def build(self, rootdir, cached=True):
"""Rebuilds the tool binaries. By default, we try to reuse the existing build."""
- print ("Nothing to do to rebuild the tool binaries.")
+ print("Nothing to do to rebuild the tool binaries.")
def setup(self, rootdir):
"""
"""
# pass
- def run(execcmd, filename, binary, num_id, timeout):
+ def run(self, execcmd, filename, binary, num_id, timeout, batchinfo):
"""Compile that test code and anaylse it with the Tool if needed (a cache system should be used)"""
# pass
# 'OutOfInitFini':'BInitFini',
'CommunicatorLeak':'BResLeak', 'DatatypeLeak':'BResLeak', 'GroupLeak':'BResLeak', 'OperatorLeak':'BResLeak', 'TypeLeak':'BResLeak', 'RequestLeak':'BResLeak',
'MissingStart':'BReqLifecycle', 'MissingWait':'BReqLifecycle',
+ 'MissingEpoch':'BEpochLifecycle','DoubleEpoch':'BEpochLifecycle',
'LocalConcurrency':'BLocalConcurrency',
# scope: communicator
'CallMatching':'DMatch',
'GlobalConcurrency':'DGlobalConcurrency',
# larger scope
-# 'BufferingHazard':'EBufferingHazard',
+ 'BufferingHazard':'EBufferingHazard',
'OK':'FOK'}
error_scope = {
'BResLeak':'single process',
# 'BInitFini':'single process',
'BReqLifecycle':'single process',
+ 'BEpochLifecycle':'single process',
'BLocalConcurrency':'single process',
'CMatch':'multi-processes',
'DRace':'multi-processes',
'DMatch':'multi-processes',
'DGlobalConcurrency':'multi-processes',
-# 'EBufferingHazard':'system',
+ 'EBufferingHazard':'system',
'FOK':'correct executions'
}
'EBufferingHazard':'Buffering hazard',
'FOK':"Correct execution",
- 'aislinn':'Aislinn','civl':'CIVL','hermes':'Hermes', 'isp':'ISP','itac':'ITAC', 'simgrid':'Mc SimGrid', 'smpi':'SMPI','smpivg':'SMPI+VG', 'mpisv':'MPI-SV', 'must':'MUST', 'parcoach':'PARCOACH'
+ 'aislinn':'Aislinn', 'civl':'CIVL', 'hermes':'Hermes', 'isp':'ISP', 'itac':'ITAC', 'simgrid':'Mc SimGrid', 'smpi':'SMPI', 'smpivg':'SMPI+VG', 'mpisv':'MPI-SV', 'must':'MUST', 'parcoach':'PARCOACH'
}
def parse_one_code(filename):
state = 2
else:
raise ValueError(f"Unexpected end of MBI_TESTS header at line {line_num}: \n{line}")
- if state == 1 and re.match("\s+\$ ?.*", line):
- m = re.match('\s+\$ ?(.*)', line)
+ if state == 1 and re.match(r'\s+\$ ?.*', line):
+ m = re.match(r'\s+\$ ?(.*)', line)
cmd = m.group(1)
nextline = next(input_file)
detail = 'OK'
if detail not in possible_details:
raise ValueError(
f"\n{filename}:{line_num}: MBI parse error: Detailled outcome {detail} is not one of the allowed ones.")
- test = {'filename': filename, 'id': test_num, 'cmd': cmd, 'expect': expect, 'detail': detail}
+
+ nextline = next(input_file)
+ m = re.match('[ |]*(.*)', nextline)
+ if not m:
+ raise ValueError(f"\n{filename}:{line_num}: MBI parse error: Expected diagnostic of the test not found.\n")
+ diagnostic = m.group(1)
+
+ test = {'filename': filename, 'id': test_num, 'cmd': cmd, 'expect': expect, 'detail': detail, 'diagnostic': diagnostic}
res.append(test.copy())
test_num += 1
line_num += 1
if outcome == 'timeout':
res_category = 'timeout'
if elapsed is None:
- diagnostic = f'hard timeout'
+ diagnostic = 'hard timeout'
else:
diagnostic = f'timeout after {elapsed} sec'
elif outcome == 'failure' or outcome == 'segfault':
res_category = 'failure'
- diagnostic = f'tool error, or test not run'
+ diagnostic = 'tool error, or test not run'
elif outcome == 'UNIMPLEMENTED':
res_category = 'unimplemented'
- diagnostic = f'coverage issue'
+ diagnostic = 'coverage issue'
elif outcome == 'other':
res_category = 'other'
- diagnostic = f'inconclusive run'
+ diagnostic = 'inconclusive run'
elif expected == 'OK':
if outcome == 'OK':
res_category = 'TRUE_NEG'
- diagnostic = f'correctly reported no error'
+ diagnostic = 'correctly reported no error'
else:
res_category = 'FALSE_POS'
- diagnostic = f'reported an error in a correct code'
+ diagnostic = 'reported an error in a correct code'
elif expected == 'ERROR':
if outcome == 'OK':
res_category = 'FALSE_NEG'
- diagnostic = f'failed to detect an error'
+ diagnostic = 'failed to detect an error'
else:
res_category = 'TRUE_POS'
- diagnostic = f'correctly detected an error'
+ diagnostic = 'correctly detected an error'
else:
raise ValueError(f"Unexpected expectation: {expected} (must be OK or ERROR)")
"""
if os.path.exists(f'{cachefile}.txt') and os.path.exists(f'{cachefile}.elapsed') and os.path.exists(f'{cachefile}.md5sum'):
hash_md5 = hashlib.md5()
- with open(filename, 'rb') as sourcefile :
+ with open(filename, 'rb') as sourcefile:
for chunk in iter(lambda: sourcefile.read(4096), b""):
hash_md5.update(chunk)
newdigest = hash_md5.hexdigest()
output = f"Compiling {binary}.c (batchinfo:{batchinfo})\n\n"
output += f"$ {buildcmd}\n"
- compil = subprocess.run(buildcmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ compil = subprocess.run(buildcmd, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
if compil.stdout is not None:
output += str(compil.stdout, errors='replace')
if compil.returncode != 0:
pid = process.pid
pgid = os.getpgid(pid) # We need that to forcefully kill subprocesses when leaving
- outcome = None
while True:
if poll_obj.poll(5): # Something to read? Do check the timeout status every 5 sec if not
line = process.stdout.readline()
if read_line_lambda != None:
read_line_lambda(line, process)
if time.time() - start_time > timeout:
- outcome = 'timeout'
with open(f'{cachefile}.timeout', 'w') as outfile:
outfile.write(f'{time.time() - start_time} seconds')
break
outfile.write(output)
with open(f'{cachefile}.md5sum', 'w') as outfile:
hashed = hashlib.md5()
- with open(filename, 'rb') as sourcefile :
+ with open(filename, 'rb') as sourcefile:
for chunk in iter(lambda: sourcefile.read(4096), b""):
hashed.update(chunk)
outfile.write(hashed.hexdigest())