-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathrecompute-analyses.py
executable file
·159 lines (127 loc) · 5.58 KB
/
recompute-analyses.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
#!/usr/bin/env python
# vim: set sw=2 ts=2 softtabstop=2 expandtab:
"""
Script to recompute analyses on existing results
"""
import argparse
import logging
import os
from BoogieRunner import AnalyserFactory
import traceback
import re
import yaml
import sys
_logger = None
def entryPoint(args):
global _logger
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("-l","--log-level",type=str, default="info", dest="log_level", choices=['debug','info','warning','error'])
parser.add_argument("-s", "--search-workdir-regex", default="", dest="search_workdir_regex", help="Substitue workdir matching this regex")
parser.add_argument("-r", "--replace-workdir-regex", default="", dest="replace_workdir_regex", help="replace matched workdir with this (can use backrefs)")
parser.add_argument("--allow-new-fields-only", default=False, dest="allow_new_fields_only", action='store_true',
help="When getting new results from the analyser only allow new fields to be added")
parser.add_argument("analyser", help="Analyser name (e.g. Boogaloo)")
parser.add_argument("yaml_old_results")
parser.add_argument("yaml_output")
pargs = parser.parse_args(args)
logLevel = getattr(logging, pargs.log_level.upper(),None)
if logLevel == logging.DEBUG:
logFormat = '%(levelname)s:%(threadName)s: %(filename)s:%(lineno)d %(funcName)s() : %(message)s'
else:
logFormat = '%(levelname)s:%(threadName)s: %(message)s'
logging.basicConfig(level=logLevel, format=logFormat)
_logger = logging.getLogger(__name__)
# Compute absolute paths
oldResultsPath = os.path.abspath(pargs.yaml_old_results)
outputPath = os.path.abspath(pargs.yaml_output)
if not os.path.exists(oldResultsPath):
_logger.error('Old results file "{}" does not exist'.format(oldResultsPath))
return 1
if os.path.exists(outputPath):
_logger.error('{} already exists'.format(outputPath))
return 1
if oldResultsPath == outputPath:
_logger.error('Input file cannot be same as output')
return 1
# Try to get the analyser class
try:
analyserClass = AnalyserFactory.getAnalyserClass(pargs.analyser)
except Exception as e:
_logger.error('Failed to load analyser {}'.format(pargs.analyser))
if logLevel == logging.DEBUG:
raise e
return 1
# Try to load old results in
oldResults = None
with open(oldResultsPath, 'r') as f:
oldResults = yaml.load(f)
if not isinstance(oldResults, list):
_logger.error('Expected top level data structure to be list in {}'.format(oldResultsPath))
return 1
_logger.info('Loaded {} results'.format(len(oldResults)))
newResults = [ ]
# Iterate over the results
for index, r in enumerate(oldResults):
assert isinstance(r, dict)
# Handle the case of error reports being in result list (generated on boogie-batch-runner being terminated)
if ('log_file' not in r) and ('error' in r) and ('program' in r):
_logger.warning(('Found error report in results for program "{}",' +
' copying result over without processing').format(r['program']))
newResults.append(r)
continue
logFileName = os.path.basename(r['log_file'])
logFileDir = os.path.dirname(r['log_file'])
logFileDir = getWorkingDirectory(logFileDir,
pargs.search_workdir_regex,
pargs.replace_workdir_regex)
patchedLogFilePath = os.path.join(logFileDir, logFileName)
if not os.path.exists(patchedLogFilePath):
_logger.error('Could not find log file {}'.format(patchedLogFilePath))
return 1
originalLogFilePath = r['log_file']
# Patch for the analyser
r['log_file'] = patchedLogFilePath
# Create analyser and reanalyse result
analyser = analyserClass(r)
newResult = analyser.getAnalysesDict()
# Undo the patch the log file path
assert 'log_file' in newResult
newResult['log_file'] = originalLogFilePath
# Merge the old and new results
mergedResult = merge(r, newResult, logFileDir, pargs.allow_new_fields_only)
newResults.append(mergedResult)
# Write result to file
_logger.info('Writing updated results to {}'.format(outputPath))
with open(outputPath, 'w') as f:
f.write('# Updated results using analyser {}\n'.format(str(analyser)))
f.write(yaml.dump(newResults, default_flow_style=False))
return 0
def merge(oldResult, updatedAnalyses, workingDirectory, allowNewFieldsOnly):
_logger.info('Merging {}'.format(oldResult['program']))
newResult = oldResult.copy()
for k,v in updatedAnalyses.items():
_logger.debug('Updating with {}:{}'.format(k,v))
newResult[k] = v
# Compute new or changed fields
newOrChanged = set(newResult.items()) - set(oldResult.items())
for k, v in newOrChanged:
if k in oldResult:
_logger.warning('[{}] Key {} changed "{}" => "{}"'.format(workingDirectory, k, oldResult[k], v))
if allowNewFieldsOnly:
_logger.error('Changing field values disallowed by --allow-new-fields-only')
sys.exit(1)
else:
_logger.info('[{}] New key added {}: "{}"'.format(workingDirectory, k, v))
return newResult
def getWorkingDirectory(originalworkDir, searchRegex, replaceRegex):
assert isinstance(originalworkDir, str)
assert isinstance(searchRegex, str)
assert isinstance(replaceRegex, str)
if len(searchRegex) == 0 or len(replaceRegex) == 0:
# Don't use regexes to change the working directory specified
return originalworkDir
r = re.compile(searchRegex)
newWorkDir = r.sub(replaceRegex, originalworkDir, count=1)
return newWorkDir
if __name__ == '__main__':
sys.exit(entryPoint(sys.argv[1:]))