7 Script listing and counting Athena messages between two points in a log file,
8 typically between start and end of event loop.
11 from __future__
import print_function
18 from collections
import OrderedDict
21 default_ignore_patterns = [
22 r'TrigCOOLUpd.*INFO.*Reload',
23 r'TrigCOOLUpd.*INFO.*Invalidated',
24 r'warn.*frontier.c.*Request',
25 r'warn.*frontier.c.*Trying',
26 r'AthenaHiveEventLoopMgr.*INFO.*processing event',
27 r'IOVDbFolder.*INFO.*tag.*resolved to',
28 r'IOVDbSvc.*INFO.*Opening.*connection',
29 r'IOVDbSvc.*INFO.*Disconnecting from',
30 r'RootDatabase\.open.*INFO.*File version',
31 r'Domain.*INFO.*DbDatabase',
32 r'INFO Database being retired',
33 r'StorageSvc.*INFO Building shape according to reflection information',
38 parser = argparse.ArgumentParser(usage=
'%(prog)s [options] LogFiles',
40 parser.add_argument(
'logfiles',
43 help=
'Log file(s) to analyse. Multiple space-separated file names may be given.')
44 parser.add_argument(
'-s',
'--startPattern',
46 help=
'Pattern to match start point for log analysis. If empty, log files are analysed from first line.')
47 parser.add_argument(
'-e',
'--endPattern',
49 help=
'Pattern to match end point for log analysis. If empty, log files are analysed until last line.')
50 parser.add_argument(
'-i',
'--ignore',
54 help=
'Add an ignore pattern to the default ones.' +
55 'The option can be specified multiple times. Defaults are: {:s}'.
format(
str(default_ignore_patterns)))
56 parser.add_argument(
'-p',
'--printMessages',
58 help=
'Print the messages found in analysed files')
59 parser.add_argument(
'--saveAll',
61 help=
'Store all the messages into the output JSON file')
62 parser.add_argument(
'-v',
'--verbose',
64 help=
'Increase output verbosity')
69 patterns = [re.compile(p)
for p
in [
'FATAL',
'ERROR',
'WARNING',
'INFO',
'DEBUG',
'VERBOSE']]
70 result = OrderedDict()
72 result[p.pattern] = []
74 result[
'ignored'] = []
76 running =
False if start
else True
78 if not running
and start
and start.search(line):
81 if running
and end
and end.search(line):
89 result[
'ignored'].
append(line)
96 result[p.pattern].
append(line)
100 result[
'other'].
append(line)
106 summary = OrderedDict()
107 for p
in result.keys():
108 summary[p] = len(result[p])
109 total =
sum(summary.values())
110 summary[
'all'] = total
115 summary_str =
'Found the following number of messages:\n'
116 for p, n
in summary.items():
117 summary_str +=
'{:8d} {:s} messages\n'.
format(n, p)
118 logging.info(summary_str)
120 for p, lines
in full_result.items():
121 logging.info(
'##### The following %s messages were found #####', p)
127 logging.info(
'Saving results to %s', filename)
128 with open(filename,
'w')
as f:
129 json.dump(result, f, indent=4)
133 logging.info(
'Saving results to %s', filename)
134 with open(filename,
'w')
as f:
135 json.dump(full_result, f, indent=4)
140 logging.basicConfig(stream=sys.stdout,
141 format=
'messageCounter %(levelname)-8s %(message)s',
142 level=logging.DEBUG
if args.verbose
else logging.INFO)
144 start = re.compile(args.startPattern)
if args.startPattern
else None
145 end = re.compile(args.endPattern)
if args.endPattern
else None
146 args.ignore.extend(default_ignore_patterns)
147 ignore = [re.compile(p)
for p
in args.ignore]
149 for fname
in args.logfiles:
150 if not os.path.isfile(fname):
151 logging.error(
'Cannot open file %s, skipping', fname)
153 logging.info(
'Analysing file %s', fname)
154 with open(fname, encoding=
'utf-8')
as f:
158 out_file_name =
'MessageCount.{:s}.json'.
format(fname)
161 all_out_file_name =
'Messages.{:s}.json'.
format(fname)
165 if '__main__' in __name__: