ATLAS Offline Software
check_log.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 #
3 # Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
4 #
5 """Tool to check for error messages in a log file.
6 
7 By default ERROR, FATAL and CRITICAL messages are considered.
8 The config file may be used to provide patterns of lines to exclude from this check
9 (known problems or false positives). If no config file is provided, all errors will be shown."""
10 
11 import re
12 import argparse
13 import sys
14 import os
15 
16 # Error keywords
17 regexMap = {}
18 regexMap['error/fatal'] = [
19  r'^ERROR ', '^ERROR:', ' ERROR ', ' FATAL ', 'CRITICAL ', 'ABORT_CHAIN',
20  r'^Exception\:',
21  r'^Caught signal',
22  r'^Core dump',
23  r'tcmalloc\: allocation failed',
24  r'athenaHLT.py\: error',
25  r'HLTMPPU.*Child Issue',
26  r'HLTMPPU.*Configuration Issue',
27  r'There was a crash',
28  r'illegal instruction',
29  r'failure loading library',
30  r'Cannot allocate memory',
31  r'Attempt to free invalid pointer',
32  r'CUDA error',
33 ]
34 
35 regexMap['prohibited'] = [
36  r'inconsistent use of tabs and spaces in indentation',
37  r'glibc detected',
38  r'in state: CONTROLREADY$',
39  r'(^\s*|^\d\d:\d\d:\d\d\s*)missing data: ',
40  r'(^\s*|^\d\d:\d\d:\d\d\s*)missing conditions data: ',
41  r'(^\s*|^\d\d:\d\d:\d\d\s*)can be produced by alg\‍(s\‍): ',
42  r'(^\s*|^\d\d:\d\d:\d\d\s*)required by tool: ',
43  r'pure virtual method called',
44  r'Selected dynamic Aux atribute.*not found in the registry',
45 ]
46 
47 regexMap['fpe'] = [
48  r'FPEAuditor.*WARNING FPE',
49 ]
50 
51 # Add list of all builtin Python errors
52 import builtins
53 builtins = dir(builtins)
54 builtinErrors = [b for b in builtins if 'Error' in b]
55 regexMap['python error'] = builtinErrors
56 
57 # Traceback keywords
58 backtrace = [
59  r'Traceback',
60  r'Shortened traceback',
61  r'stack trace',
62  r'^Algorithm stack',
63  r'^#\d+\s*0x\w+ in ',
64 ]
65 regexMap['backtrace'] = backtrace
66 
67 # FPEAuditor traceback keywords
68 fpeTracebackStart = [r'FPEAuditor.*INFO FPE stacktrace']
69 fpeTracebackCont = [
70  ' in function : ',
71  ' included from : ',
72  ' in library : ',
73 ]
74 regexMap['fpe'].extend(fpeTracebackStart)
75 
76 # Warning keywords
77 regexMap['warning'] = ['WARNING ']
78 
79 for key,exprlist in regexMap.items():
80  if not exprlist:
81  raise RuntimeError(f'Empty regex list for category \'{key}\' -- will match everything!')
82  sys.exit(1)
83 
84 def get_parser():
85  parser = argparse.ArgumentParser(description=__doc__, formatter_class=
86  lambda prog : argparse.HelpFormatter(
87  prog, max_help_position=40, width=100))
88 
89  parser.add_argument('logfile', metavar='<logfile>', nargs='+',
90  help='log file(s) to scan')
91  parser.add_argument('--config', metavar='<file>',
92  help='specify config file')
93  parser.add_argument('--showexcludestats', action='store_true',
94  help='print summary table with number of matches for each exclude pattern')
95  parser.add_argument('--printpatterns', action='store_true',
96  help='print the list of warning/error patterns being searched for')
97  parser.add_argument('--warnings', action = 'store_true',
98  help='check for WARNING messages')
99  parser.add_argument('--errors', action = 'store_true',
100  help='check for ERROR messages')
101 
102  return parser
103 
104 
105 def main():
106  parser = get_parser()
107 
108  args = parser.parse_args()
109  if not (args.errors or args.warnings):
110  parser.error('at least one of --errors or --warnings must be enabled')
111 
112  ignorePattern = parseConfig(args) if args.config else []
113  rc = 0
114  for i, lf in enumerate(args.logfile):
115  if i>0:
116  print()
117  rc += scanLogfile(args, lf, ignorePattern)
118 
119  return rc
120 
121 
122 def parseConfig(args):
123  """Parses the config file provided into a list (ignorePattern)"""
124  ignorePattern = []
125 
126  os.system(f"get_files -data -symlink {args.config} > /dev/null")
127  with open(args.config) as f:
128  print('Ignoring warnings/error patterns defined in ' + args.config)
129  for aline in f:
130  if 'ignore' in aline:
131  line = aline.strip('ignore').strip()
132  if line.startswith('\'') and line.endswith('\''):
133  line = line[1:-1]
134  ignorePattern.append(line)
135  return ignorePattern
136 
137 
138 def scanLogfile(args, logfile, ignorePattern=[]):
139  """Scan one log file and print report"""
140  tPattern = re.compile('|'.join(backtrace))
141  fpeStartPattern = re.compile('|'.join(fpeTracebackStart))
142  fpeContPattern = re.compile('|'.join(fpeTracebackCont))
143  ignoreDict = None
144 
145  categories = []
146  if args.warnings is True:
147  categories += ['warning']
148  if args.errors is True:
149  categories += ['error/fatal', 'prohibited', 'python error', 'fpe', 'backtrace']
150 
151  igLevels = re.compile('|'.join(ignorePattern))
152 
153  patterns = {
154  cat: re.compile('|'.join(regexMap[cat])) for cat in categories
155  }
156  resultsA = {cat:[] for cat in categories}
157  with open(logfile, encoding='utf-8') as f:
158  tracing = False
159  fpeTracing = False
160 
161  for line in f:
162  # First check if we need to start or continue following a trace
163  # Tracing only makes sense for errors
164  if args.errors:
165  if tPattern.search(line) and not igLevels.search(line):
166  tracing = True
167  elif fpeStartPattern.search(line) and not igLevels.search(line):
168  fpeTracing = True
169  elif line =='\n':
170  tracing = False
171  fpeTracing = False
172 
173  if tracing:
174  # Save all lines after a backtrace even if they don't belong to backtrace
175  resultsA['backtrace'].append(line)
176  elif fpeTracing:
177  # Continue following FPE so long as recognised
178  if fpeStartPattern.search(line) or fpeContPattern.search(line):
179  resultsA['fpe'].append(line)
180  else:
181  fpeTracing = False
182  else:
183  for cat in categories:
184  if patterns[cat].search(line):
185  resultsA[cat].append(line)
186 
187  ignoreDict = {}
188  results = {cat:[] for cat in categories}
189  if args.config is None:
190  results = resultsA
191  else:
192  if args.showexcludestats:
193  separateIgnoreRegex = [re.compile(line) for line in ignorePattern]
194  ignoreDict = {line:0 for line in ignorePattern} # stores counts of ignored errors/warnings
195 
196  # Filter messages
197  for cat, messages in resultsA.items():
198  for res in messages:
199  if not igLevels.search(res):
200  results[cat].append(res)
201  elif args.showexcludestats:
202  for i in range(len(separateIgnoreRegex)):
203  if separateIgnoreRegex[i].search(res):
204  ignoreDict[ignorePattern[i]] += 1
205 
206 
207  # Report results
208  found_bad_message = False
209  for cat in categories:
210 
211  if args.printpatterns:
212  print(f'check_log.py - Checking for {cat} messages with pattern: {str(patterns[cat])} in '+logfile+'\n')
213  if len(results[cat]) > 0:
214  print(f'Found {len(results[cat])} {cat} message(s) in {logfile}:')
215  for msg in results[cat]: print(msg.strip('\n'))
216  found_bad_message = True
217 
218  if ignoreDict:
219  print('Ignored:')
220  for s in ignoreDict:
221  if ignoreDict[s] > 0:
222  print(str(ignoreDict[s]) + "x " + s)
223  print('\n')
224 
225  if found_bad_message:
226  print(f'FAILURE : problematic message found in {logfile}')
227  return 1
228 
229  print(f'No error/warning messages found in {logfile}')
230  return 0
231 
232 
233 if __name__ == "__main__":
234  sys.exit(main())
dumpHVPathFromNtuple.append
bool append
Definition: dumpHVPathFromNtuple.py:91
search
void search(TDirectory *td, const std::string &s, std::string cwd, node *n)
recursive directory search for TH1 and TH2 and TProfiles
Definition: hcg.cxx:738
check_log.main
def main()
Definition: check_log.py:105
plotBeamSpotVxVal.range
range
Definition: plotBeamSpotVxVal.py:195
beamspotman.dir
string dir
Definition: beamspotman.py:623
TCS::join
std::string join(const std::vector< std::string > &v, const char c=',')
Definition: Trigger/TrigT1/L1Topo/L1TopoCommon/Root/StringUtils.cxx:10
Trk::open
@ open
Definition: BinningType.h:40
Muon::print
std::string print(const MuPatSegment &)
Definition: MuonTrackSteering.cxx:28
check_log.parseConfig
def parseConfig(args)
Definition: check_log.py:122
str
Definition: BTagTrackIpAccessor.cxx:11
check_log.get_parser
def get_parser()
Definition: check_log.py:84
check_log.scanLogfile
def scanLogfile(args, logfile, ignorePattern=[])
Definition: check_log.py:138