Loading [MathJax]/extensions/tex2jax.js
ATLAS Offline Software
All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Modules Pages
check_log.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 #
3 # Copyright (C) 2002-2025 CERN for the benefit of the ATLAS collaboration
4 #
5 """Tool to check for error messages in a log file.
6 
7 By default ERROR, FATAL and CRITICAL messages are considered.
8 The config file may be used to provide patterns of lines to exclude from this check
9 (known problems or false positives). If no config file is provided, all errors will be shown."""
10 
11 import re
12 import argparse
13 import sys
14 import os
15 
16 # Error keywords
17 regexMap = {}
18 regexMap['error/fatal'] = [
19  r'^ERROR ', '^ERROR:', ' ERROR ', ' FATAL ', 'CRITICAL ', 'ABORT_CHAIN',
20  r'^Exception\:',
21  r'^Caught signal',
22  r'^Core dump',
23  r'tcmalloc\: allocation failed',
24  r'athenaHLT.py\: error',
25  r'HLTMPPU.*Child Issue',
26  r'HLTMPPU.*Configuration Issue',
27  r'There was a crash',
28  r'illegal instruction',
29  r'failure loading library',
30  r'Cannot allocate memory',
31  r'Attempt to free invalid pointer',
32  r'CUDA error',
33 ]
34 
35 regexMap['prohibited'] = [
36  r'inconsistent use of tabs and spaces in indentation',
37  r'glibc detected',
38  r'in state: CONTROLREADY$',
39  r'(^\s*|^\d\d:\d\d:\d\d\s*)missing data: ',
40  r'(^\s*|^\d\d:\d\d:\d\d\s*)missing conditions data: ',
41  r'(^\s*|^\d\d:\d\d:\d\d\s*)can be produced by alg\‍(s\‍): ',
42  r'(^\s*|^\d\d:\d\d:\d\d\s*)required by tool: ',
43  r'pure virtual method called',
44  r'Selected dynamic Aux atribute.*not found in the registry',
45 ]
46 
47 regexMap['fpe'] = [
48  r'FPEAuditor.*WARNING FPE',
49 ]
50 
51 # Add list of all builtin Python errors
52 import builtins
53 builtins = dir(builtins)
54 builtinErrors = [b for b in builtins if 'Error' in b]
55 regexMap['python error'] = builtinErrors
56 
57 # Traceback keywords
58 backtrace = [
59  r'Traceback',
60  r'Shortened traceback',
61  r'stack trace',
62  r'^Algorithm stack',
63  r'^#\d+\s*0x\w+ in ',
64 ]
65 regexMap['backtrace'] = backtrace
66 
67 # FPEAuditor traceback keywords
68 fpeTracebackStart = [r'FPEAuditor.*INFO FPE stacktrace']
69 fpeTracebackCont = [
70  ' in function : ',
71  ' included from : ',
72  ' in library : ',
73 ]
74 regexMap['fpe'].extend(fpeTracebackStart)
75 
76 # Warning keywords
77 regexMap['warning'] = ['WARNING ']
78 
79 for key,exprlist in regexMap.items():
80  if not exprlist:
81  raise RuntimeError(f'Empty regex list for category \'{key}\' -- will match everything!')
82  sys.exit(1)
83 
84 def get_parser():
85  parser = argparse.ArgumentParser(description=__doc__, formatter_class=
86  lambda prog : argparse.HelpFormatter(
87  prog, max_help_position=40, width=100))
88 
89  parser.add_argument('logfile', metavar='<logfile>', nargs='+',
90  help='log file(s) to scan')
91  parser.add_argument('--config', metavar='<file>',
92  help='specify config file')
93  parser.add_argument('--showexcludestats', action='store_true',
94  help='print summary table with number of matches for each exclude pattern')
95  parser.add_argument('--printpatterns', action='store_true',
96  help='print the list of warning/error patterns being searched for')
97  parser.add_argument('--warnings', action = 'store_true',
98  help='check for WARNING messages')
99  parser.add_argument('--errors', action = 'store_true',
100  help='check for ERROR messages')
101 
102  return parser
103 
104 
105 def main():
106  parser = get_parser()
107 
108  args = parser.parse_args()
109  if not (args.errors or args.warnings):
110  parser.error('at least one of --errors or --warnings must be enabled')
111 
112  ignorePattern = parseConfig(args) if args.config else []
113  rc = 0
114  for i, lf in enumerate(args.logfile):
115  if i>0:
116  print()
117  rc += scanLogfile(args, lf, ignorePattern)
118 
119  return rc
120 
121 
122 def parseConfig(args):
123  """Parses the config file provided into a list (ignorePattern)"""
124  ignorePattern = []
125 
126  os.system(f"get_files -data -symlink {args.config} > /dev/null")
127  with open(args.config) as f:
128  print('Ignoring warnings/error patterns defined in ' + args.config)
129  for aline in f:
130  if 'ignore' in aline:
131  line = aline.strip('ignore').strip()
132  if line.startswith('\'') and line.endswith('\''):
133  line = line[1:-1]
134  ignorePattern.append(line)
135  return ignorePattern
136 
137 
138 def scanLogfile(args, logfile, ignorePattern=[]):
139  """Scan one log file and print report"""
140  tPattern = re.compile('|'.join(backtrace))
141  fpeStartPattern = re.compile('|'.join(fpeTracebackStart))
142  fpeContPattern = re.compile('|'.join(fpeTracebackCont))
143  ignoreDict = None
144 
145  categories = []
146  if args.warnings is True:
147  categories += ['warning']
148  if args.errors is True:
149  categories += ['error/fatal', 'prohibited', 'python error', 'fpe', 'backtrace']
150 
151  # if ignorePattern is empty, igLevels.search would match anything, protect by adding a pattern that matches nothing
152  if not ignorePattern:
153  ignorePattern.append('(?!)')
154 
155  igLevels = re.compile('|'.join(ignorePattern))
156 
157  patterns = {
158  cat: re.compile('|'.join(regexMap[cat])) for cat in categories
159  }
160  resultsA = {cat:[] for cat in categories}
161  with open(logfile, encoding='utf-8') as f:
162  tracing = False
163  fpeTracing = False
164 
165  for line in f:
166  # First check if we need to start or continue following a trace
167  # Tracing only makes sense for errors
168  if args.errors:
169  if tPattern.search(line) and not igLevels.search(line):
170  tracing = True
171  elif fpeStartPattern.search(line) and not igLevels.search(line):
172  fpeTracing = True
173  elif line =='\n':
174  tracing = False
175  fpeTracing = False
176 
177  if tracing:
178  # Save all lines after a backtrace even if they don't belong to backtrace
179  resultsA['backtrace'].append(line)
180  elif fpeTracing:
181  # Continue following FPE so long as recognised
182  if fpeStartPattern.search(line) or fpeContPattern.search(line):
183  resultsA['fpe'].append(line)
184  else:
185  fpeTracing = False
186  else:
187  for cat in categories:
188  if patterns[cat].search(line):
189  resultsA[cat].append(line)
190 
191  ignoreDict = {}
192  results = {cat:[] for cat in categories}
193  if args.config is None:
194  results = resultsA
195  else:
196  if args.showexcludestats:
197  separateIgnoreRegex = [re.compile(line) for line in ignorePattern]
198  ignoreDict = {line:0 for line in ignorePattern} # stores counts of ignored errors/warnings
199 
200  # Filter messages
201  for cat, messages in resultsA.items():
202  for res in messages:
203  if not igLevels.search(res):
204  results[cat].append(res)
205  elif args.showexcludestats:
206  for i in range(len(separateIgnoreRegex)):
207  if separateIgnoreRegex[i].search(res):
208  ignoreDict[ignorePattern[i]] += 1
209 
210 
211  # Report results
212  found_bad_message = False
213  for cat in categories:
214 
215  if args.printpatterns:
216  print(f'check_log.py - Checking for {cat} messages with pattern: {str(patterns[cat])} in '+logfile+'\n')
217  if len(results[cat]) > 0:
218  print(f'Found {len(results[cat])} {cat} message(s) in {logfile}:')
219  for msg in results[cat]: print(msg.strip('\n'))
220  found_bad_message = True
221 
222  if ignoreDict:
223  print('Ignored:')
224  for s in ignoreDict:
225  if ignoreDict[s] > 0:
226  print(str(ignoreDict[s]) + "x " + s)
227  print('\n')
228 
229  if found_bad_message:
230  print(f'FAILURE : problematic message found in {logfile}')
231  return 1
232 
233  print(f'No error/warning messages found in {logfile}')
234  return 0
235 
236 
237 if __name__ == "__main__":
238  sys.exit(main())
dumpHVPathFromNtuple.append
bool append
Definition: dumpHVPathFromNtuple.py:91
search
void search(TDirectory *td, const std::string &s, std::string cwd, node *n)
recursive directory search for TH1 and TH2 and TProfiles
Definition: hcg.cxx:738
check_log.main
def main()
Definition: check_log.py:105
plotBeamSpotVxVal.range
range
Definition: plotBeamSpotVxVal.py:195
beamspotman.dir
string dir
Definition: beamspotman.py:623
print
void print(char *figname, TCanvas *c1)
Definition: TRTCalib_StrawStatusPlots.cxx:25
TCS::join
std::string join(const std::vector< std::string > &v, const char c=',')
Definition: Trigger/TrigT1/L1Topo/L1TopoCommon/Root/StringUtils.cxx:10
Trk::open
@ open
Definition: BinningType.h:40
ActsTrk::detail::MakeDerivedVariant::extend
constexpr std::variant< Args..., T > extend(const std::variant< Args... > &, const T &)
Definition: MakeDerivedVariant.h:17
check_log.parseConfig
def parseConfig(args)
Definition: check_log.py:122
str
Definition: BTagTrackIpAccessor.cxx:11
check_log.get_parser
def get_parser()
Definition: check_log.py:84
check_log.scanLogfile
def scanLogfile(args, logfile, ignorePattern=[])
Definition: check_log.py:138