ATLAS Offline Software
Loading...
Searching...
No Matches
check_reflex.py
Go to the documentation of this file.
1# Copyright (C) 2002-2025 CERN for the benefit of the ATLAS collaboration
2
3# @file PyUtils.scripts.check_reflex
4# @purpose a script to check the definitions of (reflex) plugins
5# across multiple so-called 'rootmap' files
6# @author Sebastien Binet
7# @date February 2010
8
9__doc__ = """
10a script to check the definitions of (reflex) plugins across multiple so-called 'rootmap' files
11"""
12__author__ = "Sebastien Binet"
13
14
15
16import PyUtils.acmdlib as acmdlib
17
18@acmdlib.command(name='chk-rflx')
19@acmdlib.argument(
20 '--capabilities',
21 nargs='?',
22 default=None,
23 help="Dump the capabilities of a given library (ex: libAthenaServices.so)")
24@acmdlib.argument(
25 '--dups',
26 dest='chk_dups',
27 default=None,
28 help="Check if there is any duplicates among dictionaries for a given library")
29@acmdlib.argument(
30 '--dump-content',
31 action='store_true',
32 default=False,
33 help="Dump the content of all the known plugins (dicts. and components)")
34@acmdlib.argument(
35 "--dso",
36 dest = "dump_dso",
37 action = "store_true",
38 default = False,
39 help = "Dump all the dsomap/rootmap files known to the Dso repository")
40@acmdlib.argument(
41 "--libs",
42 dest = "dump_libs",
43 action = "store_true",
44 default = False,
45 help = "Dump all the libraries known to the Dso repository")
46@acmdlib.argument(
47 "--check-dict-dups",
48 action = "store_true",
49 default = False,
50 help = "Check if there is any duplicates among dictionaries")
51@acmdlib.argument(
52 "--check-pf-dups",
53 action = "store_true",
54 default = False,
55 help = "Check if there is any duplicates among components declared to the PluginSvc")
56@acmdlib.argument(
57 "--check-all-dups",
58 action = "store_true",
59 default = False,
60 help = "Check dictionaries *and* components")
61@acmdlib.argument(
62 "--detailed-dump",
63 action = "store_true",
64 default = False,
65 help = "Performs a detailed dump if duplicates are found")
66@acmdlib.argument(
67 "--pedantic",
68 action = "store_true",
69 default = False,
70 help = """Pedantic mode: if a component is found in 2 libraries which have
71 the same name (usual case of a developer working on a (set of) package(s)),
72 it is still being reported as being duplicated""")
73@acmdlib.argument(
74 "-l",
75 "--level",
76 default = "INFO",
77 help = "Logging level (aka verbosity)")
78def main(args):
79 """a script to check the definitions of (reflex) plugins
80 across multiple so-called 'rootmap' files
81 """
82 exitcode = 0
83
84 print (":"*80)
85 print ("::: chk-rflx :::")
86
87 import os
88 import PyUtils.Dso as Dso
89
90 _suppression_dct = {
91 'TMath' : ('libCore.so', 'libMathCore.so'),
92 'string': ('libGaudiKernelDict.so',
93 'libCore.so',
94 'liblcg_PyCoolDict.so',
95 'libSTLAddRflx.so'),
96 '__pf__::CNV_71_9631': ('libDataModelTestDataReadCnvPoolCnv.so',
97 'libDataModelTestDataWriteCnvPoolCnv.so',),
98 '__pf__::CNV_71_9632': ('libDataModelTestDataReadCnvPoolCnv.so',
99 'libDataModelTestDataWriteCnvPoolCnv.so',),
100 '__pf__::CNV_71_9633': ('libDataModelTestDataReadCnvPoolCnv.so',
101 'libDataModelTestDataWriteCnvPoolCnv.so',),
102 '__pf__::CNV_71_9634': ('libDataModelTestDataReadCnvPoolCnv.so',
103 'libDataModelTestDataWriteCnvPoolCnv.so',),
104 '__pf__::CNV_71_9639': ('libDataModelTestDataReadCnvPoolCnv.so',
105 'libDataModelTestDataWriteCnvPoolCnv.so',),
106
107 '__pf__::RootCollection': ('liblcg_RootCollection.so',
108 'libAthAnalysisTools.so',),
109
110
111 }
112
113
114 def print_db( db, detailedDump = False ):
115 if detailedDump : fct = lambda x: x
116 else: fct = os.path.basename
117 keys = db.keys()
118 keys.sort()
119 for k in keys:
120 print ("%s:" % k)
121 libs = db[k]
122 libs.sort()
123 for lib in libs:
124 print (" ",fct(lib))
125 return
126
127 dsodb = Dso.DsoDb()
128
129 if args.capabilities:
130 libname = args.capabilities
131 try:
132 capabilities = dsodb.capabilities(libname)
133 print ("::: capabilities of [%s]" % (libname,))
134 print (os.linesep.join([" %s"%c for c in capabilities]))
135 except ValueError:
136 exitcode = 1
137 pass
138
139 if args.chk_dups:
140 libname = args.chk_dups
141 try:
142 print ("::: checking duplicates for [%s]..." % (libname,))
143 dups = dsodb.duplicates(libname, pedantic=args.pedantic)
144 for k in dups:
145 print (" -",k)
146 print (os.linesep.join([" %s"%v for v in dups[k]]))
147 if len(dups.keys())>0:
148 exitcode = 1
149 except ValueError:
150 exitcode = 1
151 pass
152
153 if args.dump_content:
154 print ("::: dumping content of all known plugins...")
155 entries = dsodb.content(pedantic=args.pedantic)
156 print_db(entries, args.detailed_dump)
157 print ("::: known entries:",len(entries.keys()))
158
159 if args.dump_libs:
160 print ("::: dumping all known libraries...")
161 libs = dsodb.libs(detailedDump=args.detailed_dump)
162 for lib in libs:
163 print (" -",lib)
164 print ("::: known libs:",len(libs))
165
166 if args.dump_dso:
167 print ("::: dumping all known dso/rootmap files...")
168 dso_files = [dso for dso in dsodb.dsoFiles]
169 dso_files.sort()
170 for dso_file in dso_files:
171 if not args.detailed_dump:
172 dso_file = os.path.basename(dso_file)
173 print (" -",dso_file)
174 print ("::: known dsos:",len(dso_files))
175
176 if args.check_dict_dups:
177 print (":: checking dict. duplicates...")
178 dups = dsodb.dictDuplicates(pedantic=args.pedantic)
179 suppression_log = []
180 for k in dups:
181 v = dups[k]
182 # mark as error only if it isn't a known dup'
183 if k in _suppression_dct:
184 suppressed = [os.path.basename(ii) in _suppression_dct[k]
185 for ii in v]
186 if all(suppressed):
187 suppression_log.append(k[:])
188 pass
189 else:
190 # that's a new one !!
191 exitcode = 1
192 else:
193 # that's a new one !!
194 exitcode = 1
195 # print ("---> NOT ignoring [%s]" % (k,))
196 print_db(dups, args.detailed_dump)
197 if len(suppression_log):
198 print ("-"*40)
199 print ("## ignoring the following dups':")
200 for k in suppression_log:
201 print (" -",k)
202 print ("-"*40)
203 print ("## all dups:",len(dups.keys()))
204 print ("## dups:",len(dups.keys())-len(suppression_log))
205
206 if args.check_pf_dups:
207 print ("::: checking (plugin factories) components dups...")
208 dups = dsodb.pfDuplicates(pedantic=args.pedantic)
209 suppression_log = []
210 for k in dups:
211 v = dups[k]
212 # mark as error only if it isn't a known dup'
213 if k in _suppression_dct:
214 suppressed = [os.path.basename(ii) in _suppression_dct[k]
215 for ii in v]
216 if all(suppressed):
217 suppression_log.append(k[:])
218 pass
219 else:
220 # that's a new one !!
221 exitcode = 1
222 else:
223 # that's a new one !!
224 exitcode = 1
225 # print ("---> NOT ignoring [%s]" % (k,))
226 print_db(dups, args.detailed_dump)
227 if len(suppression_log):
228 print ("-"*40)
229 print ("## ignoring the following dups':")
230 for k in suppression_log:
231 print (" -",k)
232 print ("-"*40)
233 print ("## all dups:",len(dups.keys()))
234 print ("## dups:",len(dups.keys())-len(suppression_log))
235
236 if args.check_all_dups:
237 print ("::: checking all components dups...")
238 dups = dsodb.pfDuplicates(pedantic=args.pedantic)
239 dups.update(dsodb.dictDuplicates(pedantic=args.pedantic))
240
241 suppression_log = []
242 for k in dups:
243 v = dups[k]
244 # mark as error only if it isn't a known dup'
245 if k in _suppression_dct:
246 suppressed = [os.path.basename(ii) in _suppression_dct[k]
247 for ii in v]
248 if all(suppressed):
249 suppression_log.append(k[:])
250 pass
251 else:
252 # that's a new one !!
253 exitcode = 1
254 else:
255 # that's a new one !!
256 exitcode = 1
257 # print ("---> NOT ignoring [%s]" % (k,))
258 print_db(dups, args.detailed_dump)
259 if len(suppression_log):
260 print ("-"*40)
261 print ("## ignoring the following dups':")
262 for k in suppression_log:
263 print (" -",k)
264 print ("-"*40)
265 print ("## all dups:",len(dups.keys()))
266 print ("## dups:",len(dups.keys())-len(suppression_log))
267
268 if exitcode:
269 print ("::: ERROR !!")
270 else:
271 print ("::: All good.")
272
273 print (":"*80)
274 return exitcode