ATLAS Offline Software
Loading...
Searching...
No Matches
FilePeekerLib.py
Go to the documentation of this file.
1# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
2
3# @file PyAthena.FilePeekerLib
4# @purpose provide components to peek into pool files
5# @author Sebastien Binet
6# @date February 2010
7
8__author__ = "Sebastien Binet"
9__doc__ = "provide components to peek into pool files"
10
11
12import AthenaPython.PyAthena as PyAthena
13StatusCode = PyAthena.StatusCode
14
15
16
18 import ROOT
19 ROOT.gROOT.SetBatch(True)
20 return ROOT
21
22
23def toiter(beg,end):
24 while beg != end:
25 yield beg.__deref__()
26 beg.__preinc__()
27 return
28
30 """simple helper function to create consistent dicts for in-file metadata
31 """
32 d = {
33 'nentries' : 0, # to handle empty files
34 'run_number': [],
35 'run_type': ['N/A'],
36 'evt_type': [],
37 'evt_number': [],
38 'lumi_block': [],
39 'mc_channel_number': [],
40 'beam_type': ['N/A'], # XXX fixme
41 'beam_energy': ['N/A'], # XXX fixme
42 'stream_tags': [],
43 'metadata_items': [],
44 'eventdata_items': [],
45 'stream_names': None,
46 'geometry': None,
47 'conditions_tag': None,
48 'det_descr_tags': None,
49
50 'metadata': {},
51 'tag_info': {},
52 }
53 return d
54
55
56
58 def __init__(self, sgkey):
59 self.raw_key = sgkey
60 @property
61 def key(self):
62 return self.raw_key.split(";")[-1]
63
65 """utility algorithm to inspect a file's content
66 """
67 def __init__(self, name='FilePeeker', **kw):
68
69 super(FilePeeker, self).__init__(name, **kw)
70 self.infname = kw.get('infname', 'not-there.pool')
71 self.outfname= kw.get('outfname', None)
72
73 # flag to enable the bwd compat fallback mechanism...
74 self._old_file_flag = False
75
76 # flag to enable event less files without beginRun...
77 self._begin_run_flag = False
78
79 # all data, collected over the events analyzed
81
82 def initialize(self):
83
84 import AthenaPython.PyAthena as PyAthena
86
87 # load our pythonizations:
88 for cls_name in ('EventStreamInfo',
89 'EventType',
90 'PyEventType',
91 ):
92 cls = getattr(PyAthena, cls_name) # noqa: F841
93
94 _info = self.msg.info
95 _info("retrieving various stores...")
96 for store_name in ('evtStore', 'inputStore',
97 'tagStore', 'metaStore',):
98 _info('retrieving [%s]...', store_name)
99 o = getattr(self, store_name) # noqa: F841
100 _info('retrieving [%s]... [done]', store_name)
101
105
106 _info("retrieving various stores... [done]")
107
108 import os
109 self.infname = os.path.basename(self.infname)
110
111 import AthenaPython.PyAthena as PyAthena
112 _info = self.msg.info
113 return StatusCode.Success
114
115 def start(self):
116 self._begin_run_flag = True
117 # retrieving data available at start...
118 self.peeked_data.update(self._do_peeking(peek_evt_data=False))
119 self.print_summary()
120 return StatusCode.Success
121
122 def stop(self):
123 if not self._begin_run_flag:
124 # retrieving data for event less jobs, where no beginRun is called
125 self.peeked_data.update(self._do_peeking(peek_evt_data=False))
126 self.print_summary()
127
128 return StatusCode.Success
129
130 @property
131 def evtStore(self):
132 import AthenaPython.PyAthena as PyAthena
133 return PyAthena.py_svc('StoreGateSvc/StoreGateSvc')
134
135 @property
136 def metaStore(self):
137 import AthenaPython.PyAthena as PyAthena
138 return PyAthena.py_svc('StoreGateSvc/MetaDataStore')
139
140 @property
141 def tagStore(self):
142 import AthenaPython.PyAthena as PyAthena
143 return PyAthena.py_svc('StoreGateSvc/TagMetaDataStore')
144
145 @property
146 def inputStore(self):
147 import AthenaPython.PyAthena as PyAthena
148 return PyAthena.py_svc('StoreGateSvc/InputMetaDataStore')
149
150 def execute(self):
151 self.peeked_data.update(self._do_peeking(peek_evt_data=True))
152 self.print_summary()
153 self._peeked_events.append(dict(self.peeked_data))
154 return StatusCode.Success
155
156 def process_metadata(self, store, metadata_name):
157 msg = self.msg
158 try:
159 obj = store[metadata_name]
160 except KeyError:
161 msg.warning('could not retrieve [%s]', metadata_name)
162 return
163 if str(obj).find('MetaCont') >= 0:
164 obj = obj.get (obj.sources()[0])
165 msg.info('processing container [%s]', obj.folderName())
166 data = []
167 payloads = obj.payloadContainer()
168 payloads_sz = payloads.size()
169 if hasattr(payloads, 'at'):
170 # HACK for bug #77976
171 _tmp = payloads
172 payloads = []
173 for ii in range(payloads_sz):
174 payloads.append(_tmp.at(ii))
175 pass
176 for ii,payload in zip(range(payloads_sz), payloads):
177 # print ("-->",ii,payload,type(payload),'\n' )
178 if not payload:
179 msg.info ("**error** null-pointer ?")
180 continue
181 # names
182 chan_names = []
183 sz = payload.name_size()
184 msg.info('==names== (sz: %s)', sz)
185 for idx in range(sz):
186 chan = payload.chanNum(idx)
187 chan_name = payload.chanName(chan)
188 #msg.info( '--> (%s, %s)', idx, chan_name)
189 chan_names.append(chan_name)
190
191 if 1: # we don't really care about those...
192 # iovs
193 sz = payload.iov_size()
194 msg.info('==iovs== (sz: %s)',sz)
195 for idx in range(sz):
196 chan = payload.chanNum(idx)
197 iov_range = payload.iovRange(chan)
198 iov_start = iov_range.start()
199 iov_stop = iov_range.stop()
200 if 0:
201 msg.info( '(%s, %s) => (%s, %s) valid=%s runEvt=%s',
202 iov_start.run(),
203 iov_start.event(),
204 iov_stop.run(),
205 iov_stop.event(),
206 iov_start.isValid(),
207 iov_start.isRunEvent())
208
209 # attrs
210 attrs = [] # can't use a dict as spec.name() isn't unique
211 sz = payload.size()
212 msg.info('==attrs== (sz: %s)', sz)
213 for idx in range(sz):
214 chan = payload.chanNum(idx)
215 #msg.info("idx: %i chan: %s", idx, chan)
216 attr_list = payload.attributeList(chan)
217 attr_data = []
218 for a in list(toiter(attr_list.begin(), attr_list.end())):
219 #msg.info((a,dir(a),type(a)))
220 spec = a.specification()
221 a_type = spec.typeName()
222 if a_type.find('string') >= 0:
223 a_data = a.data['string']()
224 try:
225 a_data = eval(a_data,{},{})
226 except Exception:
227 # swallow and keep as a string
228 pass
229 else:
230 a_data = a.data[a_type]()
231 #msg.info("%s: %s %s", spec.name(), a_data, type(a_data) )
232 attr_data.append( (spec.name(), a_data) )
233 attrs.append(dict(attr_data))
234 # msg.info(attrs[-1])
235 if len(attrs) == len(chan_names):
236 data.append(dict(zip(chan_names,attrs)))
237 else:
238 if len(attrs):
239 if len(attrs) == 1:
240 data.append(attrs[0])
241 else:
242 data.append(attrs)
243 else:
244 data.append(chan_names)
245 pass # loop over payloads...
246
247 #payload.dump()
248
252 return data
253
254 def finalize(self):
255 _info = self.msg.info
256 peeked_data = dict(self.peeked_data)
257 if self.outfname:
258 oname = self.outfname
259 import os
260 oname = os.path.expanduser(os.path.expandvars(oname))
261 _info('storing peeked file infos into [%s]...', oname)
262 if os.path.exists(oname):
263 os.remove(oname)
264
265 import PyUtils.dbsqlite as dbsqlite
266 db = dbsqlite.open(oname,flags='w')
267
268 # merge and collect data from all processed events (if any)
269 if self._peeked_events:
270 peeked_data = self._peeked_events.pop(0)
271 for d in self._peeked_events:
272 for k in ('run_number',
273 'evt_number',
274 'lumi_block',
275 'run_type',
276 'beam_type',
277 'beam_energy',
278 'stream_tags',):
279 v = d[k]
280 if isinstance(v, list) and len(v)>0:
281 v = v[0]
282 peeked_data[k].append(v)
283 #_info('peeked_data:')
284 #_info(str(peeked_data))
285 db['fileinfos'] = peeked_data
286 db.close()
287 _info('storing peeked file infos into [%s]... [done]', oname)
288 self.print_summary(peeked_data)
289 return StatusCode.Success
290
291 def print_summary(self, data=None):
292 if data is None:
293 data = self.peeked_data
294 _info = self.msg.info
295
296
297 _info(':::::: summary ::::::')
298 _info(' - nbr events: %s', data['nentries'])
299 _info(' - run numbers: %s', data['run_number'])
300 _info(' - evt numbers: %s', data['evt_number'])
301 _info(' - lumiblocks: %s', data['lumi_block'])
302 _info(' - evt types: %s', data['evt_type'])
303 _info(' - item list: %s', len(data['eventdata_items']))
304 _info(' - stream names: %s', data['stream_names'])
305 _info(' - stream tags: %s', data['stream_tags'])
306 _info(' - geometry: %s', data['geometry'])
307 _info(' - conditions tag: %s', data['conditions_tag'])
308 _info(' - metadata items: %s', len(data['metadata_items']))
309 _info(' - tag-info: %s', data['tag_info'].keys())
310 return
311
312 def _do_peeking(self, peek_evt_data=False):
313 """ the real function doing all the work of peeking at the input file
314 @return a dict of peeked-at data
315 """
316 peeked_data = {}
317 import AthenaPython.PyAthena as PyAthena
318 _info = self.msg.info
319 _error= self.msg.error
320
321 def _get_detdescr_tags(evt_type):
322 ddt = evt_type.get_detdescr_tags().split()
323 # det_descr_tags is of the form:
324 # [ 'key1', 'val1', 'key2', 'val2', ... ]
325 ddt = dict(zip(ddt[0::2], # select 'key?'
326 ddt[1::2])) # select 'val?'
327 return ddt
328
329
331 store = self.inputStore
332 esi_keys = store.keys('EventStreamInfo')
333 nentries = None
334 ddt = None
335 if len(esi_keys) >= 1:
336 sg_key = esi_keys[-1]
337 nentries = 0
338 stream_names = esi_keys[:]
339 for sg_key in esi_keys:
340 esi = store.retrieve('EventStreamInfo', sg_key)
341 _info('=== [EventStreamInfo#%s] ===', sg_key)
342 nentries += esi.getNumberOfEvents()
343
344 evt_types = PyAthena.EventStreamInfo.evt_types(esi)
345 if len(evt_types) > 0:
346 evt_type = evt_types[0]
347 peeked_data['evt_type'] = evt_type.bit_mask
348 ddt = _get_detdescr_tags(evt_type)
349 peeked_data['det_descr_tags'] = ddt
350 from past.builtins import long
351 peeked_data['mc_channel_number'] = [long(evt_type.mc_channel_number())]
352
353 def _make_item_list(item):
354 sgkey= item[1]
355 clid = item[0]
356 _typename = store._pyclidsvc.typename
357 return (_typename(clid) or str(clid), # str or keep the int?
358 sgkey)
359 item_list = esi.item_list()
360 item_list = list(map(_make_item_list, item_list))
361 peeked_data['eventdata_items'] = item_list
362 # print ("======",len(item_list))
363 peeked_data['lumi_block'] = esi.lumi_blocks()
364 peeked_data['run_number'] = esi.run_numbers()
365 #peeked_data['evt_number'] = esi.event_number()
366 peeked_data['stream_names'] = esi.processing_tags()
367 # handle event-less files
368 if not peeked_data['stream_names']:
369 stream_names = [sg_versioned_key(s).key for s in stream_names]
370 peeked_data['stream_names'] = list(set(stream_names))
371 pass
372 # more event-less files handling - POOL-from-BS-event-less files
373 # see bug#98568
374 if len(esi.run_numbers()) == 0:
375 bsmd_keys = store.keys("ByteStreamMetadataContainer")
376 if len(bsmd_keys) == 1:
377 bsmd = store[bsmd_keys[0]][0]
378 peeked_data['lumi_block'] = [bsmd.getLumiBlock()]
379 peeked_data['run_number'] = [bsmd.getRunNumber()]
380 bs_metadata = {}
381 for md in bsmd.getFreeMetaDataStrings():
382 if md.startswith('Event type:'):
383 k = 'evt_type'
384 v = []
385 if 'is sim' in md: v.append('IS_SIMULATION')
386 else: v.append('IS_DATA')
387 if 'is atlas' in md: v.append('IS_ATLAS')
388 else: v.append('IS_TESTBEAM')
389 if 'is physics' in md: v.append('IS_PHYSICS')
390 else: v.append('IS_CALIBRATION')
391 bs_metadata[k] = tuple(v)
392 elif md.startswith('GeoAtlas:'):
393 k = 'geometry'
394 v = md.split('GeoAtlas:')[1].strip()
395 bs_metadata[k] = v
396 elif md.startswith('IOVDbGlobalTag:'):
397 k = 'conditions_tag'
398 v = md.split('IOVDbGlobalTag:')[1].strip()
399 bs_metadata[k] = v
400 elif '=' in md:
401 k,v = md.split('=')
402 bs_metadata[k] = v
403 pass
404 peeked_data['evt_type'] = bs_metadata.get('evt_type', [])
405 peeked_data['geometry'] = bs_metadata.get('geometry', None)
406 peeked_data['conditions_tag'] = bs_metadata.get('conditions_tag', None)
407 peeked_data['bs_metadata'] = bs_metadata
408 pass
409 pass
410
411 # hack to retrieve the number of events if no EventStreamInfo
412 # was present in the input file
413 if nentries is None:
414 ROOT = _import_ROOT()
415 root_files = list(ROOT.gROOT.GetListOfFiles())
416 root_files = [root_file for root_file in root_files
417 if root_file.GetName().count(self.infname)]
418 if len(root_files)==1:
419 root_file = root_files[0]
420 data_hdr = root_file.Get("POOLContainer")
421 if not data_hdr:
422 data_hdr = root_file.Get("POOLContainer_DataHeader")
423 nentries = data_hdr.GetEntriesFast() if bool(data_hdr) \
424 else None
425 else:
426 _info('could not find correct ROOT file (looking for [%s])',
427 self.infname)
428 nentries = None
429 del root_files
430 del data_hdr
431 peeked_data['nentries'] = nentries
432
433 # retrieve the GUID
434 def _get_guid():
435 guid = None
436 ROOT = _import_ROOT()
437 root_files = list(ROOT.gROOT.GetListOfFiles())
438 root_files = [root_file for root_file in root_files
439 if root_file.GetName().count(self.infname)]
440 if len(root_files)==0:
441 _info('could not find correct ROOT file (looking for [%s])',
442 self.infname)
443 return
444
445 root_file = root_files[0]
446 pool = root_file.Get("##Params")
447 import re
448 # Pool parameters are of the form:
449 # '[NAME=somevalue][VALUE=thevalue]'
450 pool_token = re.compile(r'[\[]NAME=(?P<name>.*?)[\]]'
451 r'[\[]VALUE=(?P<value>.*?)[\]]').match
452 params = []
453 for i in range(pool.GetEntries()):
454 if pool.GetEntry(i)>0:
455 match = pool_token(pool.db_string)
456 if not match:
457 continue
458 d = match.groupdict()
459 params.append((d['name'], d['value']))
460 if d['name'].lower() == 'fid':
461 guid = d['value']
462 return guid
463 guid = _get_guid()
464 if guid:
465 peeked_data['file_guid'] = guid
466
467
468 metadata_items = [(self.inputStore._pyclidsvc.typename(p.clID()),
469 p.name())
470 for p in self.inputStore.proxies()]
471 peeked_data['metadata_items'] = metadata_items
472 metadata = {}
473 def maybe_get(o, idx, default=None):
474 try:
475 return o[idx]
476 except IndexError:
477 return default
478 def maybe_float(o):
479 try:
480 return float(o)
481 except ValueError:
482 return o
483
484 def mergeMultipleDict(inDicts):
485 outDict={}
486 for d in inDicts:
487 for k,o in d.items():
488 if k not in outDict:
489 outDict[k]=o
490 if len(outDict)==0:
491 return None
492 else:
493 return outDict
494
495
496
497 for k in self.inputStore.keys('IOVMetaDataContainer'):
498 v = self.process_metadata(self.inputStore, k)
499 metadata[k] = maybe_get(v, -1)
500 peeked_data['metadata'] = metadata
501
502
503 taginfo = {}
504 if self.metaStore.contains('IOVMetaDataContainer','/TagInfo'):
505 v = self.process_metadata(self.metaStore, '/TagInfo')
506 taginfo = mergeMultipleDict(v)
507 else:
508 if '/TagInfo' in metadata:
509 taginfo = metadata['/TagInfo'].copy()
510 else:
511 self._old_file_flag = True
512 # no tag info whatsoever...
513 # try detdescr_tags ?
514 if ddt:
515 peeked_data['det_descr_tags'] = ddt
516 peeked_data['geometry'] = ddt.get('GeoAtlas', None)
517 peeked_data['conditions_tag'] = ddt.get('IOVDbGlobalTag', None)
518 peeked_data['tag_info'] = taginfo
519 if taginfo:
520 peeked_data['det_descr_tags'] = taginfo
521 peeked_data['geometry'] = taginfo.get('GeoAtlas', None)
522 peeked_data['conditions_tag'] = taginfo.get('IOVDbGlobalTag', None)
523 peeked_data['beam_type'] = [taginfo.get('beam_type','N/A')]
524 peeked_data['beam_energy']= [maybe_float(taginfo.get('beam_energy',
525 'N/A'))]
526
527 if 'geometry' not in peeked_data:
528 peeked_data['geometry'] = None
529 if 'conditions_tag' not in peeked_data:
530 peeked_data['conditions_tag'] = None
531 if 'det_descr_tags' not in peeked_data:
532 peeked_data['det_descr_tags'] = {}
533
534 # eventless simulated DAOD files
535 if not self._begin_run_flag:
536 if taginfo and taginfo.get('project_name',None) == 'IS_SIMULATION':
537 peeked_data['evt_type'] = ('IS_SIMULATION', 'IS_ATLAS', 'IS_PHYSICS')
538 if '/Simulation/Parameters' in metadata:
539 peeked_data['run_number'] = [metadata.get('/Simulation/Parameters').get('RunNumber',None)]
540
542 if peek_evt_data is False:
543 return peeked_data
544
545
546
547 # access directly the EventInfo
548 store = self.evtStore
549 evt_info_keys = store.keys('EventInfo')
550 if len(evt_info_keys) != 1:
551 _info('more than one EventInfo: %s', evt_info_keys)
552 _info(' ==> we\'ll use [%s]', evt_info_keys[0])
553 sg_key = evt_info_keys[0]
554 ei = store.retrieve('EventInfo', sg_key)
555 _info('=== [EventInfo#%s] ===', sg_key)
556 eid = ei.event_ID()
557
558 dh_keys = [k for k in store.keys('DataHeader')
559 # remove DH-keys for backnavigation
560 if not k.startswith('[DB=')]
561 if len(dh_keys) != 1:
562 _error('more than 1 DataHeader key after filtering: %s', dh_keys)
563 _error('content of store: %s', store.keys('DataHeader'))
564 raise RuntimeError('more than one DataHeader key')
565
566 sg_key = dh_keys[0]
567 _info('=== [DataHeader#%s] ===', sg_key)
568 dh = store.retrieve('DataHeader', sg_key)
569
570 def _make_item_list(dhe):
571 sgkey= dhe.getKey()
572 clid = dhe.getPrimaryClassID()
573 _typename = store._pyclidsvc.typename
574 return (_typename(clid) or str(clid), # str or keep the int?
575 sgkey)
576 dhes = []
577 if hasattr(dh, 'at'):
578 # HACK for bug #77976
579 for ii in range(len(dh)):
580 dhes.append(dh.at(ii))
581 pass
582 else:
583 dhes = list(dh.elements())
584 #item_list = map(_make_item_list, dh)
585 item_list = []
586 for i,dhe in enumerate(dhes):
587 sgkey = dhe.getKey()
588 clid = dhe.getPrimaryClassID()
589 _typename = store._pyclidsvc.typename
590 try:
591 clid_name = _typename(clid)
592 if clid_name:
593 clid = clid_name
594 except Exception as err:
595 self.msg.info("no typename for clid [%s] (%s)", clid, err)
596 item_list.append((str(clid), sgkey))
597
598 # -- event-type
599 evt_type = ei.event_type()
600 det_descr_tags = _get_detdescr_tags(evt_type)
601
602 peeked_data.update({
603 'run_number': [eid.run_number()],
604 'evt_number': [eid.event_number()],
605 'run_type': ['N/A'],
606 'evt_type': evt_type.bit_mask,
607 'det_descr_tags': det_descr_tags,
608 'geometry': det_descr_tags.get('GeoAtlas', None),
609 'conditions_tag': det_descr_tags.get('IOVDbGlobalTag', None),
610 'lumi_block': [eid.lumi_block()],
611 'stream_names': [dh.getProcessTag()],
612 'eventdata_items': item_list,
613 'beam_type': [det_descr_tags.get('beam_type','N/A')],
614 'beam_energy': [maybe_float(det_descr_tags.get('beam_energy',
615 'N/A'))],
616 })
617
618 trigger_info= ei.trigger_info()
619 stream_tags = trigger_info.streamTags() if trigger_info else []
620 stags = []
621 for st in stream_tags: # don't use list-comprehensions b/c of ROOT bugs
622 st_type = st.type()
623 st_name = st.name()
624 obeys_lbk=bool(st.obeysLumiblock())
625 stags.append(dict(stream_type=st_type,
626 stream_name=st_name,
627 obeys_lbk=obeys_lbk))
628 peeked_data['stream_tags'] = stags
629
630 # -- old files compat
631 if self._old_file_flag or 1:
632
633
634 metadata_items = [(self.inputStore._pyclidsvc.typename(p.clID()),
635 p.name())
636 for p in self.inputStore.proxies()]
637 peeked_data['metadata_items'] = metadata_items
638 metadata = {}
639 for k in self.inputStore.keys('IOVMetaDataContainer'):
640 v = self.process_metadata(self.inputStore, k)
641 metadata[k] = maybe_get(v, -1)
642 peeked_data['metadata'] = metadata
643
644
645 taginfo = {}
646 if self.metaStore.contains('IOVMetaDataContainer','/TagInfo'):
647 v = self.process_metadata(self.metaStore, '/TagInfo')
648 taginfo = mergeMultipleDict(v)
649 if taginfo:
650 # we want to keep the AtlasRelease from when the file was produced
651 atlas_release = metadata.get('/TagInfo', taginfo)
652 atlas_release = atlas_release.get('AtlasRelease',
653 taginfo['AtlasRelease'])
654 taginfo['AtlasRelease'] = atlas_release
655 peeked_data['det_descr_tags'] = taginfo
656 peeked_data['tag_info'] = taginfo
657
658
660 ddt = peeked_data['det_descr_tags']
661 peeked_data['geometry'] = ddt.get('GeoAtlas', None)
662 peeked_data['conditions_tag'] = ddt.get('IOVDbGlobalTag', None)
663
664
665 peeked_data['beam_type']= [ddt.get('beam_type','N/A')]
666 beam_ene = maybe_float(ddt.get('beam_energy','N/A'))
667 peeked_data['beam_energy']=[beam_ene]
668
669 pass # old files compat
670
671 return peeked_data
672
673 pass # class FilePeeker
674
676 """a service to spy for file meta-data and store this collected data into
677 the pool file, in a python-pickle friendly format
678 """
679
680 def __init__(self, name='FilePeekerSvc', **kw):
681 kw['name'] = name
682 super(FilePeekerSvc, self).__init__(**kw)
683
684
685 def initialize(self):
686 # register with the incident svc
687 svc = PyAthena.py_svc('IncidentSvc', iface='IIncidentSvc')
688 if not svc:
689 self.msg.error('unable to get the incident svc')
690 return StatusCode.Failure
691
692 for incident in ('EndEvent',
693 'BeginInputFile',):
694 svc.addListener(self, incident)
695 pass
696
697 return StatusCode.Success
698
699 def finalize(self):
700 return StatusCode.Success
701
702 def handle(self, incident):
703 tp = incident.type()
704 if tp == 'EndEvent':
705 pass
706 elif tp == 'BeginInputFile':
707 self.msg.info('input file name: [%s]', incident.fileName())
708 pass
709 else:
710 pass
711 return
712
713 pass # class FilePeekerSvc
MsgStream & msg() const
virtual StatusCode stop() override
virtual StatusCode start() override
virtual StatusCode execute() override
virtual StatusCode finalize() override
virtual StatusCode initialize() override
virtual StatusCode finalize() override
virtual StatusCode initialize() override
Gaudi Service Implementation.
STL class.
__init__(self, name='FilePeekerSvc', **kw)
process_metadata(self, store, metadata_name)
_do_peeking(self, peek_evt_data=False)
__init__(self, name='FilePeeker', **kw)
classes -----------------------------------------------------------------—
STL class.
T * get(TKey *tobj)
get a TObject* from a TKey* (why can't a TObject be a TKey?)
Definition hcg.cxx:130
bool contains(const std::string &s, const std::string &regx)
does a string contain the substring
Definition hcg.cxx:114
std::string find(const std::string &s)
return a remapped string
Definition hcg.cxx:138
int count(std::string s, const std::string &regx)
count how many occurances of a regx are in a string
Definition hcg.cxx:146
std::vector< std::string > split(const std::string &s, const std::string &t=":")
Definition hcg.cxx:177
_import_ROOT()
helper functions -------------------------------------------------------—