ATLAS Offline Software
MetaReader.py
Go to the documentation of this file.
1 # Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
2 
3 import os
4 import re
5 from fnmatch import fnmatchcase
6 from AthenaCommon.Logging import logging
7 from AthenaConfiguration.AthConfigFlags import isGaudiEnv
8 from PyUtils.PoolFile import isRNTuple
9 from ROOT import gSystem
10 
11 msg = logging.getLogger('MetaReader')
12 
13 # compile the regex needed in _convert_value() outside it to optimize the code.
14 regexEventStreamInfo = re.compile(r'^EventStreamInfo(_p\d+)?$')
15 regexIOVMetaDataContainer = re.compile(r'^IOVMetaDataContainer(_p\d+)?$')
16 regexByteStreamMetadataContainer = re.compile(r'^ByteStreamMetadataContainer(_p\d+)?$')
17 regexXAODCutBookkeeperContainer = re.compile(r'^xAOD::CutBookkeeperContainer(_v\d+)?$')
18 regexXAODCutBookkeeperContainerAux = re.compile(r'^xAOD::CutBookkeeperAuxContainer(_v\d+)?$')
19 regexXAODEventFormat = re.compile(r'^xAOD::EventFormat(_v\d+)?$')
20 regexXAODFileMetaData = re.compile(r'^xAOD::FileMetaData(_v\d+)?$')
21 regexXAODFileMetaDataAux = re.compile(r'^xAOD::FileMetaDataAuxInfo(_v\d+)?$')
22 regexXAODFileMetaDataAuxDyn = re.compile(r'^(xAOD::)?FileMetaData.*AuxDyn(\.[a-zA-Z0-9]+)?$')
23 regexXAODTriggerMenu = re.compile(r'^DataVector<xAOD::TriggerMenu(_v\d+)?>$') # Run 2
24 regexXAODTriggerMenuAux = re.compile(r'^xAOD::TriggerMenuAuxContainer(_v\d+)?$') # Run 2
25 regexXAODTriggerMenuJson = re.compile(r'^DataVector<xAOD::TriggerMenuJson(_v\d+)?>$') # Run 3
26 regexXAODTriggerMenuJsonAux = re.compile(r'^xAOD::TriggerMenuJsonAuxContainer(_v\d+)?$') # Run 3
27 regexXAODTruthMetaData = re.compile(r'^DataVector<xAOD::TruthMetaData(_v\d+)?>$')
28 regexXAODTruthMetaDataAux = re.compile(r'^xAOD::TruthMetaDataAuxContainer(_v\d+)?$')
29 regex_cppname = re.compile(r'^([\w:]+)(<.*>)?$')
30 # regex_persistent_class = re.compile(r'^([a-zA-Z]+_p\d+::)*[a-zA-Z]+_p\d+$')
31 regex_persistent_class = re.compile(r'^([a-zA-Z]+(_[pv]\d+)?::)*[a-zA-Z]+_[pv]\d+$')
32 regex_BS_files = re.compile(r'^(\w+):.*((\.D?RAW\..*)|(\.data$))')
33 regex_URI_scheme = re.compile(r'^([A-Za-z0-9\+\.\-]+)\:')
34 
35 lite_primary_keys_to_keep = [
36  'lumiBlockNumbers', 'runNumbers', 'mc_event_number', 'mc_channel_number',
37  'eventTypes', 'processingTags', 'itemList']
38 lite_TagInfo_keys_to_keep = [
39  'beam_energy', 'beam_type', 'GeoAtlas', 'IOVDbGlobalTag',
40  'AODFixVersion', 'project_name', 'mc_campaign']
41 
42 trigger_keys = [
43  'TriggerConfigInfo',
44  'TriggerMenu', 'TriggerMenuJson_BG', 'TriggerMenuJson_HLT', 'TriggerMenuJson_HLTMonitoring', 'TriggerMenuJson_HLTPS', 'TriggerMenuJson_L1', 'TriggerMenuJson_L1PS',
45  '/TRIGGER/HLT/Groups', '/TRIGGER/HLT/HltConfigKeys', '/TRIGGER/HLT/Menu', '/TRIGGER/HLT/PrescaleKey', '/TRIGGER/HLT/Prescales',
46  '/TRIGGER/LVL1/ItemDef', '/TRIGGER/LVL1/Lvl1ConfigKey', '/TRIGGER/LVL1/Menu', '/TRIGGER/LVL1/Prescales', '/TRIGGER/LVL1/Thresholds',
47  'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenu', 'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_BG', 'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_HLT',
48  'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_HLTMonitoring', 'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_HLTPS',
49  'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_L1', 'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_L1PS',
50 ]
51 
52 
53 def read_metadata(filenames, file_type = None, mode = 'lite', promote = None, meta_key_filter = None,
54  unique_tag_info_values = True, ignoreNonExistingLocalFiles=False):
55  """
56  This tool is independent of Athena framework and returns the metadata from a given file.
57  :param filenames: the input file from which metadata needs to be extracted.
58  :param file_type: the type of file. POOL or BS (bytestream: RAW, DRAW) files.
59  :param mode: if true, will return all metadata associated with the filename. By default, is false and this will
60  return a "tiny" version which have only the following keys: 'file_guid', 'file_size', 'file_type', 'nentries'.
61  :return: a dictionary of metadata for the given input file.
62  """
63 
64  # make the mode available in the _convert methods
65  global _gbl_mode
66  _gbl_mode = mode
67 
68  from RootUtils import PyROOTFixes # noqa F401
69 
70  # Check if the input is a file or a list of files.
71  if isinstance(filenames, str):
72  filenames = [filenames]
73 
74  # Check if file_type is an allowed value
75  if file_type is not None:
76  if file_type not in ('POOL', 'BS'):
77  raise NameError('Allowed values for \'file_type\' parameter are: "POOL" or "BS": you provided "' + file_type + '"')
78  else:
79  msg.info('Forced file_type: {0}'.format(file_type))
80 
81  # Check the value of mode parameter
82  if mode not in ('tiny', 'lite', 'full', 'peeker', 'iov'):
83  raise NameError('Allowed values for "mode" parameter are: "tiny", "lite", "peeker", "iov" or "full"')
84 
85  if meta_key_filter is None:
86  meta_key_filter = []
87 
88  # Disable 'full' and 'iov' in non-Gaudi environments
89  if not isGaudiEnv():
90  if mode in ('full', 'iov'):
91  raise NameError('The following modes are not available in AnalysisBase: "iov" and "full"')
92 
93  msg.info('Current mode used: {0}'.format(mode))
94  msg.info('Current filenames: {0}'.format(filenames))
95 
96  if mode != 'full' and mode !='iov' and len(meta_key_filter) > 0:
97  raise NameError('It is possible to use the meta_key_filter option only for full mode')
98  if meta_key_filter:
99  msg.info('Filter used: {0}'.format(meta_key_filter))
100 
101  # create the storage object for metadata.
102  meta_dict = {}
103 
104  # ----- retrieve metadata from all filename or filenames --------------------------------------------------------#
105  for filename in filenames:
106  meta_dict[filename] = {}
107  current_file_type = None
108  # Determine the file_type of the input and store this information into meta_dict
109  if not file_type:
110  if os.path.isfile(filename):
111 
112  if ignoreNonExistingLocalFiles and not regex_URI_scheme.match(filename) and gSystem.AccessPathName(filename): # Attention, bizarre convention of return value!!
113  msg.warn('Ignoring not accessible file: {}'.format(filename))
114  continue
115 
116  with open(filename, 'rb') as binary_file:
117  magic_file = binary_file.read(4)
118 
119  if magic_file == 'root' or magic_file == b'root':
120  current_file_type = 'POOL'
121  meta_dict[filename]['file_type'] = 'POOL'
122 
123  else:
124  current_file_type = 'BS'
125  meta_dict[filename]['file_type'] = 'BS'
126 
127  # add information about the file_size of the input filename
128  meta_dict[filename]['file_size'] = os.path.getsize(filename)
129 
130  # determine the file type for the remote input files
131  else:
132  if regex_BS_files.match(filename):
133  current_file_type = 'BS'
134  meta_dict[filename]['file_type'] = 'BS'
135  else:
136  current_file_type = 'POOL'
137  meta_dict[filename]['file_type'] = 'POOL'
138 
139  # add information about the file_size of the input filename
140  meta_dict[filename]['file_size'] = None # None -> we can't read the file size for a remote file
141 
142  else:
143  current_file_type = file_type
144 
145  # ----- retrieves metadata from POOL files ------------------------------------------------------------------#
146  if current_file_type == 'POOL':
147 
148  if ignoreNonExistingLocalFiles and not regex_URI_scheme.match(filename) and gSystem.AccessPathName(filename): # Attention, bizarre convention of return value!!
149  msg.warn('Ignoring not accessible file: {}'.format(filename))
150  continue
151 
152  import ROOT
153  # open the file using ROOT.TFile
154  current_file = ROOT.TFile.Open( _get_pfn(filename) )
155 
156  # get auto flush setting from the main EventData TTree
157  from PyUtils.PoolFile import PoolOpts
158  collectionTree = current_file.Get(PoolOpts.TTreeNames.EventData)
159  if isinstance(collectionTree, ROOT.TTree):
160  meta_dict[filename]['auto_flush'] = collectionTree.GetAutoFlush()
161 
162  # read and add the 'GUID' value
163  meta_dict[filename]['file_guid'] = _read_guid(filename)
164 
165  # read and add compression level and algorithm
166  meta_dict[filename]['file_comp_alg'] = current_file.GetCompressionAlgorithm()
167  meta_dict[filename]['file_comp_level'] = current_file.GetCompressionLevel()
168 
169  if isRNTuple( current_file.Get(PoolOpts.RNTupleNames.MetaData) ):
170  msg.warning(
171  "Reading in-file metadata from RNTuple is currently of limited support"
172  )
173  meta_dict[filename]["nentries"] = dataheader_nentries(current_file)
174 
175  def get_raw_md(filename):
176  """Helper function to read the raw metadata from RNTuple.
177  We use subprocess because output from RNTupleReader uses
178  std::ostream for output, which is not captured by PyROOT.
179 
180  Returns the raw metadata as a json-like string, but one cannot
181  assume it is valid json.
182 
183  Known issues of invalid json constructs:
184  - double quotes are not escaped in string values
185  - single-quoted strings
186  - nested json objects and lists inside single-quoted strings
187  """
188  import subprocess
189  import sys
190 
191  raw_md = f"""
192 from ROOT.Experimental import RNTupleReader
193 from ROOT import TFile
194 
195 
196 def read_md(infile):
197  file_handle = TFile.Open(infile)
198  md = file_handle.Get("MetaData")
199  reader = RNTupleReader.Open(md)
200  reader.Show(0)
201 
202 read_md("{filename}")
203  """
204  result = subprocess.run(
205  [sys.executable, "-c", raw_md],
206  capture_output=True,
207  text=True,
208  )
209  raw_data = "".join(result.stdout.split())
210  return raw_data.replace("\x00", '""')
211 
212  def extract_keys(json_like_string, keys):
213  """Helper for extracting key-value pairs from json-like string"""
214  import json
215 
216  result = {}
217  for key in keys:
218  if key == "m_eventTypes":
219  pattern = rf'"{key}":(\[\{{.*?\}}\])'
220  elif "beamEnergy" in key:
221  pattern = rf'"{key}":(\b[+]?([0-9]*\.[0-9]+|[0-9]+\.?[0-9]*)[eE][+]?([0-9]+)\b)'
222  else:
223  pattern = rf'"{key}"\s*:\s*(\[[^\]]*\]|"[^"]*"|\d+)'
224  match = re.search(pattern, json_like_string)
225  if match:
226  try:
227  result[key] = json.loads(match.group(1))
228  except json.JSONDecodeError:
229  pass
230  return result
231 
232  # metadata keys which can be relatively reliably extracted from RNTuple
233  keys_to_extract = [
234  "m_numberOfEvents",
235  "m_runNumbers",
236  "m_lumiBlockNumbers",
237  "m_processingTags",
238  "m_itemList",
239  "m_eventTypes",
240  "m_branchNames",
241  "m_classNames",
242  "FileMetaDataAuxDyn:amiTag",
243  "FileMetaDataAuxDyn:AODFixVersion",
244  "FileMetaDataAuxDyn:AODCalibVersion",
245  "FileMetaDataAuxDyn:beamEnergy",
246  "FileMetaDataAuxDyn:beamType",
247  "FileMetaDataAuxDyn:conditionsTag",
248  "FileMetaDataAuxDyn:dataYear",
249  "FileMetaDataAuxDyn:generatorsInfo",
250  "FileMetaDataAuxDyn:geometryVersion",
251  "FileMetaDataAuxDyn:isDataOverlay",
252  "FileMetaDataAuxDyn:mcCampaign",
253  "FileMetaDataAuxDyn:mcProcID",
254  "FileMetaDataAuxDyn:simFlavour",
255  "productionRelease",
256  "dataType",
257  ]
258 
259  result = extract_keys(get_raw_md(filename), keys_to_extract)
260 
261  item_list = []
262  from CLIDComps.clidGenerator import clidGenerator
263 
264  cgen = clidGenerator("")
265  for item in result["m_itemList"]:
266  item_list.append((cgen.getNameFromClid(item["_0"]), item["_1"].encode("utf-8")))
267  meta_dict[filename]["itemList"] = item_list
268  event_types = []
269  for event_type in result["m_eventTypes"]:
270  fields = {
271  key.removeprefix("m_"): value
272  for key, value in event_type.items()
273  }
274  fields = _convert_event_type_bitmask(fields)
275  fields = _convert_event_type_user_type(fields)
276  event_types.extend(fields["type"])
277  meta_dict[filename]["eventTypes"] = event_types
278  meta_dict[filename]["numberOfEvents"] = result["m_numberOfEvents"]
279  meta_dict[filename]["runNumbers"] = result["m_runNumbers"]
280  meta_dict[filename]["lumiBlockNumbers"] = result["m_lumiBlockNumbers"]
281  meta_dict[filename]["processingTags"] = result["m_processingTags"]
282 
283  meta_dict[filename]["EventFormat"] = {}
284  ef_items = {}
285  for branch_name, class_name in dict(
286  zip(
287  result["m_branchNames"],
288  result["m_classNames"],
289  )
290  ).items():
291  ef_items[branch_name] = class_name
292  meta_dict[filename]["EventFormat"] = ef_items
293 
294  meta_dict[filename]["FileMetaData"] = {}
295  for key in keys_to_extract:
296  try:
297  meta_dict[filename]["FileMetaData"][key.split(":")[1]] = (
298  result[key]
299  )
300  except (IndexError, KeyError):
301  continue
302  msg.debug(f"Read metadata from RNTuple: {meta_dict[filename]}")
303  return meta_dict
304 
305  # ----- read extra metadata required for 'lite' and 'full' modes ----------------------------------------#
306  if mode != 'tiny':
307  # selecting from all tree the only one which contains metadata, respectively "MetaData"
308  metadata_tree = current_file.Get('MetaData')
309  # read all list of branches stored in "MetaData" tree
310  metadata_branches = metadata_tree.GetListOfBranches()
311  nr_of_branches = metadata_branches.GetEntriesFast()
312 
313  # object to store the names of metadata containers and their corresponding class name.
314  meta_dict[filename]['metadata_items'] = {}
315 
316  # create a container for the list of filters used for the lite version
317  meta_filter = {}
318 
319  # set the filters for name
320  if mode == 'lite':
321  if isGaudiEnv():
322  meta_filter = {
323  '/TagInfo': 'IOVMetaDataContainer_p1',
324  'IOVMetaDataContainer_p1__TagInfo': 'IOVMetaDataContainer_p1',
325  '*': 'EventStreamInfo_p*'
326  }
327  else:
328  meta_filter = {
329  'FileMetaData': '*',
330  'FileMetaDataAux.': 'xAOD::FileMetaDataAuxInfo_v1',
331  }
332 
333  # set the filters for name
334  if mode == 'peeker':
335  meta_filter.update({
336  'TriggerMenu': 'DataVector<xAOD::TriggerMenu_v1>', # R2 trigger metadata format AOD (deprecated)
337  'TriggerMenuAux.': 'xAOD::TriggerMenuAuxContainer_v1',
338  'DataVector<xAOD::TriggerMenu_v1>_TriggerMenu': 'DataVector<xAOD::TriggerMenu_v1>', # R2 trigger metadata format ESD (deprecated)
339  'xAOD::TriggerMenuAuxContainer_v1_TriggerMenuAux.': 'xAOD::TriggerMenuAuxContainer_v1',
340  'TriggerMenuJson_HLT': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format AOD
341  'TriggerMenuJson_HLTAux.': 'xAOD::TriggerMenuJsonAuxContainer_v1',
342  'TriggerMenuJson_HLTMonitoring': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format AOD
343  'TriggerMenuJson_HLTMonitoringAux.': 'xAOD::TriggerMenuJsonAuxContainer_v1',
344  'TriggerMenuJson_HLTPS': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format AOD
345  'TriggerMenuJson_HLTPSAux.': 'xAOD::TriggerMenuJsonAuxContainer_v1',
346  'TriggerMenuJson_L1': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format AOD
347  'TriggerMenuJson_L1Aux.': 'xAOD::TriggerMenuJsonAuxContainer_v1',
348  'TriggerMenuJson_L1PS': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format AOD
349  'TriggerMenuJson_L1PSAux.': 'xAOD::TriggerMenuJsonAuxContainer_v1',
350  'CutBookkeepers': 'xAOD::CutBookkeeperContainer_v1',
351  'CutBookkeepersAux.': 'xAOD::CutBookkeeperAuxContainer_v1',
352  'FileMetaData': '*',
353  'FileMetaDataAux.': 'xAOD::FileMetaDataAuxInfo_v1',
354  'TruthMetaData': '*',
355  'TruthMetaDataAux.': 'xAOD::TruthMetaDataAuxContainer_v1',
356  'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_HLT': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format ESD
357  'xAOD::TriggerMenuJsonAuxContainer_v1_TriggerMenuJson_HLTAux.': 'xAOD::TriggerMenuJsonAuxContainer_v1',
358  'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_HLTMonitoring': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format ESD
359  'xAOD::TriggerMenuJsonAuxContainer_v1_TriggerMenuJson_HLTMonitoringAux.': 'xAOD::TriggerMenuJsonAuxContainer_v1',
360  'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_HLTPS': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format ESD
361  'xAOD::TriggerMenuJsonAuxContainer_v1_TriggerMenuJson_HLTPSAux.': 'xAOD::TriggerMenuJsonAuxContainer_v1',
362  'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_L1': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format ESD
363  'xAOD::TriggerMenuJsonAuxContainer_v1_TriggerMenuJson_L1Aux.': 'xAOD::TriggerMenuJsonAuxContainer_v1',
364  'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_L1PS': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format ESD
365  'xAOD::TriggerMenuJsonAuxContainer_v1_TriggerMenuJson_L1PSAux.': 'xAOD::TriggerMenuJsonAuxContainer_v1'
366  })
367 
368  if isGaudiEnv():
369  meta_filter.update({
370  '/TagInfo': 'IOVMetaDataContainer_p1',
371  'IOVMetaDataContainer_p1__TagInfo': 'IOVMetaDataContainer_p1',
372  '/Simulation/Parameters': 'IOVMetaDataContainer_p1',
373  '/Digitization/Parameters': 'IOVMetaDataContainer_p1',
374  '/EXT/DCS/MAGNETS/SENSORDATA': 'IOVMetaDataContainer_p1',
375  '*': 'EventStreamInfo_p*'
376  })
377 
378  if (mode == 'full' or mode == 'iov') and meta_key_filter:
379  meta_filter = {f: '*' for f in meta_key_filter}
380  # store all persistent classes for metadata container existing in a POOL/ROOT file.
381  persistent_instances = {}
382  dynamic_fmd_items = {}
383 
384  # Protect non-Gaudi environments from meta-data classes it doesn't know about
385  if not isGaudiEnv():
386  metadata_tree.SetBranchStatus("*", False)
387 
388  for i in range(0, nr_of_branches):
389  branch = metadata_branches.At(i)
390  name = branch.GetName()
391  if name == 'index_ref':
392  # skip the index branch
393  continue
394 
395  class_name = branch.GetClassName()
396 
397  if regexIOVMetaDataContainer.match(class_name):
398  name = name.replace('IOVMetaDataContainer_p1_', '').replace('_', '/')
399 
400  if regexIOVMetaDataContainer.match(class_name):
401  meta_dict[filename]['metadata_items'][name] = 'IOVMetaDataContainer'
402  elif regexByteStreamMetadataContainer.match(class_name):
403  meta_dict[filename]['metadata_items'][name] = 'ByteStreamMetadataContainer'
404  elif regexEventStreamInfo.match(class_name):
405  meta_dict[filename]['metadata_items'][name] = 'EventStreamInfo'
406  elif regexXAODFileMetaData.match(class_name):
407  meta_dict[filename]['metadata_items'][name] = 'FileMetaData'
408  elif regexXAODTruthMetaData.match(class_name):
409  meta_dict[filename]['metadata_items'][name] = 'TruthMetaData'
410  else:
411  type_name = class_name
412  if not type_name:
413  try:
414  type_name = branch.GetListOfLeaves()[0].GetTypeName()
415  except IndexError:
416  pass
417  meta_dict[filename]['metadata_items'][name] = type_name
418 
419  if len(meta_filter) > 0:
420  keep = False
421  for filter_key, filter_class in meta_filter.items():
422  if (filter_key.replace('/', '_') in name.replace('/', '_') or filter_key == '*') and fnmatchcase(class_name, filter_class):
423  if 'CutBookkeepers' in filter_key:
424  keep = filter_key == name
425  if keep:
426  break
427  else:
428  keep = True
429  break
430 
431  if not keep:
432  continue
433  else:
434  # CutBookkeepers should always be filtered:
435  if 'CutBookkeepers' in name and name not in ['CutBookkeepers', 'CutBookkeepersAux.']:
436  continue
437 
438  if not isGaudiEnv():
439  metadata_tree.SetBranchStatus(f"{name}*", True)
440 
441  # assign the corresponding persistent class based of the name of the metadata container
442  if regexEventStreamInfo.match(class_name):
443  if class_name.endswith('_p1'):
444  persistent_instances[name] = ROOT.EventStreamInfo_p1()
445  elif class_name.endswith('_p2'):
446  persistent_instances[name] = ROOT.EventStreamInfo_p2()
447  else:
448  persistent_instances[name] = ROOT.EventStreamInfo_p3()
449  elif regexIOVMetaDataContainer.match(class_name):
450  persistent_instances[name] = ROOT.IOVMetaDataContainer_p1()
451  elif regexXAODEventFormat.match(class_name):
452  persistent_instances[name] = ROOT.xAOD.EventFormat_v1()
453  elif regexXAODTriggerMenu.match(class_name) and _check_project() not in ['AthGeneration']:
454  persistent_instances[name] = ROOT.xAOD.TriggerMenuContainer_v1()
455  elif regexXAODTriggerMenuAux.match(class_name) and _check_project() not in ['AthGeneration']:
456  persistent_instances[name] = ROOT.xAOD.TriggerMenuAuxContainer_v1()
457  elif regexXAODTriggerMenuJson.match(class_name) and _check_project() not in ['AthGeneration']:
458  persistent_instances[name] = ROOT.xAOD.TriggerMenuJsonContainer_v1()
459  elif regexXAODTriggerMenuJsonAux.match(class_name) and _check_project() not in ['AthGeneration']:
460  persistent_instances[name] = ROOT.xAOD.TriggerMenuJsonAuxContainer_v1()
461  elif regexXAODCutBookkeeperContainer.match(class_name):
462  persistent_instances[name] = ROOT.xAOD.CutBookkeeperContainer_v1()
463  elif regexXAODCutBookkeeperContainerAux.match(class_name):
464  persistent_instances[name] = ROOT.xAOD.CutBookkeeperAuxContainer_v1()
465  elif regexXAODFileMetaData.match(class_name):
466  persistent_instances[name] = ROOT.xAOD.FileMetaData_v1()
467  elif regexXAODFileMetaDataAux.match(class_name):
468  persistent_instances[name] = ROOT.xAOD.FileMetaDataAuxInfo_v1()
469  elif regexXAODTruthMetaData.match(class_name):
470  persistent_instances[name] = ROOT.xAOD.TruthMetaDataContainer_v1()
471  elif regexXAODTruthMetaDataAux.match(class_name):
472  persistent_instances[name] = ROOT.xAOD.TruthMetaDataAuxContainer_v1()
473 
474  if name in persistent_instances:
475  branch.SetAddress(ROOT.AddressOf(persistent_instances[name]))
476 
477  # This creates a dict to store the dynamic attributes of the xAOD::FileMetaData
478  dynamicFMD = regexXAODFileMetaDataAuxDyn.match(name)
479  if dynamicFMD:
480  dynamicName = dynamicFMD.group().split('.')[-1]
481  dynamicType = regex_cppname.match(class_name)
482  if dynamicType:
483  # this should be a string
484  dynamic_fmd_items[dynamicName] = ROOT.std.string()
485  branch.SetAddress(ROOT.AddressOf(dynamic_fmd_items[dynamicName]))
486  else:
487  dynamic_fmd_items[dynamicName] = None
488 
489 
490  metadata_tree.GetEntry(0)
491 
492  # This loads the dynamic attributes of the xAOD::FileMetaData from the TTree
493  for key in dynamic_fmd_items:
494  if dynamic_fmd_items[key] is None:
495  try:
496  if key.startswith("is"):
497  # this is probably a boolean
498  dynamic_fmd_items[key] = getattr(metadata_tree, key) != '\x00'
499  else:
500  # this should be a float
501  dynamic_fmd_items[key] = getattr(metadata_tree, key)
502  except AttributeError:
503  # should not happen, but just ignore missing attributes
504  pass
505  else:
506  # convert ROOT.std.string objects to python equivalent
507  dynamic_fmd_items[key] = str(dynamic_fmd_items[key])
508 
509  # clean the meta-dict if the meta_key_filter flag is used, to return only the key of interest
510  if meta_key_filter:
511  meta_dict[filename] = {}
512 
513  # read the metadata
514  for name, content in persistent_instances.items():
515  key = name
516  if hasattr(content, 'm_folderName'):
517  key = content.m_folderName
518 
519  # Some transition AODs contain both the Run2 and Run3 metadata formats. We only wish to read the Run3 format if such a file is encountered.
520  has_r3_trig_meta = ('TriggerMenuJson_HLT' in persistent_instances or 'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_HLT' in persistent_instances)
521  aux = None
522  if key.startswith('TriggerMenuJson_') and not key.endswith('Aux.'): # interface container for the menu (AOD)
523  aux = persistent_instances[key+'Aux.']
524  elif key.startswith('DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_') and not key.endswith('Aux.'): # interface container for the menu (ESD)
525  menuPart = key.split('_')[-1]
526  aux = persistent_instances['xAOD::TriggerMenuJsonAuxContainer_v1_TriggerMenuJson_'+menuPart+'Aux.']
527  elif key == 'TriggerMenu' and 'TriggerMenuAux.' in persistent_instances and not has_r3_trig_meta: # AOD case (legacy support, HLT and L1 menus)
528  aux = persistent_instances['TriggerMenuAux.']
529  elif key == 'DataVector<xAOD::TriggerMenu_v1>_TriggerMenu' and 'xAOD::TriggerMenuAuxContainer_v1_TriggerMenuAux.' in persistent_instances and not has_r3_trig_meta: # ESD case (legacy support, HLT and L1 menus)
530  aux = persistent_instances['xAOD::TriggerMenuAuxContainer_v1_TriggerMenuAux.']
531  elif (key == 'CutBookkeepers'
532  and 'CutBookkeepersAux.' in persistent_instances):
533  aux = persistent_instances['CutBookkeepersAux.']
534  elif key == 'CutBookkeepersAux.':
535  continue # Extracted using the interface object
536  elif (key == 'FileMetaData'
537  and 'FileMetaDataAux.' in persistent_instances):
538  aux = persistent_instances['FileMetaDataAux.']
539  elif (key == 'xAOD::FileMetaData_v1_FileMetaData'
540  and 'xAOD::FileMetaDataAuxInfo_v1_FileMetaDataAux.' in persistent_instances):
541  aux = persistent_instances['xAOD::FileMetaDataAuxInfo_v1_FileMetaDataAux.']
542  elif (key == 'TruthMetaData'
543  and 'TruthMetaDataAux.' in persistent_instances):
544  aux = persistent_instances['TruthMetaDataAux.']
545  elif key == 'TruthMetaDataAux.':
546  continue # Extracted using the interface object
547  elif 'Menu' in key and key.endswith('Aux.'):
548  continue # Extracted using the interface object
549 
550  return_obj = _convert_value(content, aux)
551 
552  if 'TriggerMenuJson' in key or ('TriggerMenu' in key and not has_r3_trig_meta):
553  if 'RAWTriggerMenuJson' in return_obj:
554  meta_dict[filename][key] = return_obj['RAWTriggerMenuJson']
555  del return_obj['RAWTriggerMenuJson']
556  if 'TriggerConfigInfo' not in meta_dict[filename]:
557  meta_dict[filename]['TriggerConfigInfo'] = {}
558  if 'dbkey' in return_obj:
559  meta_dict[filename]['TriggerConfigInfo'][key.split('_')[-1]] = {
560  'key' : return_obj['dbkey'],
561  'name': return_obj['name']
562  }
563  del return_obj['dbkey']
564  del return_obj['name']
565  if 'TriggerMenu' not in meta_dict[filename]:
566  meta_dict[filename]['TriggerMenu'] = {}
567  meta_dict[filename]['TriggerMenu'].update(return_obj)
568  elif "FileMetaData" in key:
569  if "FileMetaData" not in meta_dict[filename]:
570  meta_dict[filename]["FileMetaData"] = dynamic_fmd_items
571  meta_dict[filename]["FileMetaData"].update(return_obj)
572  else:
573  meta_dict[filename][key] = return_obj
574 
575  try:
576  # get the number of events from EventStreamInfo
577  esi_dict = next(key for key, value in meta_dict[filename].items()
578  if isinstance(value, dict) and "numberOfEvents" in value and
579  meta_dict[filename]["metadata_items"][key] == "EventStreamInfo")
580  msg.debug(f"{esi_dict=}")
581  meta_dict[filename]["nentries"] = meta_dict[filename][esi_dict]["numberOfEvents"]
582  except StopIteration as err:
583  msg.debug(f"Caught {err=}, {type(err)=}, falling back on opening the DataHeader"
584  " Container to read the number of entries")
585  meta_dict[filename]['nentries'] = dataheader_nentries(current_file)
586  msg.debug(f"{meta_dict[filename]['nentries']=}")
587 
588  if unique_tag_info_values and mode=='iov':
589  unique_tag_info_values = False
590  msg.info('disabling "unique_tag_info_values" option for "iov" mode')
591 
592  # This is a required workaround which will temporarily be fixing ATEAM-560 originated from ATEAM-531
593  # ATEAM-560: https://its.cern.ch/jira/browse/ATEAM-560
594  # ATEAM-531: https://its.cern.ch/jira/browse/ATEAM-531
595  # This changes will remove all duplicates values presented in some files due
596  # to the improper merging of two IOVMetaDataContainers.
597  if unique_tag_info_values:
598  msg.info('MetaReader is called with the parameter "unique_tag_info_values" set to True. '
599  'This is a workaround to remove all duplicate values from "/TagInfo" key')
600  if '/TagInfo' in meta_dict[filename]:
601  for key, value in meta_dict[filename]['/TagInfo'].items():
602  if isinstance(value, list) and value:
603  if len(unique_values := set(value)) > 1:
604  msg.warn(
605  f"Found multiple values for {key}: {value}. "
606  "Looking for possible duplicates."
607  )
608  maybe_ok = False
609  if key == "AMITag":
610  # curate duplicates like: ['s3681_q453', 's3681_q453_'] or ["s3681_q453", "q453_s3681"]
611  unique_amitags = set()
612  for amitags in unique_values:
613  unique_amitags.add(
614  "_".join({tag for tag in amitags.split("_") if tag})
615  )
616  if len(unique_amitags) == 1:
617  maybe_ok = True
618  elif key == "beam_energy":
619  # handle duplicates like: ['6500000', '6500000.0'] or [3, "3"]
620  unique_energies = set()
621  for energy in unique_values:
622  try:
623  energy = int(energy)
624  except ValueError:
625  try:
626  energy = float(energy)
627  except ValueError:
628  pass
629  unique_energies.add(energy)
630  if len(unique_energies) == 1:
631  maybe_ok = True
632  elif key in ["AtlasRelease", "IOVDbGlobalTag", "AODFixVersion"]:
633  maybe_ok = True
634  if maybe_ok:
635  msg.warn(
636  f"Multiple values for {key} may mean the same, or "
637  "the input file was produced in multi-step job. "
638  f"Ignoring all but the first entry: {key} = {value[0]}"
639  )
640  else:
641  raise ValueError(
642  f"{key} from /TagInfo contains more than 1 unique value: {value}"
643  )
644 
645  meta_dict[filename]['/TagInfo'][key] = value[0]
646 
647  if promote is None:
648  promote = mode == 'lite' or mode == 'peeker'
649 
650  # Filter the data and create a prettier output for the 'lite' mode
651  if mode == 'lite':
652  meta_dict = make_lite(meta_dict)
653 
654  if mode == 'peeker':
655  meta_dict = make_peeker(meta_dict)
656 
657  if promote:
658  meta_dict = promote_keys(meta_dict, mode)
659 
660  # If AnalysisBase the itemList must be grabbed another way
661  if not isGaudiEnv():
662  if isinstance(collectionTree, ROOT.TTree):
663  meta_dict[filename]['itemList'] = [ (b.GetClassName(), b.GetName()) for b in collectionTree.GetListOfBranches() ]
664 
665  # ----- retrieves metadata from bytestream (BS) files (RAW, DRAW) ------------------------------------------#
666  elif current_file_type == 'BS':
667 
668  if ignoreNonExistingLocalFiles and not regex_URI_scheme.match(filename) and not os.path.isfile(filename):
669  msg.warn('Ignoring not accessible file: {}'.format(filename))
670  continue
671 
672  import eformat
673 
674  # store the number of entries
675  bs = eformat.istream(filename)
676  meta_dict[filename]['nentries'] = bs.total_events
677 
678  # store the 'guid' value
679  data_reader = eformat.EventStorage.pickDataReader(filename)
680  assert data_reader, 'problem picking a data reader for file [%s]' % filename
681 
682  # set auto flush equivalent, which for BS is always 1
683  meta_dict[filename]['auto_flush'] = 1
684 
685  if hasattr(data_reader, 'GUID'):
686  meta_dict[filename]['file_guid'] = data_reader.GUID()
687 
688  # compression level and algorithm, for BS always ZLIB
689  meta_dict[filename]['file_comp_alg'] = 1
690  meta_dict[filename]['file_comp_level'] = 1
691 
692 
693  # if the flag full is set to true then grab all metadata
694  # ------------------------------------------------------------------------------------------------------#
695  if mode != "tiny":
696  bs_metadata = {}
697 
698  for md in data_reader.freeMetaDataStrings():
699  if md.startswith('Event type:'):
700  k = 'eventTypes'
701  v = []
702  if 'is sim' in md:
703  v.append('IS_SIMULATION')
704  else:
705  v.append('IS_DATA')
706 
707  if 'is atlas' in md:
708  v.append('IS_ATLAS')
709  else:
710  v.append('IS_TESTBEAM')
711 
712  if 'is physics' in md:
713  v.append('IS_PHYSICS')
714  else:
715  v.append('IS_CALIBRATION')
716 
717  bs_metadata[k] = tuple(v)
718 
719  elif md.startswith('GeoAtlas:'):
720  k = 'geometry'
721  v = md.split('GeoAtlas:')[1].strip()
722  bs_metadata[k] = v
723 
724  elif md.startswith('IOVDbGlobalTag:'):
725  k = 'conditions_tag'
726  v = md.split('IOVDbGlobalTag:')[1].strip()
727  bs_metadata[k] = v
728 
729  elif '=' in md:
730  k, v = md.split('=')
731  bs_metadata[k] = v
732 
733  bs_metadata['detectorMask'] = data_reader.detectorMask()
734  bs_metadata['runNumbers'] = data_reader.runNumber()
735  bs_metadata['lumiBlockNumbers'] = data_reader.lumiblockNumber()
736  bs_metadata['projectTag'] = data_reader.projectTag()
737  bs_metadata['stream'] = data_reader.stream()
738  #bs_metadata['beamType'] = getattr(data_reader, 'beamType')()
739  beamTypeNbr= data_reader.beamType()
740  #According to info from Rainer and Guiseppe the beam type is
741  #O: no beam
742  #1: protons
743  #2: ions
744  if (beamTypeNbr==0): bs_metadata['beamType'] = 'cosmics'
745  elif (beamTypeNbr==1 or beamTypeNbr==2): bs_metadata['beamType'] = 'collisions'
746  else: bs_metadata['beamType'] = 'unknown'
747 
748  bs_metadata['beamEnergy'] = data_reader.beamEnergy()
749 
750  meta_dict[filename]['eventTypes'] = bs_metadata.get('eventTypes', [])
751  meta_dict[filename]['GeoAtlas'] = bs_metadata.get('geometry', None)
752  meta_dict[filename]['conditions_tag'] = bs_metadata.get('conditions_tag', None)
753  meta_dict[filename]['project_name'] = bs_metadata.get('projectTag', None)
754 
755  # Promote up one level
756  meta_dict[filename]['detectorMask'] = [bs_metadata.get('detectorMask', None)]
757  meta_dict[filename]['runNumbers'] = [bs_metadata.get('runNumbers', None)]
758  meta_dict[filename]['lumiBlockNumbers'] = [bs_metadata.get('lumiBlockNumbers', None)]
759  meta_dict[filename]['beam_type'] = bs_metadata.get('beamType', None)
760  meta_dict[filename]['beam_energy'] = bs_metadata.get('beamEnergy', None)
761  meta_dict[filename]['stream'] = bs_metadata.get('stream', None)
762 
763  if not data_reader.good():
764  # event-less file...
765  meta_dict[filename]['runNumbers'].append(bs_metadata.get('run_number', 0))
766  meta_dict[filename]['lumiBlockNumbers'].append(bs_metadata.get('LumiBlock', 0))
767 
768  msg.debug(f"{meta_dict[filename]=}")
769  msg.debug(f"{len(bs)=}")
770  if len(bs):
771  evt = bs[0]
772  try:
773  evt.check()
774  meta_dict[filename]['processingTags'] = [tag.name for tag in evt.stream_tag()]
775  meta_dict[filename]['evt_number'] = [evt.global_id()]
776  meta_dict[filename]['run_type'] = [eformat.helper.run_type2string(evt.run_type())]
777  # ATLASRECTS-7126: If there is no valid lumiblock information
778  # in the ByteStream header, get the info from the first event.
779  if meta_dict[filename]['lumiBlockNumbers'] == [0]:
780  msg.debug('Taking the luminosity block info from the first event (%i)', evt.lumi_block())
781  meta_dict[filename]['lumiBlockNumbers'] = [evt.lumi_block()]
782  # ATLASRECTS-7126: If there is no valid run number information
783  # in the ByteStream header, get the info from the first event.
784  if meta_dict[filename]['runNumbers'] == [0]:
785  msg.debug('Taking the run number info from the first event (%i)', evt.run_no())
786  meta_dict[filename]['runNumbers'] = [evt.run_no()]
787  except RuntimeError as err:
788  msg.error("Issue while reading the first event of BS file %r: %r", filename, err)
789  else:
790  msg.debug(f"{meta_dict[filename]=}")
791  else:
792  msg.warn(f"Event-less BS {filename=}, will not read metadata information from the first event")
793 
794  # fix for ATEAM-122
795  if len(bs_metadata.get('eventTypes', '')) == 0: # see: ATMETADATA-6
796  evt_type = ['IS_DATA', 'IS_ATLAS']
797  if bs_metadata.get('stream', '').startswith('physics_'):
798  evt_type.append('IS_PHYSICS')
799  elif bs_metadata.get('stream', '').startswith('calibration_'):
800  evt_type.append('IS_CALIBRATION')
801  elif bs_metadata.get('projectTag', '').endswith('_calib'):
802  evt_type.append('IS_CALIBRATION')
803  else:
804  evt_type.append('Unknown')
805 
806  meta_dict[filename]['eventTypes'] = evt_type
807 
808  if mode == 'full':
809  meta_dict[filename]['bs_metadata'] = bs_metadata
810 
811  # ------ Throw an error if the user provide other file types -------------------------------------------------#
812  else:
813  msg.error('Unknown filetype for {0} - there is no metadata interface for type {1}'.format(filename, current_file_type))
814  return None
815 
816  return meta_dict
817 
818 
820  import os
821  if 'AthSimulation_DIR' in os.environ:
822  return 'AthSimulation'
823  if 'AthGeneration_DIR' in os.environ:
824  return 'AthGeneration'
825  return 'Athena'
826 
827 
828 def _get_pfn(filename):
829  """
830  Extract the actual filename if LFN or PFN notation is used
831  """
832  pfx = filename[0:4]
833  if pfx == 'PFN:':
834  return filename[4:]
835  if pfx == 'LFN:':
836  import subprocess, os
837  os.environ['POOL_OUTMSG_LEVEL'] = 'Error'
838  output = subprocess.check_output(['FClistPFN','-l',filename[4:]],text=True).split('\n')
839  if len(output) == 2:
840  return output[0]
841  msg.error( 'FClistPFN({0}) returned unexpected number of lines:'.format(filename) )
842  msg.error( '\n'.join(output) )
843  return filename
844 
845 
846 def _read_guid(filename):
847  """
848  Extracts the "guid" (Globally Unique Identifier) in POOL files and Grid catalogs) value from a POOL file.
849  :param filename: the input file
850  :return: the guid value, None if unavailable
851  """
852  import ROOT
853  root_file = ROOT.TFile.Open( _get_pfn(filename) )
854  params = root_file.Get('##Params')
855  try:
856  from ROOT import RNTuple as rnt
857  except ImportError:
858  from ROOT.Experimental import RNTuple as rnt
859  if not params:
860  return
861  if not isinstance(params, ROOT.TTree) and not isinstance(params, rnt) and not isinstance(params, ROOT.TDirectory):
862  raise NotImplementedError(f"Cannot extract GUID from object {params!r} of type {type(params)!r}")
863 
864  regex = re.compile(r'\[NAME=(\w+)\]\[VALUE=(.*)\]', re.ASCII)
865  fid = None
866 
867  if isinstance(params, ROOT.TTree):
868  for entry in params:
869  param = entry.GetLeaf('db_string').GetValueString()
870  result = regex.match(param)
871  if result and result.group(1) == 'FID' :
872  # don't exit yet, it's the last FID entry that counts
873  fid = result.group(2)
874  elif isinstance(params, rnt):
875  try:
876  from ROOT import RNTupleReader
877  except ImportError:
878  from ROOT.Experimental import RNTupleReader
879  reader = RNTupleReader.Open(params)
880  try:
881  entry = reader.CreateEntry()
882  except AttributeError:
883  entry = reader.GetModel().CreateEntry()
884  for idx in range(reader.GetNEntries()):
885  reader.LoadEntry(idx, entry)
886  try:
887  result = regex.match(str(entry['db_string']))
888  except (AttributeError, TypeError) as err:
889  # Early RNTuple implementation doesn't allow reading
890  # strings on the python side, might be triggering it...
891  msg.error(f"Cannot read FID from ##Params in RNTuple w/ ROOT error: {err}")
892  return None
893  if result and result.group(1) == 'FID' :
894  # don't exit yet, it's the last FID entry that counts
895  fid = result.group(2)
896  elif isinstance(params, ROOT.TDirectory):
897  for key in params.GetListOfKeys():
898  param = params.Get(key.GetName())
899  result = regex.match(str(param))
900  if result and result.group(1) == 'FID' :
901  # don't exit yet, it's the last FID entry that counts
902  fid = result.group(2)
903 
904  return fid
905 
906 
908  result = {}
909 
910  for meth in dir(obj):
911  if not meth.startswith('_'):
912  if meth.startswith('m_'):
913 
914  field_name = str(meth)[2:]
915  field_value = getattr(obj, meth)
916 
917  result[field_name] = _convert_value(field_value)
918 
919  return result
920 
921 
922 def _convert_value(value, aux = None):
923  cl=value.__class__
924 
925  if hasattr(cl, '__cpp_name__'):
926  result = regex_cppname.match(cl.__cpp_name__)
927  if result:
928  cpp_type = result.group(1)
929  if cpp_type == 'vector' or cpp_type == 'std::vector':
930  return [_convert_value(val) for val in value]
931  elif cpp_type == 'set' or cpp_type == 'std::set':
932  return {_convert_value(val) for val in value}
933  elif cpp_type == 'pair' or cpp_type == 'std::pair':
934  return _convert_value(value.first), _convert_value(value.second)
935 
936  # elif cpp_type == 'long':
937  # return int(value)
938 
939  elif cpp_type == 'string' or cpp_type == 'std::string':
940  return str(value)
941 
942  elif cl.__cpp_name__ == "_Bit_reference":
943  return bool(value)
944 
945  # special case which extracts data in a better format from IOVPayloadContainer_p1 class
946  elif cl.__cpp_name__ == 'IOVMetaDataContainer_p1':
947  return _extract_fields_iovmdc(value)
948 
949  elif cl.__cpp_name__ == 'IOVPayloadContainer_p1':
950  global _gbl_mode
951  if _gbl_mode == 'iov':
952  return _extract_iov_detailed(value)
953  else:
954  return _extract_fields_iov( value, range(value.m_attrIndexes.size()) )
955 
956  elif cl.__cpp_name__ == 'xAOD::EventFormat_v1':
957  return _extract_fields_ef(value)
958  elif cl.__cpp_name__ == 'xAOD::CutBookkeeperContainer_v1':
959  return _extract_fields_cbk(interface=value, aux=aux)
960  elif cl.__cpp_name__ == 'xAOD::FileMetaData_v1':
961  return _extract_fields_fmd(interface=value, aux=aux)
962  elif cl.__cpp_name__ == 'DataVector<xAOD::TruthMetaData_v1>':
963  return _extract_fields_tmd(interface=value, aux=aux)
964 
965  elif cl.__cpp_name__ == 'DataVector<xAOD::TriggerMenu_v1>' :
966  return _extract_fields_triggermenu(interface=value, aux=aux)
967 
968  elif cl.__cpp_name__ == 'DataVector<xAOD::TriggerMenuJson_v1>' :
969  return _extract_fields_triggermenujson(interface=value, aux=aux)
970 
971  elif (cl.__cpp_name__ == 'EventStreamInfo_p1' or
972  cl.__cpp_name__ == 'EventStreamInfo_p2' or
973  cl.__cpp_name__ == 'EventStreamInfo_p3'):
974  return _extract_fields_esi(value)
975 
976  elif (cl.__cpp_name__ == 'EventType_p1' or
977  cl.__cpp_name__ == 'EventType_p3'):
978  fields = _extract_fields(value)
979  fields = _convert_event_type_bitmask(fields)
980  fields = _convert_event_type_user_type(fields)
981  return fields
982 
983  elif regex_persistent_class.match(cl.__cpp_name__):
984  return _extract_fields(value)
985 
986  return value
987 
988 
989 def _get_attribute_val(iov_container, attr_name, attr_idx):
990  type_idx = attr_idx.typeIndex()
991  obj_idx = attr_idx.objIndex()
992 
993  attr_value = None
994 
995  if type_idx == 0:
996  attr_value = bool(iov_container.m_bool[obj_idx])
997  elif type_idx == 1:
998  attr_value = int(iov_container.m_char[obj_idx])
999  elif type_idx == 2:
1000  attr_value = int(iov_container.m_unsignedChar[obj_idx])
1001  elif type_idx == 3:
1002  attr_value = int(iov_container.m_short[obj_idx])
1003  elif type_idx == 4:
1004  attr_value = int(iov_container.m_unsignedShort[obj_idx])
1005  elif type_idx == 5:
1006  attr_value = int(iov_container.m_int[obj_idx])
1007  elif type_idx == 6:
1008  attr_value = int(iov_container.m_unsignedInt[obj_idx])
1009  elif type_idx == 7:
1010  attr_value = int(iov_container.m_long[obj_idx])
1011  elif type_idx == 8:
1012  attr_value = int(iov_container.m_unsignedLong[obj_idx])
1013  elif type_idx == 9:
1014  attr_value = int(iov_container.m_longLong[obj_idx])
1015  elif type_idx == 10:
1016  attr_value = int(iov_container.m_unsignedLongLong[obj_idx])
1017  elif type_idx == 11:
1018  attr_value = float(iov_container.m_float[obj_idx])
1019  elif type_idx == 12:
1020  attr_value = float(iov_container.m_double[obj_idx])
1021  elif type_idx == 13:
1022  # skipping this type because is file IOVPayloadContainer_p1.h (line 120) is commented and not considered
1023  pass
1024  elif type_idx == 14:
1025  attr_value = str(iov_container.m_string[obj_idx])
1026  # Cleaning class name from value
1027  if attr_value.startswith('IOVMetaDataContainer_p1_'):
1028  attr_value = attr_value.replace('IOVMetaDataContainer_p1_', '')
1029  if attr_value.startswith('_'):
1030  attr_value = attr_value.replace('_', '/')
1031  # Now it is clean
1032  elif type_idx == 15:
1033  attr_value = int(iov_container.m_date[obj_idx])
1034  elif type_idx == 16:
1035  attr_value = int(iov_container.m_timeStamp[obj_idx])
1036  else:
1037  raise ValueError('Unknown type id {0} for attribute {1}'.format(type_idx, attr_name))
1038 
1039  return attr_value
1040 
1041 
1042 def _extract_fields_iov( iov_container, idx_range ):
1043  result = {}
1044 
1045  for idx in idx_range:
1046  attr_idx = iov_container.m_attrIndexes[idx]
1047  name_idx = attr_idx.nameIndex()
1048  attr_name = iov_container.m_attrName[name_idx]
1049  attr_value = _get_attribute_val(iov_container, attr_name, attr_idx)
1050 
1051  if attr_name not in result:
1052  result[attr_name] = [attr_value]
1053  else:
1054  result[attr_name].append(attr_value)
1055 
1056  max_element_count = 0
1057  for content in result.values():
1058  if len(content) > max_element_count:
1059  max_element_count = len(content)
1060 
1061  if max_element_count <= 1:
1062  for name, content in result.items():
1063  if len(content) > 0:
1064  result[name] = content[0]
1065  else:
1066  result[name] = None
1067 
1068  return result
1069 
1070 
1071 def _extract_iov_detailed(iov_container):
1072  def iovtostr(t):
1073  # break iov time into high and low halves (run number usually in the higher half)
1074  return "({h}:{l})".format(h=t>>32, l=t&(2^32-1))
1075 
1076  def extract_list_collection(iov_container, listCollection ):
1077  result = {}
1078  ln = 0
1079  for list in listCollection.m_attrLists:
1080  ln = ln + 1
1081  lname = 'List {ln}: iov=[{s} ,{e}]; Channel#={ch}'.format(
1082  ln=ln, s=iovtostr(list.m_range.m_start),
1083  e=iovtostr(list.m_range.m_stop),
1084  ch=list.m_channelNumber )
1085  result[ lname ] = _extract_fields_iov( iov_container, range(list.m_firstIndex, list.m_lastIndex) )
1086  return result
1087 
1088  result = {}
1089  pn = 0
1090  for listCollection in iov_container.m_payloadVec:
1091  pn = pn + 1
1092  pname = 'IOV range {n}: [{s}, {e}]'.format(n=pn, s=iovtostr(listCollection.m_start),
1093  e=iovtostr(listCollection.m_stop))
1094  result[ pname ] = extract_list_collection(iov_container, listCollection )
1095  return result
1096 
1097 
1099  return _convert_value(value.m_payload)
1100 
1101 
1103  result = {}
1104 
1105  result['eventTypes'] = []
1106  for eventType in value.m_eventTypes:
1107  result['eventTypes'].append(_convert_value(eventType))
1108 
1109  result['numberOfEvents'] = value.m_numberOfEvents
1110  result['runNumbers'] = list(value.m_runNumbers)
1111  result['lumiBlockNumbers'] = list(value.m_lumiBlockNumbers)
1112  result['processingTags'] = [str(v) for v in value.m_processingTags]
1113  result['itemList'] = []
1114 
1115  # Get the class name in the repository with CLID <clid>
1116  from CLIDComps.clidGenerator import clidGenerator
1117  cgen = clidGenerator("")
1118  for clid, sgkey in value.m_itemList:
1119  result['itemList'].append((cgen.getNameFromClid(clid), sgkey))
1120 
1121  return result
1122 
1123 
1125  result = {}
1126 
1127  for ef_element in value:
1128  result[ef_element.first] = ef_element.second.className()
1129 
1130  return result
1131 
1132 
1133 def _extract_fields_cbk(interface=None, aux=None):
1134  """Extract CutBookkeeper content into dictionary
1135 
1136  This function takes the CutBookkeeperContainer_v1 and CutBookkeeperAuxContainer_v1 objects.
1137  It makes sure the the interface object uses the auxiliary object as store.
1138  Args:
1139  interface (CutBookkeeperContainer_v1): the interface class
1140  aux (CutBookkeeperAuxContainer_v1): auxiliary container object
1141  Returns
1142  dict: with the cycle number and last stream
1143  """
1144  if not interface or not aux:
1145  return {}
1146  interface.setStore(aux)
1147 
1148  max_cycle = -1
1149  input_stream = ''
1150 
1151  for cbk in interface:
1152  current_cycle = int(cbk.cycle())
1153  if current_cycle > max_cycle:
1154  max_cycle = current_cycle
1155  input_stream = str(cbk.inputStream())
1156 
1157  result = {
1158  'currentCutCycle': max_cycle,
1159  'currentCutInputStream': input_stream,
1160  }
1161  return result
1162 
1163 
1164 def _extract_fields_fmd(interface=None, aux=None):
1165  """Turn static FileMetaData content into dictionary
1166 
1167  This function takes the FileMetaData_v1 and FileMetaDataAuxInfo_v1 objects.
1168  It makes sure the the interface object uses the auxiliary object as store.
1169  Next the two static variables of FileMetaDataAuxInfo_v1 are retrieved and
1170  added to the dictionary that is returned.
1171  Args:
1172  interface (FileMetaData_v1): the interface class
1173  aux (FileMetaDataAuxInfo_v1): auxiliary container object
1174  Returns
1175  dict: with the production release and dataType
1176  """
1177  import ROOT
1178  if not interface or not aux:
1179  return {}
1180  interface.setStore(aux)
1181  metaContent = {
1182  "productionRelease": ROOT.std.string(),
1183  "dataType": ROOT.std.string(),
1184  "runNumbers": ROOT.std.vector('unsigned int')(),
1185  "lumiBlocks": ROOT.std.vector('unsigned int')(),
1186  }
1187  # Note: using this for dynamic attributes returns empty content
1188  for k, v in metaContent.items():
1189  try:
1190  interface.value(getattr(interface, k), v)
1191  except AttributeError:
1192  interface.value(k, v)
1193  # Now return python objects
1194  result = {k: str(v) for k, v in metaContent.items() if type(v) is ROOT.std.string}
1195  result.update({k: list(v) for k, v in metaContent.items() if type(v) is ROOT.std.vector('unsigned int')})
1196  return result
1197 
1198 
1199 def _extract_fields_tmd(interface=None, aux=None):
1200  import ROOT
1201  BadAuxVarException = ROOT.SG.ExcBadAuxVar
1202  """Extract TruthMetaData content into dictionary
1203 
1204  This function takes the TruthMetaDataContainer_v1 and TruthMetaDataAuxContainer_v1 objects.
1205  It makes sure the the interface object uses the auxiliary object as store.
1206  Args:
1207  interface (TruthMetaDataContainer_v1): the interface class
1208  aux (TruthMetaDataAuxContainer_v1): auxiliary container object
1209  Returns
1210  dict
1211  """
1212  if not interface or not aux:
1213  return {}
1214  interface.setStore(aux)
1215 
1216  # return the first as we do not really expect more than one
1217  result = {}
1218  for tmd in interface:
1219  result['mcChannelNumber'] = tmd.mcChannelNumber()
1220 
1221  try:
1222  result['weightNames'] = list(tmd.weightNames())
1223  except BadAuxVarException:
1224  result['weightNames'] = []
1225 
1226  try:
1227  result['lhefGenerator'] = str(tmd.lhefGenerator())
1228  except BadAuxVarException:
1229  result['lhefGenerator'] = ''
1230 
1231  try:
1232  result['generators'] = str(tmd.generators())
1233  except BadAuxVarException:
1234  result['generators'] = ''
1235 
1236  try:
1237  result['evgenProcess'] = str(tmd.evgenProcess())
1238  except BadAuxVarException:
1239  result['evgenProcess'] = ''
1240 
1241  try:
1242  result['evgenTune'] = str(tmd.evgenTune())
1243  except BadAuxVarException:
1244  result['evgenTune'] = ''
1245 
1246  try:
1247  result['hardPDF'] = str(tmd.hardPDF())
1248  except BadAuxVarException:
1249  result['hardPDF'] = ''
1250 
1251  try:
1252  result['softPDF'] = str(tmd.softPDF())
1253  except BadAuxVarException:
1254  result['softPDF'] = ''
1255 
1256  return result
1257 
1258 
1259 """ Note: Deprecated. Legacy support for Run 2 AODs produced in release 21 or in release 22 prior to April 2021
1260 """
1261 def _extract_fields_triggermenu(interface, aux):
1262  if aux is None:
1263  return {}
1264 
1265  L1Items = []
1266  HLTChains = []
1267 
1268  try:
1269  interface.setStore( aux )
1270  if interface.size() > 0:
1271  # We make the assumption that the first stored SMK is
1272  # representative of all events in the input collection.
1273  firstMenu = interface.at(0)
1274  L1Items = [ _convert_value(item) for item in firstMenu.itemNames() ]
1275  HLTChains = [ _convert_value(chain) for chain in firstMenu.chainNames() ]
1276  except Exception as err: # noqa: F841
1277  msg.warn('Problem reading xAOD::TriggerMenu:')
1278 
1279  result = {}
1280  result['L1Items'] = L1Items
1281  result['HLTChains'] = HLTChains
1282 
1283  return result
1284 
1286  result = {}
1287 
1288  try:
1289  interface.setStore( aux )
1290  if interface.size() > 0:
1291  # We make the assumption that the first stored SMK is
1292  # representative of all events in the input collection.
1293  firstMenu = interface.at(0)
1294  import json
1295  decoded = json.loads(firstMenu.payload())
1296  result['RAWTriggerMenuJson'] = firstMenu.payload()
1297  result['name'] = firstMenu.name()
1298  result['dbkey'] = firstMenu.key()
1299  if decoded['filetype'] == 'hltmenu':
1300  result['HLTChains'] = [ _convert_value(chain) for chain in decoded['chains'] ]
1301  elif decoded['filetype'] == 'l1menu':
1302  result['L1Items'] = [ _convert_value(item) for item in decoded['items'] ]
1303  elif decoded['filetype'] in ['bunchgroupset', 'hltprescale', 'l1prescale', 'hltmonitoringsummary']:
1304  return result
1305 
1306  else:
1307  msg.warn('Got an xAOD::TriggerMenuJson called {0} but only expecting hltmenu or l1menu'.format(decoded['filetype']))
1308  return {}
1309 
1310  except Exception as err: # noqa: F841
1311  msg.warn('Problem reading xAOD::TriggerMenuJson')
1312 
1313  return result
1314 
1316  if 'user_type' in value:
1317  items = value['user_type'].split('#')[3:]
1318  for i in range(0, len(items), 2):
1319  value[items[i]] = _convert_value(items[i+1])
1320  return value
1321 
1323 
1324  types = None
1325  for key in value:
1326  if key == 'bit_mask':
1327  val = value[key]
1328 
1329  bitmask_length = len(val)
1330 
1331  is_simulation = False
1332  is_testbeam = False
1333  is_calibration = False
1334 
1335  if bitmask_length > 0: # ROOT.EventType.IS_SIMULATION
1336  is_simulation = val[0]
1337 
1338  if bitmask_length > 1: # ROOT.EventType.IS_TESTBEAM
1339  is_testbeam = val[1]
1340 
1341  if bitmask_length > 2: # ROOT.EventType.IS_CALIBRATION:
1342  is_calibration = val[2]
1343 
1344  types = [
1345  'IS_SIMULATION' if is_simulation else 'IS_DATA',
1346  'IS_TESTBEAM' if is_testbeam else 'IS_ATLAS',
1347  'IS_CALIBRATION' if is_calibration else 'IS_PHYSICS'
1348  ]
1349 
1350  value['type'] = types
1351  return value
1352 
1353 
1354 def make_lite(meta_dict):
1355  for filename, file_content in meta_dict.items():
1356  for key in file_content:
1357  if key in meta_dict[filename]['metadata_items'] and regexEventStreamInfo.match(meta_dict[filename]['metadata_items'][key]):
1358  for item in list(meta_dict[filename][key]):
1359  if item not in lite_primary_keys_to_keep:
1360  meta_dict[filename][key].pop(item)
1361 
1362  if '/TagInfo' in file_content:
1363 
1364 
1365  for item in list(meta_dict[filename]['/TagInfo']):
1366  if item not in lite_TagInfo_keys_to_keep:
1367  meta_dict[filename]['/TagInfo'].pop(item)
1368  return meta_dict
1369 
1370 
1371 def make_peeker(meta_dict):
1372  for filename, file_content in meta_dict.items():
1373  for key in file_content:
1374  if key in meta_dict[filename]['metadata_items'] and regexEventStreamInfo.match(meta_dict[filename]['metadata_items'][key]):
1375  keys_to_keep = [
1376  'lumiBlockNumbers',
1377  'runNumbers',
1378  'mc_event_number',
1379  'mc_channel_number',
1380  'eventTypes',
1381  'processingTags',
1382  'itemList'
1383  ]
1384  for item in list(meta_dict[filename][key]):
1385  if item not in keys_to_keep:
1386  meta_dict[filename][key].pop(item)
1387 
1388  if '/TagInfo' in file_content:
1389  keys_to_keep = [
1390  'beam_energy',
1391  'beam_type',
1392  'GeoAtlas',
1393  'IOVDbGlobalTag',
1394  'AODFixVersion',
1395  'AMITag',
1396  'project_name',
1397  'triggerStreamOfFile',
1398  'AtlasRelease',
1399  'specialConfiguration',
1400  'mc_campaign',
1401  'hepmc_version',
1402  'generators',
1403  'data_year',
1404  ]
1405  for item in list(meta_dict[filename]['/TagInfo']):
1406  if item not in keys_to_keep:
1407  meta_dict[filename]['/TagInfo'].pop(item)
1408 
1409  if '/Simulation/Parameters' in file_content:
1410  keys_to_keep = [
1411  'G4Version',
1412  'TruthStrategy',
1413  'SimBarcodeOffset',
1414  'RegenerationIncrement',
1415  'TRTRangeCut',
1416  'SimulationFlavour',
1417  'Simulator',
1418  'PhysicsList',
1419  'SimulatedDetectors',
1420  ]
1421  for item in list(meta_dict[filename]['/Simulation/Parameters']):
1422  if item not in keys_to_keep:
1423  meta_dict[filename]['/Simulation/Parameters'].pop(item)
1424 
1425  if '/Digitization/Parameters' in file_content:
1426  keys_to_keep = [
1427  'numberOfCollisions',
1428  'intraTrainBunchSpacing',
1429  'BeamIntensityPattern'
1430  'physicsList',
1431  'digiSteeringConf',
1432  'pileUp',
1433  'DigitizedDetectors',
1434  ]
1435  for item in list(meta_dict[filename]['/Digitization/Parameters']):
1436  if item not in keys_to_keep:
1437  meta_dict[filename]['/Digitization/Parameters'].pop(item)
1438 
1439  if 'CutBookkeepers' in file_content:
1440  keys_to_keep = [
1441  'currentCutCycle',
1442  'currentCutInputStream',
1443  ]
1444  for item in list(meta_dict[filename]['CutBookkeepers']):
1445  if item not in keys_to_keep:
1446  meta_dict[filename]['CutBookkeepers'].pop(item)
1447 
1448  if 'TruthMetaData' in file_content:
1449  keys_to_keep = [
1450  'mcChannelNumber',
1451  'weightNames',
1452  ]
1453  for item in list(meta_dict[filename]['TruthMetaData']):
1454  if item not in keys_to_keep:
1455  meta_dict[filename]['TruthMetaData'].pop(item)
1456 
1457  return meta_dict
1458 
1459 
1460 def promote_keys(meta_dict, mode):
1461  for filename, file_content in meta_dict.items():
1462  md = meta_dict[filename]
1463  for key in file_content:
1464  if key in md['metadata_items'] and regexEventStreamInfo.match(md['metadata_items'][key]):
1465  md.update(md[key])
1466 
1467  if 'eventTypes' in md and len(md['eventTypes']):
1468  et = md['eventTypes'][0]
1469  md['mc_event_number'] = et.get('mc_event_number', md['runNumbers'][0])
1470  if 'mc_channel_number' in et:
1471  md['mc_channel_number'] = et.get('mc_channel_number', None)
1472  md['eventTypes'] = et['type']
1473 
1474  # For very old files
1475  if 'GeoAtlas' in et:
1476  md['GeoAtlas'] = et.get('GeoAtlas', None)
1477  if 'IOVDbGlobalTag' in et:
1478  md['IOVDbGlobalTag'] = et.get('IOVDbGlobalTag', None)
1479 
1480  if 'lumiBlockNumbers' in md[key]:
1481  md['lumiBlockNumbers'] = md[key]['lumiBlockNumbers']
1482 
1483  if 'processingTags' in md[key]:
1484  md['processingTags'] = md[key]['processingTags']
1485 
1486  meta_dict[filename].pop(key)
1487  break
1488 
1489  if not isGaudiEnv() and key in md['metadata_items'] and 'FileMetaData' in key:
1490  if 'beamType' in md[key]:
1491  md['beam_type'] = md[key]['beamType']
1492 
1493  if 'runNumbers' in md[key]:
1494  md['runNumbers'] = md[key]['runNumbers']
1495 
1496  if 'mcProcID' in md[key]:
1497  md['mc_channel_number'] = int(md[key]['mcProcID'])
1498 
1499  if 'mcCampaign' in md[key]:
1500  md['mc_campaign'] = md[key]['mcCampaign']
1501 
1502  if 'dataYear' in md[key]:
1503  md['data_year'] = int(md[key]['dataYear'])
1504 
1505  if 'lumiBlocks' in md[key]:
1506  md['lumiBlockNumbers'] = md[key]['lumiBlocks']
1507 
1508  if mode == 'peeker' and 'amiTag' in md[key]:
1509  md['AMITag'] = md[key]['amiTag']
1510 
1511  if 'beamEnergy' in md[key]:
1512  md['beam_energy'] = int(md[key]['beamEnergy'])
1513 
1514  if 'geometryVersion' in md[key]:
1515  md['GeoAtlas'] = md[key]['geometryVersion']
1516 
1517  # EventType checks
1518  md['eventTypes'] = []
1519  if mode == 'peeker' and 'simFlavour' in md[key]:
1520  md['SimulationFlavour'] = md[key]['simFlavour']
1521 
1522  if 'simFlavour' in md[key] and ('FullG4' in md[key]['simFlavour'] or 'ATLFAST' in md[key]['simFlavour']):
1523  md['eventTypes'].append('IS_SIMULATION')
1524  else:
1525  md['eventTypes'].append('IS_DATA')
1526 
1527  if 'GeoAtlas' in md and 'ATLAS' in md['GeoAtlas']:
1528  md['eventTypes'].append('IS_ATLAS')
1529  # this is probably safe to assume for all files used in AnalysisBase
1530  md['eventTypes'].append('IS_PHYSICS')
1531  else:
1532  md['eventTypes'].append('IS_TESTBEAM')
1533 
1534  if 'dataType' in md[key]:
1535  md['processingTags'] = [md[key]['dataType']]
1536 
1537  if mode == 'peeker':
1538  if 'productionRelease' in md[key]:
1539  md['AtlasRelease'] = md[key]['productionRelease']
1540 
1541  if 'generatorsInfo' in md[key]:
1542  md['generators'] = md[key]['generatorsInfo']
1543 
1544  if mode == 'lite':
1545  meta_dict[filename].pop(key)
1546  break
1547 
1548  if '/TagInfo' in file_content:
1549  md.update(md['/TagInfo'])
1550  md.pop('/TagInfo')
1551 
1552  if '/Generation/Parameters' in file_content:
1553  md.update(md['/Generation/Parameters'])
1554  md.pop('/Generation/Parameters')
1555 
1556  if '/Simulation/Parameters' in file_content:
1557  md.update(md['/Simulation/Parameters'])
1558  md.pop('/Simulation/Parameters')
1559 
1560  if '/Digitization/Parameters' in file_content:
1561  md.update(md['/Digitization/Parameters'])
1562  md.pop('/Digitization/Parameters')
1563 
1564  if 'CutBookkeepers' in file_content:
1565  md.update(md['CutBookkeepers'])
1566  md.pop('CutBookkeepers')
1567 
1568  return meta_dict
1569 
1570 
1571 def convert_itemList(metadata, layout):
1572  """
1573  This function will rearrange the itemList values to match the format of 'eventdata_items', 'eventdata_itemsList'
1574  or 'eventdata_itemsDic' generated with the legacy file peeker tool
1575  :param metadata: a dictionary obtained using read_metadata method.
1576  The mode for read_metadata must be 'peeker of 'full'
1577  :param layout: the mode in which the data will be converted:
1578  * for 'eventdata_items' use: layout= None
1579  * for 'eventdata_itemsList' use: layout= '#join'
1580  * for 'eventdata_itemsDic' use: layout= 'dict'
1581  """
1582 
1583  # Find the itemsList:
1584  item_list = None
1585 
1586  if 'itemList' in metadata:
1587  item_list = metadata['itemList']
1588  else:
1589 
1590  current_key = None
1591 
1592  for key in metadata:
1593  if 'metadata_items' in metadata and key in metadata['metadata_items'] and metadata['metadata_items'][key] == 'EventStreamInfo_p3':
1594  current_key = key
1595  break
1596  if current_key is not None:
1597  item_list = metadata[current_key]['itemList']
1598 
1599  if item_list is not None:
1600 
1601  if layout is None:
1602  return item_list
1603 
1604  elif layout == '#join':
1605  return [k + '#' + v for k, v in item_list if k]
1606 
1607 
1608  elif layout == 'dict':
1609  from collections import defaultdict
1610  dic = defaultdict(list)
1611 
1612  for k, v in item_list:
1613  dic[k].append(v)
1614 
1615  return dict(dic)
1616 
1617 
1619  """Extract number of entries from DataHeader.
1620 
1621  infile ROOT TFile object or filename string
1622  return Number of entries as returned by DataHeader object in infile,
1623  None in absence of DataHeader object
1624  """
1625  import ROOT
1626  from PyUtils.PoolFile import PoolOpts
1627  if not isinstance(infile, ROOT.TFile):
1628  infile = ROOT.TFile.Open(infile)
1629 
1630  for name in {PoolOpts.TTreeNames.DataHeader, PoolOpts.RNTupleNames.DataHeader}:
1631  obj = infile.Get(name)
1632  msg.debug(f"dataheader_nentries: {name=}, {obj=}, {type(obj)=}")
1633  if not obj:
1634  continue
1635  if isinstance(obj, ROOT.TTree):
1636  return obj.GetEntriesFast()
1637  else:
1638  # check early to avoid scary ROOT read errors
1639  if ROOT.gROOT.GetVersionInt() < 63100:
1640  raise RuntimeError("ROOT ver. 6.31/01 or greater needed to read RNTuple files")
1641  if isRNTuple(obj):
1642  return ROOT.Experimental.RNTupleReader.Open(obj).GetNEntries()
1643  else:
1644  raise NotImplementedError(f"Keys of type {type(obj)!r} not supported")
replace
std::string replace(std::string s, const std::string &s2, const std::string &s3)
Definition: hcg.cxx:307
python.MetaReader._extract_iov_detailed
def _extract_iov_detailed(iov_container)
Definition: MetaReader.py:1071
vtune_athena.format
format
Definition: vtune_athena.py:14
python.MetaReader.read_metadata
def read_metadata(filenames, file_type=None, mode='lite', promote=None, meta_key_filter=None, unique_tag_info_values=True, ignoreNonExistingLocalFiles=False)
Definition: MetaReader.py:53
python.MetaReader._extract_fields_tmd
def _extract_fields_tmd(interface=None, aux=None)
Definition: MetaReader.py:1199
python.MetaReader._extract_fields_esi
def _extract_fields_esi(value)
Definition: MetaReader.py:1102
python.MetaReader._extract_fields_iovmdc
def _extract_fields_iovmdc(value)
Definition: MetaReader.py:1098
python.MetaReader.make_peeker
def make_peeker(meta_dict)
Definition: MetaReader.py:1371
dumpHVPathFromNtuple.append
bool append
Definition: dumpHVPathFromNtuple.py:91
python.MetaReader._extract_fields
def _extract_fields(obj)
Definition: MetaReader.py:907
python.CaloAddPedShiftConfig.type
type
Definition: CaloAddPedShiftConfig.py:42
python.MetaReader._check_project
def _check_project()
Definition: MetaReader.py:819
python.MetaReader._get_pfn
def _get_pfn(filename)
Definition: MetaReader.py:828
AtlasMcWeight::encode
number_type encode(double weight)
Definition: AtlasMcWeight.cxx:65
python.MetaReader._extract_fields_iov
def _extract_fields_iov(iov_container, idx_range)
Definition: MetaReader.py:1042
fillPileUpNoiseLumi.next
next
Definition: fillPileUpNoiseLumi.py:52
python.LArMinBiasAlgConfig.int
int
Definition: LArMinBiasAlgConfig.py:59
plotBeamSpotVxVal.range
range
Definition: plotBeamSpotVxVal.py:195
python.MetaReader._convert_event_type_bitmask
def _convert_event_type_bitmask(value)
Definition: MetaReader.py:1322
histSizes.list
def list(name, path='/')
Definition: histSizes.py:38
python.MetaReader.make_lite
def make_lite(meta_dict)
Definition: MetaReader.py:1354
python.AthConfigFlags.isGaudiEnv
def isGaudiEnv()
Definition: AthConfigFlags.py:14
python.MetaReader._convert_event_type_user_type
def _convert_event_type_user_type(value)
Definition: MetaReader.py:1315
python.MetaReader._extract_fields_triggermenu
def _extract_fields_triggermenu(interface, aux)
Definition: MetaReader.py:1261
beamspotman.dir
string dir
Definition: beamspotman.py:623
CxxUtils::set
constexpr std::enable_if_t< is_bitmask_v< E >, E & > set(E &lhs, E rhs)
Convenience function to set bits in a class enum bitmask.
Definition: bitmask.h:232
python.MetaReader._convert_value
def _convert_value(value, aux=None)
Definition: MetaReader.py:922
python.MetaReader._extract_fields_cbk
def _extract_fields_cbk(interface=None, aux=None)
Definition: MetaReader.py:1133
TCS::join
std::string join(const std::vector< std::string > &v, const char c=',')
Definition: Trigger/TrigT1/L1Topo/L1TopoCommon/Root/StringUtils.cxx:10
python.MetaReader._read_guid
def _read_guid(filename)
Definition: MetaReader.py:846
TrigJetMonitorAlgorithm.items
items
Definition: TrigJetMonitorAlgorithm.py:71
python.MetaReader._extract_fields_triggermenujson
def _extract_fields_triggermenujson(interface, aux)
Definition: MetaReader.py:1285
python.MetaReader.promote_keys
def promote_keys(meta_dict, mode)
Definition: MetaReader.py:1460
Trk::open
@ open
Definition: BinningType.h:40
python.MetaReader._extract_fields_fmd
def _extract_fields_fmd(interface=None, aux=None)
Definition: MetaReader.py:1164
python.MetaReader._extract_fields_ef
def _extract_fields_ef(value)
Definition: MetaReader.py:1124
python.MetaReader.dataheader_nentries
def dataheader_nentries(infile)
Definition: MetaReader.py:1618
python.MetaReader.convert_itemList
def convert_itemList(metadata, layout)
Definition: MetaReader.py:1571
python.PoolFile.isRNTuple
def isRNTuple(obj)
Definition: PoolFile.py:36
str
Definition: BTagTrackIpAccessor.cxx:11
python.MetaReader._get_attribute_val
def _get_attribute_val(iov_container, attr_name, attr_idx)
Definition: MetaReader.py:989
xAOD::bool
setBGCode setTAP setLVL2ErrorBits bool
Definition: TrigDecision_v1.cxx:60
Trk::split
@ split
Definition: LayerMaterialProperties.h:38
python.LArMinBiasAlgConfig.float
float
Definition: LArMinBiasAlgConfig.py:65