ATLAS Offline Software
MetaReader.py
Go to the documentation of this file.
1 # Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
2 
3 import os
4 import re
5 from fnmatch import fnmatchcase
6 from AthenaCommon.Logging import logging
7 from AthenaConfiguration.AthConfigFlags import isGaudiEnv
8 from PyUtils.PoolFile import isRNTuple
9 from ROOT import gSystem
10 
11 msg = logging.getLogger('MetaReader')
12 
13 # compile the regex needed in _convert_value() outside it to optimize the code.
14 regexEventStreamInfo = re.compile(r'^EventStreamInfo(_p\d+)?$')
15 regexIOVMetaDataContainer = re.compile(r'^IOVMetaDataContainer(_p\d+)?$')
16 regexByteStreamMetadataContainer = re.compile(r'^ByteStreamMetadataContainer(_p\d+)?$')
17 regexXAODCutBookkeeperContainer = re.compile(r'^xAOD::CutBookkeeperContainer(_v\d+)?$')
18 regexXAODCutBookkeeperContainerAux = re.compile(r'^xAOD::CutBookkeeperAuxContainer(_v\d+)?$')
19 regexXAODEventFormat = re.compile(r'^xAOD::EventFormat(_v\d+)?$')
20 regexXAODFileMetaData = re.compile(r'^xAOD::FileMetaData(_v\d+)?$')
21 regexXAODFileMetaDataAux = re.compile(r'^xAOD::FileMetaDataAuxInfo(_v\d+)?$')
22 regexXAODFileMetaDataAuxDyn = re.compile(r'^(xAOD::)?FileMetaData.*AuxDyn(\.[a-zA-Z0-9]+)?$')
23 regexXAODTriggerMenu = re.compile(r'^DataVector<xAOD::TriggerMenu(_v\d+)?>$') # Run 2
24 regexXAODTriggerMenuAux = re.compile(r'^xAOD::TriggerMenuAuxContainer(_v\d+)?$') # Run 2
25 regexXAODTriggerMenuJson = re.compile(r'^DataVector<xAOD::TriggerMenuJson(_v\d+)?>$') # Run 3
26 regexXAODTriggerMenuJsonAux = re.compile(r'^xAOD::TriggerMenuJsonAuxContainer(_v\d+)?$') # Run 3
27 regexXAODTruthMetaData = re.compile(r'^DataVector<xAOD::TruthMetaData(_v\d+)?>$')
28 regexXAODTruthMetaDataAux = re.compile(r'^xAOD::TruthMetaDataAuxContainer(_v\d+)?$')
29 regex_cppname = re.compile(r'^([\w:]+)(<.*>)?$')
30 # regex_persistent_class = re.compile(r'^([a-zA-Z]+_p\d+::)*[a-zA-Z]+_p\d+$')
31 regex_persistent_class = re.compile(r'^([a-zA-Z]+(_[pv]\d+)?::)*[a-zA-Z]+_[pv]\d+$')
32 regex_BS_files = re.compile(r'^(\w+):.*((\.D?RAW\..*)|(\.data$))')
33 regex_URI_scheme = re.compile(r'^([A-Za-z0-9\+\.\-]+)\:')
34 
35 lite_primary_keys_to_keep = [
36  'lumiBlockNumbers', 'runNumbers', 'mc_event_number', 'mc_channel_number',
37  'eventTypes', 'processingTags', 'itemList']
38 lite_TagInfo_keys_to_keep = [
39  'beam_energy', 'beam_type', 'GeoAtlas', 'IOVDbGlobalTag',
40  'AODFixVersion', 'project_name', 'mc_campaign']
41 
42 trigger_keys = [
43  'TriggerConfigInfo',
44  'TriggerMenu', 'TriggerMenuJson_BG', 'TriggerMenuJson_HLT', 'TriggerMenuJson_HLTMonitoring', 'TriggerMenuJson_HLTPS', 'TriggerMenuJson_L1', 'TriggerMenuJson_L1PS',
45  '/TRIGGER/HLT/Groups', '/TRIGGER/HLT/HltConfigKeys', '/TRIGGER/HLT/Menu', '/TRIGGER/HLT/PrescaleKey', '/TRIGGER/HLT/Prescales',
46  '/TRIGGER/LVL1/ItemDef', '/TRIGGER/LVL1/Lvl1ConfigKey', '/TRIGGER/LVL1/Menu', '/TRIGGER/LVL1/Prescales', '/TRIGGER/LVL1/Thresholds',
47  'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenu', 'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_BG', 'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_HLT',
48  'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_HLTMonitoring', 'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_HLTPS',
49  'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_L1', 'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_L1PS',
50 ]
51 
52 
53 def read_metadata(filenames, file_type = None, mode = 'lite', promote = None, meta_key_filter = None,
54  unique_tag_info_values = True, ignoreNonExistingLocalFiles=False):
55  """
56  This tool is independent of Athena framework and returns the metadata from a given file.
57  :param filenames: the input file from which metadata needs to be extracted.
58  :param file_type: the type of file. POOL or BS (bytestream: RAW, DRAW) files.
59  :param mode: if true, will return all metadata associated with the filename. By default, is false and this will
60  return a "tiny" version which have only the following keys: 'file_guid', 'file_size', 'file_type', 'nentries'.
61  :return: a dictionary of metadata for the given input file.
62  """
63 
64  # make the mode available in the _convert methods
65  global _gbl_mode
66  _gbl_mode = mode
67 
68  from RootUtils import PyROOTFixes # noqa F401
69 
70  # Check if the input is a file or a list of files.
71  if isinstance(filenames, str):
72  filenames = [filenames]
73 
74  # Check if file_type is an allowed value
75  if file_type is not None:
76  if file_type not in ('POOL', 'BS'):
77  raise NameError('Allowed values for \'file_type\' parameter are: "POOL" or "BS": you provided "' + file_type + '"')
78  else:
79  msg.info('Forced file_type: {0}'.format(file_type))
80 
81  # Check the value of mode parameter
82  if mode not in ('tiny', 'lite', 'full', 'peeker', 'iov'):
83  raise NameError('Allowed values for "mode" parameter are: "tiny", "lite", "peeker", "iov" or "full"')
84 
85  if meta_key_filter is None:
86  meta_key_filter = []
87 
88  # Disable 'full' and 'iov' in non-Gaudi environments
89  if not isGaudiEnv():
90  if mode in ('full', 'iov'):
91  raise NameError('The following modes are not available in AnalysisBase: "iov" and "full"')
92 
93  msg.info('Current mode used: {0}'.format(mode))
94  msg.info('Current filenames: {0}'.format(filenames))
95 
96  if mode != 'full' and mode !='iov' and len(meta_key_filter) > 0:
97  raise NameError('It is possible to use the meta_key_filter option only for full mode')
98  if meta_key_filter:
99  msg.info('Filter used: {0}'.format(meta_key_filter))
100 
101  # create the storage object for metadata.
102  meta_dict = {}
103 
104  # ----- retrieve metadata from all filename or filenames --------------------------------------------------------#
105  for filename in filenames:
106  meta_dict[filename] = {}
107  current_file_type = None
108  # Determine the file_type of the input and store this information into meta_dict
109  if not file_type:
110  if os.path.isfile(filename):
111 
112  if ignoreNonExistingLocalFiles and not regex_URI_scheme.match(filename) and gSystem.AccessPathName(filename): # Attention, bizarre convention of return value!!
113  msg.warn('Ignoring not accessible file: {}'.format(filename))
114  continue
115 
116  with open(filename, 'rb') as binary_file:
117  magic_file = binary_file.read(4)
118 
119  if magic_file == 'root' or magic_file == b'root':
120  current_file_type = 'POOL'
121  meta_dict[filename]['file_type'] = 'POOL'
122 
123  else:
124  current_file_type = 'BS'
125  meta_dict[filename]['file_type'] = 'BS'
126 
127  # add information about the file_size of the input filename
128  meta_dict[filename]['file_size'] = os.path.getsize(filename)
129 
130  # determine the file type for the remote input files
131  else:
132  if regex_BS_files.match(filename):
133  current_file_type = 'BS'
134  meta_dict[filename]['file_type'] = 'BS'
135  else:
136  current_file_type = 'POOL'
137  meta_dict[filename]['file_type'] = 'POOL'
138 
139  # add information about the file_size of the input filename
140  meta_dict[filename]['file_size'] = None # None -> we can't read the file size for a remote file
141 
142  else:
143  current_file_type = file_type
144 
145  # ----- retrieves metadata from POOL files ------------------------------------------------------------------#
146  if current_file_type == 'POOL':
147 
148  if ignoreNonExistingLocalFiles and not regex_URI_scheme.match(filename) and gSystem.AccessPathName(filename): # Attention, bizarre convention of return value!!
149  msg.warn('Ignoring not accessible file: {}'.format(filename))
150  continue
151 
152  import ROOT
153  # open the file using ROOT.TFile
154  current_file = ROOT.TFile.Open( _get_pfn(filename) )
155 
156  # get auto flush setting from the main EventData TTree
157  from PyUtils.PoolFile import PoolOpts
158  collectionTree = current_file.Get(PoolOpts.TTreeNames.EventData)
159  if isinstance(collectionTree, ROOT.TTree):
160  meta_dict[filename]['auto_flush'] = collectionTree.GetAutoFlush()
161 
162  # read and add the 'GUID' value
163  meta_dict[filename]['file_guid'] = _read_guid(filename)
164 
165  # read and add compression level and algorithm
166  meta_dict[filename]['file_comp_alg'] = current_file.GetCompressionAlgorithm()
167  meta_dict[filename]['file_comp_level'] = current_file.GetCompressionLevel()
168 
169  if isRNTuple( current_file.Get(PoolOpts.RNTupleNames.MetaData) ):
170  msg.warning(
171  "Reading in-file metadata from RNTuple is currently of limited support"
172  )
173  meta_dict[filename]["nentries"] = dataheader_nentries(current_file)
174 
175  def get_raw_md(filename):
176  """Helper function to read the raw metadata from RNTuple.
177  We use subprocess because output from RNTupleReader uses
178  std::ostream for output, which is not captured by PyROOT.
179 
180  Returns the raw metadata as a json-like string, but one cannot
181  assume it is valid json.
182 
183  Known issues of invalid json constructs:
184  - double quotes are not escaped in string values
185  - single-quoted strings
186  - nested json objects and lists inside single-quoted strings
187  """
188  import subprocess
189  import sys
190 
191  raw_md = f"""
192 from ROOT.Experimental import RNTupleReader
193 from ROOT import TFile
194 
195 
196 def read_md(infile):
197  file_handle = TFile.Open(infile)
198  md = file_handle.Get("MetaData")
199  reader = RNTupleReader.Open(md)
200  reader.Show(0)
201 
202 read_md("{filename}")
203  """
204  result = subprocess.run(
205  [sys.executable, "-c", raw_md],
206  capture_output=True,
207  text=True,
208  )
209  raw_data = "".join(result.stdout.split())
210  return raw_data.replace("\x00", '""')
211 
212  def extract_keys(json_like_string, keys):
213  """Helper for extracting key-value pairs from json-like string"""
214  import json
215 
216  result = {}
217  for key in keys:
218  if key == "m_eventTypes":
219  pattern = rf'"{key}":(\[\{{.*?\}}\])'
220  elif "beamEnergy" in key:
221  pattern = rf'"{key}":(\b[+]?([0-9]*\.[0-9]+|[0-9]+\.?[0-9]*)[eE][+]?([0-9]+)\b)'
222  else:
223  pattern = rf'"{key}"\s*:\s*(\[[^\]]*\]|"[^"]*"|\d+)'
224  match = re.search(pattern, json_like_string)
225  if match:
226  try:
227  result[key] = json.loads(match.group(1))
228  except json.JSONDecodeError:
229  pass
230  return result
231 
232  # metadata keys which can be relatively reliably extracted from RNTuple
233  keys_to_extract = [
234  "m_numberOfEvents",
235  "m_runNumbers",
236  "m_lumiBlockNumbers",
237  "m_processingTags",
238  "m_itemList",
239  "m_eventTypes",
240  "m_branchNames",
241  "m_classNames",
242  "FileMetaDataAuxDyn:amiTag",
243  "FileMetaDataAuxDyn:AODFixVersion",
244  "FileMetaDataAuxDyn:AODCalibVersion",
245  "FileMetaDataAuxDyn:beamEnergy",
246  "FileMetaDataAuxDyn:beamType",
247  "FileMetaDataAuxDyn:conditionsTag",
248  "FileMetaDataAuxDyn:dataYear",
249  "FileMetaDataAuxDyn:generatorsInfo",
250  "FileMetaDataAuxDyn:geometryVersion",
251  "FileMetaDataAuxDyn:isDataOverlay",
252  "FileMetaDataAuxDyn:mcCampaign",
253  "FileMetaDataAuxDyn:mcProcID",
254  "FileMetaDataAuxDyn:simFlavour",
255  "productionRelease",
256  "dataType",
257  ]
258 
259  result = extract_keys(get_raw_md(filename), keys_to_extract)
260 
261  item_list = []
262  from CLIDComps.clidGenerator import clidGenerator
263 
264  cgen = clidGenerator("")
265  for item in result["m_itemList"]:
266  item_list.append((cgen.getNameFromClid(item["_0"]), item["_1"].encode("utf-8")))
267  meta_dict[filename]["itemList"] = item_list
268  event_types = []
269  for event_type in result["m_eventTypes"]:
270  fields = {
271  key.removeprefix("m_"): value
272  for key, value in event_type.items()
273  }
274  fields = _convert_event_type_bitmask(fields)
275  fields = _convert_event_type_user_type(fields)
276  event_types.extend(fields["type"])
277  meta_dict[filename]["eventTypes"] = event_types
278  meta_dict[filename]["numberOfEvents"] = result["m_numberOfEvents"]
279  meta_dict[filename]["runNumbers"] = result["m_runNumbers"]
280  meta_dict[filename]["lumiBlockNumbers"] = result["m_lumiBlockNumbers"]
281  meta_dict[filename]["processingTags"] = result["m_processingTags"]
282 
283  meta_dict[filename]["EventFormat"] = {}
284  ef_items = {}
285  for branch_name, class_name in dict(
286  zip(
287  result["m_branchNames"],
288  result["m_classNames"],
289  )
290  ).items():
291  ef_items[branch_name] = class_name
292  meta_dict[filename]["EventFormat"] = ef_items
293 
294  meta_dict[filename]["FileMetaData"] = {}
295  for key in keys_to_extract:
296  try:
297  meta_dict[filename]["FileMetaData"][key.split(":")[1]] = (
298  result[key]
299  )
300  except (IndexError, KeyError):
301  continue
302  msg.debug(f"Read metadata from RNTuple: {meta_dict[filename]}")
303  return meta_dict
304 
305  # ----- read extra metadata required for 'lite' and 'full' modes ----------------------------------------#
306  if mode != 'tiny':
307  # selecting from all tree the only one which contains metadata, respectively "MetaData"
308  metadata_tree = current_file.Get('MetaData')
309  # read all list of branches stored in "MetaData" tree
310  metadata_branches = metadata_tree.GetListOfBranches()
311  nr_of_branches = metadata_branches.GetEntriesFast()
312 
313  # object to store the names of metadata containers and their corresponding class name.
314  meta_dict[filename]['metadata_items'] = {}
315 
316  # create a container for the list of filters used for the lite version
317  meta_filter = {}
318 
319  # set the filters for name
320  if mode == 'lite':
321  if isGaudiEnv():
322  meta_filter = {
323  '/TagInfo': 'IOVMetaDataContainer_p1',
324  'IOVMetaDataContainer_p1__TagInfo': 'IOVMetaDataContainer_p1',
325  '*': 'EventStreamInfo_p*'
326  }
327  else:
328  meta_filter = {
329  'FileMetaData': '*',
330  'FileMetaDataAux.': 'xAOD::FileMetaDataAuxInfo_v1',
331  }
332 
333  # set the filters for name
334  if mode == 'peeker':
335  meta_filter.update({
336  'TriggerMenu': 'DataVector<xAOD::TriggerMenu_v1>', # R2 trigger metadata format AOD (deprecated)
337  'TriggerMenuAux.': 'xAOD::TriggerMenuAuxContainer_v1',
338  'DataVector<xAOD::TriggerMenu_v1>_TriggerMenu': 'DataVector<xAOD::TriggerMenu_v1>', # R2 trigger metadata format ESD (deprecated)
339  'xAOD::TriggerMenuAuxContainer_v1_TriggerMenuAux.': 'xAOD::TriggerMenuAuxContainer_v1',
340  'TriggerMenuJson_HLT': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format AOD
341  'TriggerMenuJson_HLTAux.': 'xAOD::TriggerMenuJsonAuxContainer_v1',
342  'TriggerMenuJson_HLTMonitoring': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format AOD
343  'TriggerMenuJson_HLTMonitoringAux.': 'xAOD::TriggerMenuJsonAuxContainer_v1',
344  'TriggerMenuJson_HLTPS': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format AOD
345  'TriggerMenuJson_HLTPSAux.': 'xAOD::TriggerMenuJsonAuxContainer_v1',
346  'TriggerMenuJson_L1': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format AOD
347  'TriggerMenuJson_L1Aux.': 'xAOD::TriggerMenuJsonAuxContainer_v1',
348  'TriggerMenuJson_L1PS': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format AOD
349  'TriggerMenuJson_L1PSAux.': 'xAOD::TriggerMenuJsonAuxContainer_v1',
350  'CutBookkeepers': 'xAOD::CutBookkeeperContainer_v1',
351  'CutBookkeepersAux.': 'xAOD::CutBookkeeperAuxContainer_v1',
352  'FileMetaData': '*',
353  'FileMetaDataAux.': 'xAOD::FileMetaDataAuxInfo_v1',
354  'TruthMetaData': '*',
355  'TruthMetaDataAux.': 'xAOD::TruthMetaDataAuxContainer_v1',
356  'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_HLT': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format ESD
357  'xAOD::TriggerMenuJsonAuxContainer_v1_TriggerMenuJson_HLTAux.': 'xAOD::TriggerMenuJsonAuxContainer_v1',
358  'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_HLTMonitoring': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format ESD
359  'xAOD::TriggerMenuJsonAuxContainer_v1_TriggerMenuJson_HLTMonitoringAux.': 'xAOD::TriggerMenuJsonAuxContainer_v1',
360  'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_HLTPS': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format ESD
361  'xAOD::TriggerMenuJsonAuxContainer_v1_TriggerMenuJson_HLTPSAux.': 'xAOD::TriggerMenuJsonAuxContainer_v1',
362  'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_L1': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format ESD
363  'xAOD::TriggerMenuJsonAuxContainer_v1_TriggerMenuJson_L1Aux.': 'xAOD::TriggerMenuJsonAuxContainer_v1',
364  'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_L1PS': 'DataVector<xAOD::TriggerMenuJson_v1>', # R3 trigger metadata format ESD
365  'xAOD::TriggerMenuJsonAuxContainer_v1_TriggerMenuJson_L1PSAux.': 'xAOD::TriggerMenuJsonAuxContainer_v1'
366  })
367 
368  if isGaudiEnv():
369  meta_filter.update({
370  '/TagInfo': 'IOVMetaDataContainer_p1',
371  'IOVMetaDataContainer_p1__TagInfo': 'IOVMetaDataContainer_p1',
372  '/Simulation/Parameters': 'IOVMetaDataContainer_p1',
373  '/Digitization/Parameters': 'IOVMetaDataContainer_p1',
374  '/EXT/DCS/MAGNETS/SENSORDATA': 'IOVMetaDataContainer_p1',
375  '*': 'EventStreamInfo_p*'
376  })
377 
378  if (mode == 'full' or mode == 'iov') and meta_key_filter:
379  meta_filter = {f: '*' for f in meta_key_filter}
380  # store all persistent classes for metadata container existing in a POOL/ROOT file.
381  persistent_instances = {}
382  dynamic_fmd_items = {}
383 
384  # Protect non-Gaudi environments from meta-data classes it doesn't know about
385  if not isGaudiEnv():
386  metadata_tree.SetBranchStatus("*", False)
387 
388  for i in range(0, nr_of_branches):
389  branch = metadata_branches.At(i)
390  name = branch.GetName()
391  if name == 'index_ref':
392  # skip the index branch
393  continue
394 
395  class_name = branch.GetClassName()
396 
397  if regexIOVMetaDataContainer.match(class_name):
398  name = name.replace('IOVMetaDataContainer_p1_', '').replace('_', '/')
399 
400  if regexIOVMetaDataContainer.match(class_name):
401  meta_dict[filename]['metadata_items'][name] = 'IOVMetaDataContainer'
402  elif regexByteStreamMetadataContainer.match(class_name):
403  meta_dict[filename]['metadata_items'][name] = 'ByteStreamMetadataContainer'
404  elif regexEventStreamInfo.match(class_name):
405  meta_dict[filename]['metadata_items'][name] = 'EventStreamInfo'
406  elif regexXAODFileMetaData.match(class_name):
407  meta_dict[filename]['metadata_items'][name] = 'FileMetaData'
408  elif regexXAODTruthMetaData.match(class_name):
409  meta_dict[filename]['metadata_items'][name] = 'TruthMetaData'
410  else:
411  type_name = class_name
412  if not type_name:
413  try:
414  type_name = branch.GetListOfLeaves()[0].GetTypeName()
415  except IndexError:
416  pass
417  meta_dict[filename]['metadata_items'][name] = type_name
418 
419  if len(meta_filter) > 0:
420  keep = False
421  for filter_key, filter_class in meta_filter.items():
422  if (filter_key.replace('/', '_') in name.replace('/', '_') or filter_key == '*') and fnmatchcase(class_name, filter_class):
423  if 'CutBookkeepers' in filter_key:
424  keep = filter_key == name
425  if keep:
426  break
427  else:
428  keep = True
429  break
430 
431  if not keep:
432  continue
433  else:
434  # CutBookkeepers should always be filtered:
435  if 'CutBookkeepers' in name and name not in ['CutBookkeepers', 'CutBookkeepersAux.']:
436  continue
437 
438  if not isGaudiEnv():
439  metadata_tree.SetBranchStatus(f"{name}*", True)
440 
441  # assign the corresponding persistent class based of the name of the metadata container
442  if regexEventStreamInfo.match(class_name):
443  if class_name.endswith('_p1'):
444  persistent_instances[name] = ROOT.EventStreamInfo_p1()
445  elif class_name.endswith('_p2'):
446  persistent_instances[name] = ROOT.EventStreamInfo_p2()
447  else:
448  persistent_instances[name] = ROOT.EventStreamInfo_p3()
449  elif regexIOVMetaDataContainer.match(class_name):
450  persistent_instances[name] = ROOT.IOVMetaDataContainer_p1()
451  elif regexXAODEventFormat.match(class_name):
452  persistent_instances[name] = ROOT.xAOD.EventFormat_v1()
453  elif regexXAODTriggerMenu.match(class_name) and _check_project() not in ['AthGeneration']:
454  persistent_instances[name] = ROOT.xAOD.TriggerMenuContainer_v1()
455  elif regexXAODTriggerMenuAux.match(class_name) and _check_project() not in ['AthGeneration']:
456  persistent_instances[name] = ROOT.xAOD.TriggerMenuAuxContainer_v1()
457  elif regexXAODTriggerMenuJson.match(class_name) and _check_project() not in ['AthGeneration']:
458  persistent_instances[name] = ROOT.xAOD.TriggerMenuJsonContainer_v1()
459  elif regexXAODTriggerMenuJsonAux.match(class_name) and _check_project() not in ['AthGeneration']:
460  persistent_instances[name] = ROOT.xAOD.TriggerMenuJsonAuxContainer_v1()
461  elif regexXAODCutBookkeeperContainer.match(class_name):
462  persistent_instances[name] = ROOT.xAOD.CutBookkeeperContainer_v1()
463  elif regexXAODCutBookkeeperContainerAux.match(class_name):
464  persistent_instances[name] = ROOT.xAOD.CutBookkeeperAuxContainer_v1()
465  elif regexXAODFileMetaData.match(class_name):
466  persistent_instances[name] = ROOT.xAOD.FileMetaData_v1()
467  elif regexXAODFileMetaDataAux.match(class_name):
468  persistent_instances[name] = ROOT.xAOD.FileMetaDataAuxInfo_v1()
469  elif regexXAODTruthMetaData.match(class_name):
470  persistent_instances[name] = ROOT.xAOD.TruthMetaDataContainer_v1()
471  elif regexXAODTruthMetaDataAux.match(class_name):
472  persistent_instances[name] = ROOT.xAOD.TruthMetaDataAuxContainer_v1()
473 
474  if name in persistent_instances:
475  branch.SetAddress(ROOT.AddressOf(persistent_instances[name]))
476 
477  # This creates a dict to store the dynamic attributes of the xAOD::FileMetaData
478  dynamicFMD = regexXAODFileMetaDataAuxDyn.match(name)
479  if dynamicFMD:
480  dynamicName = dynamicFMD.group().split('.')[-1]
481  dynamicType = regex_cppname.match(class_name)
482  if dynamicType:
483  # this should be a string
484  dynamic_fmd_items[dynamicName] = ROOT.std.string()
485  branch.SetAddress(ROOT.AddressOf(dynamic_fmd_items[dynamicName]))
486  else:
487  dynamic_fmd_items[dynamicName] = None
488 
489 
490  metadata_tree.GetEntry(0)
491 
492  # This loads the dynamic attributes of the xAOD::FileMetaData from the TTree
493  for key in dynamic_fmd_items:
494  if dynamic_fmd_items[key] is None:
495  try:
496  if key.startswith("is"):
497  # this is probably a boolean
498  dynamic_fmd_items[key] = getattr(metadata_tree, key) != '\x00'
499  else:
500  # this should be a float
501  dynamic_fmd_items[key] = getattr(metadata_tree, key)
502  except AttributeError:
503  # should not happen, but just ignore missing attributes
504  pass
505  else:
506  # convert ROOT.std.string objects to python equivalent
507  dynamic_fmd_items[key] = str(dynamic_fmd_items[key])
508 
509  # clean the meta-dict if the meta_key_filter flag is used, to return only the key of interest
510  if meta_key_filter:
511  meta_dict[filename] = {}
512 
513  # read the metadata
514  for name, content in persistent_instances.items():
515  key = name
516  if hasattr(content, 'm_folderName'):
517  key = content.m_folderName
518 
519  # Some transition AODs contain both the Run2 and Run3 metadata formats. We only wish to read the Run3 format if such a file is encountered.
520  has_r3_trig_meta = ('TriggerMenuJson_HLT' in persistent_instances or 'DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_HLT' in persistent_instances)
521  aux = None
522  if key.startswith('TriggerMenuJson_') and not key.endswith('Aux.'): # interface container for the menu (AOD)
523  aux = persistent_instances[key+'Aux.']
524  elif key.startswith('DataVector<xAOD::TriggerMenuJson_v1>_TriggerMenuJson_') and not key.endswith('Aux.'): # interface container for the menu (ESD)
525  menuPart = key.split('_')[-1]
526  aux = persistent_instances['xAOD::TriggerMenuJsonAuxContainer_v1_TriggerMenuJson_'+menuPart+'Aux.']
527  elif key == 'TriggerMenu' and 'TriggerMenuAux.' in persistent_instances and not has_r3_trig_meta: # AOD case (legacy support, HLT and L1 menus)
528  aux = persistent_instances['TriggerMenuAux.']
529  elif key == 'DataVector<xAOD::TriggerMenu_v1>_TriggerMenu' and 'xAOD::TriggerMenuAuxContainer_v1_TriggerMenuAux.' in persistent_instances and not has_r3_trig_meta: # ESD case (legacy support, HLT and L1 menus)
530  aux = persistent_instances['xAOD::TriggerMenuAuxContainer_v1_TriggerMenuAux.']
531  elif (key == 'CutBookkeepers'
532  and 'CutBookkeepersAux.' in persistent_instances):
533  aux = persistent_instances['CutBookkeepersAux.']
534  elif key == 'CutBookkeepersAux.':
535  continue # Extracted using the interface object
536  elif (key == 'FileMetaData'
537  and 'FileMetaDataAux.' in persistent_instances):
538  aux = persistent_instances['FileMetaDataAux.']
539  elif (key == 'xAOD::FileMetaData_v1_FileMetaData'
540  and 'xAOD::FileMetaDataAuxInfo_v1_FileMetaDataAux.' in persistent_instances):
541  aux = persistent_instances['xAOD::FileMetaDataAuxInfo_v1_FileMetaDataAux.']
542  elif (key == 'TruthMetaData'
543  and 'TruthMetaDataAux.' in persistent_instances):
544  aux = persistent_instances['TruthMetaDataAux.']
545  elif key == 'TruthMetaDataAux.':
546  continue # Extracted using the interface object
547  elif 'Menu' in key and key.endswith('Aux.'):
548  continue # Extracted using the interface object
549 
550  return_obj = _convert_value(content, aux)
551 
552  if 'TriggerMenuJson' in key or ('TriggerMenu' in key and not has_r3_trig_meta):
553  if 'RAWTriggerMenuJson' in return_obj:
554  meta_dict[filename][key] = return_obj['RAWTriggerMenuJson']
555  del return_obj['RAWTriggerMenuJson']
556  if 'TriggerConfigInfo' not in meta_dict[filename]:
557  meta_dict[filename]['TriggerConfigInfo'] = {}
558  if 'dbkey' in return_obj:
559  meta_dict[filename]['TriggerConfigInfo'][key.split('_')[-1]] = {
560  'key' : return_obj['dbkey'],
561  'name': return_obj['name']
562  }
563  del return_obj['dbkey']
564  del return_obj['name']
565  if 'TriggerMenu' not in meta_dict[filename]:
566  meta_dict[filename]['TriggerMenu'] = {}
567  meta_dict[filename]['TriggerMenu'].update(return_obj)
568  elif "FileMetaData" in key:
569  if "FileMetaData" not in meta_dict[filename]:
570  meta_dict[filename]["FileMetaData"] = dynamic_fmd_items
571  meta_dict[filename]["FileMetaData"].update(return_obj)
572  else:
573  meta_dict[filename][key] = return_obj
574 
575  try:
576  # get the number of events from EventStreamInfo
577  esi_dict = next(key for key, value in meta_dict[filename].items()
578  if isinstance(value, dict) and "numberOfEvents" in value and
579  meta_dict[filename]["metadata_items"][key] == "EventStreamInfo")
580  msg.debug(f"{esi_dict=}")
581  meta_dict[filename]["nentries"] = meta_dict[filename][esi_dict]["numberOfEvents"]
582  except StopIteration as err:
583  msg.debug(f"Caught {err=}, {type(err)=}, falling back on opening the DataHeader"
584  " Container to read the number of entries")
585  meta_dict[filename]['nentries'] = dataheader_nentries(current_file)
586  msg.debug(f"{meta_dict[filename]['nentries']=}")
587 
588  if unique_tag_info_values and mode=='iov':
589  unique_tag_info_values = False
590  msg.info('disabling "unique_tag_info_values" option for "iov" mode')
591 
592  # This is a required workaround which will temporarily be fixing ATEAM-560 originated from ATEAM-531
593  # ATEAM-560: https://its.cern.ch/jira/browse/ATEAM-560
594  # ATEAM-531: https://its.cern.ch/jira/browse/ATEAM-531
595  # This changes will remove all duplicates values presented in some files due
596  # to the improper merging of two IOVMetaDataContainers.
597  if unique_tag_info_values:
598  msg.info('MetaReader is called with the parameter "unique_tag_info_values" set to True. '
599  'This is a workaround to remove all duplicate values from "/TagInfo" key')
600  if '/TagInfo' in meta_dict[filename]:
601  for key, value in meta_dict[filename]['/TagInfo'].items():
602  if isinstance(value, list) and value:
603  if len(unique_values := set(value)) > 1:
604  msg.warn(
605  f"Found multiple values for {key}: {value}. "
606  "Looking for possible duplicates."
607  )
608  maybe_ok = False
609  if key == "AMITag":
610  # curate duplicates like: ['s3681_q453', 's3681_q453_'] or ["s3681_q453", "q453_s3681"]
611  unique_amitags = set()
612  for amitags in unique_values:
613  unique_amitags.add(
614  "_".join({tag for tag in amitags.split("_") if tag})
615  )
616  if len(unique_amitags) == 1:
617  maybe_ok = True
618  elif key == "beam_energy":
619  # handle duplicates like: ['6500000', '6500000.0'] or [3, "3"]
620  unique_energies = set()
621  for energy in unique_values:
622  try:
623  energy = int(energy)
624  except ValueError:
625  try:
626  energy = float(energy)
627  except ValueError:
628  pass
629  unique_energies.add(energy)
630  if len(unique_energies) == 1:
631  maybe_ok = True
632  elif key in ["AtlasRelease", "IOVDbGlobalTag", "AODFixVersion"]:
633  maybe_ok = True
634  if maybe_ok:
635  msg.warn(
636  f"Multiple values for {key} may mean the same, or "
637  "the input file was produced in multi-step job. "
638  f"Ignoring all but the first entry: {key} = {value[0]}"
639  )
640  else:
641  raise ValueError(
642  f"{key} from /TagInfo contains more than 1 unique value: {value}"
643  )
644 
645  meta_dict[filename]['/TagInfo'][key] = value[0]
646 
647  if promote is None:
648  promote = mode == 'lite' or mode == 'peeker'
649 
650  # Filter the data and create a prettier output for the 'lite' mode
651  if mode == 'lite':
652  meta_dict = make_lite(meta_dict)
653 
654  if mode == 'peeker':
655  meta_dict = make_peeker(meta_dict)
656 
657  if promote:
658  meta_dict = promote_keys(meta_dict, mode)
659 
660  # If AnalysisBase the itemList must be grabbed another way
661  if not isGaudiEnv():
662  if isinstance(collectionTree, ROOT.TTree):
663  meta_dict[filename]['itemList'] = [ (b.GetClassName(), b.GetName()) for b in collectionTree.GetListOfBranches() ]
664 
665  # ----- retrieves metadata from bytestream (BS) files (RAW, DRAW) ------------------------------------------#
666  elif current_file_type == 'BS':
667 
668  if ignoreNonExistingLocalFiles and not regex_URI_scheme.match(filename) and not os.path.isfile(filename):
669  msg.warn('Ignoring not accessible file: {}'.format(filename))
670  continue
671 
672  import eformat
673 
674  # store the number of entries
675  bs = eformat.istream(filename)
676  meta_dict[filename]['nentries'] = bs.total_events
677 
678  # store the 'guid' value
679  data_reader = eformat.EventStorage.pickDataReader(filename)
680  assert data_reader, 'problem picking a data reader for file [%s]' % filename
681 
682  # set auto flush equivalent, which for BS is always 1
683  meta_dict[filename]['auto_flush'] = 1
684 
685  if hasattr(data_reader, 'GUID'):
686  meta_dict[filename]['file_guid'] = data_reader.GUID()
687 
688  # compression level and algorithm, for BS always ZLIB
689  meta_dict[filename]['file_comp_alg'] = 1
690  meta_dict[filename]['file_comp_level'] = 1
691 
692 
693  # if the flag full is set to true then grab all metadata
694  # ------------------------------------------------------------------------------------------------------#
695  if mode != "tiny":
696  bs_metadata = {}
697 
698  for md in data_reader.freeMetaDataStrings():
699  if md.startswith('Event type:'):
700  k = 'eventTypes'
701  v = []
702  if 'is sim' in md:
703  v.append('IS_SIMULATION')
704  else:
705  v.append('IS_DATA')
706 
707  if 'is atlas' in md:
708  v.append('IS_ATLAS')
709  else:
710  v.append('IS_TESTBEAM')
711 
712  if 'is physics' in md:
713  v.append('IS_PHYSICS')
714  else:
715  v.append('IS_CALIBRATION')
716 
717  bs_metadata[k] = tuple(v)
718 
719  elif md.startswith('GeoAtlas:'):
720  k = 'geometry'
721  v = md.split('GeoAtlas:')[1].strip()
722  bs_metadata[k] = v
723 
724  elif md.startswith('IOVDbGlobalTag:'):
725  k = 'conditions_tag'
726  v = md.split('IOVDbGlobalTag:')[1].strip()
727  bs_metadata[k] = v
728 
729  elif '=' in md:
730  k, v = md.split('=')
731  bs_metadata[k] = v
732 
733  bs_metadata['detectorMask'] = data_reader.detectorMask()
734  bs_metadata['runNumbers'] = data_reader.runNumber()
735  bs_metadata['lumiBlockNumbers'] = data_reader.lumiblockNumber()
736  bs_metadata['projectTag'] = data_reader.projectTag()
737  bs_metadata['stream'] = data_reader.stream()
738  #bs_metadata['beamType'] = getattr(data_reader, 'beamType')()
739  beamTypeNbr= data_reader.beamType()
740  #According to info from Rainer and Guiseppe the beam type is
741  #O: no beam
742  #1: protons
743  #2: ions
744  if (beamTypeNbr==0): bs_metadata['beamType'] = 'cosmics'
745  elif (beamTypeNbr==1 or beamTypeNbr==2): bs_metadata['beamType'] = 'collisions'
746  else: bs_metadata['beamType'] = 'unknown'
747 
748  bs_metadata['beamEnergy'] = data_reader.beamEnergy()
749 
750  meta_dict[filename]['eventTypes'] = bs_metadata.get('eventTypes', [])
751  meta_dict[filename]['GeoAtlas'] = bs_metadata.get('geometry', None)
752  meta_dict[filename]['conditions_tag'] = bs_metadata.get('conditions_tag', None)
753  meta_dict[filename]['project_name'] = bs_metadata.get('projectTag', None)
754 
755  # Promote up one level
756  meta_dict[filename]['detectorMask'] = [bs_metadata.get('detectorMask', None)]
757  meta_dict[filename]['runNumbers'] = [bs_metadata.get('runNumbers', None)]
758  meta_dict[filename]['lumiBlockNumbers'] = [bs_metadata.get('lumiBlockNumbers', None)]
759  meta_dict[filename]['beam_type'] = bs_metadata.get('beamType', None)
760  meta_dict[filename]['beam_energy'] = bs_metadata.get('beamEnergy', None)
761  meta_dict[filename]['stream'] = bs_metadata.get('stream', None)
762 
763  if not data_reader.good():
764  # event-less file...
765  meta_dict[filename]['runNumbers'].append(bs_metadata.get('run_number', 0))
766  meta_dict[filename]['lumiBlockNumbers'].append(bs_metadata.get('LumiBlock', 0))
767 
768  msg.debug(f"{meta_dict[filename]=}")
769  msg.debug(f"{len(bs)=}")
770  if len(bs):
771  evt = bs[0]
772  try:
773  evt.check()
774  meta_dict[filename]['processingTags'] = [tag.name for tag in evt.stream_tag()]
775  meta_dict[filename]['evt_number'] = [evt.global_id()]
776  meta_dict[filename]['run_type'] = [eformat.helper.run_type2string(evt.run_type())]
777  # ATLASRECTS-7126: If there is no valid lumiblock information
778  # in the ByteStream header, get the info from the first event.
779  if meta_dict[filename]['lumiBlockNumbers'] == [0]:
780  msg.debug('Taking the luminosity block info from the first event (%i)', evt.lumi_block())
781  meta_dict[filename]['lumiBlockNumbers'] = [evt.lumi_block()]
782  # ATLASRECTS-7126: If there is no valid run number information
783  # in the ByteStream header, get the info from the first event.
784  if meta_dict[filename]['runNumbers'] == [0]:
785  msg.debug('Taking the run number info from the first event (%i)', evt.run_no())
786  meta_dict[filename]['runNumbers'] = [evt.run_no()]
787  except RuntimeError as err:
788  msg.error("Issue while reading the first event of BS file %r: %r", filename, err)
789  else:
790  msg.debug(f"{meta_dict[filename]=}")
791  else:
792  msg.warn(f"Event-less BS {filename=}, will not read metadata information from the first event")
793 
794  # fix for ATEAM-122
795  if len(bs_metadata.get('eventTypes', '')) == 0: # see: ATMETADATA-6
796  evt_type = ['IS_DATA', 'IS_ATLAS']
797  if bs_metadata.get('stream', '').startswith('physics_'):
798  evt_type.append('IS_PHYSICS')
799  elif bs_metadata.get('stream', '').startswith('calibration_'):
800  evt_type.append('IS_CALIBRATION')
801  elif bs_metadata.get('projectTag', '').endswith('_calib'):
802  evt_type.append('IS_CALIBRATION')
803  else:
804  evt_type.append('Unknown')
805 
806  meta_dict[filename]['eventTypes'] = evt_type
807 
808  if mode == 'full':
809  meta_dict[filename]['bs_metadata'] = bs_metadata
810 
811  # ------ Throw an error if the user provide other file types -------------------------------------------------#
812  else:
813  msg.error('Unknown filetype for {0} - there is no metadata interface for type {1}'.format(filename, current_file_type))
814  return None
815 
816  return meta_dict
817 
818 
820  import os
821  if 'AthSimulation_DIR' in os.environ:
822  return 'AthSimulation'
823  if 'AthGeneration_DIR' in os.environ:
824  return 'AthGeneration'
825  return 'Athena'
826 
827 
828 def _get_pfn(filename):
829  """
830  Extract the actual filename if LFN or PFN notation is used
831  """
832  pfx = filename[0:4]
833  if pfx == 'PFN:':
834  return filename[4:]
835  if pfx == 'LFN:':
836  import subprocess, os
837  os.environ['POOL_OUTMSG_LEVEL'] = 'Error'
838  output = subprocess.check_output(['FClistPFN','-l',filename[4:]],text=True).split('\n')
839  if len(output) == 2:
840  return output[0]
841  msg.error( 'FClistPFN({0}) returned unexpected number of lines:'.format(filename) )
842  msg.error( '\n'.join(output) )
843  return filename
844 
845 
846 def _read_guid(filename):
847  """
848  Extracts the "guid" (Globally Unique Identifier) in POOL files and Grid catalogs) value from a POOL file.
849  :param filename: the input file
850  :return: the guid value, None if unavailable
851  """
852  import ROOT
853  root_file = ROOT.TFile.Open( _get_pfn(filename) )
854  params = root_file.Get('##Params')
855  if not params:
856  return
857  if not isinstance(params, ROOT.TTree):
858  raise NotImplementedError(f"Cannot extract GUID from object {params!r} of type {type(params)!r}")
859 
860  regex = re.compile(r'\[NAME=(\w+)\]\[VALUE=(.*)\]', re.ASCII)
861  fid = None
862 
863  for entry in params:
864  param = entry.GetLeaf('db_string').GetValueString()
865 
866  result = regex.match(param)
867  if result and result.group(1) == 'FID' :
868  # don't exit yet, it's the last FID entry that counts
869  fid = result.group(2)
870 
871  return fid
872 
873 
875  result = {}
876 
877  for meth in dir(obj):
878  if not meth.startswith('_'):
879  if meth.startswith('m_'):
880 
881  field_name = str(meth)[2:]
882  field_value = getattr(obj, meth)
883 
884  result[field_name] = _convert_value(field_value)
885 
886  return result
887 
888 
889 def _convert_value(value, aux = None):
890  cl=value.__class__
891 
892  if hasattr(cl, '__cpp_name__'):
893  result = regex_cppname.match(cl.__cpp_name__)
894  if result:
895  cpp_type = result.group(1)
896  if cpp_type == 'vector' or cpp_type == 'std::vector':
897  return [_convert_value(val) for val in value]
898  elif cpp_type == 'set' or cpp_type == 'std::set':
899  return {_convert_value(val) for val in value}
900  elif cpp_type == 'pair' or cpp_type == 'std::pair':
901  return _convert_value(value.first), _convert_value(value.second)
902 
903  # elif cpp_type == 'long':
904  # return int(value)
905 
906  elif cpp_type == 'string' or cpp_type == 'std::string':
907  return str(value)
908 
909  elif cl.__cpp_name__ == "_Bit_reference":
910  return bool(value)
911 
912  # special case which extracts data in a better format from IOVPayloadContainer_p1 class
913  elif cl.__cpp_name__ == 'IOVMetaDataContainer_p1':
914  return _extract_fields_iovmdc(value)
915 
916  elif cl.__cpp_name__ == 'IOVPayloadContainer_p1':
917  global _gbl_mode
918  if _gbl_mode == 'iov':
919  return _extract_iov_detailed(value)
920  else:
921  return _extract_fields_iov( value, range(value.m_attrIndexes.size()) )
922 
923  elif cl.__cpp_name__ == 'xAOD::EventFormat_v1':
924  return _extract_fields_ef(value)
925  elif cl.__cpp_name__ == 'xAOD::CutBookkeeperContainer_v1':
926  return _extract_fields_cbk(interface=value, aux=aux)
927  elif cl.__cpp_name__ == 'xAOD::FileMetaData_v1':
928  return _extract_fields_fmd(interface=value, aux=aux)
929  elif cl.__cpp_name__ == 'DataVector<xAOD::TruthMetaData_v1>':
930  return _extract_fields_tmd(interface=value, aux=aux)
931 
932  elif cl.__cpp_name__ == 'DataVector<xAOD::TriggerMenu_v1>' :
933  return _extract_fields_triggermenu(interface=value, aux=aux)
934 
935  elif cl.__cpp_name__ == 'DataVector<xAOD::TriggerMenuJson_v1>' :
936  return _extract_fields_triggermenujson(interface=value, aux=aux)
937 
938  elif (cl.__cpp_name__ == 'EventStreamInfo_p1' or
939  cl.__cpp_name__ == 'EventStreamInfo_p2' or
940  cl.__cpp_name__ == 'EventStreamInfo_p3'):
941  return _extract_fields_esi(value)
942 
943  elif (cl.__cpp_name__ == 'EventType_p1' or
944  cl.__cpp_name__ == 'EventType_p3'):
945  fields = _extract_fields(value)
946  fields = _convert_event_type_bitmask(fields)
947  fields = _convert_event_type_user_type(fields)
948  return fields
949 
950  elif regex_persistent_class.match(cl.__cpp_name__):
951  return _extract_fields(value)
952 
953  return value
954 
955 
956 def _get_attribute_val(iov_container, attr_name, attr_idx):
957  type_idx = attr_idx.typeIndex()
958  obj_idx = attr_idx.objIndex()
959 
960  attr_value = None
961 
962  if type_idx == 0:
963  attr_value = bool(iov_container.m_bool[obj_idx])
964  elif type_idx == 1:
965  attr_value = int(iov_container.m_char[obj_idx])
966  elif type_idx == 2:
967  attr_value = int(iov_container.m_unsignedChar[obj_idx])
968  elif type_idx == 3:
969  attr_value = int(iov_container.m_short[obj_idx])
970  elif type_idx == 4:
971  attr_value = int(iov_container.m_unsignedShort[obj_idx])
972  elif type_idx == 5:
973  attr_value = int(iov_container.m_int[obj_idx])
974  elif type_idx == 6:
975  attr_value = int(iov_container.m_unsignedInt[obj_idx])
976  elif type_idx == 7:
977  attr_value = int(iov_container.m_long[obj_idx])
978  elif type_idx == 8:
979  attr_value = int(iov_container.m_unsignedLong[obj_idx])
980  elif type_idx == 9:
981  attr_value = int(iov_container.m_longLong[obj_idx])
982  elif type_idx == 10:
983  attr_value = int(iov_container.m_unsignedLongLong[obj_idx])
984  elif type_idx == 11:
985  attr_value = float(iov_container.m_float[obj_idx])
986  elif type_idx == 12:
987  attr_value = float(iov_container.m_double[obj_idx])
988  elif type_idx == 13:
989  # skipping this type because is file IOVPayloadContainer_p1.h (line 120) is commented and not considered
990  pass
991  elif type_idx == 14:
992  attr_value = str(iov_container.m_string[obj_idx])
993  # Cleaning class name from value
994  if attr_value.startswith('IOVMetaDataContainer_p1_'):
995  attr_value = attr_value.replace('IOVMetaDataContainer_p1_', '')
996  if attr_value.startswith('_'):
997  attr_value = attr_value.replace('_', '/')
998  # Now it is clean
999  elif type_idx == 15:
1000  attr_value = int(iov_container.m_date[obj_idx])
1001  elif type_idx == 16:
1002  attr_value = int(iov_container.m_timeStamp[obj_idx])
1003  else:
1004  raise ValueError('Unknown type id {0} for attribute {1}'.format(type_idx, attr_name))
1005 
1006  return attr_value
1007 
1008 
1009 def _extract_fields_iov( iov_container, idx_range ):
1010  result = {}
1011 
1012  for idx in idx_range:
1013  attr_idx = iov_container.m_attrIndexes[idx]
1014  name_idx = attr_idx.nameIndex()
1015  attr_name = iov_container.m_attrName[name_idx]
1016  attr_value = _get_attribute_val(iov_container, attr_name, attr_idx)
1017 
1018  if attr_name not in result:
1019  result[attr_name] = [attr_value]
1020  else:
1021  result[attr_name].append(attr_value)
1022 
1023  max_element_count = 0
1024  for content in result.values():
1025  if len(content) > max_element_count:
1026  max_element_count = len(content)
1027 
1028  if max_element_count <= 1:
1029  for name, content in result.items():
1030  if len(content) > 0:
1031  result[name] = content[0]
1032  else:
1033  result[name] = None
1034 
1035  return result
1036 
1037 
1038 def _extract_iov_detailed(iov_container):
1039  def iovtostr(t):
1040  # break iov time into high and low halves (run number usually in the higher half)
1041  return "({h}:{l})".format(h=t>>32, l=t&(2^32-1))
1042 
1043  def extract_list_collection(iov_container, listCollection ):
1044  result = {}
1045  ln = 0
1046  for list in listCollection.m_attrLists:
1047  ln = ln + 1
1048  lname = 'List {ln}: iov=[{s} ,{e}]; Channel#={ch}'.format(
1049  ln=ln, s=iovtostr(list.m_range.m_start),
1050  e=iovtostr(list.m_range.m_stop),
1051  ch=list.m_channelNumber )
1052  result[ lname ] = _extract_fields_iov( iov_container, range(list.m_firstIndex, list.m_lastIndex) )
1053  return result
1054 
1055  result = {}
1056  pn = 0
1057  for listCollection in iov_container.m_payloadVec:
1058  pn = pn + 1
1059  pname = 'IOV range {n}: [{s}, {e}]'.format(n=pn, s=iovtostr(listCollection.m_start),
1060  e=iovtostr(listCollection.m_stop))
1061  result[ pname ] = extract_list_collection(iov_container, listCollection )
1062  return result
1063 
1064 
1066  return _convert_value(value.m_payload)
1067 
1068 
1070  result = {}
1071 
1072  result['eventTypes'] = []
1073  for eventType in value.m_eventTypes:
1074  result['eventTypes'].append(_convert_value(eventType))
1075 
1076  result['numberOfEvents'] = value.m_numberOfEvents
1077  result['runNumbers'] = list(value.m_runNumbers)
1078  result['lumiBlockNumbers'] = list(value.m_lumiBlockNumbers)
1079  result['processingTags'] = [str(v) for v in value.m_processingTags]
1080  result['itemList'] = []
1081 
1082  # Get the class name in the repository with CLID <clid>
1083  from CLIDComps.clidGenerator import clidGenerator
1084  cgen = clidGenerator("")
1085  for clid, sgkey in value.m_itemList:
1086  result['itemList'].append((cgen.getNameFromClid(clid), sgkey))
1087 
1088  return result
1089 
1090 
1092  result = {}
1093 
1094  for ef_element in value:
1095  result[ef_element.first] = ef_element.second.className()
1096 
1097  return result
1098 
1099 
1100 def _extract_fields_cbk(interface=None, aux=None):
1101  """Extract CutBookkeeper content into dictionary
1102 
1103  This function takes the CutBookkeeperContainer_v1 and CutBookkeeperAuxContainer_v1 objects.
1104  It makes sure the the interface object uses the auxiliary object as store.
1105  Args:
1106  interface (CutBookkeeperContainer_v1): the interface class
1107  aux (CutBookkeeperAuxContainer_v1): auxiliary container object
1108  Returns
1109  dict: with the cycle number and last stream
1110  """
1111  if not interface or not aux:
1112  return {}
1113  interface.setStore(aux)
1114 
1115  max_cycle = -1
1116  input_stream = ''
1117 
1118  for cbk in interface:
1119  current_cycle = int(cbk.cycle())
1120  if current_cycle > max_cycle:
1121  max_cycle = current_cycle
1122  input_stream = str(cbk.inputStream())
1123 
1124  result = {
1125  'currentCutCycle': max_cycle,
1126  'currentCutInputStream': input_stream,
1127  }
1128  return result
1129 
1130 
1131 def _extract_fields_fmd(interface=None, aux=None):
1132  """Turn static FileMetaData content into dictionary
1133 
1134  This function takes the FileMetaData_v1 and FileMetaDataAuxInfo_v1 objects.
1135  It makes sure the the interface object uses the auxiliary object as store.
1136  Next the two static variables of FileMetaDataAuxInfo_v1 are retrieved and
1137  added to the dictionary that is returned.
1138  Args:
1139  interface (FileMetaData_v1): the interface class
1140  aux (FileMetaDataAuxInfo_v1): auxiliary container object
1141  Returns
1142  dict: with the production release and dataType
1143  """
1144  import ROOT
1145  if not interface or not aux:
1146  return {}
1147  interface.setStore(aux)
1148  metaContent = {
1149  "productionRelease": ROOT.std.string(),
1150  "dataType": ROOT.std.string(),
1151  "runNumbers": ROOT.std.vector('unsigned int')(),
1152  "lumiBlocks": ROOT.std.vector('unsigned int')(),
1153  }
1154  # Note: using this for dynamic attributes returns empty content
1155  for k, v in metaContent.items():
1156  try:
1157  interface.value(getattr(interface, k), v)
1158  except AttributeError:
1159  interface.value(k, v)
1160  # Now return python objects
1161  result = {k: str(v) for k, v in metaContent.items() if type(v) is ROOT.std.string}
1162  result.update({k: list(v) for k, v in metaContent.items() if type(v) is ROOT.std.vector('unsigned int')})
1163  return result
1164 
1165 
1166 def _extract_fields_tmd(interface=None, aux=None):
1167  import ROOT
1168  BadAuxVarException = ROOT.SG.ExcBadAuxVar
1169  """Extract TruthMetaData content into dictionary
1170 
1171  This function takes the TruthMetaDataContainer_v1 and TruthMetaDataAuxContainer_v1 objects.
1172  It makes sure the the interface object uses the auxiliary object as store.
1173  Args:
1174  interface (TruthMetaDataContainer_v1): the interface class
1175  aux (TruthMetaDataAuxContainer_v1): auxiliary container object
1176  Returns
1177  dict
1178  """
1179  if not interface or not aux:
1180  return {}
1181  interface.setStore(aux)
1182 
1183  # return the first as we do not really expect more than one
1184  result = {}
1185  for tmd in interface:
1186  result['mcChannelNumber'] = tmd.mcChannelNumber()
1187 
1188  try:
1189  result['weightNames'] = list(tmd.weightNames())
1190  except BadAuxVarException:
1191  result['weightNames'] = []
1192 
1193  try:
1194  result['lhefGenerator'] = str(tmd.lhefGenerator())
1195  except BadAuxVarException:
1196  result['lhefGenerator'] = ''
1197 
1198  try:
1199  result['generators'] = str(tmd.generators())
1200  except BadAuxVarException:
1201  result['generators'] = ''
1202 
1203  try:
1204  result['evgenProcess'] = str(tmd.evgenProcess())
1205  except BadAuxVarException:
1206  result['evgenProcess'] = ''
1207 
1208  try:
1209  result['evgenTune'] = str(tmd.evgenTune())
1210  except BadAuxVarException:
1211  result['evgenTune'] = ''
1212 
1213  try:
1214  result['hardPDF'] = str(tmd.hardPDF())
1215  except BadAuxVarException:
1216  result['hardPDF'] = ''
1217 
1218  try:
1219  result['softPDF'] = str(tmd.softPDF())
1220  except BadAuxVarException:
1221  result['softPDF'] = ''
1222 
1223  return result
1224 
1225 
1226 """ Note: Deprecated. Legacy support for Run 2 AODs produced in release 21 or in release 22 prior to April 2021
1227 """
1228 def _extract_fields_triggermenu(interface, aux):
1229  if aux is None:
1230  return {}
1231 
1232  L1Items = []
1233  HLTChains = []
1234 
1235  try:
1236  interface.setStore( aux )
1237  if interface.size() > 0:
1238  # We make the assumption that the first stored SMK is
1239  # representative of all events in the input collection.
1240  firstMenu = interface.at(0)
1241  L1Items = [ _convert_value(item) for item in firstMenu.itemNames() ]
1242  HLTChains = [ _convert_value(chain) for chain in firstMenu.chainNames() ]
1243  except Exception as err: # noqa: F841
1244  msg.warn('Problem reading xAOD::TriggerMenu:')
1245 
1246  result = {}
1247  result['L1Items'] = L1Items
1248  result['HLTChains'] = HLTChains
1249 
1250  return result
1251 
1253  result = {}
1254 
1255  try:
1256  interface.setStore( aux )
1257  if interface.size() > 0:
1258  # We make the assumption that the first stored SMK is
1259  # representative of all events in the input collection.
1260  firstMenu = interface.at(0)
1261  import json
1262  decoded = json.loads(firstMenu.payload())
1263  result['RAWTriggerMenuJson'] = firstMenu.payload()
1264  result['name'] = firstMenu.name()
1265  result['dbkey'] = firstMenu.key()
1266  if decoded['filetype'] == 'hltmenu':
1267  result['HLTChains'] = [ _convert_value(chain) for chain in decoded['chains'] ]
1268  elif decoded['filetype'] == 'l1menu':
1269  result['L1Items'] = [ _convert_value(item) for item in decoded['items'] ]
1270  elif decoded['filetype'] in ['bunchgroupset', 'hltprescale', 'l1prescale', 'hltmonitoringsummary']:
1271  return result
1272 
1273  else:
1274  msg.warn('Got an xAOD::TriggerMenuJson called {0} but only expecting hltmenu or l1menu'.format(decoded['filetype']))
1275  return {}
1276 
1277  except Exception as err: # noqa: F841
1278  msg.warn('Problem reading xAOD::TriggerMenuJson')
1279 
1280  return result
1281 
1283  if 'user_type' in value:
1284  items = value['user_type'].split('#')[3:]
1285  for i in range(0, len(items), 2):
1286  value[items[i]] = _convert_value(items[i+1])
1287  return value
1288 
1290 
1291  types = None
1292  for key in value:
1293  if key == 'bit_mask':
1294  val = value[key]
1295 
1296  bitmask_length = len(val)
1297 
1298  is_simulation = False
1299  is_testbeam = False
1300  is_calibration = False
1301 
1302  if bitmask_length > 0: # ROOT.EventType.IS_SIMULATION
1303  is_simulation = val[0]
1304 
1305  if bitmask_length > 1: # ROOT.EventType.IS_TESTBEAM
1306  is_testbeam = val[1]
1307 
1308  if bitmask_length > 2: # ROOT.EventType.IS_CALIBRATION:
1309  is_calibration = val[2]
1310 
1311  types = [
1312  'IS_SIMULATION' if is_simulation else 'IS_DATA',
1313  'IS_TESTBEAM' if is_testbeam else 'IS_ATLAS',
1314  'IS_CALIBRATION' if is_calibration else 'IS_PHYSICS'
1315  ]
1316 
1317  value['type'] = types
1318  return value
1319 
1320 
1321 def make_lite(meta_dict):
1322  for filename, file_content in meta_dict.items():
1323  for key in file_content:
1324  if key in meta_dict[filename]['metadata_items'] and regexEventStreamInfo.match(meta_dict[filename]['metadata_items'][key]):
1325  for item in list(meta_dict[filename][key]):
1326  if item not in lite_primary_keys_to_keep:
1327  meta_dict[filename][key].pop(item)
1328 
1329  if '/TagInfo' in file_content:
1330 
1331 
1332  for item in list(meta_dict[filename]['/TagInfo']):
1333  if item not in lite_TagInfo_keys_to_keep:
1334  meta_dict[filename]['/TagInfo'].pop(item)
1335  return meta_dict
1336 
1337 
1338 def make_peeker(meta_dict):
1339  for filename, file_content in meta_dict.items():
1340  for key in file_content:
1341  if key in meta_dict[filename]['metadata_items'] and regexEventStreamInfo.match(meta_dict[filename]['metadata_items'][key]):
1342  keys_to_keep = [
1343  'lumiBlockNumbers',
1344  'runNumbers',
1345  'mc_event_number',
1346  'mc_channel_number',
1347  'eventTypes',
1348  'processingTags',
1349  'itemList'
1350  ]
1351  for item in list(meta_dict[filename][key]):
1352  if item not in keys_to_keep:
1353  meta_dict[filename][key].pop(item)
1354 
1355  if '/TagInfo' in file_content:
1356  keys_to_keep = [
1357  'beam_energy',
1358  'beam_type',
1359  'GeoAtlas',
1360  'IOVDbGlobalTag',
1361  'AODFixVersion',
1362  'AMITag',
1363  'project_name',
1364  'triggerStreamOfFile',
1365  'AtlasRelease',
1366  'specialConfiguration',
1367  'mc_campaign',
1368  'hepmc_version',
1369  'generators',
1370  'data_year',
1371  ]
1372  for item in list(meta_dict[filename]['/TagInfo']):
1373  if item not in keys_to_keep:
1374  meta_dict[filename]['/TagInfo'].pop(item)
1375 
1376  if '/Simulation/Parameters' in file_content:
1377  keys_to_keep = [
1378  'G4Version',
1379  'TruthStrategy',
1380  'SimBarcodeOffset',
1381  'RegenerationIncrement',
1382  'TRTRangeCut',
1383  'SimulationFlavour',
1384  'Simulator',
1385  'PhysicsList',
1386  'SimulatedDetectors',
1387  ]
1388  for item in list(meta_dict[filename]['/Simulation/Parameters']):
1389  if item not in keys_to_keep:
1390  meta_dict[filename]['/Simulation/Parameters'].pop(item)
1391 
1392  if '/Digitization/Parameters' in file_content:
1393  keys_to_keep = [
1394  'numberOfCollisions',
1395  'intraTrainBunchSpacing',
1396  'BeamIntensityPattern'
1397  'physicsList',
1398  'digiSteeringConf',
1399  'pileUp',
1400  'DigitizedDetectors',
1401  ]
1402  for item in list(meta_dict[filename]['/Digitization/Parameters']):
1403  if item not in keys_to_keep:
1404  meta_dict[filename]['/Digitization/Parameters'].pop(item)
1405 
1406  if 'CutBookkeepers' in file_content:
1407  keys_to_keep = [
1408  'currentCutCycle',
1409  'currentCutInputStream',
1410  ]
1411  for item in list(meta_dict[filename]['CutBookkeepers']):
1412  if item not in keys_to_keep:
1413  meta_dict[filename]['CutBookkeepers'].pop(item)
1414 
1415  if 'TruthMetaData' in file_content:
1416  keys_to_keep = [
1417  'mcChannelNumber',
1418  'weightNames',
1419  ]
1420  for item in list(meta_dict[filename]['TruthMetaData']):
1421  if item not in keys_to_keep:
1422  meta_dict[filename]['TruthMetaData'].pop(item)
1423 
1424  return meta_dict
1425 
1426 
1427 def promote_keys(meta_dict, mode):
1428  for filename, file_content in meta_dict.items():
1429  md = meta_dict[filename]
1430  for key in file_content:
1431  if key in md['metadata_items'] and regexEventStreamInfo.match(md['metadata_items'][key]):
1432  md.update(md[key])
1433 
1434  if 'eventTypes' in md and len(md['eventTypes']):
1435  et = md['eventTypes'][0]
1436  md['mc_event_number'] = et.get('mc_event_number', md['runNumbers'][0])
1437  if 'mc_channel_number' in et:
1438  md['mc_channel_number'] = et.get('mc_channel_number', None)
1439  md['eventTypes'] = et['type']
1440 
1441  # For very old files
1442  if 'GeoAtlas' in et:
1443  md['GeoAtlas'] = et.get('GeoAtlas', None)
1444  if 'IOVDbGlobalTag' in et:
1445  md['IOVDbGlobalTag'] = et.get('IOVDbGlobalTag', None)
1446 
1447  if 'lumiBlockNumbers' in md[key]:
1448  md['lumiBlockNumbers'] = md[key]['lumiBlockNumbers']
1449 
1450  if 'processingTags' in md[key]:
1451  md['processingTags'] = md[key]['processingTags']
1452 
1453  meta_dict[filename].pop(key)
1454  break
1455 
1456  if not isGaudiEnv() and key in md['metadata_items'] and 'FileMetaData' in key:
1457  if 'beamType' in md[key]:
1458  md['beam_type'] = md[key]['beamType']
1459 
1460  if 'runNumbers' in md[key]:
1461  md['runNumbers'] = md[key]['runNumbers']
1462 
1463  if 'mcProcID' in md[key]:
1464  md['mc_channel_number'] = int(md[key]['mcProcID'])
1465 
1466  if 'mcCampaign' in md[key]:
1467  md['mc_campaign'] = md[key]['mcCampaign']
1468 
1469  if 'dataYear' in md[key]:
1470  md['data_year'] = int(md[key]['dataYear'])
1471 
1472  if 'lumiBlocks' in md[key]:
1473  md['lumiBlockNumbers'] = md[key]['lumiBlocks']
1474 
1475  if mode == 'peeker' and 'amiTag' in md[key]:
1476  md['AMITag'] = md[key]['amiTag']
1477 
1478  if 'beamEnergy' in md[key]:
1479  md['beam_energy'] = int(md[key]['beamEnergy'])
1480 
1481  if 'geometryVersion' in md[key]:
1482  md['GeoAtlas'] = md[key]['geometryVersion']
1483 
1484  # EventType checks
1485  md['eventTypes'] = []
1486  if mode == 'peeker' and 'simFlavour' in md[key]:
1487  md['SimulationFlavour'] = md[key]['simFlavour']
1488 
1489  if 'simFlavour' in md[key] and ('FullG4' in md[key]['simFlavour'] or 'ATLFAST' in md[key]['simFlavour']):
1490  md['eventTypes'].append('IS_SIMULATION')
1491  else:
1492  md['eventTypes'].append('IS_DATA')
1493 
1494  if 'GeoAtlas' in md and 'ATLAS' in md['GeoAtlas']:
1495  md['eventTypes'].append('IS_ATLAS')
1496  # this is probably safe to assume for all files used in AnalysisBase
1497  md['eventTypes'].append('IS_PHYSICS')
1498  else:
1499  md['eventTypes'].append('IS_TESTBEAM')
1500 
1501  if 'dataType' in md[key]:
1502  md['processingTags'] = [md[key]['dataType']]
1503 
1504  if mode == 'peeker':
1505  if 'productionRelease' in md[key]:
1506  md['AtlasRelease'] = md[key]['productionRelease']
1507 
1508  if 'generatorsInfo' in md[key]:
1509  md['generators'] = md[key]['generatorsInfo']
1510 
1511  if mode == 'lite':
1512  meta_dict[filename].pop(key)
1513  break
1514 
1515  if '/TagInfo' in file_content:
1516  md.update(md['/TagInfo'])
1517  md.pop('/TagInfo')
1518 
1519  if '/Generation/Parameters' in file_content:
1520  md.update(md['/Generation/Parameters'])
1521  md.pop('/Generation/Parameters')
1522 
1523  if '/Simulation/Parameters' in file_content:
1524  md.update(md['/Simulation/Parameters'])
1525  md.pop('/Simulation/Parameters')
1526 
1527  if '/Digitization/Parameters' in file_content:
1528  md.update(md['/Digitization/Parameters'])
1529  md.pop('/Digitization/Parameters')
1530 
1531  if 'CutBookkeepers' in file_content:
1532  md.update(md['CutBookkeepers'])
1533  md.pop('CutBookkeepers')
1534 
1535  return meta_dict
1536 
1537 
1538 def convert_itemList(metadata, layout):
1539  """
1540  This function will rearrange the itemList values to match the format of 'eventdata_items', 'eventdata_itemsList'
1541  or 'eventdata_itemsDic' generated with the legacy file peeker tool
1542  :param metadata: a dictionary obtained using read_metadata method.
1543  The mode for read_metadata must be 'peeker of 'full'
1544  :param layout: the mode in which the data will be converted:
1545  * for 'eventdata_items' use: layout= None
1546  * for 'eventdata_itemsList' use: layout= '#join'
1547  * for 'eventdata_itemsDic' use: layout= 'dict'
1548  """
1549 
1550  # Find the itemsList:
1551  item_list = None
1552 
1553  if 'itemList' in metadata:
1554  item_list = metadata['itemList']
1555  else:
1556 
1557  current_key = None
1558 
1559  for key in metadata:
1560  if 'metadata_items' in metadata and key in metadata['metadata_items'] and metadata['metadata_items'][key] == 'EventStreamInfo_p3':
1561  current_key = key
1562  break
1563  if current_key is not None:
1564  item_list = metadata[current_key]['itemList']
1565 
1566  if item_list is not None:
1567 
1568  if layout is None:
1569  return item_list
1570 
1571  elif layout == '#join':
1572  return [k + '#' + v for k, v in item_list if k]
1573 
1574 
1575  elif layout == 'dict':
1576  from collections import defaultdict
1577  dic = defaultdict(list)
1578 
1579  for k, v in item_list:
1580  dic[k].append(v)
1581 
1582  return dict(dic)
1583 
1584 
1586  """Extract number of entries from DataHeader.
1587 
1588  infile ROOT TFile object or filename string
1589  return Number of entries as returned by DataHeader object in infile,
1590  None in absence of DataHeader object
1591  """
1592  import ROOT
1593  from PyUtils.PoolFile import PoolOpts
1594  if not isinstance(infile, ROOT.TFile):
1595  infile = ROOT.TFile.Open(infile)
1596 
1597  for name in {PoolOpts.TTreeNames.DataHeader, PoolOpts.RNTupleNames.DataHeader}:
1598  obj = infile.Get(name)
1599  msg.debug(f"dataheader_nentries: {name=}, {obj=}, {type(obj)=}")
1600  if not obj:
1601  continue
1602  if isinstance(obj, ROOT.TTree):
1603  return obj.GetEntriesFast()
1604  else:
1605  # check early to avoid scary ROOT read errors
1606  if ROOT.gROOT.GetVersionInt() < 63100:
1607  raise RuntimeError("ROOT ver. 6.31/01 or greater needed to read RNTuple files")
1608  if isRNTuple(obj):
1609  return ROOT.Experimental.RNTupleReader.Open(obj).GetNEntries()
1610  else:
1611  raise NotImplementedError(f"Keys of type {type(obj)!r} not supported")
replace
std::string replace(std::string s, const std::string &s2, const std::string &s3)
Definition: hcg.cxx:307
python.MetaReader._extract_iov_detailed
def _extract_iov_detailed(iov_container)
Definition: MetaReader.py:1038
vtune_athena.format
format
Definition: vtune_athena.py:14
CaloCellPos2Ntuple.int
int
Definition: CaloCellPos2Ntuple.py:24
python.MetaReader.read_metadata
def read_metadata(filenames, file_type=None, mode='lite', promote=None, meta_key_filter=None, unique_tag_info_values=True, ignoreNonExistingLocalFiles=False)
Definition: MetaReader.py:53
python.MetaReader._extract_fields_tmd
def _extract_fields_tmd(interface=None, aux=None)
Definition: MetaReader.py:1166
python.MetaReader._extract_fields_esi
def _extract_fields_esi(value)
Definition: MetaReader.py:1069
python.MetaReader._extract_fields_iovmdc
def _extract_fields_iovmdc(value)
Definition: MetaReader.py:1065
python.MetaReader.make_peeker
def make_peeker(meta_dict)
Definition: MetaReader.py:1338
dumpHVPathFromNtuple.append
bool append
Definition: dumpHVPathFromNtuple.py:91
python.MetaReader._extract_fields
def _extract_fields(obj)
Definition: MetaReader.py:874
python.MetaReader._check_project
def _check_project()
Definition: MetaReader.py:819
python.MetaReader._get_pfn
def _get_pfn(filename)
Definition: MetaReader.py:828
AtlasMcWeight::encode
number_type encode(double weight)
Definition: AtlasMcWeight.cxx:65
python.MetaReader._extract_fields_iov
def _extract_fields_iov(iov_container, idx_range)
Definition: MetaReader.py:1009
fillPileUpNoiseLumi.next
next
Definition: fillPileUpNoiseLumi.py:52
plotBeamSpotVxVal.range
range
Definition: plotBeamSpotVxVal.py:195
python.MetaReader._convert_event_type_bitmask
def _convert_event_type_bitmask(value)
Definition: MetaReader.py:1289
histSizes.list
def list(name, path='/')
Definition: histSizes.py:38
python.MetaReader.make_lite
def make_lite(meta_dict)
Definition: MetaReader.py:1321
python.AthConfigFlags.isGaudiEnv
def isGaudiEnv()
Definition: AthConfigFlags.py:14
python.MetaReader._convert_event_type_user_type
def _convert_event_type_user_type(value)
Definition: MetaReader.py:1282
python.MetaReader._extract_fields_triggermenu
def _extract_fields_triggermenu(interface, aux)
Definition: MetaReader.py:1228
beamspotman.dir
string dir
Definition: beamspotman.py:623
CxxUtils::set
constexpr std::enable_if_t< is_bitmask_v< E >, E & > set(E &lhs, E rhs)
Convenience function to set bits in a class enum bitmask.
Definition: bitmask.h:232
python.MetaReader._convert_value
def _convert_value(value, aux=None)
Definition: MetaReader.py:889
python.MetaReader._extract_fields_cbk
def _extract_fields_cbk(interface=None, aux=None)
Definition: MetaReader.py:1100
TCS::join
std::string join(const std::vector< std::string > &v, const char c=',')
Definition: Trigger/TrigT1/L1Topo/L1TopoCommon/Root/StringUtils.cxx:10
python.MetaReader._read_guid
def _read_guid(filename)
Definition: MetaReader.py:846
TrigJetMonitorAlgorithm.items
items
Definition: TrigJetMonitorAlgorithm.py:79
python.MetaReader._extract_fields_triggermenujson
def _extract_fields_triggermenujson(interface, aux)
Definition: MetaReader.py:1252
python.MetaReader.promote_keys
def promote_keys(meta_dict, mode)
Definition: MetaReader.py:1427
Trk::open
@ open
Definition: BinningType.h:40
python.MetaReader._extract_fields_fmd
def _extract_fields_fmd(interface=None, aux=None)
Definition: MetaReader.py:1131
python.MetaReader._extract_fields_ef
def _extract_fields_ef(value)
Definition: MetaReader.py:1091
python.CaloScaleNoiseConfig.type
type
Definition: CaloScaleNoiseConfig.py:78
python.MetaReader.dataheader_nentries
def dataheader_nentries(infile)
Definition: MetaReader.py:1585
python.MetaReader.convert_itemList
def convert_itemList(metadata, layout)
Definition: MetaReader.py:1538
python.PoolFile.isRNTuple
def isRNTuple(obj)
Definition: PoolFile.py:36
str
Definition: BTagTrackIpAccessor.cxx:11
python.MetaReader._get_attribute_val
def _get_attribute_val(iov_container, attr_name, attr_idx)
Definition: MetaReader.py:956
xAOD::bool
setBGCode setTAP setLVL2ErrorBits bool
Definition: TrigDecision_v1.cxx:60
WriteBchToCool.update
update
Definition: WriteBchToCool.py:67
readCCLHist.float
float
Definition: readCCLHist.py:83
Trk::split
@ split
Definition: LayerMaterialProperties.h:38