ATLAS Offline Software
Functions | Variables
python.Util Namespace Reference

Functions

def getTableName (name)
 
def toCSV (fileName, metadata, HLTTriggers, readL1=False)
 
def toJson (fileName, metadata, L1Triggers, HLTTriggers)
 
def toROOT (fileName, triggers)
 
def getMetadata (inputFile)
 
def populateTriggers (inputFile, metadata, globalGroupDict, filter)
 
def slice_dictionary (directory, object_key)
 
def populateScanTriggers (inputFile, metadata)
 
def getGlobalGroup (inputFile, filter)
 
def readDBFromAMI (amiTag)
 
def exploreTree (inputFile, dumpSummary=False, underflowThreshold=0.1, overflowThreshold=0.1, maxRanges=5, skipRanges=-1)
 
def getWalltime (inputFile, rootName)
 
def getAlgorithmTotalTime (inputFile, rootName)
 
def convert (entry)
 
def getFileName (tableName, rootName)
 
def getHistogramPrefix (tableName, rootName)
 

Variables

 log
 

Function Documentation

◆ convert()

def python.Util.convert (   entry)
@brief Save entry number in scientific notation

Definition at line 142 of file TrigCostAnalysis/python/Util.py.

142 def convert(entry):
143  ''' @brief Save entry number in scientific notation'''
144  if type(entry) is float or type(entry) is int:
145  # Avoid scientific notation for small numbers and 0
146  if entry == 0:
147  return 0
148  elif fabs(entry) > 10000 or fabs(entry) < 0.0001:
149  return "{:.4e}".format(entry)
150  elif int(entry) == entry:
151  # Get rid of unnecessary 0
152  return int(entry)
153  else:
154  return "{:.4}".format(entry)
155 
156  return entry
157 
158 

◆ exploreTree()

def python.Util.exploreTree (   inputFile,
  dumpSummary = False,
  underflowThreshold = 0.1,
  overflowThreshold = 0.1,
  maxRanges = 5,
  skipRanges = -1 
)
@brief Explore ROOT Tree to find tables with histograms to be saved in csv

Per each found directory TableConstructor object is created.
Expected directory tree:
    rootDir
        table1Dir
            entry1Dir
                hist1
                hist2
                ...
            entry2Dir
                hist1
                ...
        table2Dir
        ...
        walltimeHist

@param[in] inputFile ROOT.TFile object with histograms

Definition at line 18 of file TrigCostAnalysis/python/Util.py.

18 def exploreTree(inputFile, dumpSummary=False, underflowThreshold=0.1, overflowThreshold=0.1, maxRanges=5, skipRanges=-1):
19  ''' @brief Explore ROOT Tree to find tables with histograms to be saved in csv
20 
21  Per each found directory TableConstructor object is created.
22  Expected directory tree:
23  rootDir
24  table1Dir
25  entry1Dir
26  hist1
27  hist2
28  ...
29  entry2Dir
30  hist1
31  ...
32  table2Dir
33  ...
34  walltimeHist
35 
36  @param[in] inputFile ROOT.TFile object with histograms
37  '''
38 
39  processingWarnings = []
40  rangeCounter = 0
41  rangesToSkip = skipRanges
42  for timeRange in inputFile.GetListOfKeys():
43  if timeRange.GetName() != "metadata" and rangesToSkip > 0:
44  rangesToSkip-=1
45  log.debug("Skipping range {0}".format(timeRange.GetName()))
46  continue
47 
48  if maxRanges > 0 and rangeCounter >= maxRanges:
49  log.info("{0} ranges were processed - exiting the postprocessing".format(rangeCounter))
50  break
51 
52  rangeObj = timeRange.ReadObj()
53  if not rangeObj.IsA().InheritsFrom(ROOT.TDirectory.Class()): continue
54 
55  walltime = getWalltime(inputFile, timeRange.GetName())
56 
57  for table in rangeObj.GetListOfKeys():
58  tableObj = table.ReadObj()
59  if not tableObj.IsA().InheritsFrom(ROOT.TDirectory.Class()): continue
60  log.info("Processing Table %s", table.GetName())
61  # Find and create Table Constructor for specific Table
62  try:
63  className = table.GetName() + "_TableConstructor"
64  exec("from TrigCostAnalysis." + className + " import " + className)
65  t = eval(className + "(tableObj, underflowThreshold, overflowThreshold)")
66 
67  if table.GetName() == "Chain_HLT" or table.GetName() == "Chain_Algorithm_HLT":
68  t.totalTime = getAlgorithmTotalTime(inputFile, rangeObj.GetName())
69 
70  if table.GetName() == "Global_HLT":
71  t.lbLength = walltime
72 
73  if table.GetName() == "Algorithm_HLT":
74  t.dumpSummary = dumpSummary
75 
76  fileName = getFileName(table.GetName(), timeRange.GetName())
77  histPrefix = getHistogramPrefix(table.GetName(), timeRange.GetName())
78 
79  t.fillTable(histPrefix)
80  t.normalizeColumns(walltime)
81  t.saveToFile(fileName)
82 
83  processingWarnings += t.getWarningMsgs()
84 
85  except (ValueError):
86  log.error("Processing of table {0} failed!".format(table.GetName()))
87  return []
88  except (NameError, ImportError):
89  log.warning("Class {0} not defined - directory {1} will not be processed"
90  .format(table.GetName()+"_TableConstructor", table.GetName()))
91 
92  rangeCounter += 1
93  log.debug("Range {0} was processed".format(timeRange.GetName()))
94 
95  # add smmary of most overflown histograms
96  summary = createOverflowSummary(processingWarnings)
97  summary["Summary"] += ["Underflow threshold: {0}".format(underflowThreshold), "Overflow threshold: {0}".format(overflowThreshold)]
98  return processingWarnings + [summary]
99 
100 

◆ getAlgorithmTotalTime()

def python.Util.getAlgorithmTotalTime (   inputFile,
  rootName 
)
@brief Extract total time [s] of algorithms from histogram

@param[in] inputFile ROOT TFile to look for histogram
@param[in] rootName Name of the root directory to search for tables

@return total execution time [s] value if found else 0

Definition at line 124 of file TrigCostAnalysis/python/Util.py.

124 def getAlgorithmTotalTime(inputFile, rootName):
125  ''' @brief Extract total time [s] of algorithms from histogram
126 
127  @param[in] inputFile ROOT TFile to look for histogram
128  @param[in] rootName Name of the root directory to search for tables
129 
130  @return total execution time [s] value if found else 0
131  '''
132 
133  totalTime = 0
134  alg = inputFile.Get(rootName).Get("Global_HLT").Get("Total")
135  hist = alg.Get(rootName + "_Global_HLT_Total_AlgTime_perEvent")
136  for i in range(1, hist.GetXaxis().GetNbins()):
137  totalTime += hist.GetBinContent(i) * hist.GetXaxis().GetBinCenterLog(i)
138 
139  return totalTime * 1e-3
140 
141 

◆ getFileName()

def python.Util.getFileName (   tableName,
  rootName 
)
@brief Get name of file to save the table

@param[in] tableName Table name
@param[in] rootName  Name of table's root directory

@return Filename for given table

Definition at line 159 of file TrigCostAnalysis/python/Util.py.

159 def getFileName(tableName, rootName):
160  '''@brief Get name of file to save the table
161 
162  @param[in] tableName Table name
163  @param[in] rootName Name of table's root directory
164 
165  @return Filename for given table
166  '''
167  return "Table_" + tableName + "_" + rootName + ".csv"
168 
169 

◆ getGlobalGroup()

def python.Util.getGlobalGroup (   inputFile,
  filter 
)

Definition at line 246 of file RatesAnalysis/python/Util.py.

246 def getGlobalGroup(inputFile, filter):
247  for key in inputFile.GetListOfKeys():
248  if key.GetName() == 'All':
249  for subdirKey in key.ReadObj().GetListOfKeys():
250  if not subdirKey.GetName() == "Rate_Group_HLT" : pass
251  for globalsKey in subdirKey.ReadObj().GetListOfKeys():
252  if filter in globalsKey.GetName():
253  groupsDict = slice_dictionary(globalsKey.ReadObj(), "data")
254  return groupsDict
255 
256 

◆ getHistogramPrefix()

def python.Util.getHistogramPrefix (   tableName,
  rootName 
)
@brief Construct full histogram name

@param[in] tableName Table name
@param[in] rootName  Name of table's root directory

@return Histogram prefix for given table

Definition at line 170 of file TrigCostAnalysis/python/Util.py.

170 def getHistogramPrefix(tableName, rootName):
171  '''@brief Construct full histogram name
172 
173  @param[in] tableName Table name
174  @param[in] rootName Name of table's root directory
175 
176  @return Histogram prefix for given table
177  '''
178 
179  return rootName + '_' + tableName + '_'

◆ getMetadata()

def python.Util.getMetadata (   inputFile)
Get metadata for rates.json file

Definition at line 132 of file RatesAnalysis/python/Util.py.

132 def getMetadata(inputFile):
133  '''Get metadata for rates.json file'''
134  metatree = inputFile.Get("metadata")
135  if metatree is None:
136  return None
137 
138  metatree.GetEntry(0)
139  metadata = {}
140 
141  metadata['runNumber'] = metatree.runNumber
142 
143  metadata['targetMu'] = metatree.targetMu
144  metadata['targetBunches'] = metatree.targetBunches
145  metadata['targetLumi'] = metatree.targetLumi
146 
147  metadata['masterKey'] = metatree.masterKey
148  metadata['lvl1PrescaleKey'] = metatree.lvl1PrescaleKey
149  metadata['hltPrescaleKey'] = metatree.hltPrescaleKey
150 
151  metadata['AtlasProject'] = str(metatree.AtlasProject)
152  metadata['AtlasVersion'] = str(metatree.AtlasVersion)
153 
154  metadata['bunchCrossingRate'] = metatree.bunchCrossingRate
155 
156  metadata['multiSliceDiJet'] = metatree.multiSliceDiJet
157 
158  prescales = {}
159  lowers = {}
160  express = {}
161  for i in range(0, metatree.triggers.size()):
162  prescale = metatree.prescales.at(i)
163  expressPrescale = metatree.express.at(i)
164  # Handle group prescale values
165  prescales[metatree.triggers.at(i)] = prescale if prescale >= -1 else "Multiple"
166  lowers[metatree.triggers.at(i)] = str(metatree.lowers.at(i))
167  express[metatree.triggers.at(i)] = expressPrescale if expressPrescale >= -1 else "Multiple"
168 
169  metadata['prescales'] = prescales
170  metadata['lowers'] = lowers
171  metadata['express'] = express
172 
173  chainid = {}
174  chaingroup = {}
175  for i in range(0, metatree.hltChainIDGroup.size()):
176  chainid[metatree.hltChainIDGroup.at(i).at(0)] = metatree.hltChainIDGroup.at(i).at(1)
177  chaingroup[metatree.hltChainIDGroup.at(i).at(0)] = metatree.hltChainIDGroup.at(i).at(2)
178 
179  metadata['chainID'] = chainid
180  metadata['chainGroup'] = chaingroup
181 
182  itemid = {}
183  for i in range(0, metatree.l1ItemID.size()):
184  itemid[metatree.l1ItemID.at(i).at(0)] = metatree.l1ItemID.at(i).at(1)
185 
186  metadata['itemID'] = itemid
187 
188  bunchGroups = []
189  for bg in metatree.bunchGroups:
190  bunchGroups.append(bg)
191  metadata['bunchGroups'] = bunchGroups
192 
193  return metadata
194 
195 

◆ getTableName()

def python.Util.getTableName (   name)

Definition at line 14 of file RatesAnalysis/python/Util.py.

14 def getTableName(name):
15  tabName = "Table_Rate_"
16  if name == "HLT" or name == "L1":
17  tabName += "Chain" + name
18  else:
19  tabName += name
20 
21  tabName += "_HLT_All.csv"
22 
23  return tabName
24 
25 

◆ getWalltime()

def python.Util.getWalltime (   inputFile,
  rootName 
)
@brief Extract walltime value from histogram

@param[in] inputFile ROOT TFile to look for histogram
@param[in] rootName Name of the root directory to search for tables

@return walltime value if found else 0 and an error

Definition at line 101 of file TrigCostAnalysis/python/Util.py.

101 def getWalltime(inputFile, rootName):
102  ''' @brief Extract walltime value from histogram
103 
104  @param[in] inputFile ROOT TFile to look for histogram
105  @param[in] rootName Name of the root directory to search for tables
106 
107  @return walltime value if found else 0 and an error
108  '''
109 
110  dirObj = inputFile.Get(rootName)
111  if not dirObj.IsA().InheritsFrom(ROOT.TDirectory.Class()): return 0
112  for hist in dirObj.GetListOfKeys():
113  if '_walltime' in hist.GetName():
114  histObj = hist.ReadObj()
115  if histObj.IsA().InheritsFrom(ROOT.TProfile.Class()):
116  return histObj.Integral()
117  else:
118  return histObj.GetBinContent(1)
119 
120  log.error("Walltime not found")
121  return 0
122 
123 

◆ populateScanTriggers()

def python.Util.populateScanTriggers (   inputFile,
  metadata 
)

Definition at line 232 of file RatesAnalysis/python/Util.py.

232 def populateScanTriggers(inputFile, metadata):
233  from .RatesScanTrigger import RatesScanTrigger
234  triggerList = []
235  for key in inputFile.GetListOfKeys():
236  if key.GetName() == 'ScanTriggers':
237  for scanName in key.ReadObj().GetListOfKeys():
238  numerator_dict = slice_dictionary(scanName.ReadObj(), "rateVsThreshold")
239  if len(numerator_dict) == 0:
240  log.error(f"Empty dictionary in populateScanTriggers for scan {scanName}")
241  continue
242  else:
243  triggerList.append(RatesScanTrigger(scanName.GetName(), metadata, numerator_dict))
244  return triggerList
245 

◆ populateTriggers()

def python.Util.populateTriggers (   inputFile,
  metadata,
  globalGroupDict,
  filter 
)

Definition at line 196 of file RatesAnalysis/python/Util.py.

196 def populateTriggers(inputFile, metadata, globalGroupDict, filter):
197  # Fix groups' names that are also not GLOBAL
198  def getTriggerName(name, filter):
199  if "Group" in filter and "GLOBAL" not in name:
200  return name.replace('_', ':', 1)
201  else:
202  return name
203 
204  from .RatesTrigger import RatesTrigger
205  triggerList = []
206  for key in inputFile.GetListOfKeys():
207  if key.GetName() == 'All':
208  for subdirKey in key.ReadObj().GetListOfKeys():
209  if filter not in subdirKey.GetName(): continue
210  for triggerKey in subdirKey.ReadObj().GetListOfKeys():
211  numeratorDict = slice_dictionary(triggerKey.ReadObj(),"data")
212  for suffix, data in numeratorDict.items():
213  try:
214  triggerList.append(RatesTrigger(getTriggerName(triggerKey.GetName(), filter)+suffix, metadata, data, globalGroupDict[suffix], suffix))
215  except ValueError:
216  log.error("Cannot create a new trigger for {0}".format(triggerKey.GetName()))
217  return []
218  return triggerList
219 
220 

◆ readDBFromAMI()

def python.Util.readDBFromAMI (   amiTag)
Read used database based on AMI tag 

Definition at line 257 of file RatesAnalysis/python/Util.py.

257 def readDBFromAMI(amiTag):
258  ''' Read used database based on AMI tag '''
259  try:
260  import pyAMI.client
261  import pyAMI.atlas.api as AtlasAPI
262  except ModuleNotFoundError:
263  log.warning("Unable to import AMIClient from pyAMI. Maybe you didn't do localSetupPyAMI?")
264  return ""
265 
266  amiclient = pyAMI.client.Client('atlas')
267  AtlasAPI.init()
268 
269  command = [ 'AMIGetAMITagInfo', '-amiTag="%s"' % amiTag, '-cached' ]
270  amiTagInfo = amiclient.execute(command, format = 'dict_object').get_rows('amiTagInfo')[0]
271 
272  return amiTagInfo['DBserver'] if "DBserver" in amiTagInfo else None

◆ slice_dictionary()

def python.Util.slice_dictionary (   directory,
  object_key 
)

Definition at line 221 of file RatesAnalysis/python/Util.py.

221 def slice_dictionary(directory, object_key):
222  slices_dict = {}
223  for hist in directory.GetListOfKeys():
224  if str(hist.GetName()).startswith(object_key):
225  try:
226  slice_index = "_"+str(hist.GetName()).split("_")[1]
227  except IndexError:
228  slice_index = ""
229  slices_dict[slice_index] = hist.ReadObj()
230  return slices_dict
231 

◆ toCSV()

def python.Util.toCSV (   fileName,
  metadata,
  HLTTriggers,
  readL1 = False 
)

Definition at line 26 of file RatesAnalysis/python/Util.py.

26 def toCSV(fileName, metadata, HLTTriggers, readL1=False):
27  import csv
28 
29  with open(fileName, mode='w') as outputCSV_file:
30  rates_csv_writer = csv.writer(outputCSV_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
31 
32  rates_csv_writer.writerow(['Name','Active Time [s]','Group','Weighted PS Rate [Hz]','Weighted PS Rate Err [Hz]', \
33  'Unique Rate [Hz]','Unique Rate Err [Hz]','Express Rate [Hz]','Express Rate Err [Hz]','Prescale','Express Prescale','ID', \
34  'Raw Active Events','Raw Pass Events','Active Events','Input Rate [Hz]','Pass Fraction after PS [%]','Pass Weighted PS'])
35  rates_csv_writer.writerow(['Trigger name','Integrated length of all lumi blocks which contributed events to this rates prediction.','The group this chain belongs to.','Rate after applying all prescale(s) as weights.','Error on rate after applying all prescale(s) as weights','Total rate without this chain rate','Error on unique rate','Express stream rate','Error on express rate','The prescale of this chain. Only displayed for simple combinations.','The prescale of the chain including express prescale','The CPTID or HLT Chain ID','Raw underlying statistics on the number events processed for this chain.','Raw underlying statistics on the number events passed by this chain.','Number of events in which the chain - or at least one chain in the combination - was executed.','Input rate to this chain or combination of chains. At L1 this will be the collision frequency for the bunch pattern.','Fraction of events which pass this trigger after prescale.','Number of events this chain or combination passed after applying prescales as weighting factors.'])
36 
37  for trig in HLTTriggers:
38 
39  group_name = chain_id = ""
40  if "ChainL1" in fileName:
41  group_name = "None"
42  chain_id = metadata["itemID"].get(trig.name)
43  elif "ChainHLT" in fileName:
44  group_name = metadata["chainGroup"].get(trig.name)
45  chain_id = metadata["chainID"].get(trig.name)
46  elif "Group" in fileName:
47  chain_id = 0
48  group_name = "All" if "GLOBAL" in trig.name else group_name
49 
50  if float(trig.rateDenominator)==0:
51  print("float(trig.rateDenominator) is ZERO! This shouldn't happen")
52  if float(trig.activeWeighted)==0:
53  passFrac_afterPS=0
54  else:
55  passFrac_afterPS=100*float(trig.passWeighted)/float(trig.activeWeighted)
56 
57  isL1 = trig.name.startswith("L1_")
58  rates_csv_writer.writerow([trig.name,"%.4f" % trig.rateDenominator,group_name,"%.4f" % trig.rate,"%.4f" % trig.rateErr, \
59  "%.4f" % trig.rateUnique,"%.4f" % trig.rateUniqueErr, ("%.4f" % trig.rateExpress if not isL1 else "-"), ("%.4f" % trig.rateExpressErr if not isL1 else "-"), \
60  trig.prescale, (trig.expressPrescale if not isL1 else "-"), chain_id, "%.0f" % trig.activeRaw,"%.0f" % trig.passRaw,"%.4f" % trig.activeWeighted, \
61  "%.4f" % (float(trig.activeWeighted)/float(trig.rateDenominator)),"%.4f" % passFrac_afterPS,"%.4f" % trig.passWeighted])
62 
63 
64 

◆ toJson()

def python.Util.toJson (   fileName,
  metadata,
  L1Triggers,
  HLTTriggers 
)

Definition at line 65 of file RatesAnalysis/python/Util.py.

65 def toJson(fileName, metadata, L1Triggers, HLTTriggers):
66  import json
67  l1 = {}
68  for trig in L1Triggers:
69  trig.export(l1)
70  hlt = {}
71  for trig in HLTTriggers:
72  trig.export(hlt)
73 
74  level = {}
75  level['L1'] = l1
76  level['HLT'] = hlt
77 
78  jsonDict = {}
79  jsonDict['PredictionLumi'] = metadata['targetLumi']
80  for k,v in metadata.items():
81  if k.startswith("n_evts"):
82  jsonDict[k] = v
83  jsonDict['AtlasProject'] = metadata['AtlasProject']
84  jsonDict['AtlasVersion'] = metadata['AtlasVersion']
85  jsonDict['triggerMenuSetup'] = metadata['masterKey']
86  jsonDict['L1PrescaleSet'] = metadata['lvl1PrescaleKey']
87  jsonDict['HLTPrescaleSet'] = metadata['hltPrescaleKey']
88  jsonDict['bunchgroup'] = metadata['bunchGroups']
89  jsonDict['level'] = level
90 
91  with open(fileName, 'w') as outFile:
92  json.dump(obj=jsonDict, fp=outFile, indent=2, sort_keys=True)
93 
94 
95  metajsonData = [
96  {'PredictionLumi' : metadata['targetLumi']},
97  {'TargetMu' : metadata['targetMu']},
98  {'RunNumber' : metadata['runNumber']},
99  {'Details' : metadata['details']},
100  {'JIRA' : metadata['JIRA']},
101  {'AMITag' : metadata['amiTag']},
102  {'SMK' : metadata['masterKey']},
103  {'DB' : readDBFromAMI(metadata['amiTag']) if metadata['amiTag'] else None},
104  {'LVL1PSK' : metadata['lvl1PrescaleKey']},
105  {'HLTPSK' : metadata['hltPrescaleKey']},
106  {'AtlasProject' : metadata['AtlasProject']},
107  {'AtlasVersion' : metadata['AtlasVersion']}
108  ]
109  for k,v in metadata.items():
110  if k.startswith("n_evts"):
111  metajsonData+={jsonDict[k] : v}
112 
113 
114  metajsonDict = {}
115  metajsonDict['text'] = 'metadata'
116  metajsonDict['children'] = metajsonData
117 
118  with open('metadata.json', 'w') as outMetaFile:
119  json.dump(obj=metajsonDict, fp=outMetaFile, indent=2, sort_keys=True)
120 
121 

◆ toROOT()

def python.Util.toROOT (   fileName,
  triggers 
)

Definition at line 122 of file RatesAnalysis/python/Util.py.

122 def toROOT(fileName, triggers):
123  mydict = {}
124  for trigger in triggers:
125  trigger.export(mydict)
126  from ROOT import TFile
127  with TFile.Open(fileName, 'RECREATE') as fout:
128  for key, scanDict in mydict.items():
129  fout.WriteObject(scanDict['rate'], f"{key}_rate")
130 
131 

Variable Documentation

◆ log

python.Util.log

Definition at line 12 of file RatesAnalysis/python/Util.py.

python.Util.toCSV
def toCSV(fileName, metadata, HLTTriggers, readL1=False)
Definition: RatesAnalysis/python/Util.py:26
RatesScanTrigger
Used to calculate a rate scan as a function of some threshold value.
Definition: RatesScanTrigger.h:14
vtune_athena.format
format
Definition: vtune_athena.py:14
python.Util.convert
def convert(entry)
Definition: TrigCostAnalysis/python/Util.py:142
python.Util.slice_dictionary
def slice_dictionary(directory, object_key)
Definition: RatesAnalysis/python/Util.py:221
python.Util.exploreTree
def exploreTree(inputFile, dumpSummary=False, underflowThreshold=0.1, overflowThreshold=0.1, maxRanges=5, skipRanges=-1)
Definition: TrigCostAnalysis/python/Util.py:18
python.Util.getGlobalGroup
def getGlobalGroup(inputFile, filter)
Definition: RatesAnalysis/python/Util.py:246
python.Util.getFileName
def getFileName(tableName, rootName)
Definition: TrigCostAnalysis/python/Util.py:159
python.Util.readDBFromAMI
def readDBFromAMI(amiTag)
Definition: RatesAnalysis/python/Util.py:257
python.Util.populateTriggers
def populateTriggers(inputFile, metadata, globalGroupDict, filter)
Definition: RatesAnalysis/python/Util.py:196
python.CaloAddPedShiftConfig.type
type
Definition: CaloAddPedShiftConfig.py:42
RatesTrigger
Used to calculate the rate for a single trigger at L1 or the HLT.
Definition: RatesTrigger.h:15
python.Util.getAlgorithmTotalTime
def getAlgorithmTotalTime(inputFile, rootName)
Definition: TrigCostAnalysis/python/Util.py:124
Get
T * Get(TFile &f, const std::string &n, const std::string &dir="", const chainmap_t *chainmap=0, std::vector< std::string > *saved=0)
get a histogram given a path, and an optional initial directory if histogram is not found,...
Definition: comparitor.cxx:181
python.Util.getHistogramPrefix
def getHistogramPrefix(tableName, rootName)
Definition: TrigCostAnalysis/python/Util.py:170
LArG4FSStartPointFilter.exec
exec
Definition: LArG4FSStartPointFilter.py:103
plotBeamSpotVxVal.range
range
Definition: plotBeamSpotVxVal.py:194
python.Util.getMetadata
def getMetadata(inputFile)
Definition: RatesAnalysis/python/Util.py:132
print
void print(char *figname, TCanvas *c1)
Definition: TRTCalib_StrawStatusPlots.cxx:26
python.Util.getTableName
def getTableName(name)
Definition: RatesAnalysis/python/Util.py:14
python.Util.toJson
def toJson(fileName, metadata, L1Triggers, HLTTriggers)
Definition: RatesAnalysis/python/Util.py:65
Trk::open
@ open
Definition: BinningType.h:40
python.CaloAddPedShiftConfig.int
int
Definition: CaloAddPedShiftConfig.py:45
get
T * get(TKey *tobj)
get a TObject* from a TKey* (why can't a TObject be a TKey?)
Definition: hcg.cxx:127
python.Util.populateScanTriggers
def populateScanTriggers(inputFile, metadata)
Definition: RatesAnalysis/python/Util.py:232
str
Definition: BTagTrackIpAccessor.cxx:11
python.CostMetadataUtil.createOverflowSummary
def createOverflowSummary(warnings)
Definition: CostMetadataUtil.py:97
python.Util.getWalltime
def getWalltime(inputFile, rootName)
Definition: TrigCostAnalysis/python/Util.py:101
python.Util.toROOT
def toROOT(fileName, triggers)
Definition: RatesAnalysis/python/Util.py:122
Trk::split
@ split
Definition: LayerMaterialProperties.h:38
python.LArMinBiasAlgConfig.float
float
Definition: LArMinBiasAlgConfig.py:65