 |
ATLAS Offline Software
|
|
| def | getTableName (name) |
| |
| def | toCSV (fileName, metadata, HLTTriggers, readL1=False) |
| |
| def | toJson (fileName, metadata, L1Triggers, HLTTriggers) |
| |
| def | toROOT (fileName, triggers) |
| |
| def | getMetadata (inputFile) |
| |
| def | populateTriggers (inputFile, metadata, globalGroupDict, filter) |
| |
| def | slice_dictionary (directory, object_key) |
| |
| def | populateScanTriggers (inputFile, metadata) |
| |
| def | getGlobalGroup (inputFile, filter) |
| |
| def | readDBFromAMI (amiTag) |
| |
| def | exploreTree (inputFile, dumpSummary=False, underflowThreshold=0.1, overflowThreshold=0.1, maxRanges=5, skipRanges=-1) |
| |
| def | getWalltime (inputFile, rootName) |
| |
| def | getAlgorithmTotalTime (inputFile, rootName) |
| |
| def | convert (entry) |
| |
| def | getFileName (tableName, rootName) |
| |
| def | getHistogramPrefix (tableName, rootName) |
| |
◆ convert()
| def python.Util.convert |
( |
|
entry | ) |
|
@brief Save entry number in scientific notation
Definition at line 142 of file TrigCostAnalysis/python/Util.py.
143 ''' @brief Save entry number in scientific notation'''
144 if type(entry)
is float
or type(entry)
is int:
148 elif fabs(entry) > 10000
or fabs(entry) < 0.0001:
149 return "{:.4e}".
format(entry)
150 elif int(entry) == entry:
154 return "{:.4}".
format(entry)
◆ exploreTree()
| def python.Util.exploreTree |
( |
|
inputFile, |
|
|
|
dumpSummary = False, |
|
|
|
underflowThreshold = 0.1, |
|
|
|
overflowThreshold = 0.1, |
|
|
|
maxRanges = 5, |
|
|
|
skipRanges = -1 |
|
) |
| |
@brief Explore ROOT Tree to find tables with histograms to be saved in csv
Per each found directory TableConstructor object is created.
Expected directory tree:
rootDir
table1Dir
entry1Dir
hist1
hist2
...
entry2Dir
hist1
...
table2Dir
...
walltimeHist
@param[in] inputFile ROOT.TFile object with histograms
Definition at line 18 of file TrigCostAnalysis/python/Util.py.
18 def exploreTree(inputFile, dumpSummary=False, underflowThreshold=0.1, overflowThreshold=0.1, maxRanges=5, skipRanges=-1):
19 ''' @brief Explore ROOT Tree to find tables with histograms to be saved in csv
21 Per each found directory TableConstructor object is created.
22 Expected directory tree:
36 @param[in] inputFile ROOT.TFile object with histograms
39 processingWarnings = []
41 rangesToSkip = skipRanges
42 for timeRange
in inputFile.GetListOfKeys():
43 if timeRange.GetName() !=
"metadata" and rangesToSkip > 0:
45 log.debug(
"Skipping range {0}".
format(timeRange.GetName()))
48 if maxRanges > 0
and rangeCounter >= maxRanges:
49 log.info(
"{0} ranges were processed - exiting the postprocessing".
format(rangeCounter))
52 rangeObj = timeRange.ReadObj()
53 if not rangeObj.IsA().InheritsFrom(ROOT.TDirectory.Class()):
continue
55 walltime =
getWalltime(inputFile, timeRange.GetName())
57 for table
in rangeObj.GetListOfKeys():
58 tableObj = table.ReadObj()
59 if not tableObj.IsA().InheritsFrom(ROOT.TDirectory.Class()):
continue
60 log.info(
"Processing Table %s", table.GetName())
63 className = table.GetName() +
"_TableConstructor"
64 exec(
"from TrigCostAnalysis." + className +
" import " + className)
65 t = eval(className +
"(tableObj, underflowThreshold, overflowThreshold)")
67 if table.GetName() ==
"Chain_HLT" or table.GetName() ==
"Chain_Algorithm_HLT":
70 if table.GetName() ==
"Global_HLT":
73 if table.GetName() ==
"Algorithm_HLT":
74 t.dumpSummary = dumpSummary
76 fileName =
getFileName(table.GetName(), timeRange.GetName())
79 t.fillTable(histPrefix)
80 t.normalizeColumns(walltime)
81 t.saveToFile(fileName)
83 processingWarnings += t.getWarningMsgs()
86 log.error(
"Processing of table {0} failed!".
format(table.GetName()))
88 except (NameError, ImportError):
89 log.warning(
"Class {0} not defined - directory {1} will not be processed"
90 .
format(table.GetName()+
"_TableConstructor", table.GetName()))
93 log.debug(
"Range {0} was processed".
format(timeRange.GetName()))
97 summary[
"Summary"] += [
"Underflow threshold: {0}".
format(underflowThreshold),
"Overflow threshold: {0}".
format(overflowThreshold)]
98 return processingWarnings + [summary]
◆ getAlgorithmTotalTime()
| def python.Util.getAlgorithmTotalTime |
( |
|
inputFile, |
|
|
|
rootName |
|
) |
| |
@brief Extract total time [s] of algorithms from histogram
@param[in] inputFile ROOT TFile to look for histogram
@param[in] rootName Name of the root directory to search for tables
@return total execution time [s] value if found else 0
Definition at line 124 of file TrigCostAnalysis/python/Util.py.
125 ''' @brief Extract total time [s] of algorithms from histogram
127 @param[in] inputFile ROOT TFile to look for histogram
128 @param[in] rootName Name of the root directory to search for tables
130 @return total execution time [s] value if found else 0
134 alg = inputFile.Get(rootName).
Get(
"Global_HLT").
Get(
"Total")
135 hist = alg.Get(rootName +
"_Global_HLT_Total_AlgTime_perEvent")
136 for i
in range(1, hist.GetXaxis().GetNbins()):
137 totalTime += hist.GetBinContent(i) * hist.GetXaxis().GetBinCenterLog(i)
139 return totalTime * 1e-3
◆ getFileName()
| def python.Util.getFileName |
( |
|
tableName, |
|
|
|
rootName |
|
) |
| |
@brief Get name of file to save the table
@param[in] tableName Table name
@param[in] rootName Name of table's root directory
@return Filename for given table
Definition at line 159 of file TrigCostAnalysis/python/Util.py.
160 '''@brief Get name of file to save the table
162 @param[in] tableName Table name
163 @param[in] rootName Name of table's root directory
165 @return Filename for given table
167 return "Table_" + tableName +
"_" + rootName +
".csv"
◆ getGlobalGroup()
| def python.Util.getGlobalGroup |
( |
|
inputFile, |
|
|
|
filter |
|
) |
| |
Definition at line 246 of file RatesAnalysis/python/Util.py.
247 for key
in inputFile.GetListOfKeys():
248 if key.GetName() ==
'All':
249 for subdirKey
in key.ReadObj().GetListOfKeys():
250 if not subdirKey.GetName() ==
"Rate_Group_HLT" :
pass
251 for globalsKey
in subdirKey.ReadObj().GetListOfKeys():
252 if filter
in globalsKey.GetName():
◆ getHistogramPrefix()
| def python.Util.getHistogramPrefix |
( |
|
tableName, |
|
|
|
rootName |
|
) |
| |
@brief Construct full histogram name
@param[in] tableName Table name
@param[in] rootName Name of table's root directory
@return Histogram prefix for given table
Definition at line 170 of file TrigCostAnalysis/python/Util.py.
171 '''@brief Construct full histogram name
173 @param[in] tableName Table name
174 @param[in] rootName Name of table's root directory
176 @return Histogram prefix for given table
179 return rootName +
'_' + tableName +
'_'
◆ getMetadata()
| def python.Util.getMetadata |
( |
|
inputFile | ) |
|
Get metadata for rates.json file
Definition at line 132 of file RatesAnalysis/python/Util.py.
133 '''Get metadata for rates.json file'''
134 metatree = inputFile.Get(
"metadata")
141 metadata[
'runNumber'] = metatree.runNumber
143 metadata[
'targetMu'] = metatree.targetMu
144 metadata[
'targetBunches'] = metatree.targetBunches
145 metadata[
'targetLumi'] = metatree.targetLumi
147 metadata[
'masterKey'] = metatree.masterKey
148 metadata[
'lvl1PrescaleKey'] = metatree.lvl1PrescaleKey
149 metadata[
'hltPrescaleKey'] = metatree.hltPrescaleKey
151 metadata[
'AtlasProject'] =
str(metatree.AtlasProject)
152 metadata[
'AtlasVersion'] =
str(metatree.AtlasVersion)
154 metadata[
'bunchCrossingRate'] = metatree.bunchCrossingRate
156 metadata[
'multiSliceDiJet'] = metatree.multiSliceDiJet
161 for i
in range(0, metatree.triggers.size()):
162 prescale = metatree.prescales.at(i)
163 expressPrescale = metatree.express.at(i)
165 prescales[metatree.triggers.at(i)] = prescale
if prescale >= -1
else "Multiple"
166 lowers[metatree.triggers.at(i)] =
str(metatree.lowers.at(i))
167 express[metatree.triggers.at(i)] = expressPrescale
if expressPrescale >= -1
else "Multiple"
169 metadata[
'prescales'] = prescales
170 metadata[
'lowers'] = lowers
171 metadata[
'express'] = express
175 for i
in range(0, metatree.hltChainIDGroup.size()):
176 chainid[metatree.hltChainIDGroup.at(i).at(0)] = metatree.hltChainIDGroup.at(i).at(1)
177 chaingroup[metatree.hltChainIDGroup.at(i).at(0)] = metatree.hltChainIDGroup.at(i).at(2)
179 metadata[
'chainID'] = chainid
180 metadata[
'chainGroup'] = chaingroup
183 for i
in range(0, metatree.l1ItemID.size()):
184 itemid[metatree.l1ItemID.at(i).at(0)] = metatree.l1ItemID.at(i).at(1)
186 metadata[
'itemID'] = itemid
189 for bg
in metatree.bunchGroups:
190 bunchGroups.append(bg)
191 metadata[
'bunchGroups'] = bunchGroups
◆ getTableName()
| def python.Util.getTableName |
( |
|
name | ) |
|
Definition at line 14 of file RatesAnalysis/python/Util.py.
15 tabName =
"Table_Rate_"
16 if name ==
"HLT" or name ==
"L1":
17 tabName +=
"Chain" + name
21 tabName +=
"_HLT_All.csv"
◆ getWalltime()
| def python.Util.getWalltime |
( |
|
inputFile, |
|
|
|
rootName |
|
) |
| |
@brief Extract walltime value from histogram
@param[in] inputFile ROOT TFile to look for histogram
@param[in] rootName Name of the root directory to search for tables
@return walltime value if found else 0 and an error
Definition at line 101 of file TrigCostAnalysis/python/Util.py.
102 ''' @brief Extract walltime value from histogram
104 @param[in] inputFile ROOT TFile to look for histogram
105 @param[in] rootName Name of the root directory to search for tables
107 @return walltime value if found else 0 and an error
110 dirObj = inputFile.Get(rootName)
111 if not dirObj.IsA().InheritsFrom(ROOT.TDirectory.Class()):
return 0
112 for hist
in dirObj.GetListOfKeys():
113 if '_walltime' in hist.GetName():
114 histObj = hist.ReadObj()
115 if histObj.IsA().InheritsFrom(ROOT.TProfile.Class()):
116 return histObj.Integral()
118 return histObj.GetBinContent(1)
120 log.error(
"Walltime not found")
◆ populateScanTriggers()
| def python.Util.populateScanTriggers |
( |
|
inputFile, |
|
|
|
metadata |
|
) |
| |
Definition at line 232 of file RatesAnalysis/python/Util.py.
233 from .RatesScanTrigger
import RatesScanTrigger
235 for key
in inputFile.GetListOfKeys():
236 if key.GetName() ==
'ScanTriggers':
237 for scanName
in key.ReadObj().GetListOfKeys():
239 if len(numerator_dict) == 0:
240 log.error(f
"Empty dictionary in populateScanTriggers for scan {scanName}")
243 triggerList.append(
RatesScanTrigger(scanName.GetName(), metadata, numerator_dict))
◆ populateTriggers()
| def python.Util.populateTriggers |
( |
|
inputFile, |
|
|
|
metadata, |
|
|
|
globalGroupDict, |
|
|
|
filter |
|
) |
| |
Definition at line 196 of file RatesAnalysis/python/Util.py.
198 def getTriggerName(name, filter):
199 if "Group" in filter
and "GLOBAL" not in name:
200 return name.replace(
'_',
':', 1)
204 from .RatesTrigger
import RatesTrigger
206 for key
in inputFile.GetListOfKeys():
207 if key.GetName() ==
'All':
208 for subdirKey
in key.ReadObj().GetListOfKeys():
209 if filter
not in subdirKey.GetName():
continue
210 for triggerKey
in subdirKey.ReadObj().GetListOfKeys():
212 for suffix, data
in numeratorDict.items():
214 triggerList.append(
RatesTrigger(getTriggerName(triggerKey.GetName(), filter)+suffix, metadata, data, globalGroupDict[suffix], suffix))
216 log.error(
"Cannot create a new trigger for {0}".
format(triggerKey.GetName()))
◆ readDBFromAMI()
| def python.Util.readDBFromAMI |
( |
|
amiTag | ) |
|
Read used database based on AMI tag
Definition at line 257 of file RatesAnalysis/python/Util.py.
258 ''' Read used database based on AMI tag '''
261 import pyAMI.atlas.api
as AtlasAPI
262 except ModuleNotFoundError:
263 log.warning(
"Unable to import AMIClient from pyAMI. Maybe you didn't do localSetupPyAMI?")
266 amiclient = pyAMI.client.Client(
'atlas')
269 command = [
'AMIGetAMITagInfo',
'-amiTag="%s"' % amiTag,
'-cached' ]
270 amiTagInfo = amiclient.execute(command, format =
'dict_object').get_rows(
'amiTagInfo')[0]
272 return amiTagInfo[
'DBserver']
if "DBserver" in amiTagInfo
else None
◆ slice_dictionary()
| def python.Util.slice_dictionary |
( |
|
directory, |
|
|
|
object_key |
|
) |
| |
Definition at line 221 of file RatesAnalysis/python/Util.py.
223 for hist
in directory.GetListOfKeys():
224 if str(hist.GetName()).startswith(object_key):
226 slice_index =
"_"+
str(hist.GetName()).
split(
"_")[1]
229 slices_dict[slice_index] = hist.ReadObj()
◆ toCSV()
| def python.Util.toCSV |
( |
|
fileName, |
|
|
|
metadata, |
|
|
|
HLTTriggers, |
|
|
|
readL1 = False |
|
) |
| |
Definition at line 26 of file RatesAnalysis/python/Util.py.
26 def toCSV(fileName, metadata, HLTTriggers, readL1=False):
29 with open(fileName, mode=
'w')
as outputCSV_file:
30 rates_csv_writer = csv.writer(outputCSV_file, delimiter=
',', quotechar=
'"', quoting=csv.QUOTE_MINIMAL)
32 rates_csv_writer.writerow([
'Name',
'Active Time [s]',
'Group',
'Weighted PS Rate [Hz]',
'Weighted PS Rate Err [Hz]', \
33 'Unique Rate [Hz]',
'Unique Rate Err [Hz]',
'Express Rate [Hz]',
'Express Rate Err [Hz]',
'Prescale',
'Express Prescale',
'ID', \
34 'Raw Active Events',
'Raw Pass Events',
'Active Events',
'Input Rate [Hz]',
'Pass Fraction after PS [%]',
'Pass Weighted PS'])
35 rates_csv_writer.writerow([
'Trigger name',
'Integrated length of all lumi blocks which contributed events to this rates prediction.',
'The group this chain belongs to.',
'Rate after applying all prescale(s) as weights.',
'Error on rate after applying all prescale(s) as weights',
'Total rate without this chain rate',
'Error on unique rate',
'Express stream rate',
'Error on express rate',
'The prescale of this chain. Only displayed for simple combinations.',
'The prescale of the chain including express prescale',
'The CPTID or HLT Chain ID',
'Raw underlying statistics on the number events processed for this chain.',
'Raw underlying statistics on the number events passed by this chain.',
'Number of events in which the chain - or at least one chain in the combination - was executed.',
'Input rate to this chain or combination of chains. At L1 this will be the collision frequency for the bunch pattern.',
'Fraction of events which pass this trigger after prescale.',
'Number of events this chain or combination passed after applying prescales as weighting factors.'])
37 for trig
in HLTTriggers:
39 group_name = chain_id =
""
40 if "ChainL1" in fileName:
42 chain_id = metadata[
"itemID"].
get(trig.name)
43 elif "ChainHLT" in fileName:
44 group_name = metadata[
"chainGroup"].
get(trig.name)
45 chain_id = metadata[
"chainID"].
get(trig.name)
46 elif "Group" in fileName:
48 group_name =
"All" if "GLOBAL" in trig.name
else group_name
50 if float(trig.rateDenominator)==0:
51 print(
"float(trig.rateDenominator) is ZERO! This shouldn't happen")
52 if float(trig.activeWeighted)==0:
55 passFrac_afterPS=100*
float(trig.passWeighted)/
float(trig.activeWeighted)
57 isL1 = trig.name.startswith(
"L1_")
58 rates_csv_writer.writerow([trig.name,
"%.4f" % trig.rateDenominator,group_name,
"%.4f" % trig.rate,
"%.4f" % trig.rateErr, \
59 "%.4f" % trig.rateUnique,
"%.4f" % trig.rateUniqueErr, (
"%.4f" % trig.rateExpress
if not isL1
else "-"), (
"%.4f" % trig.rateExpressErr
if not isL1
else "-"), \
60 trig.prescale, (trig.expressPrescale
if not isL1
else "-"), chain_id,
"%.0f" % trig.activeRaw,
"%.0f" % trig.passRaw,
"%.4f" % trig.activeWeighted, \
61 "%.4f" % (
float(trig.activeWeighted)/
float(trig.rateDenominator)),
"%.4f" % passFrac_afterPS,
"%.4f" % trig.passWeighted])
◆ toJson()
| def python.Util.toJson |
( |
|
fileName, |
|
|
|
metadata, |
|
|
|
L1Triggers, |
|
|
|
HLTTriggers |
|
) |
| |
Definition at line 65 of file RatesAnalysis/python/Util.py.
65 def toJson(fileName, metadata, L1Triggers, HLTTriggers):
68 for trig
in L1Triggers:
71 for trig
in HLTTriggers:
79 jsonDict[
'PredictionLumi'] = metadata[
'targetLumi']
80 for k,v
in metadata.items():
81 if k.startswith(
"n_evts"):
83 jsonDict[
'AtlasProject'] = metadata[
'AtlasProject']
84 jsonDict[
'AtlasVersion'] = metadata[
'AtlasVersion']
85 jsonDict[
'triggerMenuSetup'] = metadata[
'masterKey']
86 jsonDict[
'L1PrescaleSet'] = metadata[
'lvl1PrescaleKey']
87 jsonDict[
'HLTPrescaleSet'] = metadata[
'hltPrescaleKey']
88 jsonDict[
'bunchgroup'] = metadata[
'bunchGroups']
89 jsonDict[
'level'] = level
91 with open(fileName,
'w')
as outFile:
92 json.dump(obj=jsonDict, fp=outFile, indent=2, sort_keys=
True)
96 {
'PredictionLumi' : metadata[
'targetLumi']},
97 {
'TargetMu' : metadata[
'targetMu']},
98 {
'RunNumber' : metadata[
'runNumber']},
99 {
'Details' : metadata[
'details']},
100 {
'JIRA' : metadata[
'JIRA']},
101 {
'AMITag' : metadata[
'amiTag']},
102 {
'SMK' : metadata[
'masterKey']},
103 {
'DB' :
readDBFromAMI(metadata[
'amiTag'])
if metadata[
'amiTag']
else None},
104 {
'LVL1PSK' : metadata[
'lvl1PrescaleKey']},
105 {
'HLTPSK' : metadata[
'hltPrescaleKey']},
106 {
'AtlasProject' : metadata[
'AtlasProject']},
107 {
'AtlasVersion' : metadata[
'AtlasVersion']}
109 for k,v
in metadata.items():
110 if k.startswith(
"n_evts"):
111 metajsonData+={jsonDict[k] : v}
115 metajsonDict[
'text'] =
'metadata'
116 metajsonDict[
'children'] = metajsonData
118 with open(
'metadata.json',
'w')
as outMetaFile:
119 json.dump(obj=metajsonDict, fp=outMetaFile, indent=2, sort_keys=
True)
◆ toROOT()
| def python.Util.toROOT |
( |
|
fileName, |
|
|
|
triggers |
|
) |
| |
Definition at line 122 of file RatesAnalysis/python/Util.py.
122 def toROOT(fileName, triggers):
124 for trigger
in triggers:
125 trigger.export(mydict)
126 from ROOT
import TFile
127 with TFile.Open(fileName,
'RECREATE')
as fout:
128 for key, scanDict
in mydict.items():
129 fout.WriteObject(scanDict[
'rate'], f
"{key}_rate")
◆ log
def toCSV(fileName, metadata, HLTTriggers, readL1=False)
Used to calculate a rate scan as a function of some threshold value.
def slice_dictionary(directory, object_key)
def exploreTree(inputFile, dumpSummary=False, underflowThreshold=0.1, overflowThreshold=0.1, maxRanges=5, skipRanges=-1)
def getGlobalGroup(inputFile, filter)
def getFileName(tableName, rootName)
def readDBFromAMI(amiTag)
def populateTriggers(inputFile, metadata, globalGroupDict, filter)
Used to calculate the rate for a single trigger at L1 or the HLT.
def getAlgorithmTotalTime(inputFile, rootName)
T * Get(TFile &f, const std::string &n, const std::string &dir="", const chainmap_t *chainmap=0, std::vector< std::string > *saved=0)
get a histogram given a path, and an optional initial directory if histogram is not found,...
def getHistogramPrefix(tableName, rootName)
def getMetadata(inputFile)
void print(char *figname, TCanvas *c1)
def toJson(fileName, metadata, L1Triggers, HLTTriggers)
T * get(TKey *tobj)
get a TObject* from a TKey* (why can't a TObject be a TKey?)
def populateScanTriggers(inputFile, metadata)
def getWalltime(inputFile, rootName)
def toROOT(fileName, triggers)