|
ATLAS Offline Software
|
|
def | getTableName (name) |
|
def | toCSV (fileName, metadata, HLTTriggers, readL1=False) |
|
def | toJson (fileName, metadata, L1Triggers, HLTTriggers) |
|
def | getMetadata (inputFile) |
|
def | populateTriggers (inputFile, metadata, globalGroup, filter) |
|
def | getGlobalGroup (inputFile, filter) |
|
def | readDBFromAMI (amiTag) |
|
def | exploreTree (inputFile, dumpSummary=False, underflowThreshold=0.1, overflowThreshold=0.1, maxRanges=5, skipRanges=-1) |
|
def | getWalltime (inputFile, rootName) |
|
def | getAlgorithmTotalTime (inputFile, rootName) |
|
def | convert (entry) |
|
def | getFileName (tableName, rootName) |
|
def | getHistogramPrefix (tableName, rootName) |
|
◆ convert()
def python.Util.convert |
( |
|
entry | ) |
|
@brief Save entry number in scientific notation
Definition at line 142 of file TrigCostAnalysis/python/Util.py.
143 ''' @brief Save entry number in scientific notation'''
144 if type(entry)
is float
or type(entry)
is int:
148 elif fabs(entry) > 10000
or fabs(entry) < 0.0001:
149 return "{:.4e}".
format(entry)
150 elif int(entry) == entry:
154 return "{:.4}".
format(entry)
◆ exploreTree()
def python.Util.exploreTree |
( |
|
inputFile, |
|
|
|
dumpSummary = False , |
|
|
|
underflowThreshold = 0.1 , |
|
|
|
overflowThreshold = 0.1 , |
|
|
|
maxRanges = 5 , |
|
|
|
skipRanges = -1 |
|
) |
| |
@brief Explore ROOT Tree to find tables with histograms to be saved in csv
Per each found directory TableConstructor object is created.
Expected directory tree:
rootDir
table1Dir
entry1Dir
hist1
hist2
...
entry2Dir
hist1
...
table2Dir
...
walltimeHist
@param[in] inputFile ROOT.TFile object with histograms
Definition at line 18 of file TrigCostAnalysis/python/Util.py.
18 def exploreTree(inputFile, dumpSummary=False, underflowThreshold=0.1, overflowThreshold=0.1, maxRanges=5, skipRanges=-1):
19 ''' @brief Explore ROOT Tree to find tables with histograms to be saved in csv
21 Per each found directory TableConstructor object is created.
22 Expected directory tree:
36 @param[in] inputFile ROOT.TFile object with histograms
39 processingWarnings = []
41 rangesToSkip = skipRanges
42 for timeRange
in inputFile.GetListOfKeys():
43 if timeRange.GetName() !=
"metadata" and rangesToSkip > 0:
45 log.debug(
"Skipping range {0}".
format(timeRange.GetName()))
48 if maxRanges > 0
and rangeCounter >= maxRanges:
49 log.info(
"{0} ranges were processed - exiting the postprocessing".
format(rangeCounter))
52 rangeObj = timeRange.ReadObj()
53 if not rangeObj.IsA().InheritsFrom(ROOT.TDirectory.Class()):
continue
55 walltime =
getWalltime(inputFile, timeRange.GetName())
57 for table
in rangeObj.GetListOfKeys():
58 tableObj = table.ReadObj()
59 if not tableObj.IsA().InheritsFrom(ROOT.TDirectory.Class()):
continue
60 log.info(
"Processing Table %s", table.GetName())
63 className = table.GetName() +
"_TableConstructor"
64 exec(
"from TrigCostAnalysis." + className +
" import " + className)
65 t = eval(className +
"(tableObj, underflowThreshold, overflowThreshold)")
67 if table.GetName() ==
"Chain_HLT" or table.GetName() ==
"Chain_Algorithm_HLT":
70 if table.GetName() ==
"Global_HLT":
73 if table.GetName() ==
"Algorithm_HLT":
74 t.dumpSummary = dumpSummary
76 fileName =
getFileName(table.GetName(), timeRange.GetName())
79 t.fillTable(histPrefix)
80 t.normalizeColumns(walltime)
81 t.saveToFile(fileName)
83 processingWarnings += t.getWarningMsgs()
86 log.error(
"Processing of table {0} failed!".
format(table.GetName()))
88 except (NameError, ImportError):
89 log.warning(
"Class {0} not defined - directory {1} will not be processed"
90 .
format(table.GetName()+
"_TableConstructor", table.GetName()))
93 log.debug(
"Range {0} was processed".
format(timeRange.GetName()))
97 summary[
"Summary"] += [
"Underflow threshold: {0}".
format(underflowThreshold),
"Overflow threshold: {0}".
format(overflowThreshold)]
98 return processingWarnings + [summary]
◆ getAlgorithmTotalTime()
def python.Util.getAlgorithmTotalTime |
( |
|
inputFile, |
|
|
|
rootName |
|
) |
| |
@brief Extract total time [s] of algorithms from histogram
@param[in] inputFile ROOT TFile to look for histogram
@param[in] rootName Name of the root directory to search for tables
@return total execution time [s] value if found else 0
Definition at line 124 of file TrigCostAnalysis/python/Util.py.
125 ''' @brief Extract total time [s] of algorithms from histogram
127 @param[in] inputFile ROOT TFile to look for histogram
128 @param[in] rootName Name of the root directory to search for tables
130 @return total execution time [s] value if found else 0
134 alg = inputFile.Get(rootName).
Get(
"Global_HLT").
Get(
"Total")
135 hist = alg.Get(rootName +
"_Global_HLT_Total_AlgTime_perEvent")
136 for i
in range(1, hist.GetXaxis().GetNbins()):
137 totalTime += hist.GetBinContent(i) * hist.GetXaxis().GetBinCenterLog(i)
139 return totalTime * 1e-3
◆ getFileName()
def python.Util.getFileName |
( |
|
tableName, |
|
|
|
rootName |
|
) |
| |
@brief Get name of file to save the table
@param[in] tableName Table name
@param[in] rootName Name of table's root directory
@return Filename for given table
Definition at line 159 of file TrigCostAnalysis/python/Util.py.
160 '''@brief Get name of file to save the table
162 @param[in] tableName Table name
163 @param[in] rootName Name of table's root directory
165 @return Filename for given table
167 return "Table_" + tableName +
"_" + rootName +
".csv"
◆ getGlobalGroup()
def python.Util.getGlobalGroup |
( |
|
inputFile, |
|
|
|
filter |
|
) |
| |
Definition at line 203 of file RatesAnalysis/python/Util.py.
204 for key
in inputFile.GetListOfKeys():
205 if key.GetName() ==
'All':
206 for subdirKey
in key.ReadObj().GetListOfKeys():
207 if not subdirKey.GetName() ==
"Rate_Group_HLT" :
pass
208 for globalsKey
in subdirKey.ReadObj().GetListOfKeys():
209 if filter
in globalsKey.GetName():
210 for hist
in globalsKey.ReadObj().GetListOfKeys():
211 if hist.GetName() ==
'data':
212 return hist.ReadObj()
◆ getHistogramPrefix()
def python.Util.getHistogramPrefix |
( |
|
tableName, |
|
|
|
rootName |
|
) |
| |
@brief Construct full histogram name
@param[in] tableName Table name
@param[in] rootName Name of table's root directory
@return Histogram prefix for given table
Definition at line 170 of file TrigCostAnalysis/python/Util.py.
171 '''@brief Construct full histogram name
173 @param[in] tableName Table name
174 @param[in] rootName Name of table's root directory
176 @return Histogram prefix for given table
179 return rootName +
'_' + tableName +
'_'
◆ getMetadata()
def python.Util.getMetadata |
( |
|
inputFile | ) |
|
Get metadata for rates.json file
Definition at line 118 of file RatesAnalysis/python/Util.py.
119 '''Get metadata for rates.json file'''
120 metatree = inputFile.Get(
"metadata")
127 metadata[
'runNumber'] = metatree.runNumber
129 metadata[
'targetMu'] = metatree.targetMu
130 metadata[
'targetBunches'] = metatree.targetBunches
131 metadata[
'targetLumi'] = metatree.targetLumi
133 metadata[
'masterKey'] = metatree.masterKey
134 metadata[
'lvl1PrescaleKey'] = metatree.lvl1PrescaleKey
135 metadata[
'hltPrescaleKey'] = metatree.hltPrescaleKey
137 metadata[
'AtlasProject'] =
str(metatree.AtlasProject)
138 metadata[
'AtlasVersion'] =
str(metatree.AtlasVersion)
143 for i
in range(0, metatree.triggers.size()):
144 prescale = metatree.prescales.at(i)
145 expressPrescale = metatree.express.at(i)
147 prescales[metatree.triggers.at(i)] = prescale
if prescale >= -1
else "Multiple"
148 lowers[metatree.triggers.at(i)] =
str(metatree.lowers.at(i))
149 express[metatree.triggers.at(i)] = expressPrescale
if expressPrescale >= -1
else "Multiple"
151 metadata[
'prescales'] = prescales
152 metadata[
'lowers'] = lowers
153 metadata[
'express'] = express
157 for i
in range(0, metatree.hltChainIDGroup.size()):
158 chainid[metatree.hltChainIDGroup.at(i).at(0)] = metatree.hltChainIDGroup.at(i).at(1)
159 chaingroup[metatree.hltChainIDGroup.at(i).at(0)] = metatree.hltChainIDGroup.at(i).at(2)
161 metadata[
'chainID'] = chainid
162 metadata[
'chainGroup'] = chaingroup
165 for i
in range(0, metatree.l1ItemID.size()):
166 itemid[metatree.l1ItemID.at(i).at(0)] = metatree.l1ItemID.at(i).at(1)
168 metadata[
'itemID'] = itemid
171 for bg
in metatree.bunchGroups:
172 bunchGroups.append(bg)
173 metadata[
'bunchGroups'] = bunchGroups
◆ getTableName()
def python.Util.getTableName |
( |
|
name | ) |
|
Definition at line 14 of file RatesAnalysis/python/Util.py.
15 tabName =
"Table_Rate_"
16 if name ==
"HLT" or name ==
"L1":
17 tabName +=
"Chain" + name
21 tabName +=
"_HLT_All.csv"
◆ getWalltime()
def python.Util.getWalltime |
( |
|
inputFile, |
|
|
|
rootName |
|
) |
| |
@brief Extract walltime value from histogram
@param[in] inputFile ROOT TFile to look for histogram
@param[in] rootName Name of the root directory to search for tables
@return walltime value if found else 0 and an error
Definition at line 101 of file TrigCostAnalysis/python/Util.py.
102 ''' @brief Extract walltime value from histogram
104 @param[in] inputFile ROOT TFile to look for histogram
105 @param[in] rootName Name of the root directory to search for tables
107 @return walltime value if found else 0 and an error
110 dirObj = inputFile.Get(rootName)
111 if not dirObj.IsA().InheritsFrom(ROOT.TDirectory.Class()):
return 0
112 for hist
in dirObj.GetListOfKeys():
113 if '_walltime' in hist.GetName():
114 histObj = hist.ReadObj()
115 if histObj.IsA().InheritsFrom(ROOT.TProfile.Class()):
116 return histObj.Integral()
118 return histObj.GetBinContent(1)
120 log.error(
"Walltime not found")
◆ populateTriggers()
def python.Util.populateTriggers |
( |
|
inputFile, |
|
|
|
metadata, |
|
|
|
globalGroup, |
|
|
|
filter |
|
) |
| |
Definition at line 178 of file RatesAnalysis/python/Util.py.
180 def getTriggerName(name, filter):
181 if "Group" in filter
and "GLOBAL" not in name:
182 return name.replace(
'_',
':', 1)
186 from .RatesTrigger
import RatesTrigger
188 for key
in inputFile.GetListOfKeys():
189 if key.GetName() ==
'All':
190 for subdirKey
in key.ReadObj().GetListOfKeys():
191 if filter
not in subdirKey.GetName():
continue
192 for triggerKey
in subdirKey.ReadObj().GetListOfKeys():
193 for hist
in triggerKey.ReadObj().GetListOfKeys():
194 if hist.GetName() ==
'data':
196 triggerList.append(
RatesTrigger(getTriggerName(triggerKey.GetName(), filter), metadata, hist.ReadObj(), globalGroup))
198 log.error(
"Cannot create a new trigger for {0}".
format(triggerKey.GetName()))
◆ readDBFromAMI()
def python.Util.readDBFromAMI |
( |
|
amiTag | ) |
|
Read used database based on AMI tag
Definition at line 215 of file RatesAnalysis/python/Util.py.
216 ''' Read used database based on AMI tag '''
219 import pyAMI.atlas.api
as AtlasAPI
220 except ModuleNotFoundError:
221 log.warning(
"Unable to import AMIClient from pyAMI. Maybe you didn't do localSetupPyAMI?")
224 amiclient = pyAMI.client.Client(
'atlas')
227 command = [
'AMIGetAMITagInfo',
'-amiTag="%s"' % amiTag,
'-cached' ]
228 amiTagInfo = amiclient.execute(command, format =
'dict_object').get_rows(
'amiTagInfo')[0]
230 return amiTagInfo[
'DBserver']
if "DBserver" in amiTagInfo
else None
◆ toCSV()
def python.Util.toCSV |
( |
|
fileName, |
|
|
|
metadata, |
|
|
|
HLTTriggers, |
|
|
|
readL1 = False |
|
) |
| |
Definition at line 26 of file RatesAnalysis/python/Util.py.
26 def toCSV(fileName, metadata, HLTTriggers, readL1=False):
29 with open(fileName, mode=
'w')
as outputCSV_file:
30 rates_csv_writer = csv.writer(outputCSV_file, delimiter=
',', quotechar=
'"', quoting=csv.QUOTE_MINIMAL)
32 rates_csv_writer.writerow([
'Name',
'Active Time [s]',
'Group',
'Weighted PS Rate [Hz]',
'Weighted PS Rate Err [Hz]', \
33 'Unique Rate [Hz]',
'Unique Rate Err [Hz]',
'Express Rate [Hz]',
'Express Rate Err [Hz]',
'Prescale',
'Express Prescale',
'ID', \
34 'Raw Active Events',
'Raw Pass Events',
'Active Events',
'Input Rate [Hz]',
'Pass Fraction after PS [%]',
'Pass Weighted PS'])
35 rates_csv_writer.writerow([
'Trigger name',
'Integrated length of all lumi blocks which contributed events to this rates prediction.',
'The group this chain belongs to.',
'Rate after applying all prescale(s) as weights.',
'Error on rate after applying all prescale(s) as weights',
'Total rate without this chain rate',
'Error on unique rate',
'Express stream rate',
'Error on express rate',
'The prescale of this chain. Only displayed for simple combinations.',
'The prescale of the chain including express prescale',
'The CPTID or HLT Chain ID',
'Raw underlying statistics on the number events processed for this chain.',
'Raw underlying statistics on the number events passed by this chain.',
'Number of events in which the chain - or at least one chain in the combination - was executed.',
'Input rate to this chain or combination of chains. At L1 this will be the collision frequency for the bunch pattern.',
'Fraction of events which pass this trigger after prescale.',
'Number of events this chain or combination passed after applying prescales as weighting factors.'])
37 for trig
in HLTTriggers:
39 group_name = chain_id =
""
40 if "ChainL1" in fileName:
42 chain_id = metadata[
"itemID"].
get(trig.name)
43 elif "ChainHLT" in fileName:
44 group_name = metadata[
"chainGroup"].
get(trig.name)
45 chain_id = metadata[
"chainID"].
get(trig.name)
46 elif "Group" in fileName:
48 group_name =
"All" if "GLOBAL" in trig.name
else group_name
50 if float(trig.rateDenominator)==0:
51 print(
"float(trig.rateDenominator) is ZERO! This shouldn't happen")
52 if float(trig.activeWeighted)==0:
55 passFrac_afterPS=100*
float(trig.passWeighted)/
float(trig.activeWeighted)
57 isL1 = trig.name.startswith(
"L1_")
58 rates_csv_writer.writerow([trig.name,
"%.4f" % trig.rateDenominator,group_name,
"%.4f" % trig.rate,
"%.4f" % trig.rateErr, \
59 "%.4f" % trig.rateUnique,
"%.4f" % trig.rateUniqueErr, (
"%.4f" % trig.rateExpress
if not isL1
else "-"), (
"%.4f" % trig.rateExpressErr
if not isL1
else "-"), \
60 trig.prescale, (trig.expressPrescale
if not isL1
else "-"), chain_id,
"%.0f" % trig.activeRaw,
"%.0f" % trig.passRaw,
"%.4f" % trig.activeWeighted, \
61 "%.4f" % (
float(trig.activeWeighted)/
float(trig.rateDenominator)),
"%.4f" % passFrac_afterPS,
"%.4f" % trig.passWeighted])
◆ toJson()
def python.Util.toJson |
( |
|
fileName, |
|
|
|
metadata, |
|
|
|
L1Triggers, |
|
|
|
HLTTriggers |
|
) |
| |
Definition at line 65 of file RatesAnalysis/python/Util.py.
65 def toJson(fileName, metadata, L1Triggers, HLTTriggers):
68 for trig
in L1Triggers:
71 for trig
in HLTTriggers:
79 jsonDict[
'PredictionLumi'] = metadata[
'targetLumi']
80 jsonDict[
'n_evts'] = metadata[
'n_evts']
81 jsonDict[
'AtlasProject'] = metadata[
'AtlasProject']
82 jsonDict[
'AtlasVersion'] = metadata[
'AtlasVersion']
83 jsonDict[
'triggerMenuSetup'] = metadata[
'masterKey']
84 jsonDict[
'L1PrescaleSet'] = metadata[
'lvl1PrescaleKey']
85 jsonDict[
'HLTPrescaleSet'] = metadata[
'hltPrescaleKey']
86 jsonDict[
'bunchgroup'] = metadata[
'bunchGroups']
87 jsonDict[
'level'] = level
89 with open(fileName,
'w')
as outFile:
90 json.dump(obj=jsonDict, fp=outFile, indent=2, sort_keys=
True)
94 {
'PredictionLumi' : metadata[
'targetLumi']},
95 {
'TargetMu' : metadata[
'targetMu']},
96 {
'RunNumber' : metadata[
'runNumber']},
97 {
'NEvents' : metadata[
'n_evts']},
98 {
'Details' : metadata[
'details']},
99 {
'JIRA' : metadata[
'JIRA']},
100 {
'AMITag' : metadata[
'amiTag']},
101 {
'SMK' : metadata[
'masterKey']},
102 {
'DB' :
readDBFromAMI(metadata[
'amiTag'])
if metadata[
'amiTag']
else None},
103 {
'LVL1PSK' : metadata[
'lvl1PrescaleKey']},
104 {
'HLTPSK' : metadata[
'hltPrescaleKey']},
105 {
'AtlasProject' : metadata[
'AtlasProject']},
106 {
'AtlasVersion' : metadata[
'AtlasVersion']}
111 metajsonDict[
'text'] =
'metadata'
112 metajsonDict[
'children'] = metajsonData
114 with open(
'metadata.json',
'w')
as outMetaFile:
115 json.dump(obj=metajsonDict, fp=outMetaFile, indent=2, sort_keys=
True)
◆ log
def toCSV(fileName, metadata, HLTTriggers, readL1=False)
def exploreTree(inputFile, dumpSummary=False, underflowThreshold=0.1, overflowThreshold=0.1, maxRanges=5, skipRanges=-1)
def getGlobalGroup(inputFile, filter)
def getFileName(tableName, rootName)
def readDBFromAMI(amiTag)
Used to calculate the rate for a single trigger at L1 or the HLT.
def getAlgorithmTotalTime(inputFile, rootName)
T * Get(TFile &f, const std::string &n, const std::string &dir="", const chainmap_t *chainmap=0, std::vector< std::string > *saved=0)
get a histogram given a path, and an optional initial directory if histogram is not found,...
def getHistogramPrefix(tableName, rootName)
def getMetadata(inputFile)
void print(char *figname, TCanvas *c1)
def toJson(fileName, metadata, L1Triggers, HLTTriggers)
T * get(TKey *tobj)
get a TObject* from a TKey* (why can't a TObject be a TKey?)
def populateTriggers(inputFile, metadata, globalGroup, filter)
def getWalltime(inputFile, rootName)