8 @brief Utility functions used by RatesPostProcessing
11 from AthenaCommon.Logging
import logging
12 log = logging.getLogger(
'RatesPostProcessing')
15 tabName =
"Table_Rate_"
16 if name ==
"HLT" or name ==
"L1":
17 tabName +=
"Chain" + name
21 tabName +=
"_HLT_All.csv"
26 def toCSV(fileName, metadata, HLTTriggers, readL1=False):
29 with open(fileName, mode=
'w')
as outputCSV_file:
30 rates_csv_writer = csv.writer(outputCSV_file, delimiter=
',', quotechar=
'"', quoting=csv.QUOTE_MINIMAL)
32 rates_csv_writer.writerow([
'Name',
'Active Time [s]',
'Group',
'Weighted PS Rate [Hz]',
'Weighted PS Rate Err [Hz]', \
33 'Unique Rate [Hz]',
'Unique Rate Err [Hz]',
'Express Rate [Hz]',
'Express Rate Err [Hz]',
'Prescale',
'Express Prescale',
'ID', \
34 'Raw Active Events',
'Raw Pass Events',
'Active Events',
'Input Rate [Hz]',
'Pass Fraction after PS [%]',
'Pass Weighted PS'])
35 rates_csv_writer.writerow([
'Trigger name',
'Integrated length of all lumi blocks which contributed events to this rates prediction.',
'The group this chain belongs to.',
'Rate after applying all prescale(s) as weights.',
'Error on rate after applying all prescale(s) as weights',
'Total rate without this chain rate',
'Error on unique rate',
'Express stream rate',
'Error on express rate',
'The prescale of this chain. Only displayed for simple combinations.',
'The prescale of the chain including express prescale',
'The CPTID or HLT Chain ID',
'Raw underlying statistics on the number events processed for this chain.',
'Raw underlying statistics on the number events passed by this chain.',
'Number of events in which the chain - or at least one chain in the combination - was executed.',
'Input rate to this chain or combination of chains. At L1 this will be the collision frequency for the bunch pattern.',
'Fraction of events which pass this trigger after prescale.',
'Number of events this chain or combination passed after applying prescales as weighting factors.'])
37 for trig
in HLTTriggers:
39 group_name = chain_id =
""
40 if "ChainL1" in fileName:
42 chain_id = metadata[
"itemID"].
get(trig.name)
43 elif "ChainHLT" in fileName:
44 group_name = metadata[
"chainGroup"].
get(trig.name)
45 chain_id = metadata[
"chainID"].
get(trig.name)
46 elif "Group" in fileName:
48 group_name =
"All" if "GLOBAL" in trig.name
else group_name
50 if float(trig.rateDenominator)==0:
51 print(
"float(trig.rateDenominator) is ZERO! This shouldn't happen")
52 if float(trig.activeWeighted)==0:
55 passFrac_afterPS=100*
float(trig.passWeighted)/
float(trig.activeWeighted)
57 isL1 = trig.name.startswith(
"L1_")
58 rates_csv_writer.writerow([trig.name,
"%.4f" % trig.rateDenominator,group_name,
"%.4f" % trig.rate,
"%.4f" % trig.rateErr, \
59 "%.4f" % trig.rateUnique,
"%.4f" % trig.rateUniqueErr, (
"%.4f" % trig.rateExpress
if not isL1
else "-"), (
"%.4f" % trig.rateExpressErr
if not isL1
else "-"), \
60 trig.prescale, (trig.expressPrescale
if not isL1
else "-"), chain_id,
"%.0f" % trig.activeRaw,
"%.0f" % trig.passRaw,
"%.4f" % trig.activeWeighted, \
61 "%.4f" % (
float(trig.activeWeighted)/
float(trig.rateDenominator)),
"%.4f" % passFrac_afterPS,
"%.4f" % trig.passWeighted])
65 def toJson(fileName, metadata, L1Triggers, HLTTriggers):
68 for trig
in L1Triggers:
71 for trig
in HLTTriggers:
79 jsonDict[
'PredictionLumi'] = metadata[
'targetLumi']
80 for k,v
in metadata.items():
81 if k.startswith(
"n_evts"):
83 jsonDict[
'AtlasProject'] = metadata[
'AtlasProject']
84 jsonDict[
'AtlasVersion'] = metadata[
'AtlasVersion']
85 jsonDict[
'triggerMenuSetup'] = metadata[
'masterKey']
86 jsonDict[
'L1PrescaleSet'] = metadata[
'lvl1PrescaleKey']
87 jsonDict[
'HLTPrescaleSet'] = metadata[
'hltPrescaleKey']
88 jsonDict[
'bunchgroup'] = metadata[
'bunchGroups']
89 jsonDict[
'level'] = level
91 with open(fileName,
'w')
as outFile:
92 json.dump(obj=jsonDict, fp=outFile, indent=2, sort_keys=
True)
96 {
'PredictionLumi' : metadata[
'targetLumi']},
97 {
'TargetMu' : metadata[
'targetMu']},
98 {
'RunNumber' : metadata[
'runNumber']},
99 {
'Details' : metadata[
'details']},
100 {
'JIRA' : metadata[
'JIRA']},
101 {
'AMITag' : metadata[
'amiTag']},
102 {
'SMK' : metadata[
'masterKey']},
103 {
'DB' :
readDBFromAMI(metadata[
'amiTag'])
if metadata[
'amiTag']
else None},
104 {
'LVL1PSK' : metadata[
'lvl1PrescaleKey']},
105 {
'HLTPSK' : metadata[
'hltPrescaleKey']},
106 {
'AtlasProject' : metadata[
'AtlasProject']},
107 {
'AtlasVersion' : metadata[
'AtlasVersion']}
109 for k,v
in metadata.items():
110 if k.startswith(
"n_evts"):
111 metajsonData+={jsonDict[k] : v}
115 metajsonDict[
'text'] =
'metadata'
116 metajsonDict[
'children'] = metajsonData
118 with open(
'metadata.json',
'w')
as outMetaFile:
119 json.dump(obj=metajsonDict, fp=outMetaFile, indent=2, sort_keys=
True)
124 for trigger
in triggers:
125 trigger.export(mydict)
126 from ROOT
import TFile
127 with TFile.Open(fileName,
'RECREATE')
as fout:
128 for key, scanDict
in mydict.items():
129 fout.WriteObject(scanDict[
'rate'], f
"{key}_rate")
133 '''Get metadata for rates.json file'''
134 metatree = inputFile.Get(
"metadata")
141 metadata[
'runNumber'] = metatree.runNumber
143 metadata[
'targetMu'] = metatree.targetMu
144 metadata[
'targetBunches'] = metatree.targetBunches
145 metadata[
'targetLumi'] = metatree.targetLumi
147 metadata[
'masterKey'] = metatree.masterKey
148 metadata[
'lvl1PrescaleKey'] = metatree.lvl1PrescaleKey
149 metadata[
'hltPrescaleKey'] = metatree.hltPrescaleKey
151 metadata[
'AtlasProject'] =
str(metatree.AtlasProject)
152 metadata[
'AtlasVersion'] =
str(metatree.AtlasVersion)
154 metadata[
'bunchCrossingRate'] = metatree.bunchCrossingRate
156 metadata[
'multiSliceDiJet'] = metatree.multiSliceDiJet
161 for i
in range(0, metatree.triggers.size()):
162 prescale = metatree.prescales.at(i)
163 expressPrescale = metatree.express.at(i)
165 prescales[metatree.triggers.at(i)] = prescale
if prescale >= -1
else "Multiple"
166 lowers[metatree.triggers.at(i)] =
str(metatree.lowers.at(i))
167 express[metatree.triggers.at(i)] = expressPrescale
if expressPrescale >= -1
else "Multiple"
169 metadata[
'prescales'] = prescales
170 metadata[
'lowers'] = lowers
171 metadata[
'express'] = express
175 for i
in range(0, metatree.hltChainIDGroup.size()):
176 chainid[metatree.hltChainIDGroup.at(i).at(0)] = metatree.hltChainIDGroup.at(i).at(1)
177 chaingroup[metatree.hltChainIDGroup.at(i).at(0)] = metatree.hltChainIDGroup.at(i).at(2)
179 metadata[
'chainID'] = chainid
180 metadata[
'chainGroup'] = chaingroup
183 for i
in range(0, metatree.l1ItemID.size()):
184 itemid[metatree.l1ItemID.at(i).at(0)] = metatree.l1ItemID.at(i).at(1)
186 metadata[
'itemID'] = itemid
189 for bg
in metatree.bunchGroups:
190 bunchGroups.append(bg)
191 metadata[
'bunchGroups'] = bunchGroups
198 def getTriggerName(name, filter):
199 if "Group" in filter
and "GLOBAL" not in name:
200 return name.replace(
'_',
':', 1)
204 from .RatesTrigger
import RatesTrigger
206 for key
in inputFile.GetListOfKeys():
207 if key.GetName() ==
'All':
208 for subdirKey
in key.ReadObj().GetListOfKeys():
209 if filter
not in subdirKey.GetName():
continue
210 for triggerKey
in subdirKey.ReadObj().GetListOfKeys():
212 for suffix, data
in numeratorDict.items():
214 triggerList.append(
RatesTrigger(getTriggerName(triggerKey.GetName(), filter)+suffix, metadata, data, globalGroupDict[suffix], suffix))
216 log.error(
"Cannot create a new trigger for {0}".
format(triggerKey.GetName()))
223 for hist
in directory.GetListOfKeys():
224 if str(hist.GetName()).startswith(object_key):
226 slice_index =
"_"+
str(hist.GetName()).
split(
"_")[1]
229 slices_dict[slice_index] = hist.ReadObj()
233 from .RatesScanTrigger
import RatesScanTrigger
235 for key
in inputFile.GetListOfKeys():
236 if key.GetName() ==
'ScanTriggers':
237 for scanName
in key.ReadObj().GetListOfKeys():
239 if len(numerator_dict) == 0:
240 log.error(f
"Empty dictionary in populateScanTriggers for scan {scanName}")
243 triggerList.append(
RatesScanTrigger(scanName.GetName(), metadata, numerator_dict))
247 for key
in inputFile.GetListOfKeys():
248 if key.GetName() ==
'All':
249 for subdirKey
in key.ReadObj().GetListOfKeys():
250 if not subdirKey.GetName() ==
"Rate_Group_HLT" :
pass
251 for globalsKey
in subdirKey.ReadObj().GetListOfKeys():
252 if filter
in globalsKey.GetName():
258 ''' Read used database based on AMI tag '''
261 import pyAMI.atlas.api
as AtlasAPI
262 except ModuleNotFoundError:
263 log.warning(
"Unable to import AMIClient from pyAMI. Maybe you didn't do localSetupPyAMI?")
266 amiclient = pyAMI.client.Client(
'atlas')
269 command = [
'AMIGetAMITagInfo',
'-amiTag="%s"' % amiTag,
'-cached' ]
270 amiTagInfo = amiclient.execute(command, format =
'dict_object').get_rows(
'amiTagInfo')[0]
272 return amiTagInfo[
'DBserver']
if "DBserver" in amiTagInfo
else None