8 @brief Utility functions used by RatesPostProcessing
11 from AthenaCommon.Logging
import logging
12 log = logging.getLogger(
'RatesPostProcessing')
15 tabName =
"Table_Rate_"
16 if name ==
"HLT" or name ==
"L1":
17 tabName +=
"Chain" + name
21 tabName +=
"_HLT_All.csv"
26 def toCSV(fileName, metadata, HLTTriggers, readL1=False):
29 with open(fileName, mode=
'w')
as outputCSV_file:
30 rates_csv_writer = csv.writer(outputCSV_file, delimiter=
',', quotechar=
'"', quoting=csv.QUOTE_MINIMAL)
32 rates_csv_writer.writerow([
'Name',
'Active Time [s]',
'Group',
'Weighted PS Rate [Hz]',
'Weighted PS Rate Err [Hz]', \
33 'Unique Rate [Hz]',
'Unique Rate Err [Hz]',
'Express Rate [Hz]',
'Express Rate Err [Hz]',
'Prescale',
'Express Prescale',
'ID', \
34 'Raw Active Events',
'Raw Pass Events',
'Active Events',
'Input Rate [Hz]',
'Pass Fraction after PS [%]',
'Pass Weighted PS'])
35 rates_csv_writer.writerow([
'Trigger name',
'Integrated length of all lumi blocks which contributed events to this rates prediction.',
'The group this chain belongs to.',
'Rate after applying all prescale(s) as weights.',
'Error on rate after applying all prescale(s) as weights',
'Total rate without this chain rate',
'Error on unique rate',
'Express stream rate',
'Error on express rate',
'The prescale of this chain. Only displayed for simple combinations.',
'The prescale of the chain including express prescale',
'The CPTID or HLT Chain ID',
'Raw underlying statistics on the number events processed for this chain.',
'Raw underlying statistics on the number events passed by this chain.',
'Number of events in which the chain - or at least one chain in the combination - was executed.',
'Input rate to this chain or combination of chains. At L1 this will be the collision frequency for the bunch pattern.',
'Fraction of events which pass this trigger after prescale.',
'Number of events this chain or combination passed after applying prescales as weighting factors.'])
37 for trig
in HLTTriggers:
39 group_name = chain_id =
""
40 if "ChainL1" in fileName:
42 chain_id = metadata[
"itemID"].
get(trig.name)
43 elif "ChainHLT" in fileName:
44 group_name = metadata[
"chainGroup"].
get(trig.name)
45 chain_id = metadata[
"chainID"].
get(trig.name)
46 elif "Group" in fileName:
48 group_name =
"All" if "GLOBAL" in trig.name
else group_name
50 if float(trig.rateDenominator)==0:
51 print(
"float(trig.rateDenominator) is ZERO! This shouldn't happen")
52 if float(trig.activeWeighted)==0:
55 passFrac_afterPS=100*
float(trig.passWeighted)/
float(trig.activeWeighted)
57 isL1 = trig.name.startswith(
"L1_")
58 rates_csv_writer.writerow([trig.name,
"%.4f" % trig.rateDenominator,group_name,
"%.4f" % trig.rate,
"%.4f" % trig.rateErr, \
59 "%.4f" % trig.rateUnique,
"%.4f" % trig.rateUniqueErr, (
"%.4f" % trig.rateExpress
if not isL1
else "-"), (
"%.4f" % trig.rateExpressErr
if not isL1
else "-"), \
60 trig.prescale, (trig.expressPrescale
if not isL1
else "-"), chain_id,
"%.0f" % trig.activeRaw,
"%.0f" % trig.passRaw,
"%.4f" % trig.activeWeighted, \
61 "%.4f" % (
float(trig.activeWeighted)/
float(trig.rateDenominator)),
"%.4f" % passFrac_afterPS,
"%.4f" % trig.passWeighted])
65 def toJson(fileName, metadata, L1Triggers, HLTTriggers):
68 for trig
in L1Triggers:
71 for trig
in HLTTriggers:
79 jsonDict[
'PredictionLumi'] = metadata[
'targetLumi']
80 jsonDict[
'n_evts'] = metadata[
'n_evts']
81 jsonDict[
'AtlasProject'] = metadata[
'AtlasProject']
82 jsonDict[
'AtlasVersion'] = metadata[
'AtlasVersion']
83 jsonDict[
'triggerMenuSetup'] = metadata[
'masterKey']
84 jsonDict[
'L1PrescaleSet'] = metadata[
'lvl1PrescaleKey']
85 jsonDict[
'HLTPrescaleSet'] = metadata[
'hltPrescaleKey']
86 jsonDict[
'bunchgroup'] = metadata[
'bunchGroups']
87 jsonDict[
'level'] = level
89 with open(fileName,
'w')
as outFile:
90 json.dump(obj=jsonDict, fp=outFile, indent=2, sort_keys=
True)
94 {
'PredictionLumi' : metadata[
'targetLumi']},
95 {
'TargetMu' : metadata[
'targetMu']},
96 {
'RunNumber' : metadata[
'runNumber']},
97 {
'NEvents' : metadata[
'n_evts']},
98 {
'Details' : metadata[
'details']},
99 {
'JIRA' : metadata[
'JIRA']},
100 {
'AMITag' : metadata[
'amiTag']},
101 {
'SMK' : metadata[
'masterKey']},
102 {
'DB' :
readDBFromAMI(metadata[
'amiTag'])
if metadata[
'amiTag']
else None},
103 {
'LVL1PSK' : metadata[
'lvl1PrescaleKey']},
104 {
'HLTPSK' : metadata[
'hltPrescaleKey']},
105 {
'AtlasProject' : metadata[
'AtlasProject']},
106 {
'AtlasVersion' : metadata[
'AtlasVersion']}
111 metajsonDict[
'text'] =
'metadata'
112 metajsonDict[
'children'] = metajsonData
114 with open(
'metadata.json',
'w')
as outMetaFile:
115 json.dump(obj=metajsonDict, fp=outMetaFile, indent=2, sort_keys=
True)
119 '''Get metadata for rates.json file'''
120 metatree = inputFile.Get(
"metadata")
127 metadata[
'runNumber'] = metatree.runNumber
129 metadata[
'targetMu'] = metatree.targetMu
130 metadata[
'targetBunches'] = metatree.targetBunches
131 metadata[
'targetLumi'] = metatree.targetLumi
133 metadata[
'masterKey'] = metatree.masterKey
134 metadata[
'lvl1PrescaleKey'] = metatree.lvl1PrescaleKey
135 metadata[
'hltPrescaleKey'] = metatree.hltPrescaleKey
137 metadata[
'AtlasProject'] =
str(metatree.AtlasProject)
138 metadata[
'AtlasVersion'] =
str(metatree.AtlasVersion)
143 for i
in range(0, metatree.triggers.size()):
144 prescale = metatree.prescales.at(i)
145 expressPrescale = metatree.express.at(i)
147 prescales[metatree.triggers.at(i)] = prescale
if prescale >= -1
else "Multiple"
148 lowers[metatree.triggers.at(i)] =
str(metatree.lowers.at(i))
149 express[metatree.triggers.at(i)] = expressPrescale
if expressPrescale >= -1
else "Multiple"
151 metadata[
'prescales'] = prescales
152 metadata[
'lowers'] = lowers
153 metadata[
'express'] = express
157 for i
in range(0, metatree.hltChainIDGroup.size()):
158 chainid[metatree.hltChainIDGroup.at(i).at(0)] = metatree.hltChainIDGroup.at(i).at(1)
159 chaingroup[metatree.hltChainIDGroup.at(i).at(0)] = metatree.hltChainIDGroup.at(i).at(2)
161 metadata[
'chainID'] = chainid
162 metadata[
'chainGroup'] = chaingroup
165 for i
in range(0, metatree.l1ItemID.size()):
166 itemid[metatree.l1ItemID.at(i).at(0)] = metatree.l1ItemID.at(i).at(1)
168 metadata[
'itemID'] = itemid
171 for bg
in metatree.bunchGroups:
172 bunchGroups.append(bg)
173 metadata[
'bunchGroups'] = bunchGroups
180 def getTriggerName(name, filter):
181 if "Group" in filter
and "GLOBAL" not in name:
182 return name.replace(
'_',
':', 1)
186 from .RatesTrigger
import RatesTrigger
188 for key
in inputFile.GetListOfKeys():
189 if key.GetName() ==
'All':
190 for subdirKey
in key.ReadObj().GetListOfKeys():
191 if filter
not in subdirKey.GetName():
continue
192 for triggerKey
in subdirKey.ReadObj().GetListOfKeys():
193 for hist
in triggerKey.ReadObj().GetListOfKeys():
194 if hist.GetName() ==
'data':
196 triggerList.append(
RatesTrigger(getTriggerName(triggerKey.GetName(), filter), metadata, hist.ReadObj(), globalGroup))
198 log.error(
"Cannot create a new trigger for {0}".
format(triggerKey.GetName()))
204 for key
in inputFile.GetListOfKeys():
205 if key.GetName() ==
'All':
206 for subdirKey
in key.ReadObj().GetListOfKeys():
207 if not subdirKey.GetName() ==
"Rate_Group_HLT" :
pass
208 for globalsKey
in subdirKey.ReadObj().GetListOfKeys():
209 if filter
in globalsKey.GetName():
210 for hist
in globalsKey.ReadObj().GetListOfKeys():
211 if hist.GetName() ==
'data':
212 return hist.ReadObj()
216 ''' Read used database based on AMI tag '''
219 import pyAMI.atlas.api
as AtlasAPI
220 except ModuleNotFoundError:
221 log.warning(
"Unable to import AMIClient from pyAMI. Maybe you didn't do localSetupPyAMI?")
224 amiclient = pyAMI.client.Client(
'atlas')
227 command = [
'AMIGetAMITagInfo',
'-amiTag="%s"' % amiTag,
'-cached' ]
228 amiTagInfo = amiclient.execute(command, format =
'dict_object').get_rows(
'amiTagInfo')[0]
230 return amiTagInfo[
'DBserver']
if "DBserver" in amiTagInfo
else None