ATLAS Offline Software
Loading...
Searching...
No Matches
ExtractEBRunDetails.py
Go to the documentation of this file.
1#!/usr/bin/env python
2#
3# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
4#
5
6import eformat
7from xml.dom import minidom
8from collections import OrderedDict
9from DQUtils.sugar import RunLumi
10
11from AthenaCommon.Logging import logging
12log = logging.getLogger('ExtractRunDetails.py')
13
14
15# Read the start timestamp of the start lumiblock and the ned of the end lumiblock
16def readTimestampsOfLb(runNumber, lbStart=-1, lbEnd=-1):
17 '''
18 Returns start and end of the run read from COOL database
19 '''
20 from TrigConfStorage.TriggerCoolUtil import TriggerCoolUtil
21 dbconn = TriggerCoolUtil.GetConnection("CONDBR2")
22
23 # based on TriggerCoolUtil.getRunStartTime
24 f = dbconn.getFolder( "/TRIGGER/LUMI/LBLB" )
25
26 since = RunLumi(runNumber, lbStart) if lbStart >= 0 else (runNumber<<32)
27 until = RunLumi(runNumber, lbEnd) if lbEnd >= 0 else ((runNumber+1)<<32)-1
28
29 from PyCool import cool
30 objs = f.browseObjects(since, until, cool.ChannelSelection(0) )
31
32 lbCounter = lbStart
33 timestampDict = OrderedDict()
34 while objs.goToNext():
35 objCurrRef = objs.currentRef()
36 timestampDict[lbCounter] = {"start": int(objCurrRef.payload()["StartTime"]/1000), "end": int(objCurrRef.payload()["EndTime"]/1000)}
37 lbCounter+=1
38
39 startTime = timestampDict[lbStart]["start"]
40 endTime = timestampDict[lbEnd]["end"]
41
42 from time import ctime
43 log.info("Read start and end of run {0} from COOL: {1} - {2}".format(runNumber, ctime(startTime/1E6).replace(' ','_'), ctime(endTime/1E6).replace(' ','_')))
44 log.debug("Timestamps: {0} - {1} ".format(startTime, endTime))
45
46 return timestampDict
47
48
49# Read the published deadtime value from IS based on start and stop of the run timestamps
50def readDeadtimeFromIS(startOfRun, endOfRun, server="https://atlasop.cern.ch"):
51 try:
52 import libpbeastpy
53 pbeast = libpbeastpy.ServerProxy(server)
54 deadTimeData = pbeast.get_data(
55 'ATLAS', 'CtpBusyInfo', 'ctpcore_objects/CtpcoreBusyInfoObject/fraction',
56 'L1CT.CTP.Instantaneous.BusyFractions/ctpcore_objects\\[9\\]',
57 True, startOfRun, endOfRun)[0].data['L1CT.CTP.Instantaneous.BusyFractions/ctpcore_objects[9]']
58
59 totalDeadtime = 0
60 for entry in deadTimeData:
61 totalDeadtime += entry.value
62
63 avgDeadtime = totalDeadtime/len(deadTimeData)
64
65 log.debug("Average deadtime per event during this EB run was {0}".format(avgDeadtime))
66 return avgDeadtime
67
68 except ImportError:
69 log.error("The pbeast python library was not found! Remember to setup tdaq release")
70 return -1
71 except RuntimeError:
72 log.error("Error when reading from Pbeast! Remember to export pbeast server sso: export PBEAST_SERVER_SSO_SETUP_TYPE=AutoUpdateKerberos")
73 return -1
74
75
76# Read average physics deadtime for item from TRP per lumiblock
77def getPhysicsDeadtimePerLB(startOfRun, endOfRun, lbRangesDict, itemName="L1_TAU8--enabled", server="https://atlasop.cern.ch"):
78 try:
79 import libpbeastpy
80 pbeast = libpbeastpy.ServerProxy(server)
81 physicsDT = pbeast.get_data('ATLAS', 'L1_Rate', 'DT', 'ISS_TRP.' + itemName, False, startOfRun, endOfRun, 0, True)[0].data['ISS_TRP.' + itemName]
82
83 entryCounter = 1
84 deadtimePerLb = {}
85 for lbRange in lbRangesDict:
86 avgDt = 0
87 counter = 0
88 while physicsDT[entryCounter].ts > lbRangesDict[lbRange]["start"] and physicsDT[entryCounter].ts < lbRangesDict[lbRange]["end"]:
89 if type(physicsDT[entryCounter].value) is not float: # None type
90 entryCounter += 1
91 continue
92 avgDt += physicsDT[entryCounter].value
93 counter += 1
94 entryCounter += 1
95
96 deadtimePerLb[lbRange] = avgDt/counter if counter > 0 else 1.
97
98 return deadtimePerLb
99
100 except ImportError:
101 log.error("The pbeast python library was not found! Remember to setup tdaq release")
102 return {}
103 except RuntimeError:
104 log.error("Error when reading from Pbeast! Remember to export pbeast server sso: export PBEAST_SERVER_SSO_SETUP_TYPE=AutoUpdateKerberos")
105 return {}
106
107
108# Create xml node storing luminosity values
109# Currently only deadtime was imported compared to Run 2, because only this value is used in further analysis
110def createDeadtimeNode(xmlRoot, totalDeadtime):
111 lumiValNode = xmlRoot.createElement('lumivalues')
112
113 deadtimeNode = xmlRoot.createElement('deadtime')
114 deadtimeNode.appendChild(xmlRoot.createTextNode(str(round(totalDeadtime, 3))))
115
116 lumiValNode.appendChild(deadtimeNode)
117
118 return lumiValNode
119
120
121# Parse constructed chains and their seeds dictionary into xml tree
122def createFiltersNodes(xmlRoot, chainsDict):
123 filters = xmlRoot.createElement('filters')
124 for chain in chainsDict:
125 f = xmlRoot.createElement('filter')
126 fname = xmlRoot.createElement('filter_name')
127 fname.appendChild(xmlRoot.createTextNode(chain))
128 f.appendChild(fname)
129
130 for item in chainsDict[chain]:
131 itemNode = xmlRoot.createElement('lv1_filter')
132 itemNode.appendChild(xmlRoot.createTextNode(item))
133 f.appendChild(itemNode)
134
135 filters.appendChild(f)
136
137 return filters
138
139# Save lumiblock details for each lumi block lumi, pileup, nevents from COOL database
140def createLumiBlockNodes(xmlRoot, runNumber, lbStart, lbEnd, deadTimeData):
141 from PyCool import cool
142 from TrigConfStorage.TriggerCoolUtil import TriggerCoolUtil
143 db = TriggerCoolUtil.GetConnection('CONDBR2')
144 folder = db.getFolder("/TRIGGER/LUMI/OnlPrefLumi")
145 folderIterator = folder.browseObjects(RunLumi(runNumber, lbStart), RunLumi(runNumber, lbEnd), cool.ChannelSelection())
146
147 # Read number of events for each lumiblock
148 from CoolRunQuery.AtlRunQuerySFO import GetSFO_NeventsAll
149 from CoolRunQuery.utils.AtlRunQueryUtils import coolDbConn
150 cursor = coolDbConn.GetSFODBConnection().cursor()
151 nEventsPerLB = dict(GetSFO_NeventsAll(cursor, [runNumber])[runNumber]['physics_EnhancedBias'])
152
153 lbNodeList = xmlRoot.createElement('lb_list')
154
155 i=lbStart
156 while folderIterator.goToNext():
157 payload=folderIterator.currentRef().payload()
158 nEvents = nEventsPerLB[i] if i in nEventsPerLB else 0
159 lbNode = createLbNode(xmlRoot, i, payload["LBAvInstLumi"], payload["LBAvEvtsPerBX"], nEvents, deadTimeData[i])
160 lbNodeList.appendChild(lbNode)
161 log.debug("Lumiblock {0} lumi {1} pileup {2} events {3}".format(i, payload["LBAvInstLumi"], payload["LBAvEvtsPerBX"], nEvents))
162
163 i+=1
164
165 return lbNodeList
166
167
168# Create a node for a single lumiblock entry
169def createLbNode(xmlRoot, lbId, lumi, mu, nEvents, avgDeadtime):
170 l = xmlRoot.createElement('lb')
171 l.setAttribute('id', str(lbId))
172 l.setAttribute('lumi', str(round(lumi, 3 if lumi > 1 else 6)))
173 l.setAttribute('mu', str(round(mu, 3 if mu > 1 else 6)))
174 l.setAttribute('deadtime', str(round(avgDeadtime, 3)))
175 l.appendChild(xmlRoot.createTextNode(str(nEvents)))
176
177 return l
178
179
180# Save bunchgroup set details
181def createBunchGroupNodes(xmlRoot, key, db):
182 from TrigConfIO.L1TriggerConfigAccess import BunchGroupSetAccess
183 bgset = BunchGroupSetAccess(dbalias = db, bgskey = key)
184
185 bgNodeList = xmlRoot.createElement('bunchgroups')
186 for bunchgroup in bgset["bunchGroups"]:
187 bgLength = 0
188 for entry in bgset["bunchGroups"][bunchgroup]["bcids"]:
189 bgLength += entry["length"]
190
191 bgNode = createBgNode(xmlRoot, bgset["bunchGroups"][bunchgroup]["id"], bgset["bunchGroups"][bunchgroup]["name"], bgLength)
192 bgNodeList.appendChild(bgNode)
193
194 log.debug("Bunchgroup {0} length: {1}".format(bgset["bunchGroups"][bunchgroup]["name"], bgLength))
195
196 return bgNodeList
197
198
199# Create a node for a single bunchgroup entry
200def createBgNode(xmlRoot, bgId, name, length):
201 bgNode = xmlRoot.createElement('bunchgroup')
202 bgNode.setAttribute('id', str(bgId))
203 bgNode.setAttribute('name', name)
204 bgNode.appendChild(xmlRoot.createTextNode(str(length)))
205
206 return bgNode
207
208
209if __name__=='__main__':
210 from argparse import ArgumentParser
211 parser = ArgumentParser()
212 parser.add_argument('--loglevel', type=int, default=3, help='Verbosity level: 1 - VERBOSE, 2 - DEBUG, 3 - INFO')
213 parser.add_argument('--physicsDeadtimeItem', type=str, default="L1_eEM26M--enabled", help='Physics Deadtime Item')
214 parser.add_argument('-s','--server', default='https://atlasop.cern.ch', help="Pbeast server url. For GPN: https://atlasop.cern.ch, for P1: http://pc-tdq-bst-05.cern.ch:8080")
215 parser.add_argument('flags', nargs='*', help='Config flag overrides')
216 args = parser.parse_args()
217
218 from AthenaConfiguration.AllConfigFlags import initConfigFlags
219 flags = initConfigFlags()
220 flags.fillFromArgs(args.flags)
221 flags.lock()
222 log.setLevel(args.loglevel)
223
224 runNumber = flags.Input.RunNumbers[0]
225 lumiblocks = set()
226 for inputFile in flags.Input.Files:
227 log.debug("Processing file {0}".format(inputFile))
228 bsfile = eformat.istream(inputFile)
229 event = bsfile[0] # we just need to analyse one event - one file has one lumiblock of data
230
231 lumiblocks.add(event.lumi_block())
232
233 # Retireve keys to read the BG configuraton - we don't expect the change in one EB run
234 from TrigConfigSvc.TrigConfigSvcCfg import getTrigConfFromCool
235 configKeys = getTrigConfFromCool(runNumber, min(lumiblocks))
236
237 # XML document base
238 root = minidom.Document()
239 xml = root.createElement('trigger')
240
241 # Read the start and end of lumiblocks from COOL to retrieve deadtime from IS
242 log.info("Retrieving timestamps of lumiblocks {0} to {1}".format(min(lumiblocks), max(lumiblocks)))
243
244 lbRanges = readTimestampsOfLb(runNumber, min(lumiblocks), max(lumiblocks))
245 (startOfRun, endOfRun) = (lbRanges[min(lumiblocks)]["start"], lbRanges[max(lumiblocks)]["end"])
246 lumiValNode = createDeadtimeNode(root, readDeadtimeFromIS(startOfRun, endOfRun, args.server))
247 xml.appendChild(lumiValNode)
248
249 # Retrieve bunchgroup data
250 bgNode = createBunchGroupNodes(root, configKeys["BGSK"], configKeys["DB"])
251 xml.appendChild(bgNode)
252
253 # Retireve lumiblocks info
254 deadTimeData = getPhysicsDeadtimePerLB(startOfRun, endOfRun, lbRanges, args.physicsDeadtimeItem, args.server)
255 lumiNode = createLumiBlockNodes(root, runNumber, min(lumiblocks), max(lumiblocks), deadTimeData)
256 xml.appendChild(lumiNode)
257
258 # Write to file
259 filename = "enhanced_bias_run_" + str(runNumber) + ".xml"
260 root.appendChild(xml)
261 xml_str = root.toprettyxml(indent = " ")
262 with open(filename, "w") as f:
263 f.write(xml_str)
#define min(a, b)
Definition cfImp.cxx:40
#define max(a, b)
Definition cfImp.cxx:41
STL class.
std::string replace(std::string s, const std::string &s2, const std::string &s3)
Definition hcg.cxx:310
createDeadtimeNode(xmlRoot, totalDeadtime)
createLumiBlockNodes(xmlRoot, runNumber, lbStart, lbEnd, deadTimeData)
createBunchGroupNodes(xmlRoot, key, db)
createBgNode(xmlRoot, bgId, name, length)
createLbNode(xmlRoot, lbId, lumi, mu, nEvents, avgDeadtime)
createFiltersNodes(xmlRoot, chainsDict)
readTimestampsOfLb(runNumber, lbStart=-1, lbEnd=-1)
readDeadtimeFromIS(startOfRun, endOfRun, server="https://atlasop.cern.ch")
getPhysicsDeadtimePerLB(startOfRun, endOfRun, lbRangesDict, itemName="L1_TAU8--enabled", server="https://atlasop.cern.ch")