ATLAS Offline Software
Loading...
Searching...
No Matches
Chain_HLT_TableConstructor.py
Go to the documentation of this file.
1#!/usr/bin/env python
2#
3# Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration
4#
5
6from TrigCostAnalysis.TableConstructorBase import TableConstructorBase, Column
7from AthenaCommon.Logging import logging
8log = logging.getLogger('Chain_HLT')
9
10'''
11@file Chain_HLT_TableConstructor.py
12@brief Contains TableConstructor classes per Chain_HLT table. Defines what
13 should be saved in table and fills them
14'''
15
16
17class Chain_HLT_TableConstructor(TableConstructorBase):
18 ''' @brief Class representing Chain_HLT table
19 '''
20 def __init__(self, tableObj, underflowThreshold, overflowThreshold):
21 super(). __init__(tableObj, underflowThreshold, overflowThreshold)
22 self.totalTime = 0 # Total time of algoirthms execution
23 self.expectedHistograms = ["Group_perCall",
24 "Chain_perEvent",
25 "AlgCalls_perEvent",
26 "Time_perEvent",
27 "Time_perCall",
28 "UniqueTime_perCall",
29 "ChainPassed_perEvent",
30 "Request_perEvent",
31 "NetworkRequest_perEvent",
32 "CachedROBSize_perEvent",
33 "NetworkROBSize_perEvent"]
34
35 def defineColumns(self):
36 self.columns['name'] = Column("Name", "Chain name")
37 self.columns['group'] = Column("Group", "Bandwidth group this chain is associated to")
38 self.columns['events'] = Column("Raw Active Events", "Raw underlying statistics on the number of events in which this ROS was accessed")
39 self.columns['eventsWeighted'] = Column("Active Events", "How many events in which this sequence was executed")
40 self.columns['time'] = Column("Time Per Event [ms]", "Average execution time of this chain per event")
41 self.columns['chainRate'] = Column("Execute Rate [Hz]", "Number of chain executions normalised to the wall time for this run range", True)
42 self.columns['passFraction'] = Column("Pass Fraction [%]", "What percentage of events pass events are kept")
43 self.columns["callsSlow"] = Column("Calls > 1000 ms", "Number of algorithm executions which were particularly slow")
44 self.columns['totalTime'] = Column("Total Chain Time [s]", "Total time used by this chain for this run range")
45 self.columns['totalTimeFrac'] = Column("Total Chain Time [%]", "Total chain time as a percentage of the total time of all chains in this run range")
46 self.columns["totalUniqTime"] = Column("Total Unique Time [s]", "Total time used by algorithms for this chain for this run range")
47 self.columns['totalUniqTimeFrac'] = Column("Total Unique Time [%]", "Total unique chain time as a percentage of the total time of all chains in this run range")
48 self.columns['algPerEvent'] = Column("Run Algs/Event", "Total number of algorithms executed by this chain")
49 self.columns["dataRate"] = Column("Data Request Rate [Hz]", "Rate of data requests to ROSes (one request can be to many ROSes) from this chain in this run range", True)
50 self.columns["retrievedDataRate"] = Column("Retrieved Data Request Rate [Hz]", "Rate of data request with at least one network ROS request from this chain in this run range", True)
51 self.columns["cachedDataSizeRate"] = Column("Cached ROB Rate [kB/s]", "Average size of cached ROB data fetches from this algorithm in this run range", True)
52 self.columns["retrievedDataSizeRate"] = Column("Retrieved ROB Rate [kB/s]", "Average size of retrieved ROB data fetches for this algorithm in this run range")
53
54
55 def fillColumns(self, itemName):
56 slowCalls = self.getHistogram("Time_perCall").Integral(self.getHistogram("Time_perCall").FindBin(1000.), self.getHistogram("Time_perCall").GetNbinsX())
57
58 self.columns["name"].addValue(itemName)
59 self.columns["events"].addValue(self.getHistogram("Chain_perEvent").GetEntries())
60 self.columns["eventsWeighted"].addValue(self.getHistogram("Chain_perEvent").Integral())
61 self.columns["time"].addValue(self.getHistogram("Time_perEvent").GetMean())
62 self.columns["chainRate"].addValue(self.getHistogram("Chain_perEvent").Integral())
63 self.columns['passFraction'].addValue(self.getHistogram("ChainPassed_perEvent").Integral())
64 self.columns["callsSlow"].addValue(slowCalls)
65 self.columns['totalTime'].addValue(self.getXWeightedIntegral("Time_perCall", isLog=True) * 1e-3)
66 #self.columns['totalTimeFrac'] in postprocessing
67 self.columns['totalUniqTime'].addValue(self.getXWeightedIntegral("UniqueTime_perCall", isLog=True) * 1e-3)
68 #self.columns['totalUniqueTimeFrac'] in postprocessing
69 self.columns["algPerEvent"].addValue(self.getHistogram("AlgCalls_perEvent").GetMean())
70 self.columns["dataRate"].addValue(self.getXWeightedIntegral("Request_perEvent", isLog=False))
71 self.columns["retrievedDataRate"].addValue(self.getXWeightedIntegral("NetworkRequest_perEvent", isLog=False))
72 self.columns["cachedDataSizeRate"].addValue(self.getXWeightedIntegral("CachedROBSize_perEvent", isLog=False))
73 self.columns["retrievedDataSizeRate"].addValue(self.getXWeightedIntegral("NetworkROBSize_perEvent", isLog=False))
74
75 groups = ""
76 for i in range (1, self.getHistogram("Group_perCall").GetNbinsX()):
77 label = self.getHistogram("Group_perCall").GetXaxis().GetBinLabel(i)
78 if not label: break
79
80 groups += label + ", "
81
82 self.columns['group'].addValue(groups)
83
84 def postProcessing(self):
85 totalTimeEntries = self.columns["totalTime"].content
86 if (sum(totalTimeEntries) == 0):
87 log.error("No histograms for the Chain HLT summary were found")
88 raise ValueError
89
90 for entry in totalTimeEntries:
91 self.columns["totalTimeFrac"].addValue(100 * entry / self.totalTime)
92
93 totalUniqTimeEntries = self.columns["totalUniqTime"].content
94
95 for entry in totalUniqTimeEntries:
96 self.columns["totalUniqTimeFrac"].addValue(100 * entry / self.totalTime)
97
98 passChainEntries = self.columns["passFraction"].content
99 totalChains = self.columns["eventsWeighted"].content
100
101 for i in range(0, len(passChainEntries)):
102 passFrac = 0 if totalChains[i] == 0 else 100 * passChainEntries[i] / totalChains[i]
103 self.columns["passFraction"].content[i] = passFrac
TGraphErrors * GetMean(TH2F *histo)
TGraphErrors * GetEntries(TH2F *histo)
__init__(self, tableObj, underflowThreshold, overflowThreshold)