ATLAS Offline Software
Loading...
Searching...
No Matches
yearwise_efficiency.py
Go to the documentation of this file.
1#!/usr/bin/env python
2# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
3
4"""
5Plot trigger and reconstruction efficiencies over entire data-periods.
6"""
7
8import numpy as np
9import ROOT as R
10import python_tools as pt
11import ZLumiScripts.tools.zlumi_mc_cf as dq_cf
12import math
13from array import array
14import argparse
15
16parser = argparse.ArgumentParser()
17parser.add_argument('--year', type=str, help='A year number (15-25) or run3 or year1_year2_...')
18parser.add_argument('--channel', type=str, help='Zee or Zmumu')
19parser.add_argument('--indir', type=str, help='Input directory for CSV files')
20parser.add_argument('--outdir', type=str, help='Output directory for plots')
21
22args = parser.parse_args()
23year = args.year
24channel = args.channel
25indir = args.indir
26outdir = args.outdir
27
28if year == "run3":
29 years = ["22", "23", "24", "25"]
30 out_tag = "run3"
31 time_format = "%m/%y"
32 xtitle = 'Month / Year'
33 date_tag = "Run 3,#kern[-0.5]{ }#sqrt{s} = 13.6 TeV"
34 labelsize = 55
35 multiyear = True
36elif len(year.split("_")) > 1:
37 years = year.split("_")
38 out_tag = "data"+year
39 time_format = "%m/%y"
40 xtitle = 'Month / Year'
41 date_tag = "Run 3,#kern[-0.5]{ }#sqrt{s} = 13.6 TeV"
42 labelsize = 55
43 multiyear = True
44else:
45 years = [year]
46 out_tag = "data"+year
47 time_format = "%d/%m"
48 xtitle = 'Date in 20' + year
49 date_tag = "Data 20" + year + ",#kern[-0.5]{ }#sqrt{s} = 13.6 TeV"
50 labelsize = 22
51 multiyear = False
52
53def main():
54 plot_efficiency(channel, years)
55 plot_efficiency_comb(channel, years)
56
57def plot_efficiency_comb(channel, years):
58
59 arr_date = []
60 arr_combeff = []
61 arr_comberr = []
62 run_num = []
63
64 for year in years:
65 grl = pt.get_grl(year)
66
67 for run in grl:
68 livetime, zlumi, zerr, olumi, timestamp, dfz_small = pt.get_dfz(args.indir, year, run, channel)
69 # Cut out short runs
70 if livetime < pt.lblivetimecut:
71 if livetime >= 0.: print(f"Skip Run {run} because of live time {livetime/60:.1f} min")
72 continue
73
74 dfz_small['CombEff'] = dfz_small[channel + 'EffComb']
75 dfz_small['CombErr'] = dfz_small[channel + 'ErrComb']
76 dfz_small['OffMu'] = dfz_small['OffMu']
77
78 # Scale event-level efficiency with FMC
79 campaign = "mc23a"
80 dfz_small['CombEff'] *= dq_cf.correction(dfz_small['OffMu'], channel, campaign, int(run))
81 dfz_small['CombErr'] *= dq_cf.correction(dfz_small['OffMu'], channel, campaign, int(run))
82
83 # Calculate average event-level efficiency
84 dfz_small['CombEff'] *= dfz_small['LBLive']
85 comb_eff_avg = dfz_small['CombEff'].sum()/livetime
86
87 # Calculate average trigger efficiency error
88 dfz_small['CombErr'] *= dfz_small['LBLive']
89 dfz_small['CombErr'] *= dfz_small['CombErr']
90 comb_err_avg = math.sqrt(dfz_small['CombErr'].sum())/livetime
91
92 arr_date.append(timestamp)
93 arr_combeff.append(comb_eff_avg)
94 arr_comberr.append(comb_err_avg)
95 run_num.append(run)
96
97 arr_date = array('d', arr_date)
98
99 arr_combeff = np.array(arr_combeff)
100 arr_comberr = np.array(arr_comberr)
101
102 if channel == "Zee":
103 ymin, ymax = 0.56, 0.74
104 elif channel == "Zmumu":
105 ymin, ymax = 0.74, 0.80
106
107 comb_graph = R.TGraphErrors(len(arr_date), arr_date, arr_combeff, R.nullptr,arr_comberr)
108 comb_graph.GetHistogram().SetYTitle("Efficiency")
109 comb_graph.GetHistogram().GetYaxis().SetRangeUser(ymin, ymax)
110 comb_graph.GetXaxis().SetTimeDisplay(2)
111 comb_graph.GetXaxis().SetNdivisions(9,R.kFALSE)
112 comb_graph.GetXaxis().SetTimeFormat(time_format)
113 comb_graph.GetXaxis().SetTimeOffset(0,"gmt")
114 comb_graph.SetMarkerSize(1)
115
116 if multiyear:
117 c1 = R.TCanvas("c1", "c1", 2000, 1200)
118 else:
119 c1 = R.TCanvas()
120
121 comb_graph.Draw("ap")
122
123 leg = R.TLegend(0.645, 0.4, 0.805, 0.6)
124 if channel == "Zee":
125 pt.drawAtlasLabel(0.2, ymax-0.06, "Internal")
126 elif channel == "Zmumu":
127 pt.drawAtlasLabel(0.2, ymax-0.4, "Internal")
128
129 pt.drawText(0.2, ymax-0.46, date_tag, size=labelsize)
130 pt.drawText(0.2, ymax-0.52, pt.plotlabel[channel] + " counting", size=labelsize)
131
132 leg.SetBorderSize(0)
133 leg.SetTextSize(0.07)
134 leg.AddEntry(comb_graph, "#varepsilon_{event}^{"+pt.plotlabel[channel]+"}", "ep")
135
136 leg.Draw()
137
138 comb_graph.GetHistogram().SetXTitle("Date")
139 c1.SaveAs(outdir + channel + "_eventeff_vs_time_"+out_tag+".pdf")
140
141def plot_efficiency(channel, years):
142
143 print("Efficiency Plots vs Time for years: ", years)
144
145 arr_date = []
146 arr_trigeff = []
147 arr_trigerr = []
148 arr_recoeff = []
149 arr_recoerr = []
150 run_num = []
151
152 trigeff_vs_runlength = R.TH2D("trigeff_vs_runlength",\
153 "Trigger efficiency vs. Run Length;Run Length [h]; Trigger Efficiency;N_{run}",\
154 48, 0., 24., 80, .6, 1.0)
155 recoeff_vs_runlength = R.TH2D("recoeff_vs_runlength",\
156 "Reconstruction efficiency vs. Run Length;Run Length [h]; Reconstruction Efficiency;N_{run}",\
157 48, 0., 24., 60, .8, 1.0)
158
159 for year in years:
160 grl = pt.get_grl(year)
161
162 for run in grl:
163 livetime, zlumi, zerr, olumi, timestamp, dfz_small = pt.get_dfz(args.indir, year, run, channel)
164 # Cut out short runs
165 if livetime < pt.lblivetimecut:
166 if livetime >= 0.: print(f"Skip Run {run} because of live time {livetime/60:.1f} min")
167 continue
168
169 dfz_small['TrigEff'] = dfz_small[channel + 'EffTrig']
170 dfz_small['TrigErr'] = dfz_small[channel + 'ErrTrig']
171 dfz_small['RecoEff'] = dfz_small[channel + 'EffReco']
172 dfz_small['RecoErr'] = dfz_small[channel + 'ErrReco']
173
174 # Calculate average trigger efficiency
175 dfz_small['TrigEff'] *= dfz_small['LBLive']
176 trig_eff_avg = dfz_small['TrigEff'].sum()/livetime
177
178 # Calculate average reconstruction efficiency
179 dfz_small['RecoEff'] *= dfz_small['LBLive']
180 reco_eff_avg = dfz_small['RecoEff'].sum()/livetime
181
182 # Calculate average trigger efficiency error
183 dfz_small['TrigErr'] *= dfz_small['LBLive']
184 dfz_small['TrigErr'] *= dfz_small['TrigErr']
185 trig_err_avg = math.sqrt(dfz_small['TrigErr'].sum())/livetime
186
187 # Calculate average reconstruction efficiency error
188 dfz_small['RecoErr'] *= dfz_small['LBLive']
189 dfz_small['RecoErr'] *= dfz_small['RecoErr']
190 reco_err_avg = math.sqrt(dfz_small['RecoErr'].sum())/livetime
191
192 arr_date.append(timestamp)
193 arr_trigeff.append(trig_eff_avg)
194 arr_trigerr.append(trig_err_avg)
195 arr_recoeff.append(reco_eff_avg)
196 arr_recoerr.append(reco_err_avg)
197 run_num.append(run)
198
199 trigeff_vs_runlength.Fill(min(livetime/3600, 23.99), trig_eff_avg)
200 recoeff_vs_runlength.Fill(min(livetime/3600, 23.99), reco_eff_avg)
201
202 arr_date = array('d', arr_date)
203
204 arr_trigeff = np.array(arr_trigeff)
205 arr_trigerr = np.array(arr_trigerr)
206 arr_recoeff = np.array(arr_recoeff)
207 arr_recoerr = np.array(arr_recoerr)
208
209 ymin, ymax = 0.61, 0.96
210 if channel == "Zee":
211 lep = "e"
212 elif channel == "Zmumu":
213 lep = "#mu"
214
215 trig_graph = R.TGraphErrors(len(arr_date), arr_date, arr_trigeff, R.nullptr,arr_trigerr)
216 trig_graph.GetHistogram().SetYTitle("Efficiency")
217 trig_graph.GetHistogram().GetYaxis().SetRangeUser(ymin, ymax)
218 trig_graph.GetXaxis().SetTimeDisplay(2)
219 trig_graph.GetXaxis().SetNdivisions(9,R.kFALSE)
220 trig_graph.GetXaxis().SetTimeFormat(time_format)
221 trig_graph.GetXaxis().SetTimeOffset(0,"gmt")
222 trig_graph.SetMarkerSize(1)
223
224 reco_graph = R.TGraphErrors(len(arr_date), arr_date, arr_recoeff, R.nullptr,arr_recoerr)
225 reco_graph.GetHistogram().GetYaxis().SetRangeUser(ymin, ymax)
226 reco_graph.GetXaxis().SetTimeDisplay(2)
227 reco_graph.GetXaxis().SetNdivisions(9,R.kFALSE)
228 reco_graph.GetXaxis().SetTimeFormat(time_format)
229 reco_graph.GetXaxis().SetTimeOffset(0,"gmt")
230 reco_graph.SetMarkerSize(1)
231 reco_graph.SetMarkerStyle(21)
232 reco_graph.SetMarkerColor(R.kRed)
233 reco_graph.SetLineColor(R.kRed)
234
235 if multiyear:
236 c1 = R.TCanvas("c1", "c1", 2000, 1200)
237 else:
238 c1 = R.TCanvas()
239
240 trig_graph.Draw("ap")
241 reco_graph.Draw("p")
242
243 if channel == "Zee":
244 leg = R.TLegend(0.645, 0.2, 0.805, 0.4)
245 pt.drawAtlasLabel(0.2, ymax-0.64, "Internal")
246 pt.drawText(0.2, ymax-0.70, date_tag, size=labelsize)
247 pt.drawText(0.2, ymax-0.76, pt.plotlabel[channel] + " counting", size=labelsize)
248 elif channel == "Zmumu":
249 leg = R.TLegend(0.645, 0.45, 0.805, 0.65)
250 pt.drawAtlasLabel(0.2, ymax-0.36, "Internal")
251 pt.drawText(0.2, ymax-0.42, date_tag, size=labelsize)
252 pt.drawText(0.2, ymax-0.48, pt.plotlabel[channel] + " counting", size=labelsize)
253
254 leg.SetBorderSize(0)
255 leg.SetTextSize(0.07)
256 leg.AddEntry(reco_graph, "#varepsilon_{reco}^{single-"+lep+"}", "ep")
257 leg.AddEntry(trig_graph, "#varepsilon_{trig}^{single-"+lep+"}", "ep")
258
259 leg.Draw()
260
261 trig_graph.GetHistogram().SetXTitle("Date")
262 c1.SaveAs(outdir + channel + "_eff_vs_time_"+out_tag+".pdf")
263
264 c1.SetRightMargin(0.15)
265 trigeff_vs_runlength.Draw("colz")
266 if channel == "Zee":
267 ymin, ymax = 0.73, 0.95
268 elif channel == "Zmumu":
269 ymin, ymax = 0.6, 0.85
270 trigeff_vs_runlength.GetYaxis().SetRangeUser(ymin, ymax)
271 pt.drawAtlasLabel(0.2, 0.89, "Internal")
272 pt.drawText(0.2, 0.83, date_tag, size=labelsize)
273 pt.drawText(0.2, 0.77, pt.plotlabel[channel] + " counting", size=labelsize)
274 c1.SaveAs(outdir + channel + "_trigeff_vs_runlength_"+out_tag+".pdf")
275
276 recoeff_vs_runlength.Draw("colz")
277 if channel == "Zee":
278 ymin, ymax = 0.8, 0.95
279 elif channel == "Zmumu":
280 ymin, ymax = 0.92, 1.0
281 recoeff_vs_runlength.GetYaxis().SetRangeUser(ymin, ymax)
282 pt.drawAtlasLabel(0.2, 0.89, "Internal")
283 pt.drawText(0.2, 0.83, date_tag, size=labelsize)
284 pt.drawText(0.2, 0.77, pt.plotlabel[channel] + " counting", size=labelsize)
285 c1.SaveAs(outdir + channel + "_recoeff_vs_runlength_"+out_tag+".pdf")
286
287
288if __name__ == "__main__":
289 pt.setAtlasStyle()
290 R.gROOT.SetBatch(R.kTRUE)
291 main()
void print(char *figname, TCanvas *c1)
#define min(a, b)
Definition cfImp.cxx:40
STL class.