ATLAS Offline Software
covarianceToolsLibrary.py
Go to the documentation of this file.
1 # Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
2 
3 """
4 The `covarianceToolsLibrary.py` can be used directly to manipulate covariance info and evaluate goodness of fit, inmported into your own python script.
5 Examples of how to do this are provided in the `examples/covarianceTool-examples/` directory
6 
7 Author: Louie D. Corpe (UCL)
8 Email: l.corpe@cern.ch
9 """
10 
11 import yaml
12 
13 import ROOT as r
14 import yoda
15 from array import array
16 
17 import numpy as np
18 from numpy import linalg as LA
19 from scipy import stats
20 
21 r.gErrorIgnoreLevel = r.kError
22 
23 
24 def setCorrInfo(h, binErrors):
25  """
26  `h` AO (the Scatter you want to set the correlation info for)
27  `binErrors`
28  """
29  corrIn = yaml.dump(binErrors, default_flow_style=True, default_style='', width=1e6)
30  h.setAnnotation("ErrorBreakdown", corrIn)
31 
32 
33 def addSystematicToCorrInfo(h, systName, binErrorsToAdd):
34  corr = h.annotation('ErrorBreakdown')
35  binErrors = yaml.load(corr)
36  if binErrorsToAdd.keys() != binErrors.keys():
37  print("[ERROR], could not add ", systName, " to ", h, " because the bins are inconsistent")
38  print(binErrors.keys(), " vs ", binErrorsToAdd.keys())
39  return
40  for k, v in binErrorsToAdd.items():
41  binErrors[k][systName] = v
42  setCorrInfo(h, binErrors)
43 
44 
45 def getCorrInfo(h, names=[]):
46  if not (h.hasAnnotation('ErrorBreakdown')):
47  print("[ERROR] analsyis object", h, " has no stored correlation info!")
48  return None
49  binErrors = yaml.load(h.annotation('ErrorBreakdown'))
50  if len(names) > 0:
51  binErrorsFiltered = {}
52  for k, v in binErrors.items(): # loop through bins
53  foundName = False
54  for name in names:
55  for k2, v2 in v.items(): # loop through systematics
56  if k2 == name:
57  foundName = True
58  binErrorsFiltered.setdefault(k, {})[k2] = v2
59  if not foundName:
60  print("[WARNING] cannot find systematic with name ", name)
61  binErrors = binErrorsFiltered
62  return binErrors
63 
64 
66  nbins = ao.numPoints()
67  corr = yaml.load(ao.annotation('ErrorBreakdown'))
68  if len(corr) == 0: return True
69  systList = corr[0].keys()
70  hasAsymmErrs = False
71  for sname in systList:
72  for ibin in range(nbins):
73  up = ((corr[ibin][sname]['up']))
74  dn = ((corr[ibin][sname]['dn']))
75  if (up != -1 * dn):
76  hasAsymmErrs = True
77  break
78  return hasAsymmErrs
79 
80 
81 def makeCovarianceMatrix(ao, ignore_corrs=False):
82  nbins = ao.numPoints()
83  corr = ao.annotation('ErrorBreakdown') if '1.7' in yoda.version() else yaml.load(ao.annotation('ErrorBreakdown'))
84  dummyM = np.outer(range(nbins), range(nbins))
85  covM = np.zeros(dummyM.shape)
86  if len(corr) == 0:
87  for ibin in range(nbins):
88  covM[ibin][ibin] = ((ao.points()[ibin].yErrs()[0] + ao.points()[ibin].yErrs()[1]) / 2) ** 2
89  if covM[ibin][ibin] == 0: covM[ibin][ibin] = 1
90  print("[WARNING], ao ", ao.path, " has no errors. Setting cov martix to unit... but consider excluding it !")
91  return covM
92  systList = corr[0].keys()
93  for sname in systList:
94  systErrs = np.zeros(nbins)
95  for ibin in range(nbins):
96  systErrs[ibin] = ((abs(corr[ibin][sname]['up'])) + (abs(corr[ibin][sname]['dn']))) * 0.5 # up/dn are considered the same since NoToys assumes symmetric errors
97  if (ignore_corrs or 'stat' in sname):
98  covM += np.diag(systErrs * systErrs)
99  else:
100  covM += np.outer(systErrs, systErrs)
101  if LA.det(covM) == 0:
102  printMatrix(covM)
103  print("[ERROR], the above cov matrix is singular for ", ao.path(), " ! exit")
104  print("(if this is a cov matrix for a super-AO, perhaps you have several highly correlated distributions in your set. Try vetoing some.)")
105  return covM
106 
107 
109  if not ao.hasAnnotation("ErrorBreakdown"): return False
110  if len(ao.annotation("ErrorBreakdown")) == 0: return False
111  corrs = getCorrInfo(ao)
112  for iBin, binErrs in corrs.items():
113  binTotal = [0., 0.]
114  for syst, err in binErrs.items():
115  binTotal[0] = (binTotal[0] ** 2 + err['up'] ** 2) ** 0.5
116  binTotal[1] = (binTotal[1] ** 2 + err['dn'] ** 2) ** 0.5
117  if binTotal[0] == 0 and binTotal[1] == 0: return False
118  return True
119 
120 
121 def makeCovarianceMatrixFromToys(ao, ntoys=10000, ignore_corrs=False):
122  nbins = ao.numPoints()
123  corr = yaml.load(ao.annotation('ErrorBreakdown'))
124  systList = corr[0].keys()
125  toys = np.zeros([ntoys, nbins])
126 
127  for itoy in range(ntoys):
128  shift = np.zeros(len(systList))
129  delta = np.zeros(nbins)
130  isyst = 0
131  for sname in systList:
132  if (ignore_corrs or 'stat' in sname):
133  for n in range(nbins):
134  lrand = np.random.normal(0, 1) # bin-bin independent stat fluctuations
135  s = ((corr[n][sname]['up'])) if lrand >= 0. else ((corr[n][sname]['dn']))
136  delta[n] += abs(lrand) * s
137  else:
138  lrand = np.random.normal(0, 1)
139  shift[isyst] = lrand # bin-bin correlated fluctuations
140 
141  for n in range(nbins):
142  s = ((corr[n][sname]['up'])) if lrand >= 0. else ((corr[n][sname]['dn']))
143  delta[n] += abs(lrand) * s # linear interpolation
144  isyst += 1
145 
146  for n in range(nbins):
147  toys[itoy][n] = (1. + delta[n])
148 
149  cov = np.cov(toys, rowvar=0)
150  if LA.det(cov) == 0:
151  printMatrix(cov)
152  print("[ERROR], the above cov matrix is singular for ", ao.path, " ! exit")
153  print("(if this is a cov matrix for a super-AO, perhaps you have several highly correlated distributions in your set. Try vetoing some.)")
154  return cov
155 
156 
157 def plotVariations(ao, outdir, label, threshold=0.05, xLabel=None, title=None):
158  if ao.dim() != 2: return
159  tgraphs = {}
160  tgraphs['all'] = [r.TGraphAsymmErrors(), r.TGraphAsymmErrors(), r.TGraphAsymmErrors(), r.TGraphAsymmErrors()]
161  maxErr = 0
162 
163  nbins = ao.numPoints()
164  corr = ao.annotation('ErrorBreakdown') if '1.7' in yoda.version() else yaml.load(ao.annotation('ErrorBreakdown'))
165 
166  if len(corr) == 0: return
167  systList = corr[0].keys()
168 
169  all_up = [0 for i in range(nbins)]
170  all_dn = [0 for i in range(nbins)]
171  for sname in systList:
172  tgu = r.TGraphAsymmErrors()
173  tgu_line = r.TGraphAsymmErrors()
174  tgd = r.TGraphAsymmErrors()
175  tgd_line = r.TGraphAsymmErrors()
176  pt = 0
177  draw = 0
178  for i in range(nbins):
179  nominalx = ao.points()[i].x()
180  nominaly = ao.points()[i].y()
181  xerr = abs(nominalx - ao.points()[i].xMin())
182  tgu.SetPoint(pt, nominalx, 1)
183  tgd.SetPoint(pt, nominalx, 1)
184  if nominaly == 0: continue
185  eup = 1 - (((corr[i][sname]['up'])) + nominaly) / nominaly
186  edn = 1 - (((corr[i][sname]['dn'])) + nominaly) / nominaly
187  all_up[i] = (all_up[i] ** 2 + eup ** 2) ** 0.5
188  all_dn[i] = (all_dn[i] ** 2 + edn ** 2) ** 0.5
189  if (abs(eup) > threshold): draw = 1
190  if (abs(edn) > threshold): draw = 1
191  if (abs(edn) > threshold): draw = 1
192  if abs(all_up[i]) > maxErr: maxErr = abs(all_up[i])
193  tgu.SetPointError(pt, xerr, xerr, 0, eup)
194  tgd.SetPointError(pt, xerr, xerr, 0, edn)
195  tgu_line.SetPoint(pt, nominalx, 1 + eup)
196  tgd_line.SetPoint(pt, nominalx, 1 + edn)
197  tgu_line.SetPointError(pt, xerr, xerr, 0, 0)
198  tgd_line.SetPointError(pt, xerr, xerr, 0, 0)
199  tgraphs['all'][0].SetPoint(pt, nominalx, nominaly / nominaly)
200  tgraphs['all'][1].SetPoint(pt, nominalx, nominaly / nominaly)
201  tgraphs['all'][2].SetPoint(pt, nominalx, 1 - all_dn[i])
202  tgraphs['all'][3].SetPoint(pt, nominalx, 1 + all_up[i])
203  tgraphs['all'][0].SetPointError(pt, xerr, xerr, 0, -1 * all_dn[i])
204  tgraphs['all'][1].SetPointError(pt, xerr, xerr, 0, all_up[i])
205  tgraphs['all'][2].SetPointError(pt, xerr, xerr, 0, 0)
206  tgraphs['all'][3].SetPointError(pt, xerr, xerr, 0, 0)
207  pt += 1
208  if (draw): tgraphs[sname] = [tgd, tgu, tgd_line, tgu_line]
209  r.gROOT.SetBatch()
210  tc = r.TCanvas("c", "c", 500, 500)
211  same = 0
212  itg = 0
213  leg = r.TLegend(.13, .6, .9, .8)
214  leg.SetNColumns(2)
215  leg.SetBorderSize(0)
216  leg.SetFillColor(0)
217  leg.SetFillStyle(0)
218  leg.SetTextFont(42)
219  leg.SetTextSize(0.025)
220 
221  for tgname, tg in sorted(tgraphs.items(), key=lambda x: abs(x[1][1].GetErrorYhigh(0)), reverse=True):
222  itg += 1
223  isCorr = not (('stat' in tgname) or ('all' in tgname))
224  if 'nominal' == tgname: continue
225  if 'mc' == tgname: continue
226  tg[0].SetFillColorAlpha(0 + itg, 0.3)
227  tg[1].SetFillColorAlpha(0 + itg, 0.3)
228  tg[0].SetMarkerColor(1) # kBlack
229  tg[2].SetMarkerStyle(23)
230  tg[2].SetMarkerColor(0 + itg)
231  tg[2].SetLineColor(0 + itg)
232  tg[3].SetMarkerStyle(22)
233  tg[3].SetMarkerColor(0 + itg)
234  tg[3].SetLineColor(0 + itg)
235  leg.AddEntry(tg[0], tgname, "f")
236  if same:
237  tg[0].Draw("p2 same")
238  tg[1].Draw("p2 same")
239  if isCorr: tg[2].Draw("P same")
240  if isCorr: tg[3].Draw("P same ")
241  else:
242  tg[0].SetMaximum((1. + maxErr) * 1.33)
243  tg[0].SetMinimum(1. - maxErr)
244  tg[0].Draw("AP2")
245  tg[1].Draw("P2 same ")
246  if isCorr: tg[2].Draw("P same")
247  if isCorr: tg[3].Draw("P same")
248  same = 1
249  leg.Draw()
250  lat = r.TLatex()
251  leg.SetTextFont(42)
252  lat.SetNDC()
253  lat.SetTextSize(0.05)
254  lat.DrawLatex(0.1, 0.91, "#it{ATLAS} #bf{Internal}")
255  lat.SetTextSize(0.035)
256  lat.DrawLatex(0.13, 0.83, "#bf{All variations >%1.1f%% anywhere in range:}" % (threshold * 100))
257  if xLabel:
258  lat.SetTextAlign(33)
259  lat.SetTextSize(0.04)
260  lat.DrawLatex(0.9, 0.05, "#bf{%s}" % xLabel.replace("$", "").replace("\\text", ""))
261  lat.SetTextAngle(90)
262  lat.DrawLatex(0.02, 0.9, "#bf{Relative Uncertainty}")
263  outfile = "%s/%s-variations" % (outdir, label)
264  tc.SaveAs(outfile + ".pdf")
265  tc.SaveAs(outfile + ".png")
266 
267 
269  npts = scat.numPoints()
270  cov_up = np.zeros([npts, npts])
271  cov_dn = np.zeros([npts, npts])
272  for i in range(npts):
273  yed_, yeu_ = scat.points()[i].yErrs()
274  cov_up[i][i] = yeu_ * yeu_
275  cov_dn[i][i] = yed_ * yed_
276  return[cov_dn, cov_up]
277 
278 
280  npts = scat.numBins
281  cov_up = np.zeros([npts, npts])
282  cov_dn = np.zeros([npts, npts])
283  for i in range(npts):
284  err = scat.bins[i].xStdErr()
285  cov_up[i][i] = err * err
286  cov_dn[i][i] = err * err
287  return[cov_dn, cov_up]
288 
289 
290 def scatterToMatrix(h, bw=False):
291  npts = h.numPoints()
292  dim = h.dim()
293  mo = np.zeros([npts, npts])
294  for i in range(npts):
295  for j in range(npts):
296  mo[0][i] = h.points()[i].val(dim)
297  if bw:
298  width = (abs(h.points()[i].errs(dim)[0]))
299  width += (abs(h.points()[i].errs(dim)[1]))
300  mo[0][i] *= 1. / width
301  return mo
302 
303 
304 def histoToMatrix(h, bw=False):
305  npts = h.numBins
306  mo = np.zeros([npts, npts])
307  for i in range(npts):
308  for j in range(npts):
309  mo[0][i] = h.bins[i].sumW()
310  if bw:
311  width = h.bins[i].xWidth()
312  mo[0][i] *= 1. / width
313  return mo
314 
315 
316 def printMatrix(matrix, outfile=None):
317  if isinstance(outfile, str()): outfile = open("%s.txt" % outfile, 'w')
318  nCols = matrix.shape[0]
319  nRows = matrix.shape[1]
320  colNames = "X "
321  for i in range(nCols):
322  colNames += "%d " % i
323  if outfile is not None: outfile.write(colNames + "\n")
324  else: print(colNames + "\n")
325 
326  for j in range(nRows):
327  rowString = "%d| " % j
328  for i in range(nCols):
329  rowString += "%.6f " % matrix[i][j]
330  if outfile is not None: outfile.write(rowString + "\n")
331  else: print(rowString + "\n")
332  if outfile is not None: outfile.write("Eigenvalues %s\n" % (" ".join(["%.3f" % val for val in LA.eigvals((matrix))])))
333  else: print("Eigenvalues %s\n" % (" ".join(["%.6f" % val for val in LA.eigvals((matrix))])))
334 
335 
336 def drawMatrix(matrix, outfile, xLabel=None):
337 
338  nCols = matrix.shape[0]
339  nRows = matrix.shape[1]
340  assert(nCols == nRows)
341  nBins = nCols
342  r.gROOT.SetBatch()
343  lat = r.TLatex()
344  lat.SetTextSize(0.02)
345  lat.SetTextAngle(45)
346  lat.SetTextAlign(22)
347  Red = array('d', [1.0, 1.0, 0.0])
348  Grn = array('d', [0.0, 1.0, 0.0])
349  Blu = array('d', [0.0, 1.0, 1.0])
350  Length = array('d', [0.0, 0.5, 1.0])
351  r.TColor.CreateGradientColorTable(len(Length), Length, Red, Grn, Blu, 50)
352  if ('chi2' in outfile): nRows += 1
353  th2 = r.TH2D(outfile.split("/")[-1], outfile.split("/")[-1], nRows, 0, nRows, nBins, 0, nBins)
354  th2.SetTitle("")
355  totalOverall = 0.
356  totalsPerRow = []
357  for j in range(nBins):
358  th2.GetYaxis().SetBinLabel(j + 1, "%d" % (nBins - j))
359  total = 0.
360  for i in range(nBins):
361  th2.GetXaxis().SetBinLabel(i + 1, "%d" % (i + 1))
362  th2.Fill(i, nBins - j - 1, matrix[i][j])
363  total += matrix[i][j]
364  totalOverall += total
365  if('chi2' in outfile): th2.Fill(nBins, nBins - j - 1, total)
366  if('chi2' in outfile): th2.GetXaxis().SetBinLabel(nBins + 1, "TOTAL =%.2f" % totalOverall)
367  totalsPerRow.append(total)
368  tc = r.TCanvas("c", "c", 500, 500)
369  tc.SetRightMargin(0.2)
370  if('covariance' in outfile): tc.SetLogz()
371  th2.Draw("colz")
372  for j in range(nBins):
373  for i in range(nBins):
374  lat.DrawLatex(i + 0.5, nBins - j - 0.5, "%.2f" % matrix[i][j])
375  if('chi2' in outfile): lat.DrawLatex(nBins + 0.5, nBins - j - 0.5, "%.2f" % totalsPerRow[j])
376  th2.SetMaximum(max(abs(th2.GetMaximum()), abs(th2.GetMinimum())))
377  th2.SetMinimum(-1 * (th2.GetMaximum()))
378  th2.SetStats(0)
379  lat = r.TLatex()
380  lat.SetNDC()
381  lat.SetTextSize(0.05)
382  lat.DrawLatex(0.1, 0.91, "#it{ATLAS} #bf{Internal}")
383  lat.SetTextSize(0.035)
384  if xLabel:
385  lat.SetTextAlign(33)
386  lat.SetTextSize(0.04)
387  lat.DrawLatex(0.9, 0.05, "#bf{%s}" % xLabel.replace("$", "").replace("\\text", ""))
388  lat.SetTextAngle(90)
389  lat.DrawLatex(0.02, 0.9, "#bf{%s}" % xLabel.replace("$", "").replace("\\text", ""))
390  tc.SaveAs(outfile + ".pdf")
391  tc.SaveAs(outfile + ".png")
392 
393 
394 def manualChi2(data, mc, cov, verbosity=0):
395  if (verbosity > 2):
396  print()
397  print()
398  print("[INFO] Manual Chi2 Calculation, (verbosity)= ", (verbosity > 0))
399  Nbins = cov.shape[0]
400  chi2 = 0
401  for i in range(Nbins):
402  nEvData = data[0][i]
403  nEvMC = mc[0][i]
404  sigmaSq = cov[i][i]
405  sigma = (sigmaSq) ** 0.5
406  residual = nEvData - nEvMC
407  if (verbosity > 2):
408  print("[INFO] Manual Chi2 Bin i=%d: data=%f, mc=%f, res=%f vs sigma=%f res/sigma=%.2f, (r/s) ** 2=%.2f" % (i, nEvData, nEvMC, residual, sigma, float(residual) / sigma, (float(residual) / sigma) ** 2))
409  chi2 += (residual / sigma) ** 2
410  if (verbosity > 2):
411  print("[INFO] FINAL MANUAL CHI2= ", chi2)
412  return chi2
413 
414 
415 def calculateChi2(data, mc, cov=None, verbosity=0):
416  ndf = data.numPoints()
417  values = {}
418  for name, obj in {'data': data, 'mc': mc}.items():
419  if type(obj) in (yoda.Scatter1D, yoda.Scatter2D, yoda.Scatter3D):
420  values[name] = scatterToMatrix(obj)
421  else: # Histo*D hopefully !
422  values[name] = histoToMatrix(obj, bw=True)
423 
424  v = values['data'].copy()
425  v -= values['mc'].copy()
426  if cov is None:
427  covData = makeCovarianceMatrix(data)
428  covMC = makeCovarianceMatrix(mc)
429  cov = covData + covMC
430 
431  if (cov.shape[0] != cov.shape[1]):
432  print("[ERROR] the specified covariance matrix is not square!", cov)
433  exit(1)
434  if (cov.shape[0] != v.shape[0]):
435  print("[ERROR] the specified covariance matrix is incompatible with the vector of data-MC", cov, v)
436  exit(1)
437 
438  precision_matrix = LA.inv(cov.copy())
439 
440  if (verbosity > 1):
441  print("[INFO]: precision matrix:", precision_matrix)
442 
443  vT = v.copy().transpose()
444  chi2tmp = (v.dot(precision_matrix)).dot(vT)
445  chi2 = chi2tmp[0][0]
446  pvalue = (1 - stats.chi2.cdf(chi2, ndf))
447 
448  return chi2, ndf, pvalue, chi2ContributionMatrix(v, precision_matrix)
449 
450 
451 def correlationMatrix(covMatrix):
452  nbins = covMatrix.shape[0]
453  corr = np.zeros([nbins, nbins])
454  for i in range(nbins):
455  sigma_i = covMatrix[i][i]
456  for j in range(nbins):
457  sigma_j = covMatrix[j][j]
458  corr[i][j] = covMatrix[i][j] / np.sqrt(sigma_i * sigma_j)
459 
460  return corr
461 
462 
463 def updateAOinYODA(ao, infile):
464  aos = yoda.read(infile)
465  aos[ao.path] = ao
466  yoda.write(sorted(aos.values()), infile)
467 
468 
469 def chi2ContributionMatrix(diff, prec):
470  nbins = prec.shape[0]
471  diffT = diff.copy().transpose()
472  mo = np.zeros([nbins, nbins])
473  for j in range(nbins):
474  for i in range(nbins):
475  mo[i][j] = diff[0][j] * prec[i][j] * diffT[i][0]
476  return mo
477 
478 
479 def chi2ContribsByRow(chi2contribs):
480  nbins = chi2contribs.shape[0]
481  mo = {}
482  for j in range(nbins):
483  mo[j] = 0.0
484  for i in range(nbins):
485  mo[j] += float(chi2contribs[i][j])
486  return mo
487 
488 
489 def makeSuperAO(aoList, verbosity=0):
490  aoOut = yoda.core.Scatter2D()
491  pointCounter = 0
492  corrOut = {}
493  pointsOut = []
494  allSystematics = []
495  for aoIn in aoList:
496  corrIn = aoIn.annotation('ErrorBreakdown') if '1.7' in yoda.version() else yaml.load(aoIn.annotation('ErrorBreakdown'))
497  if len(corrIn) == 0: continue
498  for ipt in range(aoIn.numPoints()):
499  for syst in corrIn[ipt].keys():
500  if syst not in allSystematics: allSystematics.append(syst)
501 
502  for aoIn in sorted(aoList, key=lambda x: x.name):
503  corrIn = aoIn.annotation('ErrorBreakdown') if '1.7' in yoda.version() else yaml.load(aoIn.annotation('ErrorBreakdown'))
504  if len(corrIn) == 0: continue
505  for ipt in range(aoIn.numPoints()):
506  aoOut.addPoint(yoda.core.Point2D(pointCounter, aoIn.points()[ipt].y()))
507  totalErrorUp = 0
508  for syst in allSystematics:
509  if syst in corrIn[ipt].keys():
510  corrOut.setdefault(pointCounter, {})[syst] = corrIn[ipt][syst]
511  totalErrorUp += corrIn[ipt][syst]['up'] * corrIn[ipt][syst]['up']
512  else:
513  corrOut.setdefault(pointCounter, {})[syst] = {'up': 0.0, 'dn': 0.0}
514  pointsOut.append(aoIn.points()[ipt])
515  pointCounter += 1
516  for i in range(aoOut.numPoints()):
517  if (verbosity) > 1: print("[INFO] Super AO points ", aoOut.points()[i])
518  corrs = yaml.dump(corrOut, default_flow_style=True, default_style='', width=1e6)
519  aoOut.setAnnotation("ErrorBreakdown", corrs)
520  name = aoList[0].path.replace(aoList[0].name, "")
521  name += "superAO"
522  for ao in sorted(aoList, key=lambda x: x.name):
523  name += "_%s" % ao.name
524  aoOut.setAnnotation("Path", name)
525  return aoOut
covarianceToolsLibrary.manualChi2
def manualChi2(data, mc, cov, verbosity=0)
Definition: covarianceToolsLibrary.py:394
replace
std::string replace(std::string s, const std::string &s2, const std::string &s3)
Definition: hcg.cxx:307
covarianceToolsLibrary.updateAOinYODA
def updateAOinYODA(ao, infile)
Definition: covarianceToolsLibrary.py:463
covarianceToolsLibrary.calculateChi2
def calculateChi2(data, mc, cov=None, verbosity=0)
Definition: covarianceToolsLibrary.py:415
max
constexpr double max()
Definition: ap_fixedTest.cxx:33
covarianceToolsLibrary.correlationMatrix
def correlationMatrix(covMatrix)
Definition: covarianceToolsLibrary.py:451
covarianceToolsLibrary.setCorrInfo
def setCorrInfo(h, binErrors)
Definition: covarianceToolsLibrary.py:24
covarianceToolsLibrary.makeCovarianceMatrixFromToys
def makeCovarianceMatrixFromToys(ao, ntoys=10000, ignore_corrs=False)
Definition: covarianceToolsLibrary.py:121
covarianceToolsLibrary.hasValidErrorBreakdown
def hasValidErrorBreakdown(ao)
Definition: covarianceToolsLibrary.py:108
covarianceToolsLibrary.histoToMatrix
def histoToMatrix(h, bw=False)
Definition: covarianceToolsLibrary.py:304
covarianceToolsLibrary.printMatrix
def printMatrix(matrix, outfile=None)
Definition: covarianceToolsLibrary.py:316
plotBeamSpotVxVal.range
range
Definition: plotBeamSpotVxVal.py:195
covarianceToolsLibrary.scatterToMatrix
def scatterToMatrix(h, bw=False)
Definition: covarianceToolsLibrary.py:290
covarianceToolsLibrary.makeSuperAO
def makeSuperAO(aoList, verbosity=0)
Definition: covarianceToolsLibrary.py:489
calibdata.exit
exit
Definition: calibdata.py:236
DerivationFramework::TriggerMatchingUtils::sorted
std::vector< typename T::value_type > sorted(T begin, T end)
Helper function to create a sorted vector from an unsorted one.
covarianceToolsLibrary.getDiagonalCovMatricesFromScatter
def getDiagonalCovMatricesFromScatter(scat)
Definition: covarianceToolsLibrary.py:268
covarianceToolsLibrary.hasAsymmetricErrors
def hasAsymmetricErrors(ao)
Definition: covarianceToolsLibrary.py:65
covarianceToolsLibrary.getDiagonalCovMatricesFromHisto
def getDiagonalCovMatricesFromHisto(scat)
Definition: covarianceToolsLibrary.py:279
print
void print(char *figname, TCanvas *c1)
Definition: TRTCalib_StrawStatusPlots.cxx:25
array
TCS::join
std::string join(const std::vector< std::string > &v, const char c=',')
Definition: Trigger/TrigT1/L1Topo/L1TopoCommon/Root/StringUtils.cxx:10
covarianceToolsLibrary.addSystematicToCorrInfo
def addSystematicToCorrInfo(h, systName, binErrorsToAdd)
Definition: covarianceToolsLibrary.py:33
TrigJetMonitorAlgorithm.items
items
Definition: TrigJetMonitorAlgorithm.py:79
covarianceToolsLibrary.drawMatrix
def drawMatrix(matrix, outfile, xLabel=None)
Definition: covarianceToolsLibrary.py:336
covarianceToolsLibrary.getCorrInfo
def getCorrInfo(h, names=[])
Definition: covarianceToolsLibrary.py:45
Trk::open
@ open
Definition: BinningType.h:40
covarianceToolsLibrary.makeCovarianceMatrix
def makeCovarianceMatrix(ao, ignore_corrs=False)
Definition: covarianceToolsLibrary.py:81
covarianceToolsLibrary.chi2ContribsByRow
def chi2ContribsByRow(chi2contribs)
Definition: covarianceToolsLibrary.py:479
covarianceToolsLibrary.plotVariations
def plotVariations(ao, outdir, label, threshold=0.05, xLabel=None, title=None)
Definition: covarianceToolsLibrary.py:157
Pythia8_RapidityOrderMPI.val
val
Definition: Pythia8_RapidityOrderMPI.py:14
python.CaloScaleNoiseConfig.type
type
Definition: CaloScaleNoiseConfig.py:78
covarianceToolsLibrary.chi2ContributionMatrix
def chi2ContributionMatrix(diff, prec)
Definition: covarianceToolsLibrary.py:469
if
if(febId1==febId2)
Definition: LArRodBlockPhysicsV0.cxx:567
dot
Definition: dot.py:1
str
Definition: BTagTrackIpAccessor.cxx:11
python.Bindings.keys
keys
Definition: Control/AthenaPython/python/Bindings.py:798
calibdata.copy
bool copy
Definition: calibdata.py:27
readCCLHist.float
float
Definition: readCCLHist.py:83