ATLAS Offline Software
MonitoringFile_MergeAlgs.cxx
Go to the documentation of this file.
1 /*
2  Copyright (C) 2002-2022 CERN for the benefit of the ATLAS collaboration
3  */
4 
6 
7 #include <cmath>
8 #include <iostream>
9 
10 #include <TH1.h>
11 #include <TH2.h>
12 #include <TList.h>
13 
14 namespace {
15  Bool_t IsBinOverflow(const TH1& hist, Int_t bin) {
16  // Return true if the bin is overflow.
17  Int_t binx, biny, binz;
18 
19  hist.GetBinXYZ(bin, binx, biny, binz);
20  Int_t dim = hist.GetDimension();
21 
22  if (dim == 1) return binx >= hist.GetNbinsX() + 1;
23  else if (dim == 2) return (binx >= hist.GetNbinsX() + 1) ||
24  (biny >= hist.GetNbinsY() + 1);
25  else if (dim == 3)
26  return (binx >= hist.GetNbinsX() + 1) ||
27  (biny >= hist.GetNbinsY() + 1) ||
28  (binz >= hist.GetNbinsZ() + 1);
29  else return 0;
30  }
31 
32  Bool_t IsBinUnderflow(const TH1& hist, Int_t bin) {
33  // Return true if the bin is overflow.
34  Int_t binx, biny, binz;
35 
36  hist.GetBinXYZ(bin, binx, biny, binz);
37  Int_t dim = hist.GetDimension();
38 
39  if (dim == 1) return(binx <= 0);
40  else if (dim == 2) return(binx <= 0 || biny <= 0);
41  else if (dim == 3) return(binx <= 0 || biny <= 0 || binz <= 0);
42  else return 0;
43  }
44 }
45 
46 namespace dqutils {
48  // Return number of histogram bins in ROOT 1-dim projection scheme
49  // Allow loops on multi-dim histograms without regard for dimensionality
50  Int_t dim = hist.GetDimension();
51 
52  if (dim == 1) {
53  return(hist.GetNbinsX() + 2);
54  }
55  if (dim == 2) {
56  return (hist.GetNbinsX() + 2) * (hist.GetNbinsY() + 2);
57  }
58  if (dim == 3) {
59  return (hist.GetNbinsX() + 2) * (hist.GetNbinsY() + 2) * (hist.GetNbinsZ() + 2);
60  }
61  return -1;
62  }
63 
65  // Auther: Benjamin Trocme
66  // a and b are efficiency histogramming with percentage stored
67  // den/num are a number of events
68  // BinContent = n/d*100
69  // BinError = (1/d2) * sqrt( d*n*(d-n) )
70 
71  // First extract the denominator
72  // It is supposed to be the same for all bins
73  // Have to do this in two steps to avoid problem
74  // of empty bins in which the nb of events can not be extracted
75  float denA = 0.;
76  float denB = 0.;
77 
78  for (int ix = 1; ix <= a.GetNbinsX(); ix++) {
79  for (int iy = 1; iy <= a.GetNbinsY(); iy++) {
80  // Extract ratio and associated errors
81  // Warning : these are percentages!
82  float efficiencyA = a.GetBinContent(ix, iy) / 100.;
83  float efficiencyB = b.GetBinContent(ix, iy) / 100.;
84  float efficiencyErrA = a.GetBinError(ix, iy) / 100.;
85  float efficiencyErrB = b.GetBinError(ix, iy) / 100.;
86 
87  // Compute denominator ("nb of events")
88  if (efficiencyErrA != 0 && efficiencyA != 0 &&
89  denA == 0) denA = efficiencyA * (1 - efficiencyA) / efficiencyErrA / efficiencyErrA;
90  if (efficiencyErrB != 0 && efficiencyB != 0 &&
91  denB == 0) denB = efficiencyB * (1 - efficiencyB) / efficiencyErrB / efficiencyErrB;
92  }
93  }
94 
95  float denTot = denA + denB;
96  const double nEntries = a.GetEntries() + b.GetEntries();
97 
98  for (int ix = 1; ix <= a.GetNbinsX(); ix++) {
99  for (int iy = 1; iy <= a.GetNbinsY(); iy++) {
100  // Extract ratio and associated errors
101  // Warning : these are percentages!
102  float efficiencyA = a.GetBinContent(ix, iy) / 100.;
103  float efficiencyB = b.GetBinContent(ix, iy) / 100.;
104  //float efficiencyErrA = a.GetBinError(ix,iy)/100.;
105  //float efficiencyErrB = b.GetBinError(ix,iy)/100.;
106 
107  // Compute numerator ("nb of good events") for each histo
108  float numA = denA * efficiencyA;
109  float numB = denB * efficiencyB;
110 
111  // Deduce the merged ratio and the associated error
112  float numTot = numA + numB;
113  float efficiencyTot = 0.;
114  float efficiencyErrTot = 0.;
115 
116  if (denTot != 0.) efficiencyTot = numTot / denTot * 100.;
117  if (denTot != 0.) efficiencyErrTot = sqrt(numTot * denTot * (denTot - numTot)) / denTot / denTot * 100.;
118 
119  a.SetBinContent(ix, iy, efficiencyTot);
120  a.SetBinError(ix, iy, efficiencyErrTot);
121  }
122  }
123  a.SetEntries(nEntries);
124  }
125 
127  // This code assume that the histogram content is the efficiency of a
128  // given cut or selection in each bin (e.g. the ratio of a distribution
129  // after cut to the distribution before cut, bin by bin) and that these
130  // are efficiencies in percent.
131  //
132  // It also assumes that the error we calculated in a specific way:
133  // dEff = sqrt( eff*(1.-eff)/N ) [Eff= efficiency N = number of event in bin before cuts]
134  // dEff = 1-0.159^(1/N) if Eff = 0
135  // dEff = 1-0.159^(1/N) if Eff = 1
136  // dEff = 0 means no entries, the bin is ignored
137  //
138 
139  constexpr double OneSigOneSided = 0.159; // 0.5*(1-0.681) where 0.681 means 68%CL
140 
141  // Verify histogram compatibility
142  if (a.GetDimension() != b.GetDimension()) {
143  std::cerr << "merge_perBinEffPerCent \"" << a.GetName() <<
144  "\": attempt to merge histograms of different dimensionality" << std::endl;
145  return;
146  }
147 
148  Int_t ncells = getNumBins(a);
149 
150  if (ncells != getNumBins(b)) {
151  std::cerr << "merge_perBinEffPerCent \"" << a.GetName() << "\": attempt to merge histograms of different sizes\n";
152  return;
153  }
154 
155  // do not attempt to automatically extend!
156  a.SetCanExtend(TH1::kNoAxis);
157 
158  const double nEntries = a.GetEntries() + b.GetEntries();
159 
160  for (int bin = 0; bin < ncells; bin++) {
161  if (IsBinUnderflow(a, bin) || IsBinOverflow(a, bin)) continue;
162  float efficiencyA = a.GetBinContent(bin) / 100.;
163  float efficiencyB = b.GetBinContent(bin) / 100.;
164  float efficiencyErrA = a.GetBinError(bin) / 100.;
165  float efficiencyErrB = b.GetBinError(bin) / 100.;
166 
167  float efficiencyTot = 0.;
168  float efficiencyErrTot = 0.;
169  if (efficiencyErrA == 0.) {
170  efficiencyTot = efficiencyB;
171  efficiencyErrTot = efficiencyErrB;
172  } else {
173  if (efficiencyErrB == 0.) {
174  efficiencyTot = efficiencyA;
175  efficiencyErrTot = efficiencyErrA;
176  } else {
177  float denomA = 0.;
178  if (efficiencyA == 0.) {
179  denomA = std::log(OneSigOneSided) / std::log(1. - efficiencyErrA);
180  } else {
181  if (efficiencyA > 0.99) {
182  denomA = std::log(OneSigOneSided) / std::log(1. - efficiencyErrA);
183  } else {
184  denomA = efficiencyA * (1. - efficiencyA) / (efficiencyErrA * efficiencyErrA);
185  }
186  }
187 
188  float denomB = 0.;
189  if (efficiencyB == 0.) {
190  denomB = std::log(OneSigOneSided) / std::log(1. - efficiencyErrB);
191  } else {
192  if (efficiencyB > 0.99) {
193  denomB = std::log(OneSigOneSided) / std::log(1. - efficiencyErrB);
194  } else {
195  denomB = efficiencyB * (1. - efficiencyB) / (efficiencyErrB * efficiencyErrB);
196  }
197  }
198 
199  float denom = denomA + denomB;
200  efficiencyTot = (denomA * efficiencyA + denomB * efficiencyB) / denom;
201  efficiencyErrTot = std::sqrt(efficiencyTot * (1. - efficiencyTot) / denom);
202  if (efficiencyTot == 0.) efficiencyErrTot = 1.0 - std::pow(OneSigOneSided, 1.0 / denom);
203  if (efficiencyTot > 0.99) efficiencyErrTot = 1.0 - std::pow(OneSigOneSided, 1.0 / denom);
204  }
205  }
206  a.SetBinContent(bin, efficiencyTot * 100.);
207  a.SetBinError(bin, efficiencyErrTot * 100.);
208  }
209  a.SetEntries(nEntries);
210  }
211 
213  // Author: Peter Onyisi, d'apres Benjamin Trocme
214  // Variation of merge_effAsPerCent
215  // a and b are efficiency histogramming with percentage stored
216  // den/num are a number of events
217  // BinContent = n/d*100
218  // BinError = (1/d2) * sqrt( d*n*(d-n) )
219 
220  // Verify histogram compatibility
221  if (a.GetDimension() != b.GetDimension()) {
222  std::cerr << "merge_effAsPerCentAlt \"" << a.GetName() <<
223  "\": attempt to merge histograms of different dimensionality\n";
224  return;
225  }
226 
227  Int_t ncells = getNumBins(a);
228 
229  if (ncells != getNumBins(b)) {
230  std::cerr << "merge_effAsPerCentAlt \"" << a.GetName() <<
231  "\": attempt to merge histograms of different bin counts\n";
232  return;
233  }
234 
235  // do not attempt to automatically extend!
236  a.SetCanExtend(TH1::kNoAxis);
237 
238  // First extract the denominator
239  // It is supposed to be the same for all bins
240  // Have to do this in two steps to avoid problem
241  // of empty bins in which the nb of events can not be extracted
242 
243  float denA = 0.;
244  float denB = 0.;
245  for (int bin = 0; bin < ncells; bin++) {
246  if (IsBinUnderflow(a, bin) || IsBinOverflow(a, bin)) continue;
247  // Extract ratio and associated errors
248  // Warning : these are percentages!
249  float efficiencyA = a.GetBinContent(bin) / 100.;
250  float efficiencyB = b.GetBinContent(bin) / 100.;
251  float efficiencyErrA = a.GetBinError(bin) / 100.;
252  float efficiencyErrB = b.GetBinError(bin) / 100.;
253 
254  // Compute denominator ("nb of events")
255  if (efficiencyErrA != 0 && efficiencyA != 0 &&
256  denA == 0) denA = efficiencyA * (1 - efficiencyA) / efficiencyErrA / efficiencyErrA;
257  if (efficiencyErrB != 0 && efficiencyB != 0 && denB == 0) denB = efficiencyB * (1 - efficiencyB) / efficiencyErrB / efficiencyErrB;
258  }
259 
260  float denTot = denA + denB;
261  const double nEntries = a.GetEntries() + b.GetEntries();
262 
263  for (int bin = 0; bin < ncells; bin++) {
264  if (IsBinUnderflow(a, bin) || IsBinOverflow(a, bin)) continue;
265  // Extract ratio and associated errors
266  // Warning : these are percentages!
267  float efficiencyA = a.GetBinContent(bin) / 100.;
268  float efficiencyB = b.GetBinContent(bin) / 100.;
269  //float efficiencyErrA = a.GetBinError(bin)/100.;
270  //float efficiencyErrB = b.GetBinError(bin)/100.;
271 
272  // Compute numerator ("nb of good events") for each histo
273  float numA = denA * efficiencyA;
274  float numB = denB * efficiencyB;
275 
276  // Deduce the merged ratio and the associated error
277  float numTot = numA + numB;
278  float efficiencyTot = 0.;
279  float efficiencyErrTot = 0.;
280 
281  if (denTot != 0.) efficiencyTot = numTot / denTot * 100.;
282  if (denTot != 0.) efficiencyErrTot = sqrt(numTot * denTot * (denTot - numTot)) / denTot / denTot * 100.;
283 
284  a.SetBinContent(bin, efficiencyTot);
285  a.SetBinError(bin, efficiencyErrTot);
286  }
287  a.SetEntries(nEntries);
288  }
289 
291  // Author: Tobias Golling
292 
293  if (a.GetDimension() != b.GetDimension()) {
294  std::cerr << "merge_weightedAverage \"" << a.GetName() << "\": attempt to merge histograms of different dimensionality\n";
295  return;
296  }
297 
298  Int_t ncells = getNumBins(a);
299 
300  if (ncells != getNumBins(b)) {
301  std::cerr << "merge_weightedAverage \"" << a.GetName() << "\": attempt to merge histograms of different sizes\n";
302  return;
303  }
304 
305  // do not attempt to automatically extend!
306  a.SetCanExtend(TH1::kNoAxis);
307 
308  double nEntries = a.GetEntries();
309  nEntries += b.GetEntries();
310 
311  // if ( !a.InheritsFrom("TH2") ) {
312  for (int bin = 0; bin < ncells; bin++) {
313  double y1 = a.GetBinContent(bin);
314  double y2 = b.GetBinContent(bin);
315  double e1 = a.GetBinError(bin);
316  double e2 = b.GetBinError(bin);
317  double w1 = 1., w2 = 1.;
318  if (e1 > 0) w1 = 1. / (e1 * e1);
319  if (e2 > 0) w2 = 1. / (e2 * e2);
320 
321  // case 1:
322  if (e1 > 0 && e2 > 0) {
323  a.SetBinContent(bin, (w1 * y1 + w2 * y2) / (w1 + w2));
324  a.SetBinError(bin, 1. / sqrt(w1 + w2));
325  }
326  // case 2:
327  else if (e2 > 0) {
328  a.SetBinContent(bin, y2);
329  a.SetBinError(bin, e2);
330  }
331  // case 3:
332  else if (e1 > 0) {
333  a.SetBinContent(bin, y1);
334  a.SetBinError(bin, e1);
335  }
336  // case 4:
337  else {
338  a.SetBinContent(bin, (y1 + y2) / 2.);
339  a.SetBinError(bin, 0.);
340  }
341  }
342 
343  a.SetEntries(nEntries);
344 
345  /*
346  } else if ( a.InheritsFrom("TH2") ) {
347 
348  try {
349 
350  merge_weightedAverage2D( dynamic_cast<TH2&>(a), dynamic_cast<const TH2&>(b) );
351 
352  } catch ( const std::bad_cast& err ) {
353  // protect against dynamic cast failing
354 
355  return;
356 
357  }
358 
359  }
360  */
361  }
362 
364  // Author: Frank Berghaus
365  for (int binx = 0; binx <= a.GetNbinsX() + 1; binx++) {
366  for (int biny = 0; biny <= a.GetNbinsY() + 1; biny++) {
367  int bin = a.GetBin(binx, biny);
368 
369  double y1 = a.GetBinContent(bin);
370  double y2 = b.GetBinContent(bin);
371  double e1 = a.GetBinError(bin);
372  double e2 = b.GetBinError(bin);
373  double w1 = 1., w2 = 1.;
374  if (e1 > 0) w1 = 1. / (e1 * e1);
375  if (e2 > 0) w2 = 1. / (e2 * e2);
376 
377  // case 1:
378  if (e1 > 0 && e2 > 0) {
379  a.SetBinContent(bin, (w1 * y1 + w2 * y2) / (w1 + w2));
380  a.SetBinError(bin, 1. / sqrt(w1 + w2));
381  }
382  // case 2:
383  else if (e2 > 0) {
384  a.SetBinContent(bin, y2);
385  a.SetBinError(bin, e2);
386  }
387  // case 3:
388  else if (e1 > 0) {
389  a.SetBinContent(bin, y1);
390  a.SetBinError(bin, e1);
391  }
392  // case 4:
393  else {
394  a.SetBinContent(bin, (y1 + y2) / 2.);
395  a.SetBinError(bin, 0.);
396  }
397  }
398  }
399  }
400 
402  // Author: Arely Cortes Gonzalez
403  // This function adds two 1D efficiency histograms
404  // weighting them by the number of entries.
405  // The histograms need to have same binning.
406  // Also, it can be used to add two normalized histograms,
407  // keeping the properly weighted normalization.
408  // The number of entries for the merged histogram
409  // will be equal to the NumberEntries of 'a' + NumberEntries of 'b'
410 
411 
412  // Getting weights based on number of entries.
413  double entries_a = a.GetEntries();
414  double entries_b = b.GetEntries();
415 
416  double weight_a = 0.0;
417  double weight_b = 0.0;
418 
419  if (a.GetDimension() != b.GetDimension()) {
420  std::cerr << "merge_weightedEff \"" << a.GetName() << "\": attempt to merge histograms of different dimensionality\n";
421  return;
422  }
423 
424  // Check whether the sumw2 are present - Added by B.Trocme
425  bool sumw2 = (a.GetSumw2N() != 0) && (b.GetSumw2N() != 0);
426 
427  Int_t ncells = getNumBins(a);
428 
429  if (ncells != getNumBins(b)) {
430  std::cerr << "merge_weightedEff \"" << a.GetName() << "\": attempt to merge histograms of different sizes\n";
431  return;
432  }
433 
434  if (entries_b == 0) {
435  // nothing to merge with, return
436  return;
437  }
438  if (entries_a == 0) {
439  // replace my contents with b
440  a.Add(&b);
441  return;
442  }
443 
444  // do not attempt to automatically extend!
445  a.SetCanExtend(TH1::kNoAxis);
446 
447  if (entries_a + entries_b > 0) {
448  weight_a = entries_a / (entries_a + entries_b);
449  weight_b = entries_b / (entries_a + entries_b);
450 
451  for (int bin = 0; bin < ncells; bin++) {
452  double binContent_a = a.GetBinContent(bin);
453  double binContent_b = b.GetBinContent(bin);
454 
455  //Error treatment added by Evan Wulf:
456  //Note that the errors are not used in the calculation!
457  double binError_a = a.GetBinError(bin);
458  double binError_b = b.GetBinError(bin);
459 
460  //Filling the histogram with the new weighted values
461  float weightedEff = binContent_a * weight_a + binContent_b * weight_b;
462  a.SetBinContent(bin, weightedEff);
463 
464  //Set Errors:
465  float weightedError = sqrt(pow(binError_a * weight_a, 2) + pow(binError_b * weight_b, 2));
466  a.SetBinError(bin, weightedError);
467  }
468  }
469  // If the original histos did not contain sumw2, delete the sumw2 array created
470  // by SetBinError. This may look dirty but this is the recommandation by R.Brun:
471  // http://root.cern.ch/phpBB3/viewtopic.php?f=3&t=1620&p=51674&hilit=sumw2#p51674
472  // Added by B.Trocme
473  if (!sumw2) (a.GetSumw2())->Set(0);
474  //Resets number of entries of a:
475  a.SetEntries(entries_a + entries_b);
476  }
477 
479  // Author: Luca Fiorini
480  // This method provide a correct summation for histograms that have different binning
481  // e.g. the histograms with TH1::kCanRebin set to true
482  // The method uses TH1::Merge as explained here:
483  // http://root.cern.ch/root/html/TH1.html#TH1:Merge
484  // The axis x may have different number
485  // of bins and different limits, BUT the largest bin width must be
486  // a multiple of the smallest bin width and the upper limit must also
487  // be a multiple of the bin width.
488 
489  TList* list = new TList;
490 
491  list->Add(&b);
492  a.Merge(list);
493 
494  delete list;
495  }
496 
498  // Author: Peter Faulkner
499  // Merge histograms containing, for example, event numbers of events
500  // with particular types of errors. Data is inserted/appended row-wise.
501  int nbinsx = a.GetNbinsX();
502  int nbinsy = a.GetNbinsY();
503 
504  if (b.GetNbinsX() != nbinsx || b.GetNbinsY() != nbinsy) return;
505 
506  double entries = a.GetEntries();
507  for (int biny = 1; biny <= nbinsy; biny++) {
508  for (int binx = 1; binx <= nbinsx; binx++) {
509  double bVal = b.GetBinContent(binx, biny);
510  if (bVal == 0) break;
511  for (int binxa = 1; binxa <= nbinsx; binxa++) {
512  double aVal = a.GetBinContent(binxa, biny);
513  if (aVal == 0) {
514  a.SetBinContent(binxa, biny, bVal);
515  entries++;
516  break;
517  } else if (bVal < aVal) {
518  for (int bx = nbinsx; bx > binxa; bx--) {
519  double v1 = a.GetBinContent(bx - 1, biny);
520  if (v1 == 0) continue;
521  double v2 = a.GetBinContent(bx, biny);
522  if (v2 == 0) entries++;
523  a.SetBinContent(bx, biny, v1);
524  }
525  a.SetBinContent(binxa, biny, bVal);
526  break;
527  } else if (aVal == bVal) break;
528  }
529  }
530  }
531  a.SetEntries(entries);
532  }
533 
535  //Merge histograms where bins are filled with RMS type data:
536  // Add in quadrature, weighted by the number of events as
537  // reconstructed from the errors.
538 
539  // Author: Evan Wulf
540 
541  if (a.GetDimension() != b.GetDimension()) {
542  std::cerr << "merge_RMS \"" << a.GetName() << "\": attempt to merge histograms of different dimensionality\n";
543  return;
544  }
545 
546  Int_t ncells = getNumBins(a);
547 
548  if (ncells != getNumBins(b)) {
549  std::cerr << "merge_RMS \"" << a.GetName() << "\": attempt to merge histograms of different sizes\n";
550  return;
551  }
552 
553  // do not attempt to automatically extend!
554  a.SetCanExtend(TH1::kNoAxis);
555 
556  double nEntries = a.GetEntries();
557  nEntries += b.GetEntries();
558 
559  for (int bin = 0; bin < ncells; bin++) {
560  double rms1 = a.GetBinContent(bin);
561  double rms2 = b.GetBinContent(bin);
562  double e1 = a.GetBinError(bin);
563  double e2 = b.GetBinError(bin);
564 
565  double n1 = 0;
566  double n2 = 0;
567 
568  if (e1 != 0) {
569  n1 = pow(rms1 / e1, 2) / 2;
570  }
571  if (e2 != 0) {
572  n2 = pow(rms2 / e2, 2) / 2;
573  }
574 
575  double ntot = n1 + n2;
576  if (ntot <= 0) {
577  a.SetBinContent(bin, sqrt((rms1 * rms1) + (rms2 * rms2)));
578  a.SetBinError(bin, sqrt((e1 * e1) + (e2 * e2)));
579  } else {
580  double rmstot = sqrt(((pow(n1 * rms1, 2) / (n1 - 1)) + (pow(n2 * rms2, 2) / (n2 - 1))) * (ntot - 1) / pow(ntot, 2));
581  a.SetBinContent(bin, rmstot);
582  a.SetBinError(bin, rmstot / sqrt(2 * ntot));
583  }
584  }
585 
586  a.SetEntries(nEntries);
587  }
588 
590  //Merge histograms where bins are filled with RMS type data which has
591  // been normalized to a percent deviation by use of a reference, using
592  // content = 100 * (RMS - reference) / reference = (RMS * 100 / reference) - 100
593  // error = RMSerror * 100 / reference.
594 
595  // Once constant term (100) is added back in, treatment is the same as with merge_RMS above:
596 
597  // Add in quadrature, weighted by the number of events as
598  // reconstructed from the errors.
599 
600  // Author: Evan Wulf
601 
602  if (a.GetDimension() != b.GetDimension()) {
603  std::cerr << "merge_RMSpercentDeviation \"" << a.GetName() << "\": attempt to merge histograms of different dimensionality\n";
604  return;
605  }
606 
607  Int_t ncells = getNumBins(a);
608 
609  if (ncells != getNumBins(b)) {
610  std::cerr << "merge_RMSpercentDeviation \"" << a.GetName() << "\": attempt to merge histograms of different sizes\n";
611  return;
612  }
613 
614  double nEntries = a.GetEntries();
615  nEntries += b.GetEntries();
616 
617  for (int bin = 0; bin < ncells; bin++) {
618  double y1 = a.GetBinContent(bin) + 100;
619  double y2 = b.GetBinContent(bin) + 100;
620  double e1 = a.GetBinError(bin);
621  double e2 = b.GetBinError(bin);
622 
623  double n1 = 0;
624  double n2 = 0;
625 
626  if (e1 != 0) {
627  n1 = pow(y1 / e1, 2) / 2;
628  }
629  if (e2 != 0) {
630  n2 = pow(y2 / e2, 2) / 2;
631  }
632 
633  double ntot = n1 + n2;
634  if (ntot <= 0) {
635  a.SetBinContent(bin, sqrt((y1 * y1) + (y2 * y2)) - 100);
636  a.SetBinError(bin, sqrt((e1 * e1) + (e2 * e2)));
637  } else {
638  double ytot = sqrt(((pow(n1 * y1, 2) / (n1 - 1)) + (pow(n2 * y2, 2) / (n2 - 1))) * (ntot - 1) / pow(ntot, 2));
639  a.SetBinContent(bin, ytot - 100);
640  a.SetBinError(bin, ytot / sqrt(2 * ntot));
641  }
642  }
643 
644  a.SetEntries(nEntries);
645  }
646 
648  // Merge "status" histograms, i.e filled at start of run/LB.
649  // The histogram title should contain the LB for which the histo was filled
650  // such that strcmp can extract the histo of lower LB
651  // Be careful to not format your title with %d but rather %4d. Otherwise,
652  // strcmp will return : 2>10
653  // Example in : LArCalorimeter/LArMonTools/src/LArCoverage.cxx
654  // Author: B.Trocme
655  //
656 
657  if (a.GetDimension() != b.GetDimension()) {
658  std::cerr << "merge_lowerLB \"" << a.GetName() << "\": attempt to merge histograms of different dimensionality\n";
659  return;
660  }
661 
662  Int_t ncells = getNumBins(a);
663 
664  if (ncells != getNumBins(b)) {
665  std::cerr << "merge_lowerLB \"" << a.GetName() << "\": attempt to merge histograms of different sizes\n";
666  return;
667  }
668 
669  // do not attempt to automatically extend!
670  a.SetCanExtend(TH1::kNoAxis);
671 
672  if (strcmp(a.GetTitle(), b.GetTitle()) > 0) {
673  // The LB of histo a is greater than the LB of histo b
674  // a is overwritten by b - Otherwise do nothing
675  a.SetTitle(b.GetTitle());
676  /*
677  for( int bin = 0; bin < ncells; bin++ ) {
678  a.SetBinContent(bin,b.GetBinContent(bin));
679  a.SetBinError(bin,b.GetBinError(bin));
680  }
681  */
682  a.Add(&a, &b, 0, 1);
683  // If the original histo did not contain sumw2, delete the sumw2 array created
684  // by SetBinError. This may look dirty but this is the recommandation by R.Brun:
685  // http://root.cern.ch/phpBB3/viewtopic.php?f=3&t=1620&p=51674&hilit=sumw2#p51674
686  /*
687  if ((b.GetSumw2N()) == 0) (a.GetSumw2())->Set(0);
688 
689  a.SetEntries(b.GetEntries()); */
690  }
691  return;
692  }
693 
695  // Merge "status" histograms, i.e filled at start of run/LB.
696  // The histogram title should contain the LB for which the histo was filled
697  // such that strcmp can extract the histo of lower LB
698  // Be careful to not format your title with %d but rather %4d. Otherwise,
699  // strcmp will return : 2>10
700  // Example in : LArCalorimeter/LArMonTools/src/LArCoverage.cxx
701  // Author: B.Trocme
702  //
703 
704  if (a.GetDimension() != b.GetDimension()) {
705  std::cerr << "merge_identical \"" << a.GetName() << "\": attempt to merge histograms of different dimensionality\n";
706  return;
707  }
708 
709  Int_t ncells = getNumBins(a);
710 
711  if (ncells != getNumBins(b)) {
712  std::cerr << "merge_identical \"" << a.GetName() << "\": attempt to merge histograms of different sizes\n";
713  return;
714  }
715 
716  // check that all bins contentsl are identical
717  for (Int_t icell = 0; icell < ncells; ++icell) {
718  if ((a.GetBinContent(icell) != b.GetBinContent(icell))
719  || (a.GetBinError(icell) != b.GetBinError(icell))) {
720  std::cerr << "merge_identical \"" << a.GetName() << "\" and \"" << b.GetName() << "\" have different content";
721  return;
722  }
723  }
724 
725  return;
726  }
727 }//end dqutils namespace
dqutils::MonitoringFile::merge_effAsPerCentAlt
static void merge_effAsPerCentAlt(TH1 &a, const TH1 &b)
Definition: MonitoringFile_MergeAlgs.cxx:212
yodamerge_tmp.dim
dim
Definition: yodamerge_tmp.py:239
WriteCellNoiseToCool.icell
icell
Definition: WriteCellNoiseToCool.py:339
egammaEnergyPositionAllSamples::e1
double e1(const xAOD::CaloCluster &cluster)
return the uncorrected cluster energy in 1st sampling
plotmaker.hist
hist
Definition: plotmaker.py:148
conifer::pow
constexpr int pow(int x)
Definition: conifer.h:20
dqutils::MonitoringFile::merge_effAsPerCent
static void merge_effAsPerCent(TH2 &a, const TH2 &b)
Definition: MonitoringFile_MergeAlgs.cxx:64
dqutils::MonitoringFile::merge_lowerLB
static void merge_lowerLB(TH1 &a, const TH1 &b)
Definition: MonitoringFile_MergeAlgs.cxx:647
bin
Definition: BinsDiffFromStripMedian.h:43
dqutils::MonitoringFile::merge_Rebinned
static void merge_Rebinned(TH1 &a, TH1 &b)
Definition: MonitoringFile_MergeAlgs.cxx:478
makeTRTBarrelCans.y1
tuple y1
Definition: makeTRTBarrelCans.py:15
fitman.bx
bx
Definition: fitman.py:410
makeTRTBarrelCans.y2
tuple y2
Definition: makeTRTBarrelCans.py:18
histSizes.list
def list(name, path='/')
Definition: histSizes.py:38
dqutils
Definition: CoolMdt.h:76
TH2
Definition: rootspy.cxx:373
compute_lumi.denom
denom
Definition: compute_lumi.py:76
Set
struct _Set Set
Represents a set of values.
Definition: set.h:59
dqutils::MonitoringFile::merge_RMS
static void merge_RMS(TH1 &a, const TH1 &b)
Definition: MonitoringFile_MergeAlgs.cxx:534
dqutils::MonitoringFile::merge_weightedAverage
static void merge_weightedAverage(TH1 &a, const TH1 &b)
Definition: MonitoringFile_MergeAlgs.cxx:290
plotBeamSpotMon.b
b
Definition: plotBeamSpotMon.py:77
Rtt_histogram.n1
n1
Definition: Rtt_histogram.py:21
ReadCellNoiseFromCoolCompare.v2
v2
Definition: ReadCellNoiseFromCoolCompare.py:364
MonitoringFile.h
dqutils::MonitoringFile::merge_weightedAverage2D
static void merge_weightedAverage2D(TH2 &a, const TH2 &b)
Definition: MonitoringFile_MergeAlgs.cxx:363
a
TList * a
Definition: liststreamerinfos.cxx:10
egammaEnergyPositionAllSamples::e2
double e2(const xAOD::CaloCluster &cluster)
return the uncorrected cluster energy in 2nd sampling
dqutils::MonitoringFile::merge_identical
static void merge_identical(TH1 &a, const TH1 &b)
Definition: MonitoringFile_MergeAlgs.cxx:694
TH1
Definition: rootspy.cxx:268
python.CaloCondTools.log
log
Definition: CaloCondTools.py:20
entries
double entries
Definition: listroot.cxx:49
dqutils::MonitoringFile::merge_perBinEffPerCent
static void merge_perBinEffPerCent(TH1 &a, const TH1 &b)
Definition: MonitoringFile_MergeAlgs.cxx:126
dqBeamSpot.nEntries
int nEntries
Definition: dqBeamSpot.py:73
dqutils::MonitoringFile::merge_weightedEff
static void merge_weightedEff(TH1 &a, const TH1 &b)
Definition: MonitoringFile_MergeAlgs.cxx:401
dqutils::MonitoringFile::getNumBins
static Int_t getNumBins(const TH1 &hist)
Definition: MonitoringFile_MergeAlgs.cxx:47
dqutils::MonitoringFile::merge_RMSpercentDeviation
static void merge_RMSpercentDeviation(TH1 &a, const TH1 &b)
Definition: MonitoringFile_MergeAlgs.cxx:589
dqutils::MonitoringFile::merge_eventSample
static void merge_eventSample(TH2 &a, const TH2 &b)
Definition: MonitoringFile_MergeAlgs.cxx:497