ATLAS Offline Software
Loading...
Searching...
No Matches
MonitoringFile_MergeAlgs.cxx
Go to the documentation of this file.
1/*
2 Copyright (C) 2002-2025 CERN for the benefit of the ATLAS collaboration
3*/
4
6
7
8
9#include <TH1.h>
10#include <TH2.h>
11#include <TList.h>
12#include <cmath>
13#include <iostream>
14#include <stdexcept>
15
16namespace {
17Bool_t IsBinOverflow(const TH1& hist, Int_t bin)
18{
19 // Return true if the bin is overflow.
20 Int_t binx, biny, binz;
21 hist.GetBinXYZ(bin, binx, biny, binz);
22 Int_t dim = hist.GetDimension();
23
24 if ( dim == 1 )
25 return binx >= hist.GetNbinsX() + 1;
26 else if ( dim == 2 )
27 return (binx >= hist.GetNbinsX() + 1) ||
28 (biny >= hist.GetNbinsY() + 1);
29 else if ( dim == 3 )
30 return (binx >= hist.GetNbinsX() + 1) ||
31 (biny >= hist.GetNbinsY() + 1) ||
32 (binz >= hist.GetNbinsZ() + 1);
33 else
34 return 0;
35}
36
37Bool_t IsBinUnderflow(const TH1& hist, Int_t bin)
38{
39
40 // Return true if the bin is overflow.
41 Int_t binx, biny, binz;
42 hist.GetBinXYZ(bin, binx, biny, binz);
43 Int_t dim = hist.GetDimension();
44
45 if ( dim == 1 )
46 return (binx <= 0);
47 else if ( dim == 2 )
48 return (binx <= 0 || biny <= 0);
49 else if ( dim == 3 )
50 return (binx <= 0 || biny <= 0 || binz <= 0);
51 else
52 return 0;
53}
54}
55
56namespace dqutils {
57
58Int_t MonitoringFile::getNumBins( const TH1& hist )
59{
60 // Return number of histogram bins in ROOT 1-dim projection scheme
61 // Allow loops on multi-dim histograms without regard for dimensionality
62 Int_t dim = hist.GetDimension();
63 if (dim == 1) {
64 return (hist.GetNbinsX()+2);
65 }
66 if (dim == 2) {
67 return (hist.GetNbinsX()+2)*(hist.GetNbinsY()+2);
68 }
69 if (dim == 3) {
70 return (hist.GetNbinsX()+2)*(hist.GetNbinsY()+2)*(hist.GetNbinsZ()+2);
71 }
72 return -1;
73}
74
75void MonitoringFile::merge_effAsPerCent( TH2& a, const TH2& b )
76{
77 // Auther: Benjamin Trocme
78 // a and b are efficiency histogramming with percentage stored
79 // den/num are a number of events
80 // BinContent = n/d*100
81 // BinError = (1/d2) * sqrt( d*n*(d-n) )
82
83 // First extract the denominator
84 // It is supposed to be the same for all bins
85 // Have to do this in two steps to avoid problem
86 // of empty bins in which the nb of events can not be extracted
87 float denA = 0.;
88 float denB = 0.;
89 for (int ix = 1; ix <= a.GetNbinsX(); ix++){
90 for (int iy = 1; iy <= a.GetNbinsY(); iy++){
91 // Extract ratio and associated errors
92 // Warning : these are percentages!
93 float efficiencyA = a.GetBinContent(ix,iy)/100.;
94 float efficiencyB = b.GetBinContent(ix,iy)/100.;
95 float efficiencyErrA = a.GetBinError(ix,iy)/100.;
96 float efficiencyErrB = b.GetBinError(ix,iy)/100.;
97
98 // Compute denominator ("nb of events")
99 if (efficiencyErrA != 0 && efficiencyA != 0 && denA == 0) denA = efficiencyA*(1-efficiencyA)/efficiencyErrA/efficiencyErrA;
100 if (efficiencyErrB != 0 && efficiencyB != 0 && denB == 0) denB = efficiencyB*(1-efficiencyB)/efficiencyErrB/efficiencyErrB;
101 }
102 }
103
104 float denTot = denA + denB;
105 const double nEntries = a.GetEntries() + b.GetEntries();
106
107 for (int ix = 1; ix <= a.GetNbinsX(); ix++){
108 for (int iy = 1; iy <= a.GetNbinsY(); iy++){
109 // Extract ratio and associated errors
110 // Warning : these are percentages!
111 float efficiencyA = a.GetBinContent(ix,iy)/100.;
112 float efficiencyB = b.GetBinContent(ix,iy)/100.;
113 //float efficiencyErrA = a.GetBinError(ix,iy)/100.;
114 //float efficiencyErrB = b.GetBinError(ix,iy)/100.;
115
116 // Compute numerator ("nb of good events") for each histo
117 float numA = denA * efficiencyA;
118 float numB = denB * efficiencyB;
119
120 // Deduce the merged ratio and the associated error
121 float numTot = numA + numB;
122 float efficiencyTot = 0.;
123 float efficiencyErrTot = 0.;
124
125 if (denTot != 0.) efficiencyTot = numTot/denTot*100.;
126 if (denTot != 0.) efficiencyErrTot = std::sqrt(numTot*denTot*(denTot-numTot))/denTot/denTot*100.;
127
128 a.SetBinContent(ix,iy,efficiencyTot);
129 a.SetBinError(ix,iy,efficiencyErrTot);
130 }
131 }
132 a.SetEntries( nEntries );
133}
134
136{
137 // This code assume that the histogram content is the efficiency of a
138 // given cut or selection in each bin (e.g. the ratio of a distribution
139 // after cut to the distribution before cut, bin by bin) and that these
140 // are efficiencies in percent.
141 //
142 // It also assumes that the error we calculated in a specific way:
143 // dEff = sqrt( eff*(1.-eff)/N ) [Eff= efficiency N = number of event in bin before cuts]
144 // dEff = 1-0.159^(1/N) if Eff = 0
145 // dEff = 1-0.159^(1/N) if Eff = 1
146 // dEff = 0 means no entries, the bin is ignored
147 //
148
149 constexpr double OneSigOneSided = 0.159; // 0.5*(1-0.681) where 0.681 means 68%CL
150 // Verify histogram compatibility
151 if (a.GetDimension() != b.GetDimension()) {
152 std::cerr<<"merge_perBinEffPerCent \""<< a.GetName() <<"\": attempt to merge histograms of different dimensionality" << std::endl;
153 return;
154 }
155
156 Int_t ncells = getNumBins(a);
157
158 if (ncells != getNumBins(b)) {
159 std::cerr<<"merge_perBinEffPerCent \""<< a.GetName() <<"\": attempt to merge histograms of different sizes\n";
160 return;
161 }
162
163 // do not attempt to automatically extend!
164 a.SetCanExtend(TH1::kNoAxis);
165
166 const double nEntries = a.GetEntries() + b.GetEntries();
167
168 for( int bin = 0; bin < ncells; bin++ ) {
169 if (IsBinUnderflow(a, bin) || IsBinOverflow(a, bin)) continue;
170 float efficiencyA = a.GetBinContent(bin)/100.;
171 float efficiencyB = b.GetBinContent(bin)/100.;
172 float efficiencyErrA = a.GetBinError(bin)/100.;
173 float efficiencyErrB = b.GetBinError(bin)/100.;
174
175 float efficiencyTot = 0.;
176 float efficiencyErrTot = 0.;
177 if ( efficiencyErrA == 0. ) {
178 efficiencyTot = efficiencyB;
179 efficiencyErrTot = efficiencyErrB;
180 }
181 else {
182 if ( efficiencyErrB == 0. ) {
183 efficiencyTot = efficiencyA;
184 efficiencyErrTot = efficiencyErrA;
185 }
186 else {
187 float denomA = 0.;
188 if ( efficiencyA == 0. ){
189 denomA = std::log(OneSigOneSided)/std::log(1.-efficiencyErrA);
190 }
191 else {
192 if ( efficiencyA > 0.99) {
193 denomA = std::log(OneSigOneSided)/std::log(1.-efficiencyErrA);
194 }
195 else {
196 denomA = efficiencyA*(1.-efficiencyA)/(efficiencyErrA*efficiencyErrA);
197 }
198 }
199
200 float denomB = 0.;
201 if ( efficiencyB == 0. ){
202 denomB = std::log(OneSigOneSided)/std::log(1.-efficiencyErrB);
203 }
204 else {
205 if ( efficiencyB > 0.99) {
206 denomB = std::log(OneSigOneSided)/std::log(1.-efficiencyErrB);
207 }
208 else {
209 denomB = efficiencyB*(1.-efficiencyB)/(efficiencyErrB*efficiencyErrB);
210 }
211 }
212
213 float denom = denomA + denomB;
214 efficiencyTot = (denomA*efficiencyA + denomB*efficiencyB)/denom;
215 efficiencyErrTot = std::sqrt(efficiencyTot*(1.-efficiencyTot)/denom);
216 if ( efficiencyTot == 0. ) efficiencyErrTot = 1.0-std::pow(OneSigOneSided,1.0/denom);
217 if ( efficiencyTot > 0.99 ) efficiencyErrTot = 1.0-std::pow(OneSigOneSided,1.0/denom);
218 }
219 }
220 a.SetBinContent(bin,efficiencyTot*100.);
221 a.SetBinError(bin,efficiencyErrTot*100.);
222 }
223 a.SetEntries( nEntries );
224}
225
226void MonitoringFile::merge_effAsPerCentAlt( TH1& a, const TH1& b )
227{
228 // Author: Peter Onyisi, d'apres Benjamin Trocme
229 // Variation of merge_effAsPerCent
230 // a and b are efficiency histogramming with percentage stored
231 // den/num are a number of events
232 // BinContent = n/d*100
233 // BinError = (1/d2) * sqrt( d*n*(d-n) )
234
235 // Verify histogram compatibility
236 if (a.GetDimension() != b.GetDimension()) {
237 std::cerr<<"merge_effAsPerCentAlt \"" << a.GetName() <<"\": attempt to merge histograms of different dimensionality\n";
238 return;
239 }
240
241 Int_t ncells = getNumBins(a);
242
243 if (ncells != getNumBins(b)) {
244 std::cerr<<"merge_effAsPerCentAlt \""<< a.GetName() <<"\": attempt to merge histograms of different bin counts\n";
245 return;
246 }
247
248 // do not attempt to automatically extend!
249 a.SetCanExtend(TH1::kNoAxis);
250
251 // First extract the denominator
252 // It is supposed to be the same for all bins
253 // Have to do this in two steps to avoid problem
254 // of empty bins in which the nb of events can not be extracted
255
256 float denA = 0.;
257 float denB = 0.;
258 for( int bin = 0; bin < ncells; bin++ ) {
259 if (IsBinUnderflow(a, bin) || IsBinOverflow(a, bin)) continue;
260 // Extract ratio and associated errors
261 // Warning : these are percentages!
262 float efficiencyA = a.GetBinContent(bin)/100.;
263 float efficiencyB = b.GetBinContent(bin)/100.;
264 float efficiencyErrA = a.GetBinError(bin)/100.;
265 float efficiencyErrB = b.GetBinError(bin)/100.;
266
267 // Compute denominator ("nb of events")
268 if (efficiencyErrA != 0 && efficiencyA != 0 && denA == 0) denA = efficiencyA*(1-efficiencyA)/efficiencyErrA/efficiencyErrA;
269 if (efficiencyErrB != 0 && efficiencyB != 0 && denB == 0) denB = efficiencyB*(1-efficiencyB)/efficiencyErrB/efficiencyErrB;
270 }
271
272 float denTot = denA + denB;
273 const double nEntries = a.GetEntries() + b.GetEntries();
274
275 for( int bin = 0; bin < ncells; bin++ ) {
276 if (IsBinUnderflow(a, bin) || IsBinOverflow(a, bin)) continue;
277 // Extract ratio and associated errors
278 // Warning : these are percentages!
279 float efficiencyA = a.GetBinContent(bin)/100.;
280 float efficiencyB = b.GetBinContent(bin)/100.;
281 //float efficiencyErrA = a.GetBinError(bin)/100.;
282 //float efficiencyErrB = b.GetBinError(bin)/100.;
283
284 // Compute numerator ("nb of good events") for each histo
285 float numA = denA * efficiencyA;
286 float numB = denB * efficiencyB;
287
288 // Deduce the merged ratio and the associated error
289 float numTot = numA + numB;
290 float efficiencyTot = 0.;
291 float efficiencyErrTot = 0.;
292
293 if (denTot != 0.) efficiencyTot = numTot/denTot*100.;
294 if (denTot != 0.) efficiencyErrTot = std::sqrt(numTot*denTot*(denTot-numTot))/denTot/denTot*100.;
295
296 a.SetBinContent(bin,efficiencyTot);
297 a.SetBinError(bin,efficiencyErrTot);
298 }
299 a.SetEntries( nEntries );
300}
301
302
303void MonitoringFile::merge_weightedAverage( TH1& a, const TH1& b )
304{
305
306 // Author: Tobias Golling
307
308 if (a.GetDimension() != b.GetDimension()) {
309 std::cerr<<"merge_weightedAverage \"" << a.GetName() <<"\": attempt to merge histograms of different dimensionality\n";
310 return;
311 }
312
313 Int_t ncells = getNumBins(a);
314
315 if (ncells != getNumBins(b)) {
316 std::cerr<<"merge_weightedAverage \"" << a.GetName() <<"\": attempt to merge histograms of different sizes\n";
317 return;
318 }
319
320 // do not attempt to automatically extend!
321 a.SetCanExtend(TH1::kNoAxis);
322
323 double nEntries = a.GetEntries();
324 nEntries += b.GetEntries();
325
326 // if ( !a.InheritsFrom("TH2") ) {
327 for( int bin = 0; bin < ncells; bin++ ) {
328 double y1 = a.GetBinContent(bin);
329 double y2 = b.GetBinContent(bin);
330 double e1 = a.GetBinError(bin);
331 double e2 = b.GetBinError(bin);
332 double w1 = 1., w2 = 1.;
333 if (e1 > 0) w1 = 1./(e1*e1);
334 if (e2 > 0) w2 = 1./(e2*e2);
335
336 // case 1:
337 if (e1 > 0 && e2 > 0){
338 a.SetBinContent(bin, (w1*y1 + w2*y2)/(w1 + w2));
339 a.SetBinError(bin, 1./std::sqrt(w1 + w2));
340 }
341 // case 2:
342 else if (e2 > 0){
343 a.SetBinContent(bin, y2);
344 a.SetBinError(bin, e2);
345 }
346 // case 3:
347 else if (e1 > 0){
348 a.SetBinContent(bin, y1);
349 a.SetBinError(bin, e1);
350 }
351 // case 4:
352 else {
353 a.SetBinContent(bin, (y1 + y2)/2.);
354 a.SetBinError(bin, 0.);
355 }
356 }
357
358 a.SetEntries( nEntries );
359
360 /*
361 } else if ( a.InheritsFrom("TH2") ) {
362
363 try {
364
365 merge_weightedAverage2D( dynamic_cast<TH2&>(a), dynamic_cast<const TH2&>(b) );
366
367 } catch ( const std::bad_cast& err ) {
368 // protect against dynamic cast failing
369
370 return;
371
372 }
373
374 }
375 */
376
377}
378
379
381{
382 // Author: Frank Berghaus
383 for( int binx = 0; binx <= a.GetNbinsX()+1; binx++ ) {
384 for( int biny = 0; biny <= a.GetNbinsY()+1; biny++ ) {
385
386 int bin = a.GetBin(binx,biny);
387
388 double y1 = a.GetBinContent(bin);
389 double y2 = b.GetBinContent(bin);
390 double e1 = a.GetBinError(bin);
391 double e2 = b.GetBinError(bin);
392 double w1 = 1., w2 = 1.;
393 if (e1 > 0) w1 = 1./(e1*e1);
394 if (e2 > 0) w2 = 1./(e2*e2);
395
396 // case 1:
397 if (e1 > 0 && e2 > 0){
398 a.SetBinContent(bin, (w1*y1 + w2*y2)/(w1 + w2));
399 a.SetBinError(bin, 1./std::sqrt(w1 + w2));
400 }
401 // case 2:
402 else if (e2 > 0){
403 a.SetBinContent(bin, y2);
404 a.SetBinError(bin, e2);
405 }
406 // case 3:
407 else if (e1 > 0){
408 a.SetBinContent(bin, y1);
409 a.SetBinError(bin, e1);
410 }
411 // case 4:
412 else {
413 a.SetBinContent(bin, (y1 + y2)/2.);
414 a.SetBinError(bin, 0.);
415 }
416 }
417 }
418}
419
420
421void MonitoringFile::merge_weightedEff( TH1& a, const TH1& b )
422{
423 // Author: Arely Cortes Gonzalez
424 // This function adds two 1D efficiency histograms
425 // weighting them by the number of entries.
426 // The histograms need to have same binning.
427 // Also, it can be used to add two normalized histograms,
428 // keeping the properly weighted normalization.
429 // The number of entries for the merged histogram
430 // will be equal to the NumberEntries of 'a' + NumberEntries of 'b'
431
432
433 // Getting weights based on number of entries.
434 double entries_a = a.GetEntries();
435 double entries_b = b.GetEntries();
436
437 double weight_a = 0.0;
438 double weight_b = 0.0;
439
440 if (a.GetDimension() != b.GetDimension()) {
441 std::cerr<<"merge_weightedEff \""<< a.GetName()<<"\": attempt to merge histograms of different dimensionality\n";
442 return;
443 }
444
445 // Check whether the sumw2 are present - Added by B.Trocme
446 bool sumw2 = (a.GetSumw2N() != 0) && (b.GetSumw2N() != 0);
447
448 Int_t ncells = getNumBins(a);
449
450 if (ncells != getNumBins(b)) {
451 std::cerr<<"merge_weightedEff \""<< a.GetName() << "\": attempt to merge histograms of different sizes\n";
452 return;
453 }
454
455 if (entries_b == 0) {
456 // nothing to merge with, return
457 return;
458 }
459 if (entries_a == 0) {
460 // replace my contents with b
461 a.Add(&b);
462 return;
463 }
464
465 // do not attempt to automatically extend!
466 a.SetCanExtend(TH1::kNoAxis);
467
468 if (entries_a + entries_b > 0)
469 {
470 weight_a = entries_a / (entries_a + entries_b);
471 weight_b = entries_b / (entries_a + entries_b);
472
473 for( int bin = 0; bin < ncells; bin++ ) {
474 double binContent_a = a.GetBinContent(bin);
475 double binContent_b = b.GetBinContent(bin);
476
477 //Error treatment added by Evan Wulf:
478 //Note that the errors are not used in the calculation!
479 double binError_a = a.GetBinError(bin);
480 double binError_b = b.GetBinError(bin);
481
482 //Filling the histogram with the new weighted values
483 float weightedEff = binContent_a * weight_a + binContent_b * weight_b;
484 a.SetBinContent(bin, weightedEff);
485
486 //Set Errors:
487 float weightedError = std::sqrt( std::pow(binError_a * weight_a,2) + std::pow(binError_b * weight_b,2) );
488 a.SetBinError(bin, weightedError);
489 }
490 }
491 // If the original histos did not contain sumw2, delete the sumw2 array created
492 // by SetBinError. This may look dirty but this is the recommandation by R.Brun:
493 // http://root.cern.ch/phpBB3/viewtopic.php?f=3&t=1620&p=51674&hilit=sumw2#p51674
494 // Added by B.Trocme
495 if (!sumw2) (a.GetSumw2())->Set(0);
496 //Resets number of entries of a:
497 a.SetEntries(entries_a + entries_b);
498}
499
500
502{
503 // Author: Luca Fiorini
504 // This method provide a correct summation for histograms that have different binning
505 // e.g. the histograms with TH1::kCanRebin set to true
506 // The method uses TH1::Merge as explained here:
507 // http://root.cern.ch/root/html/TH1.html#TH1:Merge
508 // The axis x may have different number
509 // of bins and different limits, BUT the largest bin width must be
510 // a multiple of the smallest bin width and the upper limit must also
511 // be a multiple of the bin width.
512
513 // if the axes were all extendable, try to match the number of bins between the histograms
514 // this is only needed for TProfile2D ...
515 if(a.InheritsFrom("TProfile2D") && b.InheritsFrom("TProfile2D") && a.GetDimension()==b.GetDimension()) {
516 auto binsFunc = [](TH1& h, int ax) {
517 if(ax==0) return h.GetNbinsX();
518 if(ax==1) return h.GetNbinsY();
519 if(ax==2) return h.GetNbinsZ();
520 return 0;
521 };
522 for(int i=0;i<a.GetDimension();i++) {
523 int n1 = binsFunc(a,i);
524 int n2 = binsFunc(b,i);
525 if ((n1 == 0) or (n2 == 0)) {
526 throw std::runtime_error("MonitoringFile::merge_Rebinned; nbins is zero.");
527 }
528 if((n2 % n1 == 0) && std::bitset<sizeof(int)>(n2/n1).count()==1) { //n2 is a 2^N times bigger than n1
529 while(binsFunc(a,i) != binsFunc(b,i)) {
530 a.LabelsInflate((i==0) ? "x" : ( (i==1) ? "y" : "z"));
531 }
532 } else if((n1 % n2 == 0) && std::bitset<sizeof(int)>(n1/n2).count()==1) { //n1 is a 2^N times bigger than n2
533 while(binsFunc(a,i) != binsFunc(b,i)) {
534 b.LabelsInflate((i==0) ? "x" : ( (i==1) ? "y" : "z"));
535 }
536 }
537 }
538 }
539
540
541 TList *list = new TList;
542 list->Add(&b);
543 a.Merge(list);
544
545 delete list;
546
547}
548
549
550void MonitoringFile::merge_eventSample( TH2& a, const TH2& b )
551{
552 // Author: Peter Faulkner
553 // Merge histograms containing, for example, event numbers of events
554 // with particular types of errors. Data is inserted/appended row-wise.
555 int nbinsx = a.GetNbinsX();
556 int nbinsy = a.GetNbinsY();
557 if (b.GetNbinsX() != nbinsx || b.GetNbinsY() != nbinsy) return;
558 double entries = a.GetEntries();
559 for( int biny = 1; biny <= nbinsy; biny++ ) {
560 for( int binx = 1; binx <= nbinsx; binx++ ) {
561 double bVal = b.GetBinContent(binx, biny);
562 if (bVal == 0) break;
563 for( int binxa = 1; binxa <= nbinsx; binxa++ ) {
564 double aVal = a.GetBinContent(binxa, biny);
565 if (aVal == 0) {
566 a.SetBinContent(binxa, biny, bVal);
567 entries++;
568 break;
569 } else if (bVal < aVal) {
570 for( int bx = nbinsx; bx > binxa; bx-- ) {
571 double v1 = a.GetBinContent(bx-1, biny);
572 if (v1 == 0) continue;
573 double v2 = a.GetBinContent(bx, biny);
574 if (v2 == 0) entries++;
575 a.SetBinContent(bx, biny, v1);
576 }
577 a.SetBinContent(binxa, biny, bVal);
578 break;
579 } else if (aVal == bVal) break;
580 }
581 }
582 }
583 a.SetEntries(entries);
584}
585
586void MonitoringFile::merge_RMS( TH1& a, const TH1& b ) {
587
588 //Merge histograms where bins are filled with RMS type data:
589 // Add in quadrature, weighted by the number of events as
590 // reconstructed from the errors.
591
592 // Author: Evan Wulf
593
594 if (a.GetDimension() != b.GetDimension()) {
595 std::cerr<<"merge_RMS \""<< a.GetName() <<"\": attempt to merge histograms of different dimensionality\n";
596 return;
597 }
598
599 Int_t ncells = getNumBins(a);
600
601 if (ncells != getNumBins(b)) {
602 std::cerr<<"merge_RMS \""<< a.GetName() <<"\": attempt to merge histograms of different sizes\n";
603 return;
604 }
605
606 // do not attempt to automatically extend!
607 a.SetCanExtend(TH1::kNoAxis);
608
609 double nEntries = a.GetEntries();
610 nEntries += b.GetEntries();
611
612 for( int bin = 0; bin < ncells; bin++ ) {
613 double rms1 = a.GetBinContent(bin);
614 double rms2 = b.GetBinContent(bin);
615 double e1 = a.GetBinError(bin);
616 double e2 = b.GetBinError(bin);
617
618 double n1 = 0;
619 double n2 = 0;
620
621 if( e1 != 0 ) {
622 n1 = std::pow( rms1 / e1, 2) / 2;
623 }
624 if( e2 != 0 ) {
625 n2 = std::pow( rms2 / e2, 2) / 2;
626 }
627
628 double ntot = n1 + n2;
629 if( ntot <= 0 ) {
630 a.SetBinContent( bin, std::sqrt( (rms1*rms1) + (rms2*rms2) ) );
631 a.SetBinError( bin, std::sqrt( (e1*e1) + (e2*e2) ) );
632 }
633 else {
634 double rmstot = std::sqrt( ( (std::pow(n1 * rms1,2) / (n1 - 1)) + (std::pow(n2 * rms2, 2) / (n2 - 1)) ) * (ntot - 1) / std::pow(ntot,2) );
635 a.SetBinContent( bin, rmstot );
636 a.SetBinError( bin, rmstot / std::sqrt( 2 * ntot ) );
637 }
638 }
639
640 a.SetEntries(nEntries);
641}
642
644
645 //Merge histograms where bins are filled with RMS type data which has
646 // been normalized to a percent deviation by use of a reference, using
647 // content = 100 * (RMS - reference) / reference = (RMS * 100 / reference) - 100
648 // error = RMSerror * 100 / reference.
649
650 // Once constant term (100) is added back in, treatment is the same as with merge_RMS above:
651
652 // Add in quadrature, weighted by the number of events as
653 // reconstructed from the errors.
654
655 // Author: Evan Wulf
656
657 if (a.GetDimension() != b.GetDimension()) {
658 std::cerr<<"merge_RMSpercentDeviation \"" << a.GetName() <<"\": attempt to merge histograms of different dimensionality\n";
659 return;
660 }
661
662 Int_t ncells = getNumBins(a);
663
664 if (ncells != getNumBins(b)) {
665 std::cerr<<"merge_RMSpercentDeviation \"" << a.GetName() <<"\": attempt to merge histograms of different sizes\n";
666 return;
667 }
668
669 double nEntries = a.GetEntries();
670 nEntries += b.GetEntries();
671
672 for( int bin = 0; bin < ncells; bin++ ) {
673 double y1 = a.GetBinContent(bin) + 100;
674 double y2 = b.GetBinContent(bin) + 100;
675 double e1 = a.GetBinError(bin);
676 double e2 = b.GetBinError(bin);
677
678 double n1 = 0;
679 double n2 = 0;
680
681 if( e1 != 0 ) {
682 n1 = pow( y1 / e1, 2) / 2;
683 }
684 if( e2 != 0 ) {
685 n2 = pow( y2 / e2, 2) / 2;
686 }
687
688 double ntot = n1 + n2;
689 if( ntot <= 0 ) {
690 a.SetBinContent( bin, std::sqrt( (y1*y1) + (y2*y2) ) - 100 );
691 a.SetBinError( bin, std::sqrt( (e1*e1) + (e2*e2) ) );
692 }
693 else {
694 double ytot = std::sqrt( ( (std::pow(n1 * y1,2) / (n1 - 1)) + (std::pow(n2 * y2, 2) / (n2 - 1)) ) * (ntot - 1) / std::pow(ntot,2) );
695 a.SetBinContent( bin, ytot - 100);
696 a.SetBinError( bin, ytot / std::sqrt( 2 * ntot ) );
697 }
698 }
699
700 a.SetEntries(nEntries);
701}
702
703void MonitoringFile::merge_lowerLB( TH1& a, const TH1& b ) {
704
705 // Merge "status" histograms, i.e filled at start of run/LB.
706 // The histogram title should contain the LB for which the histo was filled
707 // such that strcmp can extract the histo of lower LB
708 // Be careful to not format your title with %d but rather %4d. Otherwise,
709 // strcmp will return : 2>10
710 // Example in : LArCalorimeter/LArMonTools/src/LArCoverage.cxx
711 // Author: B.Trocme
712 //
713
714 if (a.GetDimension() != b.GetDimension()) {
715 std::cerr<<"merge_lowerLB \"" << a.GetName() <<"\": attempt to merge histograms of different dimensionality\n";
716 return;
717 }
718
719 Int_t ncells = getNumBins(a);
720
721 if (ncells != getNumBins(b)) {
722 std::cerr<<"merge_lowerLB \"" << a.GetName() <<"\": attempt to merge histograms of different sizes\n";
723 return;
724 }
725
726 // do not attempt to automatically extend!
727 a.SetCanExtend(TH1::kNoAxis);
728
729 if (strcmp(a.GetTitle(),b.GetTitle())>0){
730 // The LB of histo a is greater than the LB of histo b
731 // a is overwritten by b - Otherwise do nothing
732 a.SetTitle(b.GetTitle());
733 /*
734 for( int bin = 0; bin < ncells; bin++ ) {
735 a.SetBinContent(bin,b.GetBinContent(bin));
736 a.SetBinError(bin,b.GetBinError(bin));
737 }
738 */
739 a.Add(&a, &b, 0, 1);
740 // If the original histo did not contain sumw2, delete the sumw2 array created
741 // by SetBinError. This may look dirty but this is the recommandation by R.Brun:
742 // http://root.cern.ch/phpBB3/viewtopic.php?f=3&t=1620&p=51674&hilit=sumw2#p51674
743 /*
744 if ((b.GetSumw2N()) == 0) (a.GetSumw2())->Set(0);
745
746 a.SetEntries(b.GetEntries()); */
747 }
748 return;
749}
750
751
752void MonitoringFile::merge_identical( TH1& a, const TH1& b ) {
753
754 // Merge "status" histograms, i.e filled at start of run/LB.
755 // The histogram title should contain the LB for which the histo was filled
756 // such that strcmp can extract the histo of lower LB
757 // Be careful to not format your title with %d but rather %4d. Otherwise,
758 // strcmp will return : 2>10
759 // Example in : LArCalorimeter/LArMonTools/src/LArCoverage.cxx
760 // Author: B.Trocme
761 //
762
763 if (a.GetDimension() != b.GetDimension()) {
764 std::cerr<<"merge_identical \"" << a.GetName() <<"\": attempt to merge histograms of different dimensionality\n";
765 return;
766 }
767
768 Int_t ncells = getNumBins(a);
769
770 if (ncells != getNumBins(b)) {
771 std::cerr<<"merge_identical \"" << a.GetName() <<"\": attempt to merge histograms of different sizes\n";
772 return;
773 }
774
775 // check that all bins contentsl are identical
776 for (Int_t icell = 0; icell < ncells; ++icell) {
777 if ((a.GetBinContent(icell) != b.GetBinContent(icell))
778 || (a.GetBinError(icell) != b.GetBinError(icell))) {
779 std::cerr<<"merge_identical \"" << a.GetName() << "\" and \"" << b.GetName() << "\" have different content\n";
780 return;
781 }
782 }
783
784 return;
785}
786
787}//end dqutils namespace
static Double_t a
constexpr int pow(int base, int exp) noexcept
Header file for AthHistogramAlgorithm.
static void merge_identical(TH1 &a, const TH1 &b)
static void merge_weightedAverage(TH1 &a, const TH1 &b)
static void merge_effAsPerCentAlt(TH1 &a, const TH1 &b)
static void merge_RMS(TH1 &a, const TH1 &b)
static void merge_eventSample(TH2 &a, const TH2 &b)
static void merge_RMSpercentDeviation(TH1 &a, const TH1 &b)
static void merge_weightedAverage2D(TH2 &a, const TH2 &b)
static void merge_lowerLB(TH1 &a, const TH1 &b)
static Int_t getNumBins(const TH1 &hist)
static void merge_weightedEff(TH1 &a, const TH1 &b)
static void merge_perBinEffPerCent(TH1 &a, const TH1 &b)
static void merge_Rebinned(TH1 &a, TH1 &b)
static void merge_effAsPerCent(TH2 &a, const TH2 &b)
int count(std::string s, const std::string &regx)
count how many occurances of a regx are in a string
Definition hcg.cxx:146
double entries
Definition listroot.cxx:49