ATLAS Offline Software
Loading...
Searching...
No Matches
number/trainNN.cxx
Go to the documentation of this file.
1/*
2 Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
3*/
4
5#include <TTree.h>
6#include <TFile.h>
7#include <TCanvas.h>
8#include <TH1F.h>
9#include <TLegend.h>
10#include <iostream>
11#include <TPad.h>
12#include <string>
13#include <cmath>
14#include "../TJetNet.h"
15#include "../doNormalization.C"
16#include "Riostream.h"
18#include "TSystem.h"
19#include "../TTrainedNetwork.h"
20#include "TChain.h"
21
22#include "TMatrixD.h"
23#include "TVectorD.h"
24#include "trainNN.h"
25
26#include <vector>
27
28using namespace std;
29
30Double_t sigmoid(Double_t x)
31{
32 return 1./(1.+exp(-2*x));
33}
34
35using namespace std;
36
37bool isBadCluster( int sizeX, int nParticles ){
38
39
40 if(sizeX==-100){return true;}
41 if(nParticles==0){return true;}
42 if(nParticles!=1 && nParticles!=2 && nParticles!=3){return true;}
43
44
45
46 return false;
47
48
49}
50
51
52bool skipSingle(int nParticles,int iClus, int dilutionFactor){
53
54
55 // this reduces the number of single particle clusters of a factor k
56 int k = 50;
57// int k_2 = 1;
58
59
60 if (nParticles==1)
61 {
62 if ((iClus %(dilutionFactor*k) != 0) && (iClus % (dilutionFactor*k) != 1))
63 {
64 return true;
65 }
66 }
67
68// if (nParticles==2)
69// {
70// if ((iClus %(dilutionFactor*k2) != 0) && (iClus % (dilutionFactor*k2) != 1))
71// {
72// return true;
73// }
74// }
75
76 return false;
77
78}
79
80
81
82bool badTrackInfo(bool useTrackEstimate, double theta){
83
84 if(useTrackEstimate && theta==0){return true;}
85 return false;
86
87}
88
89
90
91int main()
92{
93 trainNN(
94 "../TrkValidation.root",
95 "dummy",
96 10000,
97 200,
98 10,
99 10,
100 0);
101 return 0;
102}
103
104
105
106
107
108void trainNN(TString inputfile,
109 TString outputclass,
110 int nIterations,
111 int dilutionFactor,
112 int nodesFirstLayer,
113 int nodesSecondLayer,
114 int restartTrainingFrom,
115 bool useTrackEstimate,
116 int nPatternsPerUpdate,
117 double learningRate,
118 double learningRateDecrease,
119 double learningRateMomentum)
120{
121
122 double bweight=1;
123 double cweight=1.;
124 double lweight=1;
125
126 gROOT->SetStyle("Plain");
127
128 cout << "starting with settings: " << endl;
129 cout << " nIterations: " << nIterations << endl;
130 cout << " dilutionFactor: " << dilutionFactor << endl;
131 cout << " nodesFirstLayer: " << nodesFirstLayer << endl;
132 cout << " nodesSecondLayer: " << nodesSecondLayer << endl;
133
134
135// TFile *file = TFile::Open(inputfile);
136// TTree *simu = (TTree*)file->Get("Validation/NNinput");
137 TChain *myChain = new TChain("Validation/NNinput");
138
139
140if(!useTrackEstimate){
141 #include "../files.txt"
142}
143
144if(useTrackEstimate){
145 #include "../filesOnTrack.txt"
146}
147
148
149
150 TChain* simu=myChain;
151
152 std::cout << " Training sample obtained... " << std::endl;
153
154 vector<int> *NN_sizeX;
155 vector<int> *NN_sizeY;
156 vector<vector<float> > *NN_matrixOfToT;
157 vector<vector<float> > *NN_vectorOfPitchesY;
158 vector<int> *NN_ClusterPixLayer;
159 vector<int> *NN_ClusterPixBarrelEC;
160 vector<float> *NN_phiBS;
161 vector<float> *NN_thetaBS;
162 vector<float> *NN_etaModule;
163 vector<bool> *NN_useTrackInfo;
164 vector<int> *NN_columnWeightedPosition;
165 vector<int> *NN_rowWeightedPosition;
166 vector<vector<float> > *NN_positionX;
167 vector<vector<float> > *NN_positionY;
168 vector<vector<float> > *NN_theta;
169 vector<vector<float> > *NN_phi;
170
171 // List of branches
172 TBranch *b_NN_sizeX;
173 TBranch *b_NN_sizeY;
174 TBranch *b_NN_matrixOfToT;
175 TBranch *b_NN_vectorOfPitchesY;
176 TBranch *b_NN_ClusterPixLayer;
177 TBranch *b_NN_ClusterPixBarrelEC;
178 TBranch *b_NN_phiBS;
179 TBranch *b_NN_thetaBS;
180 TBranch *b_NN_etaModule;
181 TBranch *b_NN_useTrackInfo;
182 TBranch *b_NN_columnWeightedPosition;
183 TBranch *b_NN_rowWeightedPosition;
184 TBranch *b_NN_positionX;
185 TBranch *b_NN_positionY;
186 TBranch *b_NN_theta;
187 TBranch *b_NN_phi;
188
189
190
191 NN_sizeX = 0;
192 NN_sizeY = 0;
193 NN_matrixOfToT = 0;
194 NN_vectorOfPitchesY = 0;
195 NN_ClusterPixLayer = 0;
196 NN_ClusterPixBarrelEC = 0;
197 NN_phiBS = 0;
198 NN_thetaBS = 0;
199 NN_etaModule = 0;
200 NN_useTrackInfo = 0;
201 NN_columnWeightedPosition = 0;
202 NN_rowWeightedPosition = 0;
203 NN_positionX = 0;
204 NN_positionY = 0;
205 NN_theta = 0;
206 NN_phi = 0;
207 // Set branch addresses and branch pointers
208 // if (!tree) return 0;
209 // TTree* simu = tree;
210 // fCurrent = -1;
211 simu->SetMakeClass(1);
212
213 simu->SetBranchAddress("NN_sizeX", &NN_sizeX, &b_NN_sizeX);
214 simu->SetBranchAddress("NN_sizeY", &NN_sizeY, &b_NN_sizeY);
215 simu->SetBranchAddress("NN_matrixOfToT", &NN_matrixOfToT, &b_NN_matrixOfToT);
216 simu->SetBranchAddress("NN_vectorOfPitchesY", &NN_vectorOfPitchesY, &b_NN_vectorOfPitchesY);
217 simu->SetBranchAddress("NN_ClusterPixLayer", &NN_ClusterPixLayer, &b_NN_ClusterPixLayer);
218 simu->SetBranchAddress("NN_ClusterPixBarrelEC", &NN_ClusterPixBarrelEC, &b_NN_ClusterPixBarrelEC);
219 simu->SetBranchAddress("NN_phiBS", &NN_phiBS, &b_NN_phiBS);
220 simu->SetBranchAddress("NN_thetaBS", &NN_thetaBS, &b_NN_thetaBS);
221 simu->SetBranchAddress("NN_etaModule", &NN_etaModule, &b_NN_etaModule);
222 simu->SetBranchAddress("NN_useTrackInfo", &NN_useTrackInfo, &b_NN_useTrackInfo);
223 simu->SetBranchAddress("NN_columnWeightedPosition", &NN_columnWeightedPosition, &b_NN_columnWeightedPosition);
224 simu->SetBranchAddress("NN_rowWeightedPosition", &NN_rowWeightedPosition, &b_NN_rowWeightedPosition);
225 simu->SetBranchAddress("NN_positionX", &NN_positionX, &b_NN_positionX);
226 simu->SetBranchAddress("NN_positionY", &NN_positionY, &b_NN_positionY);
227 simu->SetBranchAddress("NN_theta", &NN_theta, &b_NN_theta);
228 simu->SetBranchAddress("NN_phi", &NN_phi, &b_NN_phi);
229
230
231 cout << "Branches set..." << endl;
232
233
234 TString filterTrain("Entry$%");
235 filterTrain+=dilutionFactor;
236 filterTrain+="==0";
237
238 TString filterTest("Entry$%");
239 filterTest+=dilutionFactor;
240 filterTest+="==1";
241
242 int* nneurons;
243 int nlayer=3;
244
245 cout << "Getting Max size " << endl;
246
247 // simu->Print();
248
249 simu->GetEntry(0);
250
251 cout << "First entry..." << endl;
252
253 Int_t sizeX=-7;
254 Int_t sizeY=-7;
255
256
257 // loop over the clusters loking for the first cluster properly saved
258 for( unsigned int clus =0; clus<NN_sizeX->size(); clus++ ){
259
260 sizeX = (*NN_sizeX)[clus];
261 sizeY = (*NN_sizeY)[clus];
262
263 if(sizeX>0)break;
264
265 }
266
267 cout << "Size obtained" << endl;
268
269// int numberinputs=sizeX*(sizeY+1)+2;
270 int numberinputs=sizeX*(sizeY+1)+4;
271 if (!useTrackEstimate)
272 {
273// numberinputs=sizeX*(sizeY+1)+3;
274 numberinputs=sizeX*(sizeY+1)+5;
275 }
276
277 int numberoutputs=3;
278
279 if (nodesSecondLayer!=0)
280 {
281 nlayer=4;
282 }
283
284 if (nodesSecondLayer!=0)
285 {
286 nneurons=new int[4];
287 }
288 else
289 {
290 nneurons=new int[3];
291 }
292
293 nneurons[0]=numberinputs;
294
295 nneurons[1]=nodesFirstLayer;
296
297 if (nodesSecondLayer!=0)
298 {
299 nneurons[2]=nodesSecondLayer;
300 nneurons[3]=3;//number of output nodes
301 }
302 else
303 {
304 nneurons[2]=3;//number of output nodes
305 }
306
307 // float eventWeight(0);
308 float trainingError(0);
309 float testError(0);
310
311 //setting learning parameters
312
313 cout << " now providing training events " << endl;
314
315 Int_t numberTrainingEvents=0;
316 Int_t numberTestingEvents=0;
317
318 int iClus=0;
319 int part_0=0;
320 int part_1=0;
321 int part_2=0;
322 int part_3=0;
323 // Loop over entries:
324 for (Int_t i = 0; i < simu->GetEntries(); i++) {
325
326 if (i % 100000 == 0 ) {
327 std::cout << " Counting training / testing events in sample. Looping over event " << i << std::endl;
328 }
329
330 simu->GetEntry(i);
331 // cout << " Entry obtained with: " << NN_sizeX->size() << " clusters" << endl;
332 for( unsigned int clus =0; clus<NN_sizeX->size(); clus++ ){
333
334 vector<float> *matrixOfToT=0;
335 vector<float> *vectorOfPitchesY=0;
336
337 Float_t phiBS;
338 Float_t thetaBS;
339 Float_t etaModule;
340 Int_t ClusterPixLayer;
341 Int_t ClusterPixBarrelEC;
342
343 std::vector<float> * positionX=0;
344 std::vector<float> * positionY=0;
345 std::vector<float> * thetaTr=0;
346 std::vector<float> * phiTr=0;
347
348 sizeX = (*NN_sizeX)[clus];
349 sizeY = (*NN_sizeY)[clus];
350
351 positionX =&(*NN_positionX)[clus];
352
353 thetaTr = &(*NN_theta)[clus];
354
355 int nParticles = positionX->size();
356 if (isBadCluster(sizeX, nParticles ) )continue;
357
358 // loop over the particles;
359 for( unsigned int P = 0; P < positionX->size(); P++){
360
361 double theta = (*thetaTr)[P];
362 if (theta!=theta) continue;
363 iClus++;
364
365 if ( badTrackInfo(useTrackEstimate, theta ) )continue;
366
367 if ( skipSingle(nParticles, iClus, dilutionFactor) )continue;
368
369
370
371 if (iClus%dilutionFactor==0) numberTrainingEvents+=1;
372 if (iClus%dilutionFactor==1) numberTestingEvents+=1;
373
374 if (iClus%dilutionFactor==1 && nParticles==1 ) part_1++;
375 if (iClus%dilutionFactor==1 && nParticles==2 ) part_2++;
376 if (iClus%dilutionFactor==1 && nParticles==3 ) part_3++;
377
378
379
380 }// end loop over th particles
381 }// end loop over cluster
382 }// end Loop over entries
383
384
385
386 cout << " N. training events: " << numberTrainingEvents <<
387 " N. testing events: " << numberTestingEvents <<
388 " N. total events: " << iClus << endl;
389
390
391 cout << " 1 particle clusters: " << part_1 << " 2 particles clusters: " << part_2 << " 3 particles clusters: " << part_3 << endl;
392
393 cout << "now start to setup the network..." << endl;
394
395 TJetNet* jn = new TJetNet( numberTestingEvents, numberTrainingEvents, nlayer, nneurons );
396
397 cout << " setting learning method... " << endl;
398
399 // jn->SetMSTJN(4,12); Fletscher-Rieves (Scaled Conj Grad)
400
401 // int nPatternsPerUpdate=200;
402 // int nPatternsPerUpdate=1;
403
404 jn->SetPatternsPerUpdate( nPatternsPerUpdate );
405 jn->SetUpdatesPerEpoch( (int)std::floor((float)numberTrainingEvents/(float)nPatternsPerUpdate) );
406 jn->SetUpdatingProcedure( 0 );
407 jn->SetErrorMeasure( 0 );
408 jn->SetActivationFunction( 1 );
409 jn->SetLearningRate( learningRate );//0.8
410 // jn->SetLearningRate( 1 );//0.8
411 // jn->SetMomentum( 0.2 );//0.3 //is now 0.5
412 jn->SetMomentum( learningRateMomentum );//0.3 //is now 0.5
413 jn->SetInitialWeightsWidth( 1. );
414 // jn->SetLearningRateDecrease( 0.992 );
415 jn->SetLearningRateDecrease( learningRateDecrease );//0.992
416 // jn->SetLearningRateDecrease( 0.995 );//0.992
417
418
419 cout << " setting pattern for training events " << endl;
420
421 int trainSampleNumber=0;
422 int testSampleNumber=1;
423
424 cout << " copying over training events " << endl;
425 int counter=0;
426 int counter0=0;
427 int counter1=0;
428
429 iClus=0;
430 for (Int_t i = 0; i < simu->GetEntries(); i++) {
431
432 if (i % 100000 == 0 ) {
433 std::cout << " Copying over training events. Looping over event " << i << std::endl;
434 }
435
436 simu->GetEntry(i);
437 // loop over clusters
438 for( unsigned int clus =0; clus<NN_sizeX->size(); clus++ ){
439
440 vector<float> *matrixOfToT=0;
441 vector<float> *vectorOfPitchesY=0;
442
443 Float_t phiBS;
444 Float_t thetaBS;
445 Float_t etaModule;
446 Int_t ClusterPixLayer;
447 Int_t ClusterPixBarrelEC;
448
449 std::vector<float> * positionX=0;
450 std::vector<float> * positionY=0;
451 std::vector<float> * thetaTr=0;
452 std::vector<float> * phiTr=0;
453
454
455 sizeX = (*NN_sizeX)[clus];
456 sizeY = (*NN_sizeY)[clus];
457
458 matrixOfToT=&(*NN_matrixOfToT)[clus];
459 vectorOfPitchesY=&(*NN_vectorOfPitchesY)[clus];
460
461 phiBS = (*NN_phiBS)[clus];
462 thetaBS =(*NN_thetaBS)[clus];
463 etaModule =(*NN_etaModule)[clus];
464
465 ClusterPixLayer=(*NN_ClusterPixLayer)[clus];
466 ClusterPixBarrelEC = (*NN_ClusterPixBarrelEC)[clus];
467
468 positionX =&(*NN_positionX)[clus];
469 thetaTr = &(*NN_theta)[clus];
470 phiTr = &(*NN_phi)[clus];
471
472 int nParticles = positionX->size();
473
474 if(isBadCluster(sizeX, nParticles ) )continue;
475
476 for( unsigned int P = 0; P < positionX->size(); P++){
477
478 double theta = (*thetaTr)[P];
479 double phi = (*phiTr)[P];
480
481 if (theta!=theta) continue;
482
483 iClus++;
484
485 if ( badTrackInfo(useTrackEstimate, theta ) )continue;
486 if ( skipSingle(nParticles, iClus, dilutionFactor) )continue;
487
488
489 if (matrixOfToT->size()!=sizeX*sizeY)
490 {
491 std::cout << " Event: " << i << " PROBLEM: size Y is: " << matrixOfToT->size() << std::endl;
492 throw std::runtime_error("Problem in number/trainNN.cxx");
493 }
494
495 // loop over elements of matrixOfTot which is actually a vector
496 for(unsigned int ME =0; ME < matrixOfToT->size(); ME++){
497
498 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, ME, norm_ToT((*matrixOfToT)[ME]));
499 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, ME, norm_ToT((*matrixOfToT)[ME]));
500
501 if (counter1 == 0) std::cout << " element: " << ME << " ToT set to: " << norm_ToT((*matrixOfToT)[ME]) << std::endl;
502
503 }
504
505
506 // loop over vector of pitches
507 for (int s=0;s<sizeY;s++)
508 {
509 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, sizeX*sizeY+s, norm_pitch((*vectorOfPitchesY)[s]));
510
511 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, sizeX*sizeY+s, norm_pitch((*vectorOfPitchesY)[s]));
512
513 if (counter0 == 0) std::cout << " s: " << s << " pitch set to: " << norm_pitch((*vectorOfPitchesY)[s]) << std::endl;
514 }
515
516 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, (sizeX+1)*sizeY, norm_layerNumber(ClusterPixLayer));
517 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, (sizeX+1)*sizeY+1, norm_layerType(ClusterPixBarrelEC));
518
519 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, (sizeX+1)*sizeY, norm_layerNumber(ClusterPixLayer));
520 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, (sizeX+1)*sizeY+1, norm_layerType(ClusterPixBarrelEC));
521
522
523
524 if (counter0 == 0) std::cout << " ClusterPixLayer " << norm_layerNumber(ClusterPixLayer) << " ClusterPixBarrelEC " << norm_layerType(ClusterPixBarrelEC) << std::endl;
525
526 if (useTrackEstimate)
527 {
528 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, (sizeX+1)*sizeY+2, norm_phi(phi) );
529 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, (sizeX+1)*sizeY+3, norm_theta(theta) );
530
531 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, (sizeX+1)*sizeY+2, norm_phi(phi) );
532 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, (sizeX+1)*sizeY+3, norm_theta(theta) );
533
534 if (counter0==0) std::cout << " phi " << norm_phi(phi) << " theta: " << norm_theta(theta) << std::endl;
535
536 }
537 else
538 {
539 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, (sizeX+1)*sizeY+2, norm_phiBS(phiBS) );
540 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, (sizeX+1)*sizeY+3, norm_thetaBS(thetaBS) );
541 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, (sizeX+1)*sizeY+4, norm_etaModule(etaModule) );
542
543 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, (sizeX+1)*sizeY+2, norm_phiBS(phiBS) );
544 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, (sizeX+1)*sizeY+3, norm_thetaBS(thetaBS) );
545 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, (sizeX+1)*sizeY+4, norm_etaModule(etaModule) );
546
547
548 if (counter0==0) std::cout <<
549 " phiBS " << norm_phiBS(phiBS) <<
550 " thetaBS: " << norm_thetaBS(thetaBS) <<
551 " etaModule: " << norm_etaModule(etaModule) << std::endl;
552 }
553
554
555 if (iClus%dilutionFactor==0) jn->SetOutputTrainSet( counter0, 0, (nParticles==1 ? 1 : 0) );
556 if (iClus%dilutionFactor==0) jn->SetOutputTrainSet( counter0, 1, (nParticles==2 ? 1 : 0) );
557 if (iClus%dilutionFactor==0) jn->SetOutputTrainSet( counter0, 2, (nParticles>=3 ? 1 : 0) );
558 if (iClus%dilutionFactor==0) jn->SetEventWeightTrainSet( counter0, 1 );
559
560
561 if (iClus%dilutionFactor==1) jn->SetOutputTestSet( counter1, 0, (nParticles==1 ? 1 : 0) );
562 if (iClus%dilutionFactor==1) jn->SetOutputTestSet( counter1, 1, (nParticles==2 ? 1 : 0) );
563 if (iClus%dilutionFactor==1) jn->SetOutputTestSet( counter1, 2, (nParticles>=3 ? 1 : 0) );
564 if (iClus%dilutionFactor==1) jn->SetEventWeightTestSet( counter1, 1 );
565
566
567
568 if (iClus%dilutionFactor==0){counter0+=1;}
569 if (iClus%dilutionFactor==1){counter1+=1;}
570
571 // counter=counter0;
572
573
574 }// end loop over the particles
575 }// end cluster loop
576 }// end loop on entries
577 cout << counter0 << " "<< numberTrainingEvents << " "<< iClus << endl;
578
579 if (counter0!=numberTrainingEvents)
580 {
581 cout << " counter up to: " << counter0 << " while events in training sample are " << numberTrainingEvents << endl;
582 return;
583 }
584
585
586 if (counter1!=numberTestingEvents)
587 {
588 cout << " counter up to: " << counter1 << " while events in training sample are " << numberTestingEvents << endl;
589 return;
590 }
591
592 jn->Shuffle(true,false);
593
594 if (restartTrainingFrom==0)
595 {
596 jn->Init();
597 // jn->DumpToFile("WeightsInitial.w");
598 }
599 else
600 {
601 TString name("Weights");
602 name+=restartTrainingFrom;
603 name+=".w";
604
605 jn->ReadFromFile(name);
606 }
607
608
609
610 float minimumError=1e10;
611 int epochesWithRisingError=0;
612 int epochWithMinimum=0;
613
614 int updatesPerEpoch=jn->GetUpdatesPerEpoch();
615
616 //prepare output stream
617
618 TString directory("weights");
619 directory+="_nPat";
620 directory+=nPatternsPerUpdate;
621 directory+="_rate";
622 directory+=(int)(learningRate*100);
623 directory+="_rdec";
624 directory+=(int)(100*learningRateDecrease);
625 directory+="_mom";
626 directory+=(int)(100*learningRateMomentum);
627
628 if (useTrackEstimate)
629 {
630 directory+="_withTracks";
631 }
632
633 TString command("mkdir ");
634 command+=directory;
635
636 gSystem->Exec(command);
637
638
639
640
641 TString nameCronology=directory;
642 nameCronology+="/trainingCronology.txt";
643
644 ofstream cronology(nameCronology,ios_base::out);//|ios_base::app);
645
646 cronology << "-------------SETTINGS----------------" << endl;
647 cronology << "Epochs: " << jn->GetEpochs() << std::endl;
648 cronology << "Updates Per Epoch: " << jn->GetUpdatesPerEpoch() << std::endl;
649 cronology << "Updating Procedure: " << jn->GetUpdatingProcedure() << std::endl;
650 cronology << "Error Measure: " << jn->GetErrorMeasure() << std::endl;
651 cronology << "Patterns Per Update: " << jn->GetPatternsPerUpdate() << std::endl;
652 cronology << "Learning Rate: " << jn->GetLearningRate() << std::endl;
653 cronology << "Momentum: " << jn->GetMomentum() << std::endl;
654 cronology << "Initial Weights Width: " << jn->GetInitialWeightsWidth() << std::endl;
655 cronology << "Learning Rate Decrease: " << jn->GetLearningRateDecrease() << std::endl;
656 cronology << "Activation Function: " << jn->GetActivationFunction() << std::endl;
657 cronology << "-------------LAYOUT------------------" << endl;
658 cronology << "Input variables: " << jn->GetInputDim() << endl;
659 cronology << "Output variables: " << jn->GetOutputDim() << endl;
660 cronology << "Hidden layers: " << jn->GetHiddenLayerDim() << endl;
661 cronology << "Layout : ";
662 for (Int_t s=0;s<jn->GetHiddenLayerDim()+2;++s)
663 {
664 cronology << jn->GetHiddenLayerSize(s);
665 if (s<jn->GetHiddenLayerDim()+1) cronology << "-";
666 }
667 cronology << endl;
668 cronology << "--------------HISTORY-----------------" << endl;
669 cronology << "History of iterations: " << endl;
670 cronology.close();
671
672 //prepare training histo
673 TH1F* histoTraining=new TH1F("training","training",(int)std::floor((float)nIterations/10.+0.5),1,std::floor((float)nIterations/10.+1.5));
674 TH1F* histoTesting=new TH1F("testing","testing",(int)std::floor((float)nIterations/10.+0.5),1,std::floor((float)nIterations/10.+1.5));
675
676 double maximumTrain=0;
677 double minimumTrain=1e10;
678
679 for(int epoch=restartTrainingFrom+1;epoch<=nIterations;++epoch)
680 {
681 if (epoch!=restartTrainingFrom+1)
682 {
683 trainingError = jn->Train();
684 }
685
686 if (epoch%10==0 || epoch==restartTrainingFrom+1)
687 {
688
689 cronology.open(nameCronology,ios_base::app);
690
691 testError = jn->TestBTAG();
692
693 if (trainingError>maximumTrain) maximumTrain=trainingError;
694 if (testError>maximumTrain) maximumTrain=testError;
695 if (trainingError<minimumTrain) minimumTrain=trainingError;
696 if (testError<minimumTrain) minimumTrain=testError;
697
698
699 histoTraining->Fill(epoch/10.,trainingError);
700 histoTesting->Fill(epoch/10.,testError);
701
702 if (testError<minimumError)
703 {
704 minimumError=testError;
705 epochesWithRisingError=0;
706 epochWithMinimum=epoch;
707 }
708 else
709 {
710 epochesWithRisingError+=10;
711//WHAT IS THIS???
712// if (trainingError>testError)
713// {
714// epochWithMinimum=epoch;
715// }
716 }
717
718
719 if (epochesWithRisingError>300)
720 {
721 if (trainingError<minimumError)
722 {
723 cout << " End of training. Minimum already on epoch: " << epochWithMinimum << endl;
724 cronology << " End of training. Minimum already on epoch: " << epochWithMinimum << endl;
725 break;
726 }
727 }
728
729 cronology << "Epoch: [" << epoch <<
730 "] Error: " << trainingError <<
731 " Test: " << testError << endl;
732
733 cout << "Epoch: [" << epoch <<
734 "] Error: " << trainingError <<
735 " Test: " << testError << endl;
736
737 cronology.close();
738
739
740
741 TString name=directory;
742 name+="/Weights";
743 name+=epoch;
744 name+=".root";
745 cout << "Writing File... " << endl;
746 TFile* file=new TFile(name,"recreate");
747 TTrainedNetwork* trainedNetwork=jn->createTrainedNetwork();
748 trainedNetwork->Write();
749 file->Write();
750 file->Close();
751 delete file;
752
753 /*
754 TFile* file2=new TFile(name);
755 trainedNetwork=(TTrainedNetwork*)file2->Get("TTrainedNetwork");
756 cout <<" hid lay 1 size: " << trainedNetwork->getnHiddenLayerSize()[0] << endl;
757 file2->Close();
758 delete file2;
759 */
760
761 // jn->DumpToFile(name);
762 }
763 }
764
765 jn->writeNetworkInfo(1);
766 jn->writeNetworkInfo(2);
767 // jn->writeNetworkInfo(3);
768 // jn->writeNetworkInfo(4);
769 // jn->writeNetworkInfo(5);
770
771
772 // cout << " Now try to understand how to get the weights..." << endl;
773
775 Int_t nInput=jn->GetInputDim();
776
777 cout << " create Trained Network object..." << endl;
778
779 TTrainedNetwork* trainedNetwork=jn->createTrainedNetwork();
780
781/*
782 cout << " now getting value with trained Network ";
783
784
785
786
787 double inputexample[9]={norm_nVTX(1),
788 norm_nTracksAtVtx(2),
789 norm_nSingleTracks(0),
790 norm_energyFraction(0.6),
791 norm_mass(2500),
792 norm_significance3d(4 ),
793 norm_IP3D(3),
794 norm_cat_pT(3),
795 norm_cat_eta(1)};
796
797 for (Int_t i=0;i<nInput;++i)
798 {
799 jn->SetInputs(i,inputexample[i]);
800 }
801
802 cronology.open("weights/trainingCronology.txt",ios_base::app);
803
804 jn->Evaluate();
805
806 cronology << "----------------CONSISTENCY CHECK-----------" << endl;
807 cout << "Result 0:" << jn->GetOutput(0);
808 cronology << "Result 0:" << jn->GetOutput(0);
809 cout << " Result 1:" << jn->GetOutput(1);
810 cronology << "Result 0:" << jn->GetOutput(1);
811 cout << " Result 2:" << jn->GetOutput(2) << endl;
812 cronology << " Result 2:" << jn->GetOutput(2) << endl;
813
814 cout << " Reading back old network " << endl;
815 jn->readBackTrainedNetwork(trainedNetwork);
816
817 cout <<" resetting input " << endl;
818 for (Int_t i=0;i<nInput;++i)
819 {
820 jn->SetInputs(i,inputexample[i]);
821 }
822
823 jn->Evaluate();
824
825 cout << "After reading back - Result 0:" << jn->GetOutput(0);
826 cronology << "After reading back - Result 0:" << jn->GetOutput(0);
827 // << " my: " << result[0] << endl;
828 cout << " After reading back - Result 1:" << jn->GetOutput(1);
829 cronology << "After reading back - Result 1:" << jn->GetOutput(1);
830 //<< " my: " << result[1] << endl;
831 cout << " After reading back - Result 2:" << jn->GetOutput(2) << endl;
832 cronology << "After reading back - Result 2:" << jn->GetOutput(2);
833 // << " my: " << result[2] << endl;
834 */
835
836 cout << " Now getting histograms from trainingResult" << endl;
837 cronology << " Now getting histograms from trainingResult" << endl;
838
839 TNetworkToHistoTool myHistoTool;
840
841 cout << " From network to histo..." << endl;
842 std::vector<TH1*> myHistos=myHistoTool.fromTrainedNetworkToHisto(trainedNetwork);
843
844 cout << " From histo to network back..." << endl;
845 TTrainedNetwork* trainedNetwork2=myHistoTool.fromHistoToTrainedNetwork(myHistos);
846
847 cout << " reading back " << endl;
848 jn->readBackTrainedNetwork(trainedNetwork2);
849
850/*
851 cout <<" resetting input " << endl;
852 for (Int_t i=0;i<nInput;++i)
853 {
854 jn->SetInputs(i,inputexample[i]);
855 }
856
857 jn->Evaluate();
858
859 cout << "After reading back - Result 0:" << jn->GetOutput(0);
860 cronology << "After reading back - Result 0:" << jn->GetOutput(0);
861 // << " my: " << result[0] << endl;
862 cout << " After reading back - Result 1:" << jn->GetOutput(1);
863 cronology << "After reading back - Result 1:" << jn->GetOutput(1);
864 //<< " my: " << result[1] << endl;
865 cout << " After reading back - Result 2:" << jn->GetOutput(2) << endl;
866 cronology << "After reading back - Result 2:" << jn->GetOutput(2);
867 // << " my: " << result[2] << endl;
868
869
870 cout << " Directly from the trainedNetwork read back from HISTOS...!" << endl;
871
872 std::vector<Double_t> inputData;
873 for (Int_t u=0;u<nInput;++u)
874 {
875 inputData.push_back(inputexample[u]);
876 }
877
878 std::vector<Double_t> outputData=trainedNetwork2->calculateOutputValues(inputData);
879
880 cout << "After reading back - Result 0:" << outputData[0] << endl;
881 cout << " After reading back - Result 1:" << outputData[1] << endl;
882 cout << " After reading back - Result 2:" << outputData[2] << endl;
883*/
884
885
886
887
888 if (epochWithMinimum!=0)
889 {
890 cronology << "Minimum stored from Epoch: " << epochWithMinimum << endl;
891 } else
892 {
893 cronology << "Minimum not reached" << endl;
894 }
895
896 cronology.close();
897
898 if (epochWithMinimum!=0)
899 {
900
901 TString name=directory;
902 name+="/Weights";
903 name+=epochWithMinimum;
904 name+=".root";
905
906 std::cout << " reading back from minimum " << endl;
907
908
909 TFile *_file0 = new TFile(name);
910 TTrainedNetwork* trainedNetwork=(TTrainedNetwork*)_file0->Get("TTrainedNetwork");
911
912 cout << " Reading back network with minimum" << endl;
913 jn->readBackTrainedNetwork(trainedNetwork);
914
915 TString nameFile=directory;
916 nameFile+="/weightMinimum.root";
917
918 TFile* file=new TFile(nameFile,"recreate");
919 trainedNetwork->Write();
920 file->Write();
921 file->Close();
922 delete file;
923
924 cout << " -------------------- " << endl;
925 cout << " Writing OUTPUT histos " << endl;
926
927 TString histoFName=directory;
928 histoFName+="/histoWeights.root";
929
930 TFile* fileHistos=new TFile(histoFName,"recreate");
931 TNetworkToHistoTool histoTool;
932 std::vector<TH1*> myHistos=histoTool.fromTrainedNetworkToHisto(trainedNetwork);
933 std::vector<TH1*>::const_iterator histoBegin=myHistos.begin();
934 std::vector<TH1*>::const_iterator histoEnd=myHistos.end();
935 for (std::vector<TH1*>::const_iterator histoIter=histoBegin;
936 histoIter!=histoEnd;++histoIter)
937 {
938 (*histoIter)->Write();
939 }
940 fileHistos->Write();
941 fileHistos->Close();
942 delete fileHistos;
943
944 // " filename: " << name << endl;
945
946 // jn->ReadFromFile(name);
947
948 }
949 else
950 {
951 cout << " using network at last iteration (minimum not reached..." << endl;
952 }
953
954 //here you should create the class... Still open how to deal with this...
955 // char* myname=const_cast<char*>(static_cast<const char*>(outputclass));
956 // ierr=mlpsavecf_(myname);
957
958 TString histoTName=directory;
959 histoTName+="/trainingInfo.root";
960
961 TFile* histoFile=new TFile(histoTName,"recreate");
962 histoTraining->Write();
963 histoTesting->Write();
964 histoFile->Write();
965 histoFile->Close();
966 delete histoFile;
967
968 TCanvas* trainingCanvas=new TCanvas("trainingCanvas","trainingCanvas");
969 histoTraining->SetLineColor(2);
970 histoTesting->SetLineColor(4);
971
972 histoTraining->GetYaxis()->SetRangeUser(minimumTrain,maximumTrain);
973 histoTraining->Draw("l");
974 histoTesting->Draw("lsame");
975 TString canvasName=directory;
976 canvasName+="/trainingCurve.eps";
977 trainingCanvas->SaveAs(canvasName);
978
979
980 TCanvas* mlpa_canvas = new TCanvas("jetnet_canvas","Network analysis");
981 mlpa_canvas->Divide(2,4);
982
983
984
985// TCanvas* mlpa_canvas_5=gDirectory->Get("mlpa_canvas_5");
986// mlpa_canvas_5->SetLogy(kTrue);
987 gPad->SetLogy();
988
989 // Use the NN to plot the results for each sample
990 // This will give approx. the same result as DrawNetwork.
991 // All entries are used, while DrawNetwork focuses on
992 // the test sample. Also the xaxis range is manually set.
993 TH1F *bg2 = new TH1F("bg2h", "NN output", 50, -.5, 1.5);
994 TH1F *bg = new TH1F("bgh", "NN output", 50, -.5, 1.5);
995 TH1F *sig = new TH1F("sigh", "NN output", 50, -.5, 1.5);
996
997 //sig = 1 part; bg = 2 part; bg2 = 3 part
998
999 TH1F *bg2test = new TH1F("bg2htest", "NN output", 50, -.5, 1.5);
1000 TH1F *bgtest = new TH1F("bghtest", "NN output", 50, -.5, 1.5);
1001 TH1F *sigtest = new TH1F("sightest", "NN output", 50, -.5, 1.5);
1002
1003 int weight=1;
1004 iClus=0;
1005 for (Int_t i = 0; i < simu->GetEntries(); i++) {
1006
1007 if (i % 100000 == 0 ) {
1008 std::cout << " First plot. Looping over event " << i << std::endl;
1009 }
1010
1011 // if (iClus%dilutionFactor!=0&&iClus%dilutionFactor!=1) continue;
1012
1013 simu->GetEntry(i);
1014
1015
1016 for( unsigned int clus =0; clus<NN_sizeX->size(); clus++ ){
1017 vector<float> *matrixOfToT=0;
1018 vector<float> *vectorOfPitchesY=0;
1019
1020 Float_t phiBS;
1021 Float_t thetaBS;
1022 Float_t etaModule;
1023 Int_t ClusterPixLayer;
1024 Int_t ClusterPixBarrelEC;
1025
1026 std::vector<float> * positionX=0;
1027 std::vector<float> * positionY=0;
1028 std::vector<float> * thetaTr=0;
1029 std::vector<float> * phiTr=0;
1030
1031
1032
1033 sizeX = (*NN_sizeX)[clus];
1034 sizeY = (*NN_sizeY)[clus];
1035
1036 matrixOfToT=&(*NN_matrixOfToT)[clus];
1037 vectorOfPitchesY=&(*NN_vectorOfPitchesY)[clus];
1038
1039 phiBS = (*NN_phiBS)[clus];
1040 thetaBS =(*NN_thetaBS)[clus];
1041 etaModule =(*NN_etaModule)[clus];
1042
1043 ClusterPixLayer=(*NN_ClusterPixLayer)[clus];
1044 ClusterPixBarrelEC = (*NN_ClusterPixBarrelEC)[clus];
1045
1046 positionX =&(*NN_positionX)[clus];
1047
1048 int nParticles = positionX->size();
1049 //if(nParticles==0)continue;
1050
1051 thetaTr = &(*NN_theta)[clus];
1052 phiTr = &(*NN_phi)[clus];
1053
1054 if(isBadCluster(sizeX, nParticles ) )continue;
1055
1056
1057 for( unsigned int P = 0; P < positionX->size(); P++){
1058
1059 iClus++;
1060 double theta = (*thetaTr)[P];
1061 double phi = (*phiTr)[P];
1062 if (ClusterPixBarrelEC==2)
1063 {
1064 theta=-theta;
1065 phi=-phi;
1066 }
1067
1068
1069 if ( badTrackInfo(useTrackEstimate, theta ) )continue;
1070 if ( skipSingle(nParticles, iClus, dilutionFactor) )continue;
1071
1072
1073 if (iClus%dilutionFactor==0||iClus%dilutionFactor==1){
1074
1075
1076 // loop over elements of matrixOfTot which is actually a vector
1077 for(unsigned int ME =0; ME < matrixOfToT->size(); ME++)
1078 {
1079 jn->SetInputs( ME, norm_ToT((*matrixOfToT)[ME]));
1080 }
1081
1082 for (int s=0;s<sizeY;s++)
1083 {
1084 jn->SetInputs( sizeX*sizeY+s, norm_pitch((*vectorOfPitchesY)[s]));
1085 }
1086
1087
1088 jn->SetInputs( (sizeX+1)*sizeY, norm_layerNumber(ClusterPixLayer));
1089 jn->SetInputs( (sizeX+1)*sizeY+1, norm_layerType(ClusterPixBarrelEC));
1090
1091 if (useTrackEstimate)
1092 {
1093 jn->SetInputs( (sizeX+1)*sizeY+2, norm_phi(phi) );
1094 jn->SetInputs( (sizeX+1)*sizeY+3, norm_theta(theta) );
1095 // jn->SetInputs( (sizeX+1)*sizeY+4, norm_etaModule(etaModule) );
1096 }
1097 else
1098 {
1099 jn->SetInputs( (sizeX+1)*sizeY+2, norm_phiBS(phiBS) );
1100 jn->SetInputs( (sizeX+1)*sizeY+3, norm_thetaBS(thetaBS) );
1101 jn->SetInputs( (sizeX+1)*sizeY+4, norm_etaModule(etaModule) );
1102 }
1103
1104 jn->Evaluate();
1105
1106 float p1=jn->GetOutput(0);
1107 float p2=jn->GetOutput(1);
1108 float p3=jn->GetOutput(2);
1109
1110 if (nParticles==1)
1111 {
1112 if (iClus%dilutionFactor==0)
1113 {
1114 sig->Fill(p1/(p1+p2+p3),weight);
1115 }
1116 else if (iClus%dilutionFactor==1)
1117 {
1118 sigtest->Fill(p1/(p1+p2+p3),weight);
1119 }
1120 }
1121 if (nParticles==2)
1122 {
1123 if (iClus%dilutionFactor==0)
1124 {
1125 bg->Fill(p1/(p1+p2+p3),weight);
1126 }
1127 else if (iClus%dilutionFactor==1)
1128 {
1129 bgtest->Fill(p1/(p1+p2+p3),weight);
1130 }
1131 }
1132 if (nParticles>=3)
1133 {
1134 if (iClus%dilutionFactor==0)
1135 {
1136 bg2->Fill(p1/(p1+p2+p3),weight);
1137 }
1138 else if (iClus%dilutionFactor==1)
1139 {
1140 bg2test->Fill(p1/(p1+p2+p3),weight);
1141 }
1142
1143
1144 }
1145
1146 }
1147
1148
1149 }// end loop over particles
1150 }// end loop over clusters
1151 }// end loop over entries
1152
1153 //now you need the maximum
1154 float maximum=1;
1155 for (Int_t a=0;a<bg->GetNbinsX();a++)
1156 {
1157 if (bg->GetBinContent(a)>maximum)
1158 {
1159 maximum=1.2*bg->GetBinContent(a);
1160 }
1161 }
1162
1163
1164 bg2->SetLineColor(kYellow);
1165 bg2->SetFillStyle(3008); bg2->SetFillColor(kYellow);
1166 bg->SetLineColor(kBlue);
1167 bg->SetFillStyle(3008); bg->SetFillColor(kBlue);
1168 sig->SetLineColor(kRed);
1169 sig->SetFillStyle(3003); sig->SetFillColor(kRed);
1170 bg2->SetStats(0);
1171 bg->SetStats(0);
1172 sig->SetStats(0);
1173
1174
1175 bg2test->SetLineColor(kYellow);
1176 bg2test->SetFillStyle(3008); bg2test->SetFillColor(kYellow);
1177 bgtest->SetLineColor(kBlue);
1178 bgtest->SetFillStyle(3008); bgtest->SetFillColor(kBlue);
1179 sigtest->SetLineColor(kRed);
1180 sigtest->SetFillStyle(3003); sigtest->SetFillColor(kRed);
1181 bg2test->SetStats(0);
1182 bgtest->SetStats(0);
1183 sigtest->SetStats(0);
1184
1185 mlpa_canvas->cd(1);
1186 gPad->SetLogy();
1187
1188 bg->GetYaxis()->SetRangeUser(1,maximum);
1189 bgtest->GetYaxis()->SetRangeUser(1,maximum);
1190
1191 mlpa_canvas->cd(1);
1192 bg->Draw();
1193 bg2->Draw("same");
1194 sig->Draw("same");
1195
1196 TLegend *legend = new TLegend(.75, .80, .95, .95);
1197 legend->AddEntry(bg2, "particles >=3");
1198 legend->AddEntry(bg, "particles = 2");
1199 legend->AddEntry(sig, "particles = 1");
1200 legend->Draw();
1201
1202 mlpa_canvas->cd(2);
1203 gPad->SetLogy();
1204
1205 bgtest->Draw();
1206 bg2test->Draw("same");
1207 sigtest->Draw("same");
1208
1209 TLegend *legendtest = new TLegend(.75, .80, .95, .95);
1210 legendtest->AddEntry(bg2test, "particles >=3");
1211 legendtest->AddEntry(bgtest, "particles = 2");
1212 legendtest->AddEntry(sigtest, "particles = 1");
1213 legendtest->Draw();
1214
1215 mlpa_canvas->cd(5);
1216 gPad->SetLogy();
1217 bg->DrawNormalized();
1218 bg2->DrawNormalized("same");
1219 sig->DrawNormalized("same");
1220 legend->Draw();
1221
1222 mlpa_canvas->cd(6);
1223 gPad->SetLogy();
1224 bgtest->DrawNormalized();
1225 bg2test->DrawNormalized("same");
1226 sigtest->DrawNormalized("same");
1227 legendtest->Draw();
1228
1229
1230
1231 mlpa_canvas->cd(3);
1232 gPad->SetLogy();
1233
1234 // Use the NN to plot the results for each sample
1235 // This will give approx. the same result as DrawNetwork.
1236 // All entries are used, while DrawNetwork focuses on
1237 // the test sample. Also the xaxis range is manually set.
1238 TH1F *c_bg2 = new TH1F("c_bg2h", "NN output", 50, -.5, 1.5);
1239 TH1F *c_bg = new TH1F("c_bgh", "NN output", 50, -.5, 1.5);
1240 TH1F *c_sig = new TH1F("c_sigh", "NN output", 50, -.5, 1.5);
1241
1242 TH1F *c_bg2test = new TH1F("c_bg2htest", "NN output", 50, -.5, 1.5);
1243 TH1F *c_bgtest = new TH1F("c_bghtest", "NN output", 50, -.5, 1.5);
1244 TH1F *c_sigtest = new TH1F("c_sightest", "NN output", 50, -.5, 1.5);
1245
1246
1247 iClus=0;
1248 for (Int_t i = 0; i < simu->GetEntries(); i++) {
1249
1250 if (i % 100000 == 0 ) {
1251 std::cout << " Second plot. Looping over event " << i << std::endl;
1252 }
1253
1254
1255 simu->GetEntry(i);
1256
1257 for( unsigned int clus =0; clus<NN_sizeX->size(); clus++ ){
1258
1259 vector<float> *matrixOfToT=0;
1260 vector<float> *vectorOfPitchesY=0;
1261
1262 Float_t phiBS;
1263 Float_t thetaBS;
1264 Float_t etaModule;
1265 Int_t ClusterPixLayer;
1266 Int_t ClusterPixBarrelEC;
1267
1268 std::vector<float> * positionX=0;
1269 std::vector<float> * positionY=0;
1270 std::vector<float> * thetaTr=0;
1271 std::vector<float> * phiTr=0;
1272
1273
1274 sizeX = (*NN_sizeX)[clus];
1275 sizeY = (*NN_sizeY)[clus];
1276
1277 if(sizeX==-100)continue;
1278
1279 matrixOfToT=&(*NN_matrixOfToT)[clus];
1280 vectorOfPitchesY=&(*NN_vectorOfPitchesY)[clus];
1281
1282 phiBS = (*NN_phiBS)[clus];
1283 thetaBS =(*NN_thetaBS)[clus];
1284 etaModule =(*NN_etaModule)[clus];
1285
1286 ClusterPixLayer=(*NN_ClusterPixLayer)[clus];
1287 ClusterPixBarrelEC = (*NN_ClusterPixBarrelEC)[clus];
1288
1289 positionX =&(*NN_positionX)[clus];
1290
1291 int nParticles = positionX->size();
1292 if(nParticles==0)continue;
1293
1294 thetaTr = &(*NN_theta)[clus];
1295 phiTr = &(*NN_phi)[clus];
1296
1297
1298 if(isBadCluster(sizeX, nParticles ) )continue;
1299
1300
1301
1302 for( unsigned int P = 0; P < positionX->size(); P++){
1303
1304 iClus++;
1305 double theta = (*thetaTr)[P];
1306 double phi = (*phiTr)[P];
1307 if (ClusterPixBarrelEC==2)
1308 {
1309 theta=-theta;
1310 phi=-phi;
1311 }
1312
1313
1314 if ( badTrackInfo(useTrackEstimate, theta ) )continue;
1315 if ( skipSingle(nParticles, iClus, dilutionFactor) )continue;
1316
1317
1318
1319 if (iClus%dilutionFactor==0||iClus%dilutionFactor==1){//continue;
1320
1321
1322 // loop over elements of matrixOfTot which is actually a vector
1323 for(unsigned int ME =0; ME < matrixOfToT->size(); ME++)
1324 {
1325 jn->SetInputs( ME, norm_ToT((*matrixOfToT)[ME]));
1326 }
1327
1328
1329
1330
1331 for (int s=0;s<sizeY;s++)
1332 {
1333 jn->SetInputs( sizeX*sizeY+s, norm_pitch((*vectorOfPitchesY)[s]));
1334 }
1335
1336 jn->SetInputs( (sizeX+1)*sizeY, norm_layerNumber(ClusterPixLayer));
1337 jn->SetInputs( (sizeX+1)*sizeY+1, norm_layerType(ClusterPixBarrelEC));
1338
1339 if (useTrackEstimate)
1340 {
1341 jn->SetInputs( (sizeX+1)*sizeY+2, norm_phi(phi) );
1342 jn->SetInputs( (sizeX+1)*sizeY+3, norm_theta(theta) );
1343 // jn->SetInputs( (sizeX+1)*sizeY+4, norm_etaModule(etaModule) );
1344 }
1345 else
1346 {
1347 jn->SetInputs( (sizeX+1)*sizeY+2, norm_phiBS(phiBS) );
1348 jn->SetInputs( (sizeX+1)*sizeY+3, norm_thetaBS(thetaBS) );
1349 jn->SetInputs( (sizeX+1)*sizeY+4, norm_etaModule(etaModule) );
1350 }
1351
1352
1353 jn->Evaluate();
1354
1355 float p1=jn->GetOutput(0);
1356 float p2=jn->GetOutput(1);
1357 float p3=jn->GetOutput(2);
1358
1359 float discr=(p1+p2)/(p1+p2+p3);
1360
1361 if (nParticles==1)
1362 {
1363 if (iClus%dilutionFactor==0)
1364 {
1365 c_sig->Fill(discr,weight);
1366 }
1367 else if (iClus%dilutionFactor==1)
1368 {
1369 c_sigtest->Fill(discr,weight);
1370 }
1371 }
1372 if (nParticles==2)
1373 {
1374 if (iClus%dilutionFactor==0)
1375 {
1376 c_bg->Fill(discr,weight);
1377 }
1378 else if (iClus%dilutionFactor==1)
1379 {
1380 c_bgtest->Fill(discr,weight);
1381 }
1382 }
1383 if (nParticles>=3)
1384 {
1385 if (iClus%dilutionFactor==0)
1386 {
1387 c_bg2->Fill(discr,weight);
1388 }
1389 else if (iClus%dilutionFactor==1)
1390 {
1391 c_bg2test->Fill(discr,weight);
1392 }
1393 }
1394
1395 }
1396 iClus++;
1397 }// end loop over particles
1398 }// end loop over cluster
1399 }// end loop over entries
1400
1401 //now you need the maximum
1402 maximum=1;
1403 for (Int_t a=0;a<c_bg->GetNbinsX();a++)
1404 {
1405 if (c_bg->GetBinContent(a)>maximum)
1406 {
1407 maximum=1.2*c_bg->GetBinContent(a);
1408 }
1409 }
1410 c_bg2->SetLineColor(kYellow);
1411 c_bg2->SetFillStyle(3008); c_bg2->SetFillColor(kYellow);
1412 c_bg->SetLineColor(kBlue);
1413 c_bg->SetFillStyle(3008); c_bg->SetFillColor(kBlue);
1414 c_sig->SetLineColor(kRed);
1415 c_sig->SetFillStyle(3003); c_sig->SetFillColor(kRed);
1416 c_bg2->SetStats(0);
1417 c_bg->SetStats(0);
1418 c_sig->SetStats(0);
1419
1420 c_bg2test->SetLineColor(kYellow);
1421 c_bg2test->SetFillStyle(3008); c_bg2test->SetFillColor(kYellow);
1422 c_bgtest->SetLineColor(kBlue);
1423 c_bgtest->SetFillStyle(3008); c_bgtest->SetFillColor(kBlue);
1424 c_sigtest->SetLineColor(kRed);
1425 c_sigtest->SetFillStyle(3003); c_sigtest->SetFillColor(kRed);
1426 c_bg2test->SetStats(0);
1427 c_bgtest->SetStats(0);
1428 c_sigtest->SetStats(0);
1429
1430 mlpa_canvas->cd(3);
1431 gPad->SetLogy();
1432
1433
1434 c_bg->GetYaxis()->SetRangeUser(1,maximum);
1435 c_bgtest->GetYaxis()->SetRangeUser(1,maximum);
1436
1437 c_bg->Draw();
1438 c_bg2->Draw("same");
1439 c_sig->Draw("same");
1440
1441 TLegend *legend2 = new TLegend(.75, .80, .95, .95);
1442 legend2->AddEntry(c_bg2, "particles >=3");
1443 legend2->AddEntry(c_bg, "particles = 2");
1444 legend2->AddEntry(c_sig, "particles = 1");
1445 legend2->Draw();
1446
1447 mlpa_canvas->cd(4);
1448 gPad->SetLogy();
1449
1450 c_bgtest->Draw();
1451 c_bg2test->Draw("same");
1452 c_sigtest->Draw("same");
1453
1454 TLegend *legend2test = new TLegend(.75, .80, .95, .95);
1455 legend2test->AddEntry(c_bg2test, "particles >=3");
1456 legend2test->AddEntry(c_bgtest, "particles = 2");
1457 legend2test->AddEntry(c_sigtest, "particles = 1");
1458 legend2test->Draw();
1459
1460 mlpa_canvas->cd(7);
1461 gPad->SetLogy();
1462 c_bg->DrawNormalized();
1463 c_bg2->DrawNormalized("same");
1464 c_sig->DrawNormalized("same");
1465 legend2->Draw();
1466
1467 mlpa_canvas->cd(8);
1468 gPad->SetLogy();
1469 c_bgtest->DrawNormalized();
1470 c_bg2test->DrawNormalized("same");
1471 c_sigtest->DrawNormalized("same");
1472 legend2test->Draw();
1473
1474
1475 mlpa_canvas->cd(0);
1476
1477 TString resultName=directory;
1478 resultName+="/result.eps";
1479 mlpa_canvas->SaveAs(resultName);
1480
1481
1482}
1483
Scalar phi() const
phi method
Scalar theta() const
theta method
static Double_t a
static Double_t P(Double_t *tt, Double_t *par)
double norm_pitch(const double input, bool addIBL=false)
double norm_layerNumber(const double input)
double norm_thetaBS(const double input)
double norm_layerType(const double input)
double norm_ToT(const double input)
double norm_phi(const double input)
double norm_phiBS(const double input)
double norm_theta(const double input)
double norm_etaModule(const double input)
#define x
void SetInputs(Int_t aIndex=0, Double_t aValue=0.0)
Definition TJetNet.cxx:942
Int_t GetInputDim(void) const
Definition TJetNet.h:54
void Init(void)
Definition TJetNet.cxx:670
Double_t GetLearningRateDecrease(void)
Definition TJetNet.cxx:1192
void SetMomentum(Double_t aValue)
Definition TJetNet.cxx:1118
Int_t GetUpdatingProcedure(void)
Definition TJetNet.cxx:1151
void SetLearningRateDecrease(Double_t aValue)
Definition TJetNet.cxx:1130
void SetErrorMeasure(Int_t aValue)
Definition TJetNet.cxx:1085
void SetOutputTestSet(Int_t aPatternInd, Int_t aOutputInd, Double_t aValue)
Definition TJetNet.h:196
Double_t GetMomentum(void)
Definition TJetNet.cxx:1182
void SetInitialWeightsWidth(Double_t aValue)
Definition TJetNet.cxx:1124
void SetEventWeightTestSet(Int_t aPatternInd, Double_t aValue)
Definition TJetNet.cxx:758
void Evaluate(Int_t aPattern)
Definition TJetNet.cxx:932
Int_t GetPatternsPerUpdate(void)
Definition TJetNet.cxx:1172
Double_t GetOutput(Int_t aIndex=0)
Definition TJetNet.cxx:948
Double_t GetInitialWeightsWidth(void)
Definition TJetNet.cxx:1187
Int_t GetHiddenLayerSize(Int_t number) const
Definition TJetNet.h:56
void SetInputTrainSet(Int_t aPatternInd, Int_t aInputInd, Double_t aValue)
Definition TJetNet.cxx:728
Int_t GetHiddenLayerDim(void) const
Definition TJetNet.h:55
void SetLearningRate(Double_t aValue)
Definition TJetNet.cxx:1111
Double_t TestBTAG(void)
Definition TJetNet.cxx:362
Int_t GetUpdatesPerEpoch(void)
Definition TJetNet.cxx:1146
Int_t GetEpochs(void)
Definition TJetNet.h:78
void writeNetworkInfo(Int_t typeOfInfo=0)
Definition TJetNet.cxx:664
Double_t Train(void)
Definition TJetNet.cxx:618
void SetActivationFunction(Int_t aValue)
Definition TJetNet.cxx:1091
void ReadFromFile(TString aFileName="fort.8")
Definition TJetNet.cxx:962
void Shuffle(Bool_t aShuffleTrainSet=true, Bool_t aShuffleTestSet=true)
Definition TJetNet.cxx:1222
Int_t GetErrorMeasure(void)
Definition TJetNet.cxx:1156
void SetUpdatingProcedure(Int_t aValue)
Definition TJetNet.cxx:1078
void readBackTrainedNetwork(const TTrainedNetwork *)
Definition TJetNet.cxx:207
Double_t GetLearningRate(void)
Definition TJetNet.cxx:1177
void SetInputTestSet(Int_t aPatternInd, Int_t aInputInd, Double_t aValue)
Definition TJetNet.cxx:740
void SetUpdatesPerEpoch(Int_t aValue)
Definition TJetNet.cxx:1071
void SetEventWeightTrainSet(Int_t aPatternInd, Double_t aValue)
Definition TJetNet.cxx:752
void SetOutputTrainSet(Int_t aPatternInd, Int_t aOutputInd, Double_t aValue)
Definition TJetNet.cxx:734
TTrainedNetwork * createTrainedNetwork() const
Definition TJetNet.cxx:104
Int_t GetOutputDim(void) const
Definition TJetNet.h:57
Int_t GetActivationFunction(void) const
Definition TJetNet.cxx:1161
void SetPatternsPerUpdate(Int_t aValue)
Definition TJetNet.cxx:1105
std::vector< TH1 * > fromTrainedNetworkToHisto(TTrainedNetwork *) const
TTrainedNetwork * fromHistoToTrainedNetwork(std::vector< TH1 * > &) const
STL namespace.
bool badTrackInfo(bool useTrackEstimate, double theta)
Double_t sigmoid(Double_t x)
bool isBadCluster(int sizeX, int nParticles)
bool skipSingle(int nParticles, int iClus, int dilutionFactor)
int main()
void trainNN(TString inputfile, TString outputclass, int nIterations, int dilutionFactor, int nodesFirstLayer, int nodesSecondLayer, int restartTrainingFrom, bool useTrackEstimate, int nPatternsPerUpdate, double learningRate, double learningRateDecrease, double learningRateMomentum)
TFile * file