ATLAS Offline Software
Loading...
Searching...
No Matches
trainNN.cxx File Reference
#include <TChain.h>
#include <TTree.h>
#include <TFile.h>
#include <TCanvas.h>
#include <TH1F.h>
#include <TLegend.h>
#include <TPad.h>
#include "../TJetNet.h"
#include "../doNormalization.C"
#include "Riostream.h"
#include "../TNetworkToHistoTool.h"
#include <TSystem.h>
#include "../TTrainedNetwork.h"
#include "TMatrixD.h"
#include "TVectorD.h"
#include "trainNN.h"
#include <vector>
#include <utility>
#include <algorithm>
#include <string>
#include <cmath>
#include <iostream>
#include "../files.txt"
#include "../filesOnTrack.txt"

Go to the source code of this file.

Functions

Double_t sigmoid (Double_t x)
bool isBadCluster (int sizeX, int nParticles)
bool skipSingle (int nParticles, int iClus, int dilutionFactor)
bool badTrackInfo (bool useTrackEstimate, double theta)
int main ()
bool binIsFull (int binNumber, bool isX, double errValue, int nParticles, int numberBins)
void trainNN (TString inputfile, TString outputclass, int nIterations, int dilutionFactor, int nodesFirstLayer, int nodesSecondLayer, int restartTrainingFrom, int nParticlesTraining, bool useTrackEstimate, int numberBinsErrorEstimate, bool trainXerrors, int nPatternsPerUpdate, double learningRate, double learningRateDecrease, double learningRateMomentum)

Function Documentation

◆ badTrackInfo()

bool badTrackInfo ( bool useTrackEstimate,
double theta )

Definition at line 78 of file errors/trainNN.cxx.

78 {
79
80 if(useTrackEstimate && theta==0){return true;}
81 return false;
82
83}
Scalar theta() const
theta method

◆ binIsFull()

bool binIsFull ( int binNumber,
bool isX,
double errValue,
int nParticles,
int numberBins )

Definition at line 99 of file errors/trainNN.cxx.

100{
101 double interval=0;
102 if (isX)
103 {
104 interval=errorHalfIntervalX(nParticles);
105 }
106 else
107 {
108 interval=errorHalfIntervalY(nParticles);
109 }
110
111 double min=-interval;
112 double max=interval;
113
114 int realBins=numberBins-2;//delete case of overflow / underflow
115
116 double binSize=(max-min)/(double)realBins;
117
118 if (errValue<min)
119 {
120 if (binNumber==0)
121 {
122 return true;
123 }
124 else
125 {
126 return false;
127 }
128 }
129 if (errValue>=max)
130 {
131 if (binNumber==numberBins-1)
132 {
133 return true;
134 }
135 else
136 {
137 return false;
138 }
139 }
140
141 bool found=false;
142
143 for (int u=0;u<realBins;u++)
144 {
145 double minInterval=min+u*binSize;
146 double maxInterval=min+(u+1)*binSize;
147
148 if (minInterval<=errValue && errValue<maxInterval)
149 {
150 if (binNumber==u+1)
151 {
152 return true;
153 }
154 else
155 {
156 found=true;
157 }
158 }
159 }
160
161 if (!found)
162 {
163 std::cout <<" Shouldn't arrive HERE !! errValue: " << errValue << std::endl;
164 }
165
166 return false;
167
168}
double errorHalfIntervalY(const int nParticles)
double errorHalfIntervalX(const int nParticles)
#define min(a, b)
Definition cfImp.cxx:40
#define max(a, b)
Definition cfImp.cxx:41
@ u
Enums for curvilinear frames.
Definition ParamDefs.h:77

◆ isBadCluster()

bool isBadCluster ( int sizeX,
int nParticles )

Definition at line 42 of file errors/trainNN.cxx.

42 {
43
44
45 if(sizeX==-100){return true;}
46 if(nParticles==0){return true;}
47 if(nParticles!=1 && nParticles!=2 && nParticles!=3){return true;}
48
49
50
51 return false;
52
53
54}

◆ main()

int main ( )

Definition at line 85 of file errors/trainNN.cxx.

86{
87 trainNN(
88 "../reduceddatasets/reduceddataset_Cone4H1TopoParticleJets_forNN.root",
89 "dummy",
90 10000,
91 200,
92 10,
93 10,
94 0);
95 return 0;
96}
void trainNN(TString inputfile, TString outputclass, int nIterations, int dilutionFactor, int nodesFirstLayer, int nodesSecondLayer, int restartTrainingFrom, int nParticlesTraining, bool useTrackEstimate, int numberBinsErrorEstimate, bool trainXerrors, int nPatternsPerUpdate, double learningRate, double learningRateDecrease, double learningRateMomentum)

◆ sigmoid()

Double_t sigmoid ( Double_t x)

Definition at line 34 of file errors/trainNN.cxx.

35{
36 return 1./(1.+exp(-2*x));
37}
#define x

◆ skipSingle()

bool skipSingle ( int nParticles,
int iClus,
int dilutionFactor )

Definition at line 57 of file errors/trainNN.cxx.

57 {
58
59
60 // this reduces the number of single particle clusters of a factor k
61 int k = 60;
62
63
64 if (nParticles==1)
65 {
66 if ((iClus %(dilutionFactor*k) != 0) && (iClus % (dilutionFactor*k) != 1))
67 {
68 return true;
69 }
70 }
71
72 return false;
73
74}

◆ trainNN()

void trainNN ( TString inputfile,
TString outputclass,
int nIterations,
int dilutionFactor,
int nodesFirstLayer,
int nodesSecondLayer,
int restartTrainingFrom,
int nParticlesTraining,
bool useTrackEstimate,
int numberBinsErrorEstimate,
bool trainXerrors,
int nPatternsPerUpdate,
double learningRate,
double learningRateDecrease,
double learningRateMomentum )

Definition at line 174 of file errors/trainNN.cxx.

188 {
189
190 double bweight=1;
191 double cweight=1.;
192 double lweight=1;
193
194
195
196 gROOT->SetStyle("Plain");
197
198 cout << "starting with settings: " << endl;
199 cout << " nIterations: " << nIterations << endl;
200 cout << " dilutionFactor: " << dilutionFactor << endl;
201 cout << " nodesFirstLayer: " << nodesFirstLayer << endl;
202 cout << " nodesSecondLayer: " << nodesSecondLayer << endl;
203
204
205// TFile *file= TFile::Open(inputfile);
206
207// TTree *simu = (TTree*)file->Get("Validation/NNinput");
208 TChain *myChain = new TChain("Validation/NNinput");
209
210
211if(!useTrackEstimate){
212 #include "../files.txt"
213}
214
215if(useTrackEstimate){
216 #include "../filesOnTrack.txt"
217}
218 TChain* simu=myChain;
219
220 std::cout << " Training sample obtained... " << std::endl;
221
222 vector<int> *NN_sizeX;
223 vector<int> *NN_sizeY;
224 vector<vector<float> > *NN_matrixOfToT;
225 vector<vector<float> > *NN_vectorOfPitchesY;
226 vector<int> *NN_ClusterPixLayer;
227 vector<int> *NN_ClusterPixBarrelEC;
228 vector<float> *NN_phiBS;
229 vector<float> *NN_thetaBS;
230 vector<float> *NN_etaModule;
231 vector<bool> *NN_useTrackInfo;
232 vector<int> *NN_columnWeightedPosition;
233 vector<int> *NN_rowWeightedPosition;
234 vector<double> *NN_localColumnWeightedPosition;
235 vector<double> *NN_localRowWeightedPosition;
236
237 vector<vector<float> > *NN_positionX;
238 vector<vector<float> > *NN_positionY;
239 vector<vector<float> > *NN_position_idX;
240 vector<vector<float> > *NN_position_idY;
241 vector<vector<float> > *NN_theta;
242 vector<vector<float> > *NN_phi;
243
244 // List of branches
245 TBranch *b_NN_sizeX;
246 TBranch *b_NN_sizeY;
247 TBranch *b_NN_matrixOfToT;
248 TBranch *b_NN_vectorOfPitchesY;
249 TBranch *b_NN_ClusterPixLayer;
250 TBranch *b_NN_ClusterPixBarrelEC;
251 TBranch *b_NN_phiBS;
252 TBranch *b_NN_thetaBS;
253 TBranch *b_NN_etaModule;
254 TBranch *b_NN_useTrackInfo;
255 TBranch *b_NN_columnWeightedPosition;
256 TBranch *b_NN_rowWeightedPosition;
257 TBranch *b_NN_localColumnWeightedPosition;
258 TBranch *b_NN_localRowWeightedPosition;
259 TBranch *b_NN_positionX;
260 TBranch *b_NN_positionY;
261 TBranch *b_NN_position_idX;
262 TBranch *b_NN_position_idY;
263 TBranch *b_NN_theta;
264 TBranch *b_NN_phi;
265
266
267
268 NN_sizeX = 0;
269 NN_sizeY = 0;
270 NN_matrixOfToT = 0;
271 NN_vectorOfPitchesY = 0;
272 NN_ClusterPixLayer = 0;
273 NN_ClusterPixBarrelEC = 0;
274 NN_phiBS = 0;
275 NN_thetaBS = 0;
276 NN_etaModule = 0;
277 NN_useTrackInfo = 0;
278 NN_columnWeightedPosition = 0;
279 NN_rowWeightedPosition = 0;
280 NN_localColumnWeightedPosition = 0;
281 NN_localRowWeightedPosition = 0;
282 NN_positionX = 0;
283 NN_positionY = 0;
284 NN_position_idX = 0;
285 NN_position_idY = 0;
286 NN_theta = 0;
287 NN_phi = 0;
288 // Set branch addresses and branch pointers
289 // if (!tree) return 0;
290 // TTree* simu = tree;
291 // fCurrent = -1;
292 simu->SetMakeClass(1);
293
294 simu->SetBranchAddress("NN_sizeX", &NN_sizeX, &b_NN_sizeX);
295 simu->SetBranchAddress("NN_sizeY", &NN_sizeY, &b_NN_sizeY);
296 simu->SetBranchAddress("NN_matrixOfToT", &NN_matrixOfToT, &b_NN_matrixOfToT);
297 simu->SetBranchAddress("NN_vectorOfPitchesY", &NN_vectorOfPitchesY, &b_NN_vectorOfPitchesY);
298 simu->SetBranchAddress("NN_ClusterPixLayer", &NN_ClusterPixLayer, &b_NN_ClusterPixLayer);
299 simu->SetBranchAddress("NN_ClusterPixBarrelEC", &NN_ClusterPixBarrelEC, &b_NN_ClusterPixBarrelEC);
300 simu->SetBranchAddress("NN_phiBS", &NN_phiBS, &b_NN_phiBS);
301 simu->SetBranchAddress("NN_thetaBS", &NN_thetaBS, &b_NN_thetaBS);
302 simu->SetBranchAddress("NN_etaModule", &NN_etaModule, &b_NN_etaModule);
303 simu->SetBranchAddress("NN_useTrackInfo", &NN_useTrackInfo, &b_NN_useTrackInfo);
304 simu->SetBranchAddress("NN_columnWeightedPosition", &NN_columnWeightedPosition, &b_NN_columnWeightedPosition);
305 simu->SetBranchAddress("NN_rowWeightedPosition", &NN_rowWeightedPosition, &b_NN_rowWeightedPosition);
306
307 simu->SetBranchAddress("NN_localColumnWeightedPosition", &NN_localColumnWeightedPosition, &b_NN_localColumnWeightedPosition);
308 simu->SetBranchAddress("NN_localRowWeightedPosition", &NN_localRowWeightedPosition, &b_NN_localRowWeightedPosition);
309
310 simu->SetBranchAddress("NN_positionX", &NN_positionX, &b_NN_positionX);
311 simu->SetBranchAddress("NN_positionY", &NN_positionY, &b_NN_positionY);
312 simu->SetBranchAddress("NN_position_idX", &NN_position_idX, &b_NN_position_idX);
313 simu->SetBranchAddress("NN_position_idY", &NN_position_idY, &b_NN_position_idY);
314
315 simu->SetBranchAddress("NN_theta", &NN_theta, &b_NN_theta);
316 simu->SetBranchAddress("NN_phi", &NN_phi, &b_NN_phi);
317
318
319 cout << "Branches set..." << endl;
320
321 TString name;
322 if(nParticlesTraining == 1 ) name+="WeightsOneTracks.root";
323 if(nParticlesTraining == 2 ) name+="WeightsTwoTracks.root";
324 if(nParticlesTraining == 3 ) name+="WeightsThreeTracks.root";
325
326 if (!useTrackEstimate)
327 {
328 name.ReplaceAll(".root","_noTrack.root");
329 }
330
331 // getting a postion trained network from file
332 TFile *_file0 = new TFile(name);
333 TTrainedNetwork* positionTrainedNetwork=(TTrainedNetwork*)_file0->Get("TTrainedNetwork");
334
335 cout << " Reading back network with minimum" << endl;
336
337
338 TString filterTrain("Entry$%");
339 filterTrain+=dilutionFactor;
340 filterTrain+="==0";
341
342 TString filterTest("Entry$%");
343 filterTest+=dilutionFactor;
344 filterTest+="==1";
345
346 int* nneurons;
347 int nlayer=3;
348
349 simu->GetEntry(0);
350
351 cout << "First entry..." << endl;
352
353 Int_t sizeX=-7;
354 Int_t sizeY=-7;
355
356
357 // loop over the clusters loking for the first cluster properly saved
358 for( unsigned int clus =0; clus<NN_sizeX->size(); clus++ ){
359
360 sizeX = (*NN_sizeX)[clus];
361 sizeY = (*NN_sizeY)[clus];
362
363 if(sizeX>0)break;
364
365 }
366
367 cout << "Size obtained" << endl;
368
369
370
371 int numberinputs=sizeX*(sizeY+1)+4+nParticlesTraining*2;//add also position information
372 if (!useTrackEstimate)
373 {
374 numberinputs=sizeX*(sizeY+1)+5+nParticlesTraining*2;
375 }
376
377 int numberoutputs=nParticlesTraining*numberBinsErrorEstimate;
378 //2 for x and y
379 //nParticlesTraining
380 //numberBinsErrorEstimate
381
382 if (nodesSecondLayer!=0)
383 {
384 nlayer=4;
385 }
386
387 if (nodesSecondLayer!=0)
388 {
389 nneurons=new int[4];
390 }
391 else
392 {
393 nneurons=new int[3];
394 }
395
396 nneurons[0]=numberinputs;
397
398 nneurons[1]=nodesFirstLayer;
399
400 if (nodesSecondLayer!=0)
401 {
402 nneurons[2]=nodesSecondLayer;
403 nneurons[3]=numberoutputs;//number of output nodes
404 }
405 else
406 {
407 nneurons[2]=numberoutputs;//number of output nodes
408 }
409
410 for (int i=0;i<nlayer;i++)
411 {
412 cout << " layer i: " << i << " number neurons: " << nneurons[i] << endl;
413 }
414
415
416 // float eventWeight(0);
417 float trainingError(0);
418 float testError(0);
419
420 //setting learning parameters
421
422 cout << " now providing training events " << endl;
423
424 Long64_t numberTrainingEvents=0;
425 Long64_t numberTestingEvents=0;
426 int iClus=0;
427 int part_0=0;
428 int part_1=0;
429 int part_2=0;
430 int part_3=0;
431
432 int nTotal=simu->GetEntries();
433
434// int nTotal=200;
435
436 // Loop over entries:
437 for (Int_t i = 0; i < nTotal; i++) {
438
439 if (i % 1000000 == 0 ) {
440 std::cout << " Counting training / testing events in sample. Looping over event " << i << std::endl;
441 }
442
443 simu->GetEntry(i);
444
445 for( unsigned int clus =0; clus<NN_sizeX->size(); clus++ ){
446
447 vector<float> *matrixOfToT=0;
448 vector<float> *vectorOfPitchesY=0;
449
450 Float_t phiBS;
451 Float_t thetaBS;
452 Float_t etaModule;
453 Int_t ClusterPixLayer;
454 Int_t ClusterPixBarrelEC;
455
456 std::vector<float> * positionX=0;
457 std::vector<float> * positionY=0;
458 std::vector<float> * thetaTr=0;
459 std::vector<float> * phiTr=0;
460
461
462
463 int sizeX = (*NN_sizeX)[clus];
464 positionX =&(*NN_positionX)[clus];
465 int nParticles = positionX->size();
466 //cout << "------" << endl;
467 if (nParticlesTraining!=nParticles)
468 {
469 continue;
470 }
471 if (isBadCluster(sizeX, nParticles ) )continue;
472
473 thetaTr = &(*NN_theta)[clus];
474 phiTr = &(*NN_phi)[clus];
475
476 // loop over the particles;
477 for( unsigned int P = 0; P < positionX->size(); P++){
478 double theta = (*thetaTr)[P];
479 if (theta!=theta) continue;
480
481 iClus++;
482
483 if ( badTrackInfo(useTrackEstimate, theta ) )continue;
484
485
486
487
488
489 if (iClus%dilutionFactor==0) numberTrainingEvents+=1;
490 if (iClus%dilutionFactor==1) numberTestingEvents+=1;
491
492 if (iClus%dilutionFactor==1 && nParticles==1 ) part_1++;
493 if (iClus%dilutionFactor==1 && nParticles==2 ) part_2++;
494 if (iClus%dilutionFactor==1 && nParticles==3 ) part_3++;
495
496
497
498 }// end loop over th particles
499 }// end loop over cluster
500 }// end Loop over entries
501
502
503
504 cout << " N. training events: " << numberTrainingEvents <<
505 " N. testing events: " << numberTestingEvents << endl;
506
507 cout << "now start to setup the network..." << endl;
508
509 TJetNet* jn = new TJetNet( numberTestingEvents, numberTrainingEvents, nlayer, nneurons );
510
511 cout << " setting learning method... " << endl;
512
513
514 jn->SetPatternsPerUpdate( nPatternsPerUpdate );
515 jn->SetUpdatesPerEpoch( (int)std::floor((float)numberTrainingEvents/(float)nPatternsPerUpdate) );
516 jn->SetUpdatingProcedure( 0 );
517 jn->SetErrorMeasure( 0 );
518 jn->SetActivationFunction( 1 );
519 jn->SetLearningRate( learningRate );//0.8
520 jn->SetMomentum( learningRateMomentum );//0.3 //is now 0.5
521 jn->SetInitialWeightsWidth( 1. );
522 jn->SetLearningRateDecrease( learningRateDecrease );//0.992
523
524
525 //jn->SetActivationFunction(4,nlayer-1-1); This is to have linear OUTPUT.
526
527
528 TH1F* histoControlTestX=new TH1F("histoControlTestX","histoControlTestX",numberBinsErrorEstimate,0,numberBinsErrorEstimate);
529 TH1F* histoControlTestY=new TH1F("histoControlTestY","histoControlTestY",numberBinsErrorEstimate,0,numberBinsErrorEstimate);
530
531
532 int trainSampleNumber=0;
533 int testSampleNumber=1;
534
535
536
537 cout << " copying over training events " << endl;
538 int counter=0;
539 int counter0=0;
540 int counter1=0;
541
542 iClus=0;
543
544
545 for (Int_t i = 0; i < nTotal; i++) {
546
547 if (i % 1000 == 0 ) {
548 std::cout << " Copying over training events. Looping over event " << i << std::endl;
549 }
550
551 simu->GetEntry(i);
552 // loop over clusters
553 for( unsigned int clus =0; clus<NN_sizeX->size(); clus++ ){
554
555 vector<float> *matrixOfToT=0;
556 vector<float> *vectorOfPitchesY=0;
557
558 Float_t phiBS;
559 Float_t thetaBS;
560 Float_t etaModule;
561 Int_t ClusterPixLayer;
562 Int_t ClusterPixBarrelEC;
563
564 std::vector<float> * positionX=0;
565 std::vector<float> * positionY=0;
566
567 std::vector<float> positionX_reorder;
568 std::vector<float> positionY_reorder;
569
570
571
572
573 std::vector<float> * thetaTr=0;
574 std::vector<float> * phiTr=0;
575
576 double localColumnWeightedPosition;// = 0;
577 double localRowWeightedPosition;// = 0;
578
579 sizeX = (*NN_sizeX)[clus];
580 sizeY = (*NN_sizeY)[clus];
581
582 matrixOfToT=&(*NN_matrixOfToT)[clus];
583 vectorOfPitchesY=&(*NN_vectorOfPitchesY)[clus];
584
585 phiBS = (*NN_phiBS)[clus];
586 thetaBS =(*NN_thetaBS)[clus];
587 etaModule =(*NN_etaModule)[clus];
588
589 ClusterPixLayer=(*NN_ClusterPixLayer)[clus];
590 ClusterPixBarrelEC = (*NN_ClusterPixBarrelEC)[clus];
591
592 positionX =&(*NN_positionX)[clus];
593 positionY =&(*NN_positionY)[clus];
594
595 positionX_reorder=*positionX;
596 positionY_reorder.clear();
597
598
599
600 thetaTr = &(*NN_theta)[clus];
601 phiTr = &(*NN_phi)[clus];
602
603
604 localColumnWeightedPosition =(*NN_localColumnWeightedPosition)[clus];
605 localRowWeightedPosition =(*NN_localRowWeightedPosition)[clus];
606
607
608
609 int nParticles = positionX->size();
610 if (nParticlesTraining!=nParticles)
611 {
612 continue;
613 }
614
615 if(isBadCluster(sizeX, nParticles ) )continue;
616
617
618
619
620
621
622 std::sort(positionX_reorder.begin(),
623 positionX_reorder.end());
624
625 for (int o=0;o<positionX->size();o++)
626 {
627 double corry=-1000;
628 for (int e=0;e<positionX->size();e++)
629 {
630 if (fabs(positionX_reorder[o]-(*positionX)[e])<1e-10)
631 {
632 if (fabs(corry+1000)>1e-6)
633 {
634 cout << " Value find more than once! " << endl;
635 for (int p=0;p<positionX->size();p++)
636 {
637 cout << " X n. : " << p << " is: " << (*positionX)[p] << endl;
638 }
639 }
640 corry=(*positionY)[e];
641 }
642 }
643 positionY_reorder.push_back(corry);
644 }
645
646
647
648 for( unsigned int P = 0; P < positionX->size(); P++){
649
650
651 double theta = (*thetaTr)[P];
652 double phi = (*phiTr)[P];
653
654 if (theta!=theta) continue;
655
656 if (ClusterPixBarrelEC==2)
657 {
658 theta=-theta;
659 phi=-phi;
660 }
661
662
663 iClus++;
664
665 if ( badTrackInfo(useTrackEstimate, theta ) )continue;
666
667 // formatting cluster infos for the NN input
668
669 std::vector<Double_t> inputData;
670
671 for(unsigned int ME =0; ME < matrixOfToT->size(); ME++){
672
673 inputData.push_back(norm_ToT((*matrixOfToT)[ME]));
674 }
675
676 for (int s=0;s<sizeY;s++)
677 {
678 inputData.push_back(norm_pitch((*vectorOfPitchesY)[s]));
679 }
680
681 inputData.push_back(norm_layerNumber(ClusterPixLayer));
682 inputData.push_back(norm_layerType(ClusterPixBarrelEC));
683
684 if (useTrackEstimate)
685 {
686 inputData.push_back(norm_phi(phi));
687 inputData.push_back(norm_theta(theta));
688 }
689 else
690 {
691 inputData.push_back(norm_phiBS(phiBS));
692 inputData.push_back(norm_thetaBS(thetaBS));
693 inputData.push_back(norm_etaModule(etaModule));
694 }
695 // cout << "formatted" << endl;
696
697
698 vector<double> outputNN_idX;
699 vector<double> outputNN_idY;
700
701
702 vector<double> resultNN = positionTrainedNetwork->calculateOutputValues(inputData);
703
704
705
706
707 // storing the obtained X and Y position in vectors of position // nParticlesTraining
708
709 if(nParticlesTraining==1){
710 outputNN_idX.push_back(back_posX(resultNN[0]));
711 outputNN_idY.push_back(back_posY(resultNN[1]));
712 }
713
714
715 if(nParticlesTraining==2){
716 outputNN_idX.push_back(back_posX(resultNN[0]));
717 outputNN_idX.push_back(back_posX(resultNN[2]));
718 outputNN_idY.push_back(back_posY(resultNN[1]));
719 outputNN_idY.push_back(back_posY(resultNN[3]));
720 }
721
722 if(nParticlesTraining==3){
723 outputNN_idX.push_back(back_posX(resultNN[0]));
724 outputNN_idX.push_back(back_posX(resultNN[2]));
725 outputNN_idX.push_back(back_posX(resultNN[4]));
726 outputNN_idY.push_back(back_posY(resultNN[1]));
727 outputNN_idY.push_back(back_posY(resultNN[3]));
728 outputNN_idY.push_back(back_posY(resultNN[5]));
729 }
730
731
732
733 // HERE WE NEED THE CONVERSION OF THE POSITION RESPECT WITH CLUSTER CENTER FROM LOCAL POSITION!!!
734 vector<float> outputNN_X ;
735 vector<float> outputNN_Y ;
736
737 for(unsigned int t=0; t < outputNN_idX.size(); t++){
738
739 double PitchX=0.05; //50 micron pitch in Y
740
741 // local position respect with the cluster center questa e' la posizione locale corrispondente al centro del cluster
742 // defined as the center of matrixOfToT (definito come centro della matrice per la rete neuronale)
743 double centerPosY = localColumnWeightedPosition;
744 double centerPosX = localRowWeightedPosition;
745// cout << sizeX << " " << "centerPosX: " << centerPosX << endl;
746 //questi sono gli unput che la rete restituisce (coordinate rispetto al centro)
747 // output of the NN (respect with the cluster center
748 double indexX = outputNN_idX[t];
749 double indexY = outputNN_idY[t];
750 double indexPositionToTranslateY = indexY+(double)(sizeY-1)/2;
751 //cout << indexX << " " << centerPosX << endl;
752 double pos_idX = centerPosX + (double)indexX * PitchX;
753// cout << " pos_idX = " << pos_idX << endl;
754
755 //now for Y it is slightly more complicated (but not much)
756 double positionYFromZero = -100;
757 double positionCenterYFromZero = -100;
758 double actualPositionFromZero=0.;
759
760 //start both indices and positions at zero
761
762 for (int i=0;i<sizeY;i++)
763 {
764 if (indexPositionToTranslateY>=(double)i && indexPositionToTranslateY<=(double)(i+1))
765 {
766 positionYFromZero = actualPositionFromZero + (double)(indexPositionToTranslateY-(double)i+0.5)*(*vectorOfPitchesY)[i];
767 // cout << "positionYFromZero: " << positionYFromZero << endl;
768 }
769 if (i==(sizeY-1)/2)
770 {
771 positionCenterYFromZero = actualPositionFromZero + 0.5* (*vectorOfPitchesY)[i];
772 // cout << "positionCenterYFromZero: " << positionCenterYFromZero << endl;
773 }
774 actualPositionFromZero+=(*vectorOfPitchesY)[i];
775 }
776
777 double pos_idY = centerPosY + positionYFromZero - positionCenterYFromZero;
778// cout << " pos_idY " << pos_idY << endl;
779
780
781 //
782 outputNN_X.push_back(pos_idX);
783 outputNN_Y.push_back(pos_idY);
784 }
785
786
787
788
789 if (matrixOfToT->size()!=sizeX*sizeY)
790 {
791 std::cout << " Event: " << i << " PROBLEM: size Y is: " << matrixOfToT->size() << std::endl;
792 throw std::runtime_error("Error in errors/trainNN.cxx");
793 }
794
795 // loop over elements of matrixOfTot which is actually a vector
796 for(unsigned int ME =0; ME < matrixOfToT->size(); ME++){
797
798 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, ME, norm_ToT((*matrixOfToT)[ME]));
799 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, ME, norm_ToT((*matrixOfToT)[ME]));
800
801 if (counter1 == 0) std::cout << " element: " << ME << " ToT set to: " << norm_ToT((*matrixOfToT)[ME]) << std::endl;
802
803 }
804
805
806 // loop over vector of pitches
807 for (int s=0;s<sizeY;s++)
808 {
809 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, sizeX*sizeY+s, norm_pitch((*vectorOfPitchesY)[s]));
810 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, sizeX*sizeY+s, norm_pitch((*vectorOfPitchesY)[s]));
811
812 if (counter0 == 0) std::cout << " s: " << s << " pitch set to: " << norm_pitch((*vectorOfPitchesY)[s]) << std::endl;
813 }
814
815 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, (sizeX+1)*sizeY, norm_layerNumber(ClusterPixLayer));
816 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, (sizeX+1)*sizeY+1, norm_layerType(ClusterPixBarrelEC));
817
818 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, (sizeX+1)*sizeY, norm_layerNumber(ClusterPixLayer));
819 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, (sizeX+1)*sizeY+1, norm_layerType(ClusterPixBarrelEC));
820
821
822
823 if (counter0 == 0) std::cout << " ClusterPixLayer " << norm_layerNumber(ClusterPixLayer) << " ClusterPixBarrelEC " << norm_layerType(ClusterPixBarrelEC) << std::endl;
824
825 if (useTrackEstimate)
826 {
827 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, (sizeX+1)*sizeY+2, norm_phi(phi) );
828 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, (sizeX+1)*sizeY+3, norm_theta(theta) );
829
830 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, (sizeX+1)*sizeY+2, norm_phi(phi) );
831 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, (sizeX+1)*sizeY+3, norm_theta(theta) );
832
833 if (counter0==0) std::cout << " phi " << norm_phi(phi) << " theta: " << norm_theta(theta) << std::endl;
834
835 }
836 else
837 {
838 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, (sizeX+1)*sizeY+2, norm_phiBS(phiBS) );
839 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, (sizeX+1)*sizeY+3, norm_thetaBS(thetaBS) );
840 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, (sizeX+1)*sizeY+4, norm_etaModule(etaModule) );
841
842 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, (sizeX+1)*sizeY+2, norm_phiBS(phiBS) );
843 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, (sizeX+1)*sizeY+3, norm_thetaBS(thetaBS) );
844 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, (sizeX+1)*sizeY+4, norm_etaModule(etaModule) );
845
846
847 if (counter0==0) std::cout <<
848 " phiBS " << norm_phiBS(phiBS) <<
849 " thetaBS: " << norm_thetaBS(thetaBS) <<
850 " etaModule: " << norm_etaModule(etaModule) << std::endl;
851 }
852
853 int addNumber=5;
854 if (useTrackEstimate) addNumber=4;
855
856
857
858 for (int o=0;o<nParticlesTraining;o++)
859 {
860 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, (sizeX+1)*sizeY+addNumber+2*o,norm_posX(outputNN_idX[o]) );
861 if (iClus%dilutionFactor==0) jn->SetInputTrainSet( counter0, (sizeX+1)*sizeY+addNumber+2*o+1,norm_posY(outputNN_idY[o]) );
862
863 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, (sizeX+1)*sizeY+addNumber+2*o,norm_posX(outputNN_idX[o]) );
864 if (iClus%dilutionFactor==1) jn->SetInputTestSet( counter1, (sizeX+1)*sizeY+addNumber+2*o+1,norm_posY(outputNN_idY[o]) );
865
866
867
868 if (counter==0) std::cout << " n. " << o <<
869 " posX: " << norm_posX((outputNN_idX)[o]) <<
870 " posY: " << norm_posY((outputNN_idY)[o]) << std::endl;
871
872 }
873
874
875
876// cout << " nParticlesTraining " << nParticlesTraining << endl;
877
878
879 for (int r=0;r<nParticlesTraining;r++)
880 {
881
882// cout << " numberBinsErrorEstimate " << numberBinsErrorEstimate << endl;
883 // if (counter==0) std::cout << " particle: " << r << std::endl;
884
885 for (int u=0;u<numberBinsErrorEstimate;u++)
886 {
887 if (trainXerrors)
888 {
889// cout << " outputNN_X["<<r<<"]="<<outputNN_X[r] <<
890// " positionX_reorder["<<r<<"] "<< positionX_reorder[r] << endl;
891 bool full=binIsFull(u,true,(outputNN_X[r]-positionX_reorder[r]),nParticlesTraining,numberBinsErrorEstimate);
892 int nValueFull=0;
893 if (full) nValueFull=1;
894 if (iClus%dilutionFactor==0) jn->SetOutputTrainSet(counter0, r*numberBinsErrorEstimate+u, nValueFull);
895 if (iClus%dilutionFactor==1) jn->SetOutputTestSet(counter1, r*numberBinsErrorEstimate+u, nValueFull);
896
897 if (counter==0) std::cout << " X bin: " << u << " gl: "<< r*2*numberBinsErrorEstimate+u << " val: " << nValueFull;
898 histoControlTestX->Fill(u+1,nValueFull);
899 }
900 }
901 for (int u=0;u<numberBinsErrorEstimate;u++)
902 {
903 if (!trainXerrors)
904 {
905// cout << " outputNN_Y["<<r<<"]="<<outputNN_Y[r] <<
906// " positionY_reorder["<<r<<"] "<< positionY_reorder[r] << endl;
907
908 bool full=binIsFull(u,false,(outputNN_Y[r]-positionY_reorder[r]),nParticlesTraining,numberBinsErrorEstimate);
909 int nValueFull=0;
910 if (full) nValueFull=1;
911 if (iClus%dilutionFactor==0) jn->SetOutputTrainSet(counter0, r*numberBinsErrorEstimate+u, nValueFull);
912 if (iClus%dilutionFactor==1) jn->SetOutputTestSet(counter1, r*numberBinsErrorEstimate+u, nValueFull);
913
914 if (counter0==0) std::cout << " Y bin: " << u << " gl: " << r*2*numberBinsErrorEstimate+numberBinsErrorEstimate+u << " val: " << nValueFull;
915 if (iClus%dilutionFactor==0) histoControlTestY->Fill(u+1,nValueFull);
916 }
917
918 }
919 }
920
921 if (counter0==0) std::cout << std::endl << " total number of bins: " << numberoutputs << std::endl;
922
923
924
925 if (iClus%dilutionFactor==0) jn->SetEventWeightTrainSet( counter0, 1 );
926 if (iClus%dilutionFactor==1) jn->SetEventWeightTestSet( counter1, 1 );
927
928 counter+=1;
929
930 if (iClus%dilutionFactor==0) counter0+=1;
931 if (iClus%dilutionFactor==1) counter1+=1;
932
933
934 }
935 }
936 }
937
938 if (counter0!=numberTrainingEvents)
939 {
940 cout << " counter up to: " << counter0 << " while events in training sample are " << numberTrainingEvents << endl;
941 return;
942 }
943
944 if (counter1!=numberTestingEvents)
945 {
946 cout << " counter up to: " << counter1 << " while events in testing sample are " << numberTestingEvents << endl;
947 return;
948 }
949
950
951
952
953
954
955
956
957 jn->Shuffle(true,false);
958
959 std::cout << " Potts units are: " << jn->GetPottsUnits() << std::endl;
960
961
962 cout << " setting pattern for training events " << endl;
963
964 if (restartTrainingFrom==0)
965 {
966 jn->Init();
967 }
968 else
969 {
970 TString name("Weights");
971 name+=restartTrainingFrom;
972 name+=".w";
973
974 jn->ReadFromFile(name);
975 }
976
977
978
979 float minimumError=1e10;
980 int epochesWithRisingError=0;
981 int epochWithMinimum=0;
982
983 int updatesPerEpoch=jn->GetUpdatesPerEpoch();
984
985 TString directory("weights");
986 directory+="_nPat";
987 directory+=nPatternsPerUpdate;
988 directory+="_rate";
989 directory+=(int)(learningRate*100);
990 directory+="_rdec";
991 directory+=(int)(100*learningRateDecrease);
992 directory+="_mom";
993 directory+=(int)(100*learningRateMomentum);
994 directory+="_";
995 directory+=nParticlesTraining;
996 directory+="_";
997 if (trainXerrors)
998 {
999 directory+="X";
1000 }
1001 else
1002 {
1003 directory+="Y";
1004 }
1005
1006 if (useTrackEstimate)
1007 {
1008 directory+="_withTracks";
1009 }
1010
1011 TString command("mkdir ");
1013
1014 gSystem->Exec(command);
1015
1016 TString nameCronology=directory;
1017
1018 nameCronology+="/trainingCronology.txt";
1019
1020 //prepare output stream
1021
1022 ofstream cronology(nameCronology,ios_base::out);//|ios_base::app);
1023
1024 cronology << "-------------SETTINGS----------------" << endl;
1025 cronology << "Epochs: " << jn->GetEpochs() << std::endl;
1026 cronology << "Updates Per Epoch: " << jn->GetUpdatesPerEpoch() << std::endl;
1027 cronology << "Updating Procedure: " << jn->GetUpdatingProcedure() << std::endl;
1028 cronology << "Error Measure: " << jn->GetErrorMeasure() << std::endl;
1029 cronology << "Patterns Per Update: " << jn->GetPatternsPerUpdate() << std::endl;
1030 cronology << "Learning Rate: " << jn->GetLearningRate() << std::endl;
1031 cronology << "Momentum: " << jn->GetMomentum() << std::endl;
1032 cronology << "Initial Weights Width: " << jn->GetInitialWeightsWidth() << std::endl;
1033 cronology << "Learning Rate Decrease: " << jn->GetLearningRateDecrease() << std::endl;
1034 cronology << "Activation Function: " << jn->GetActivationFunction() << std::endl;
1035 cronology << "-------------LAYOUT------------------" << endl;
1036 cronology << "Input variables: " << jn->GetInputDim() << endl;
1037 cronology << "Output variables: " << jn->GetOutputDim() << endl;
1038 cronology << "Hidden layers: " << jn->GetHiddenLayerDim() << endl;
1039 cronology << "Layout : ";
1040 for (Int_t s=0;s<jn->GetHiddenLayerDim()+2;++s)
1041 {
1042 cronology << jn->GetHiddenLayerSize(s);
1043 if (s<jn->GetHiddenLayerDim()+1) cronology << "-";
1044 }
1045 cronology << endl;
1046 cronology << "--------------HISTORY-----------------" << endl;
1047 cronology << "History of iterations: " << endl;
1048 cronology.close();
1049
1050 //prepare training histo
1051 TH1F* histoTraining=new TH1F("training","training",(int)std::floor((float)nIterations/10.+0.5),1,std::floor((float)nIterations/10.+1.5));
1052 TH1F* histoTesting=new TH1F("testing","testing",(int)std::floor((float)nIterations/10.+0.5),1,std::floor((float)nIterations/10.+1.5));
1053
1054 double maximumTrain=0;
1055 double minimumTrain=1e10;
1056
1057 for(int epoch=restartTrainingFrom+1;epoch<=nIterations;++epoch)
1058 {
1059 if (epoch!=restartTrainingFrom+1)
1060 {
1061 trainingError = jn->Train();
1062 }
1063
1064 if (epoch%10==0 || epoch==restartTrainingFrom+1)
1065 {
1066
1067 cronology.open(nameCronology,ios_base::app);
1068
1069 testError = jn->Test();
1070
1071 if (trainingError>maximumTrain) maximumTrain=trainingError;
1072 if (testError>maximumTrain) maximumTrain=testError;
1073 if (trainingError<minimumTrain) minimumTrain=trainingError;
1074 if (testError<minimumTrain) minimumTrain=testError;
1075
1076
1077 histoTraining->Fill(epoch/10.,trainingError);
1078 histoTesting->Fill(epoch/10.,testError);
1079
1080 if (testError<minimumError)
1081 {
1082 minimumError=testError;
1083 epochesWithRisingError=0;
1084 epochWithMinimum=epoch;
1085 }
1086 else
1087 {
1088 epochesWithRisingError+=10;
1089
1090 }
1091
1092
1093 if (epochesWithRisingError>300)
1094 {
1095 if (trainingError<minimumError)
1096 {
1097 cout << " End of training. Minimum already on epoch: " << epochWithMinimum << endl;
1098 cronology << " End of training. Minimum already on epoch: " << epochWithMinimum << endl;
1099 break;
1100 }
1101 }
1102
1103 cronology << "Epoch: [" << epoch <<
1104 "] Error: " << trainingError <<
1105 " Test: " << testError << endl;
1106
1107 cout << "Epoch: [" << epoch <<
1108 "] Error: " << trainingError <<
1109 " Test: " << testError << endl;
1110
1111 cronology.close();
1112
1113 TString name=directory;
1114 name+="/Weights";
1115 name+=epoch;
1116 name+=".root";
1117
1118 TFile* file=new TFile(name,"recreate");
1119 TTrainedNetwork* trainedNetwork=jn->createTrainedNetwork();
1120
1121
1122
1123 trainedNetwork->Write();
1124 histoControlTestX->Write();
1125 histoControlTestY->Write();
1126 file->Write();
1127 file->Close();
1128 delete file;
1129
1130
1131
1132 // jn->DumpToFile(name);
1133 }
1134 }
1135
1136 jn->writeNetworkInfo(1);
1137 jn->writeNetworkInfo(2);
1138
1139
1140
1141
1143 Int_t nInput=jn->GetInputDim();
1144
1145 cout << " create Trained Network object..." << endl;
1146
1147 TTrainedNetwork* trainedNetwork=jn->createTrainedNetwork();
1148
1149
1150
1151 cout << " Now getting histograms from trainingResult" << endl;
1152 cronology << " Now getting histograms from trainingResult" << endl;
1153
1154 TNetworkToHistoTool myHistoTool;
1155
1156 cout << " From network to histo..." << endl;
1157 std::vector<TH1*> myHistos=myHistoTool.fromTrainedNetworkToHisto(trainedNetwork);
1158
1159 cout << " From histo to network back..." << endl;
1160 TTrainedNetwork* trainedNetwork2=myHistoTool.fromHistoToTrainedNetwork(myHistos);
1161
1162 cout << " reading back " << endl;
1163 jn->readBackTrainedNetwork(trainedNetwork2);
1164
1165
1166
1167
1168
1169 if (epochWithMinimum!=0)
1170 {
1171 cronology << "Minimum stored from Epoch: " << epochWithMinimum << endl;
1172 } else
1173 {
1174 cronology << "Minimum not reached" << endl;
1175 }
1176
1177 cronology.close();
1178
1179 if (epochWithMinimum!=0)
1180 {
1181
1182 TString name=directory;
1183 name+="/Weights";
1184 name+=epochWithMinimum;
1185 name+=".root";
1186 std::cout << " reading back from minimum " << endl;
1187
1188
1189 TFile *_file0 = new TFile(name);
1190 TTrainedNetwork* trainedNetwork=(TTrainedNetwork*)_file0->Get("TTrainedNetwork");
1191
1192 cout << " Reading back network with minimum" << endl;
1193 jn->readBackTrainedNetwork(trainedNetwork);
1194
1195 TString nameFile=directory;
1196 nameFile+="/weightMinimum.root";
1197
1198 TFile* file=new TFile(nameFile,"recreate");
1199 trainedNetwork->Write();
1200 file->Write();
1201 file->Close();
1202 delete file;
1203
1204 cout << " -------------------- " << endl;
1205 cout << " Writing OUTPUT histos " << endl;
1206 TString histoFName=directory;
1207 histoFName+="/histoWeights.root";
1208
1209 TFile* fileHistos=new TFile(histoFName,"recreate");
1210 TNetworkToHistoTool histoTool;
1211 std::vector<TH1*> myHistos=histoTool.fromTrainedNetworkToHisto(trainedNetwork);
1212 std::vector<TH1*>::const_iterator histoBegin=myHistos.begin();
1213 std::vector<TH1*>::const_iterator histoEnd=myHistos.end();
1214 for (std::vector<TH1*>::const_iterator histoIter=histoBegin;
1215 histoIter!=histoEnd;++histoIter)
1216 {
1217 (*histoIter)->Write();
1218 }
1219 fileHistos->Write();
1220 fileHistos->Close();
1221 delete fileHistos;
1222
1223
1224
1225 }
1226 else
1227 {
1228 cout << " using network at last iteration (minimum not reached..." << endl;
1229 }
1230
1231 //here you should create the class... Still open how to deal with this...
1232 // char* myname=const_cast<char*>(static_cast<const char*>(outputclass));
1233 // ierr=mlpsavecf_(myname);
1234
1235 TString histoTName=directory;
1236 histoTName+="/trainingInfo.root";
1237
1238 TFile* histoFile=new TFile(histoTName,"recreate");
1239 histoTraining->Write();
1240 histoTesting->Write();
1241 histoFile->Write();
1242 histoFile->Close();
1243 delete histoFile;
1244
1245 TCanvas* trainingCanvas=new TCanvas("trainingCanvas","trainingCanvas");
1246 histoTraining->SetLineColor(2);
1247 histoTesting->SetLineColor(4);
1248
1249 histoTraining->GetYaxis()->SetRangeUser(minimumTrain,maximumTrain);
1250 histoTraining->Draw("l");
1251 histoTesting->Draw("lsame");
1252 TString canvasName=directory;
1253 canvasName+="/trainingCurve.eps";
1254 trainingCanvas->SaveAs(canvasName);
1255
1256
1257 TCanvas* mlpa_canvas = new TCanvas("jetnet_canvas","Network analysis");
1258 mlpa_canvas->Divide(2,4);
1259
1260
1261
1262}
Scalar phi() const
phi method
static Double_t P(Double_t *tt, Double_t *par)
double norm_pitch(const double input, bool addIBL=false)
double norm_posX(const double input, const bool recenter=false)
double norm_layerNumber(const double input)
double norm_thetaBS(const double input)
double norm_layerType(const double input)
double norm_ToT(const double input)
double back_posX(const double input, const bool recenter=false)
double back_posY(const double input)
double norm_posY(const double input)
double norm_phi(const double input)
double norm_phiBS(const double input)
double norm_theta(const double input)
double norm_etaModule(const double input)
Int_t GetPottsUnits()
Definition TJetNet.cxx:1197
Int_t GetInputDim(void) const
Definition TJetNet.h:54
void Init(void)
Definition TJetNet.cxx:670
Double_t GetLearningRateDecrease(void)
Definition TJetNet.cxx:1192
void SetMomentum(Double_t aValue)
Definition TJetNet.cxx:1118
Int_t GetUpdatingProcedure(void)
Definition TJetNet.cxx:1151
void SetLearningRateDecrease(Double_t aValue)
Definition TJetNet.cxx:1130
void SetErrorMeasure(Int_t aValue)
Definition TJetNet.cxx:1085
void SetOutputTestSet(Int_t aPatternInd, Int_t aOutputInd, Double_t aValue)
Definition TJetNet.h:196
Double_t GetMomentum(void)
Definition TJetNet.cxx:1182
void SetInitialWeightsWidth(Double_t aValue)
Definition TJetNet.cxx:1124
void SetEventWeightTestSet(Int_t aPatternInd, Double_t aValue)
Definition TJetNet.cxx:758
Int_t GetPatternsPerUpdate(void)
Definition TJetNet.cxx:1172
Double_t Test(void)
Definition TJetNet.cxx:320
Double_t GetInitialWeightsWidth(void)
Definition TJetNet.cxx:1187
Int_t GetHiddenLayerSize(Int_t number) const
Definition TJetNet.h:56
void SetInputTrainSet(Int_t aPatternInd, Int_t aInputInd, Double_t aValue)
Definition TJetNet.cxx:728
Int_t GetHiddenLayerDim(void) const
Definition TJetNet.h:55
void SetLearningRate(Double_t aValue)
Definition TJetNet.cxx:1111
Int_t GetUpdatesPerEpoch(void)
Definition TJetNet.cxx:1146
Int_t GetEpochs(void)
Definition TJetNet.h:78
void writeNetworkInfo(Int_t typeOfInfo=0)
Definition TJetNet.cxx:664
Double_t Train(void)
Definition TJetNet.cxx:618
void SetActivationFunction(Int_t aValue)
Definition TJetNet.cxx:1091
void ReadFromFile(TString aFileName="fort.8")
Definition TJetNet.cxx:962
void Shuffle(Bool_t aShuffleTrainSet=true, Bool_t aShuffleTestSet=true)
Definition TJetNet.cxx:1222
Int_t GetErrorMeasure(void)
Definition TJetNet.cxx:1156
void SetUpdatingProcedure(Int_t aValue)
Definition TJetNet.cxx:1078
void readBackTrainedNetwork(const TTrainedNetwork *)
Definition TJetNet.cxx:207
Double_t GetLearningRate(void)
Definition TJetNet.cxx:1177
void SetInputTestSet(Int_t aPatternInd, Int_t aInputInd, Double_t aValue)
Definition TJetNet.cxx:740
void SetUpdatesPerEpoch(Int_t aValue)
Definition TJetNet.cxx:1071
void SetEventWeightTrainSet(Int_t aPatternInd, Double_t aValue)
Definition TJetNet.cxx:752
void SetOutputTrainSet(Int_t aPatternInd, Int_t aOutputInd, Double_t aValue)
Definition TJetNet.cxx:734
TTrainedNetwork * createTrainedNetwork() const
Definition TJetNet.cxx:104
Int_t GetOutputDim(void) const
Definition TJetNet.h:57
Int_t GetActivationFunction(void) const
Definition TJetNet.cxx:1161
void SetPatternsPerUpdate(Int_t aValue)
Definition TJetNet.cxx:1105
std::vector< TH1 * > fromTrainedNetworkToHisto(TTrainedNetwork *) const
TTrainedNetwork * fromHistoToTrainedNetwork(std::vector< TH1 * > &) const
std::vector< Double_t > calculateOutputValues(std::vector< Double_t > &input) const
bool badTrackInfo(bool useTrackEstimate, double theta)
bool binIsFull(int binNumber, bool isX, double errValue, int nParticles, int numberBins)
bool isBadCluster(int sizeX, int nParticles)
int r
Definition globals.cxx:22
str directory
Definition DeMoScan.py:78
TH1F(name, title, nxbins, bins_par2, bins_par3=None, path='', **kwargs)
void sort(typename DataModel_detail::iterator< DVL > beg, typename DataModel_detail::iterator< DVL > end)
Specialization of sort for DataVector/List.
TFile * file