119 gROOT->SetStyle(
"Plain");
121 cout <<
"starting with settings: " << endl;
122 cout <<
" nIterations: " << nIterations << endl;
123 cout <<
" dilutionFactor: " << dilutionFactor << endl;
124 cout <<
" nodesFirstLayer: " << nodesFirstLayer << endl;
125 cout <<
" nodesSecondLayer: " << nodesSecondLayer << endl;
130 TChain *myChain =
new TChain(
"Validation/NNinput");
133 if(!useTrackEstimate){
134 #include "../files.txt"
137 if(useTrackEstimate){
138 #include "../filesOnTrack.txt"
140 TChain* simu=myChain;
142 std::cout <<
" Training sample obtained... " << std::endl;
144 vector<int> *NN_sizeX;
145 vector<int> *NN_sizeY;
146 vector<vector<float> > *NN_matrixOfToT;
147 vector<vector<float> > *NN_vectorOfPitchesY;
148 vector<int> *NN_ClusterPixLayer;
149 vector<int> *NN_ClusterPixBarrelEC;
150 vector<float> *NN_phiBS;
151 vector<float> *NN_thetaBS;
152 vector<float> *NN_etaModule;
153 vector<bool> *NN_useTrackInfo;
154 vector<int> *NN_columnWeightedPosition;
155 vector<int> *NN_rowWeightedPosition;
156 vector<vector<float> > *NN_positionX;
157 vector<vector<float> > *NN_positionY;
158 vector<vector<float> > *NN_position_idX;
159 vector<vector<float> > *NN_position_idY;
160 vector<vector<float> > *NN_theta;
161 vector<vector<float> > *NN_phi;
166 TBranch *b_NN_matrixOfToT;
167 TBranch *b_NN_vectorOfPitchesY;
168 TBranch *b_NN_ClusterPixLayer;
169 TBranch *b_NN_ClusterPixBarrelEC;
171 TBranch *b_NN_thetaBS;
172 TBranch *b_NN_etaModule;
173 TBranch *b_NN_useTrackInfo;
174 TBranch *b_NN_columnWeightedPosition;
175 TBranch *b_NN_rowWeightedPosition;
176 TBranch *b_NN_positionX;
177 TBranch *b_NN_positionY;
178 TBranch *b_NN_position_idX;
179 TBranch *b_NN_position_idY;
188 NN_vectorOfPitchesY = 0;
189 NN_ClusterPixLayer = 0;
190 NN_ClusterPixBarrelEC = 0;
195 NN_columnWeightedPosition = 0;
196 NN_rowWeightedPosition = 0;
207 simu->SetMakeClass(1);
209 simu->SetBranchAddress(
"NN_sizeX", &NN_sizeX, &b_NN_sizeX);
210 simu->SetBranchAddress(
"NN_sizeY", &NN_sizeY, &b_NN_sizeY);
211 simu->SetBranchAddress(
"NN_matrixOfToT", &NN_matrixOfToT, &b_NN_matrixOfToT);
212 simu->SetBranchAddress(
"NN_vectorOfPitchesY", &NN_vectorOfPitchesY, &b_NN_vectorOfPitchesY);
213 simu->SetBranchAddress(
"NN_ClusterPixLayer", &NN_ClusterPixLayer, &b_NN_ClusterPixLayer);
214 simu->SetBranchAddress(
"NN_ClusterPixBarrelEC", &NN_ClusterPixBarrelEC, &b_NN_ClusterPixBarrelEC);
215 simu->SetBranchAddress(
"NN_phiBS", &NN_phiBS, &b_NN_phiBS);
216 simu->SetBranchAddress(
"NN_thetaBS", &NN_thetaBS, &b_NN_thetaBS);
217 simu->SetBranchAddress(
"NN_etaModule", &NN_etaModule, &b_NN_etaModule);
218 simu->SetBranchAddress(
"NN_useTrackInfo", &NN_useTrackInfo, &b_NN_useTrackInfo);
219 simu->SetBranchAddress(
"NN_columnWeightedPosition", &NN_columnWeightedPosition, &b_NN_columnWeightedPosition);
220 simu->SetBranchAddress(
"NN_rowWeightedPosition", &NN_rowWeightedPosition, &b_NN_rowWeightedPosition);
221 simu->SetBranchAddress(
"NN_positionX", &NN_positionX, &b_NN_positionX);
222 simu->SetBranchAddress(
"NN_positionY", &NN_positionY, &b_NN_positionY);
223 simu->SetBranchAddress(
"NN_position_idX", &NN_position_idX, &b_NN_position_idX);
224 simu->SetBranchAddress(
"NN_position_idY", &NN_position_idY, &b_NN_position_idY);
225 simu->SetBranchAddress(
"NN_theta", &NN_theta, &b_NN_theta);
226 simu->SetBranchAddress(
"NN_phi", &NN_phi, &b_NN_phi);
229 cout <<
"Branches set..." << endl;
231 TString filterTrain(
"Entry$%");
232 filterTrain+=dilutionFactor;
235 TString filterTest(
"Entry$%");
236 filterTest+=dilutionFactor;
245 cout <<
"First entry..." << endl;
252 for(
unsigned int clus =0; clus<NN_sizeX->size(); clus++ ){
254 sizeX = (*NN_sizeX)[clus];
255 sizeY = (*NN_sizeY)[clus];
261 cout <<
"Size obtained" << endl;
264 int numberinputs=sizeX*(sizeY+1)+4;
265 if (!useTrackEstimate)
267 numberinputs=sizeX*(sizeY+1)+5;
270 int numberoutputs=2*nParticlesTraining;
272 if (nodesSecondLayer!=0)
277 if (nodesSecondLayer!=0)
286 nneurons[0]=numberinputs;
288 nneurons[1]=nodesFirstLayer;
290 if (nodesSecondLayer!=0)
292 nneurons[2]=nodesSecondLayer;
293 nneurons[3]=numberoutputs;
297 nneurons[2]=numberoutputs;
300 for (
int i=0;
i<nlayer;
i++)
302 cout <<
" layer i: " <<
i <<
" number neurons: " << nneurons[
i] << endl;
307 float trainingError(0);
312 cout <<
" now providing training events " << endl;
314 Int_t numberTrainingEvents=0;
315 Int_t numberTestingEvents=0;
323 int totalN=simu->GetEntries();
328 for (Int_t
i = 0;
i < totalN;
i++) {
330 if (
i % 50000 == 0 ) {
331 std::cout <<
" Counting training / testing events in sample. Looping over event " <<
i << std::endl;
336 for(
unsigned int clus =0; clus<NN_sizeX->size(); clus++ ){
338 vector<float> *matrixOfToT=0;
339 vector<float> *vectorOfPitchesY=0;
344 Int_t ClusterPixLayer;
345 Int_t ClusterPixBarrelEC;
347 std::vector<float> * positionX=0;
348 std::vector<float> * positionY=0;
349 std::vector<float> * thetaTr=0;
350 std::vector<float> * phiTr=0;
352 std::vector<float> positionX_reorder;
353 std::vector<float> positionY_reorder;
356 sizeX = (*NN_sizeX)[clus];
357 positionX =&(*NN_positionX)[clus];
358 int nParticles = positionX->size();
360 thetaTr = &(*NN_theta)[clus];
361 phiTr = &(*NN_phi)[clus];
363 if (nParticlesTraining!=nParticles)
370 for(
unsigned int P = 0;
P < positionX->size();
P++){
371 double theta = (*thetaTr)[
P];
380 if (iClus%dilutionFactor==0) numberTrainingEvents+=1;
381 if (iClus%dilutionFactor==1) numberTestingEvents+=1;
383 if (iClus%dilutionFactor==1 && nParticles==1 ) part_1++;
384 if (iClus%dilutionFactor==1 && nParticles==2 ) part_2++;
385 if (iClus%dilutionFactor==1 && nParticles==3 ) part_3++;
395 cout <<
" N. training events: " << numberTrainingEvents <<
396 " N. testing events: " << numberTestingEvents << endl;
400 cout <<
"now start to setup the network..." << endl;
402 TJetNet* jn =
new TJetNet( numberTestingEvents, numberTrainingEvents, nlayer, nneurons );
404 cout <<
" setting learning method... " << endl;
412 jn->
SetUpdatesPerEpoch( (
int)std::floor((
float)numberTrainingEvents/(
float)nPatternsPerUpdate) );
430 cout <<
" setting pattern for training events " << endl;
432 int trainSampleNumber=0;
433 int testSampleNumber=1;
435 cout <<
" copying over training events " << endl;
442 vector<double> inputVar;
443 vector<double> outputVar;
447 for (Int_t
i = 0;
i < totalN;
i++) {
449 if (
i % 100000 == 0 ) {
450 std::cout <<
" Copying over training events. Looping over event " <<
i << std::endl;
456 for(
unsigned int clus =0; clus<NN_sizeX->size(); clus++ ){
458 vector<float> *matrixOfToT=0;
459 vector<float> *vectorOfPitchesY=0;
464 Int_t ClusterPixLayer;
465 Int_t ClusterPixBarrelEC;
467 std::vector<float> * position_idX=0;
468 std::vector<float> * position_idY=0;
469 std::vector<float> * thetaTr=0;
470 std::vector<float> * phiTr=0;
473 sizeX = (*NN_sizeX)[clus];
475 sizeY = (*NN_sizeY)[clus];
476 matrixOfToT=&(*NN_matrixOfToT)[clus];
477 vectorOfPitchesY=&(*NN_vectorOfPitchesY)[clus];
479 phiBS = (*NN_phiBS)[clus];
480 thetaBS =(*NN_thetaBS)[clus];
481 etaModule =(*NN_etaModule)[clus];
483 ClusterPixLayer=(*NN_ClusterPixLayer)[clus];
484 ClusterPixBarrelEC = (*NN_ClusterPixBarrelEC)[clus];
486 position_idX =&(*NN_position_idX)[clus];
487 position_idY =&(*NN_position_idY)[clus];
489 int nParticles = position_idX->size();
491 if (nParticlesTraining!=nParticles)
496 thetaTr = &(*NN_theta)[clus];
497 phiTr = &(*NN_phi)[clus];
502 for(
unsigned int P = 0;
P < position_idX->size();
P++){
504 double theta = (*thetaTr)[
P];
505 double phi = (*phiTr)[
P];
508 if (ClusterPixBarrelEC==2)
522 if (matrixOfToT->size()!=sizeX*sizeY)
524 std::cout <<
" Event: " <<
i <<
" PROBLEM: size Y is: " << matrixOfToT->size() << std::endl;
525 throw std::runtime_error(
"Error in positions/trainNN.cxx");
529 for(
unsigned int ME =0;
ME < matrixOfToT->size();
ME++){
534 if ((*matrixOfToT)[ME] != (*matrixOfToT)[ME])
536 cout <<
"ME n. " <<
ME <<
" is: " << (*matrixOfToT)[
ME] << endl;
537 throw std::runtime_error(
"Error in positions/trainNN.cxx");
540 if (counter0 == 0) std::cout <<
" element: " <<
ME <<
" ToT set to: " <<
norm_ToT((*matrixOfToT)[ME]) << std::endl;
541 if (iClus%dilutionFactor==1&&counter1==0) inputVar.push_back(
norm_ToT((*matrixOfToT)[ME]));
547 for (
int s=0;
s<sizeY;
s++)
552 if ((*vectorOfPitchesY)[
s]!=(*vectorOfPitchesY)[
s])
554 cout <<
" pitchY: " << (*vectorOfPitchesY)[
s] << endl;
555 throw std::runtime_error(
"Error in positions/trainNN.cxx");;
557 if (counter0 == 0) std::cout <<
" s: " <<
s <<
" pitch set to: " <<
norm_pitch((*vectorOfPitchesY)[
s]) << std::endl;
559 if (iClus%dilutionFactor==1&&counter1==0) inputVar.push_back(
norm_pitch((*vectorOfPitchesY)[
s]));
565 if (ClusterPixLayer!=ClusterPixLayer || ClusterPixBarrelEC!=ClusterPixBarrelEC)
567 cout <<
" ClusterPixLayer: " << ClusterPixLayer <<
" ClusterPixBarrelEC " << ClusterPixBarrelEC << endl;
568 throw std::runtime_error(
"Error in positions/trainNN.cxx");
574 if (iClus%dilutionFactor==1&&counter1==0) {
580 if (counter0 == 0) std::cout <<
" ClusterPixLayer " <<
norm_layerNumber(ClusterPixLayer) <<
" ClusterPixBarrelEC " <<
norm_layerType(ClusterPixBarrelEC) << std::endl;
582 if (useTrackEstimate)
588 cout <<
" phi: " <<
phi << endl;
589 throw std::runtime_error(
"Error in positions/trainNN.cxx");
593 cout <<
" theta: " <<
theta << endl;
594 throw std::runtime_error(
"Error in positions/trainNN.cxx");
604 if (iClus%dilutionFactor==1&&counter1==0) {
620 if (iClus%dilutionFactor==1&&counter1==0) {
626 if (counter0==0) std::cout <<
633 vector<float> xPositions = *position_idX;
640 std::sort(xPositions.begin(),xPositions.end());
642 for (
int o=0;o<xPositions.size();o++)
648 if (xPositions[o]!=xPositions[o])
650 cout <<
"pos: " << xPositions[o] << endl;
651 throw std::runtime_error(
"Error in positions/trainNN.cxx");
654 if (counter0==0) std::cout <<
" output node: " << 2*o <<
" set to: " <<
norm_posX(xPositions[o]) << endl;
656 if (iClus%dilutionFactor==1&&counter1==0) { outputVar.push_back(
norm_posX(xPositions[o]));}
660 for (
int e=0;
e<nParticles;
e++)
662 if (fabs((*position_idX)[
e]-xPositions[o])<1
e-10)
664 if (fabs(corry+1000)>1
e-6)
666 cout <<
" Value find more than once! " << endl;
667 for (
int p=0;
p<xPositions.size();
p++)
669 cout <<
" X n. : " <<
p <<
" is: " << xPositions[
p] << endl;
672 corry=(*position_idY)[
e];
676 if (fabs(corry+1000)<1
e-6) {
677 cout <<
" could not find original X pos. " << endl;
678 throw std::runtime_error(
"Error in positions/trainNN.cxx");
685 cout <<
" posY " << corry << endl;
686 throw std::runtime_error(
"Error in positions/trainNN.cxx");
690 if (counter0==0) std::cout <<
" output node: " << 2*o+1 <<
" set to: " <<
norm_posY(corry) << endl;
691 if (iClus%dilutionFactor==1&&counter1==0) { outputVar.push_back(
norm_posY(corry));}
699 if (iClus%dilutionFactor==0){counter0+=1;}
700 if (iClus%dilutionFactor==1){counter1+=1;}
708 if (counter0!=numberTrainingEvents)
710 cout <<
" counter up to: " << counter0 <<
" while events in training sample are " << numberTrainingEvents << endl;
714 if (counter1!=numberTestingEvents)
716 cout <<
" counter up to: " << counter1 <<
" while events in testing sample are " << numberTestingEvents << endl;
724 if (restartTrainingFrom==0)
731 TString
name(
"Weights");
732 name+=restartTrainingFrom;
740 float minimumError=1e10;
741 int epochesWithRisingError=0;
742 int epochWithMinimum=0;
759 if (useTrackEstimate)
771 nameCronology+=
"/trainingCronology.txt";
775 cronology <<
"-------------SETTINGS----------------" << endl;
776 cronology <<
"Epochs: " << jn->
GetEpochs() << std::endl;
782 cronology <<
"Momentum: " << jn->
GetMomentum() << std::endl;
786 cronology <<
"-------------LAYOUT------------------" << endl;
787 cronology <<
"Input variables: " << jn->
GetInputDim() << endl;
788 cronology <<
"Output variables: " << jn->
GetOutputDim() << endl;
790 cronology <<
"Layout : ";
794 if (s<jn->GetHiddenLayerDim()+1) cronology <<
"-";
797 cronology <<
"--------------HISTORY-----------------" << endl;
798 cronology <<
"History of iterations: " << endl;
802 TH1F* histoTraining=
new TH1F(
"training",
"training",(
int)std::floor((
float)nIterations/10.+0.5),1,std::floor((
float)nIterations/10.+1.5));
803 TH1F* histoTesting=
new TH1F(
"testing",
"testing",(
int)std::floor((
float)nIterations/10.+0.5),1,std::floor((
float)nIterations/10.+1.5));
805 double maximumTrain=0;
806 double minimumTrain=1e10;
808 for(
int epoch=restartTrainingFrom+1;epoch<=nIterations;++epoch)
810 if (epoch!=restartTrainingFrom+1)
812 trainingError = jn->
Train();
815 if (epoch%10==0 || epoch==restartTrainingFrom+1)
818 cronology.open(nameCronology,ios_base::app);
820 testError = jn->
Test();
822 if (trainingError>maximumTrain) maximumTrain=trainingError;
823 if (testError>maximumTrain) maximumTrain=testError;
824 if (trainingError<minimumTrain) minimumTrain=trainingError;
825 if (testError<minimumTrain) minimumTrain=testError;
828 histoTraining->Fill(epoch/10.,trainingError);
829 histoTesting->Fill(epoch/10.,testError);
831 if (testError<minimumError)
833 minimumError=testError;
834 epochesWithRisingError=0;
835 epochWithMinimum=epoch;
839 epochesWithRisingError+=10;
844 if (epochesWithRisingError>300)
846 if (trainingError<minimumError)
848 cout <<
" End of training. Minimum already on epoch: " << epochWithMinimum << endl;
849 cronology <<
" End of training. Minimum already on epoch: " << epochWithMinimum << endl;
854 cronology <<
"Epoch: [" << epoch <<
855 "] Error: " << trainingError <<
856 " Test: " << testError << endl;
858 cout <<
"Epoch: [" << epoch <<
859 "] Error: " << trainingError <<
860 " Test: " << testError << endl;
869 TFile*
file=
new TFile(
name,
"recreate");
874 for (
int z=0;
z<nParticlesTraining;
z++)
880 for (
int z=0;
z<nParticlesTraining;
z++)
882 std::cout <<
"output TTNet "<<
z<<
" x: " << myTestOutput[2*
z] <<
883 " y: " << myTestOutput[2*
z+1] << endl;
885 for (
int z=0;
z<nParticlesTraining;
z++)
887 std::cout <<
"should be " <<
z <<
"x: " << outputVar[2*
z] <<
" y: " << outputVar[2*
z+1] << endl;
890 trainedNetwork->Write();
919 cout <<
" create Trained Network object..." << endl;
978 cout <<
" Now getting histograms from trainingResult" << endl;
979 cronology <<
" Now getting histograms from trainingResult" << endl;
983 cout <<
" From network to histo..." << endl;
986 cout <<
" From histo to network back..." << endl;
989 cout <<
" reading back " << endl;
1030 if (epochWithMinimum!=0)
1032 cronology <<
"Minimum stored from Epoch: " << epochWithMinimum << endl;
1035 cronology <<
"Minimum not reached" << endl;
1040 if (epochWithMinimum!=0)
1045 name+=epochWithMinimum;
1047 std::cout <<
" reading back from minimum " << endl;
1050 TFile *_file0 =
new TFile(
name);
1053 cout <<
" Reading back network with minimum" << endl;
1060 trainedNetwork->Write();
1065 cout <<
" -------------------- " << endl;
1066 cout <<
" Writing OUTPUT histos " << endl;
1068 histoFName+=
"/histoWeights.root";
1070 TFile* fileHistos=
new TFile(histoFName,
"recreate");
1073 std::vector<TH1*>::const_iterator histoBegin=myHistos.begin();
1074 std::vector<TH1*>::const_iterator histoEnd=myHistos.end();
1075 for (std::vector<TH1*>::const_iterator histoIter=histoBegin;
1076 histoIter!=histoEnd;++histoIter)
1078 (*histoIter)->Write();
1080 fileHistos->Write();
1081 fileHistos->Close();
1091 cout <<
" using network at last iteration (minimum not reached..." << endl;
1099 histoTName+=
"/trainingInfo.root";
1101 TFile* histoFile=
new TFile(histoTName,
"recreate");
1102 histoTraining->Write();
1103 histoTesting->Write();
1108 TCanvas* trainingCanvas=
new TCanvas(
"trainingCanvas",
"trainingCanvas");
1109 histoTraining->SetLineColor(2);
1110 histoTesting->SetLineColor(4);
1112 histoTraining->GetYaxis()->SetRangeUser(minimumTrain,maximumTrain);
1113 histoTraining->Draw(
"l");
1114 histoTesting->Draw(
"lsame");
1116 canvasName+=
"/trainingCurve.eps";
1117 trainingCanvas->SaveAs(canvasName);
1120 TCanvas* mlpa_canvas =
new TCanvas(
"jetnet_canvas",
"Network analysis");
1121 mlpa_canvas->Divide(2,4);