196 gROOT->SetStyle(
"Plain");
198 cout <<
"starting with settings: " << endl;
199 cout <<
" nIterations: " << nIterations << endl;
200 cout <<
" dilutionFactor: " << dilutionFactor << endl;
201 cout <<
" nodesFirstLayer: " << nodesFirstLayer << endl;
202 cout <<
" nodesSecondLayer: " << nodesSecondLayer << endl;
208 TChain *myChain =
new TChain(
"Validation/NNinput");
211 if(!useTrackEstimate){
212 #include "../files.txt"
215 if(useTrackEstimate){
216 #include "../filesOnTrack.txt"
218 TChain* simu=myChain;
220 std::cout <<
" Training sample obtained... " << std::endl;
222 vector<int> *NN_sizeX;
223 vector<int> *NN_sizeY;
224 vector<vector<float> > *NN_matrixOfToT;
225 vector<vector<float> > *NN_vectorOfPitchesY;
226 vector<int> *NN_ClusterPixLayer;
227 vector<int> *NN_ClusterPixBarrelEC;
228 vector<float> *NN_phiBS;
229 vector<float> *NN_thetaBS;
230 vector<float> *NN_etaModule;
231 vector<bool> *NN_useTrackInfo;
232 vector<int> *NN_columnWeightedPosition;
233 vector<int> *NN_rowWeightedPosition;
234 vector<double> *NN_localColumnWeightedPosition;
235 vector<double> *NN_localRowWeightedPosition;
237 vector<vector<float> > *NN_positionX;
238 vector<vector<float> > *NN_positionY;
239 vector<vector<float> > *NN_position_idX;
240 vector<vector<float> > *NN_position_idY;
241 vector<vector<float> > *NN_theta;
242 vector<vector<float> > *NN_phi;
247 TBranch *b_NN_matrixOfToT;
248 TBranch *b_NN_vectorOfPitchesY;
249 TBranch *b_NN_ClusterPixLayer;
250 TBranch *b_NN_ClusterPixBarrelEC;
252 TBranch *b_NN_thetaBS;
253 TBranch *b_NN_etaModule;
254 TBranch *b_NN_useTrackInfo;
255 TBranch *b_NN_columnWeightedPosition;
256 TBranch *b_NN_rowWeightedPosition;
257 TBranch *b_NN_localColumnWeightedPosition;
258 TBranch *b_NN_localRowWeightedPosition;
259 TBranch *b_NN_positionX;
260 TBranch *b_NN_positionY;
261 TBranch *b_NN_position_idX;
262 TBranch *b_NN_position_idY;
271 NN_vectorOfPitchesY = 0;
272 NN_ClusterPixLayer = 0;
273 NN_ClusterPixBarrelEC = 0;
278 NN_columnWeightedPosition = 0;
279 NN_rowWeightedPosition = 0;
280 NN_localColumnWeightedPosition = 0;
281 NN_localRowWeightedPosition = 0;
292 simu->SetMakeClass(1);
294 simu->SetBranchAddress(
"NN_sizeX", &NN_sizeX, &b_NN_sizeX);
295 simu->SetBranchAddress(
"NN_sizeY", &NN_sizeY, &b_NN_sizeY);
296 simu->SetBranchAddress(
"NN_matrixOfToT", &NN_matrixOfToT, &b_NN_matrixOfToT);
297 simu->SetBranchAddress(
"NN_vectorOfPitchesY", &NN_vectorOfPitchesY, &b_NN_vectorOfPitchesY);
298 simu->SetBranchAddress(
"NN_ClusterPixLayer", &NN_ClusterPixLayer, &b_NN_ClusterPixLayer);
299 simu->SetBranchAddress(
"NN_ClusterPixBarrelEC", &NN_ClusterPixBarrelEC, &b_NN_ClusterPixBarrelEC);
300 simu->SetBranchAddress(
"NN_phiBS", &NN_phiBS, &b_NN_phiBS);
301 simu->SetBranchAddress(
"NN_thetaBS", &NN_thetaBS, &b_NN_thetaBS);
302 simu->SetBranchAddress(
"NN_etaModule", &NN_etaModule, &b_NN_etaModule);
303 simu->SetBranchAddress(
"NN_useTrackInfo", &NN_useTrackInfo, &b_NN_useTrackInfo);
304 simu->SetBranchAddress(
"NN_columnWeightedPosition", &NN_columnWeightedPosition, &b_NN_columnWeightedPosition);
305 simu->SetBranchAddress(
"NN_rowWeightedPosition", &NN_rowWeightedPosition, &b_NN_rowWeightedPosition);
307 simu->SetBranchAddress(
"NN_localColumnWeightedPosition", &NN_localColumnWeightedPosition, &b_NN_localColumnWeightedPosition);
308 simu->SetBranchAddress(
"NN_localRowWeightedPosition", &NN_localRowWeightedPosition, &b_NN_localRowWeightedPosition);
310 simu->SetBranchAddress(
"NN_positionX", &NN_positionX, &b_NN_positionX);
311 simu->SetBranchAddress(
"NN_positionY", &NN_positionY, &b_NN_positionY);
312 simu->SetBranchAddress(
"NN_position_idX", &NN_position_idX, &b_NN_position_idX);
313 simu->SetBranchAddress(
"NN_position_idY", &NN_position_idY, &b_NN_position_idY);
315 simu->SetBranchAddress(
"NN_theta", &NN_theta, &b_NN_theta);
316 simu->SetBranchAddress(
"NN_phi", &NN_phi, &b_NN_phi);
319 cout <<
"Branches set..." << endl;
322 if(nParticlesTraining == 1 )
name+=
"WeightsOneTracks.root";
323 if(nParticlesTraining == 2 )
name+=
"WeightsTwoTracks.root";
324 if(nParticlesTraining == 3 )
name+=
"WeightsThreeTracks.root";
326 if (!useTrackEstimate)
328 name.ReplaceAll(
".root",
"_noTrack.root");
332 TFile *_file0 =
new TFile(
name);
335 cout <<
" Reading back network with minimum" << endl;
338 TString filterTrain(
"Entry$%");
339 filterTrain+=dilutionFactor;
342 TString filterTest(
"Entry$%");
343 filterTest+=dilutionFactor;
351 cout <<
"First entry..." << endl;
358 for(
unsigned int clus =0; clus<NN_sizeX->size(); clus++ ){
360 sizeX = (*NN_sizeX)[clus];
361 sizeY = (*NN_sizeY)[clus];
367 cout <<
"Size obtained" << endl;
371 int numberinputs=sizeX*(sizeY+1)+4+nParticlesTraining*2;
372 if (!useTrackEstimate)
374 numberinputs=sizeX*(sizeY+1)+5+nParticlesTraining*2;
377 int numberoutputs=nParticlesTraining*numberBinsErrorEstimate;
382 if (nodesSecondLayer!=0)
387 if (nodesSecondLayer!=0)
396 nneurons[0]=numberinputs;
398 nneurons[1]=nodesFirstLayer;
400 if (nodesSecondLayer!=0)
402 nneurons[2]=nodesSecondLayer;
403 nneurons[3]=numberoutputs;
407 nneurons[2]=numberoutputs;
410 for (
int i=0;
i<nlayer;
i++)
412 cout <<
" layer i: " <<
i <<
" number neurons: " << nneurons[
i] << endl;
417 float trainingError(0);
422 cout <<
" now providing training events " << endl;
424 Long64_t numberTrainingEvents=0;
425 Long64_t numberTestingEvents=0;
432 int nTotal=simu->GetEntries();
437 for (Int_t
i = 0;
i < nTotal;
i++) {
439 if (
i % 1000000 == 0 ) {
440 std::cout <<
" Counting training / testing events in sample. Looping over event " <<
i << std::endl;
445 for(
unsigned int clus =0; clus<NN_sizeX->size(); clus++ ){
447 vector<float> *matrixOfToT=0;
448 vector<float> *vectorOfPitchesY=0;
453 Int_t ClusterPixLayer;
454 Int_t ClusterPixBarrelEC;
456 std::vector<float> * positionX=0;
457 std::vector<float> * positionY=0;
458 std::vector<float> * thetaTr=0;
459 std::vector<float> * phiTr=0;
463 int sizeX = (*NN_sizeX)[clus];
464 positionX =&(*NN_positionX)[clus];
465 int nParticles = positionX->size();
467 if (nParticlesTraining!=nParticles)
473 thetaTr = &(*NN_theta)[clus];
474 phiTr = &(*NN_phi)[clus];
477 for(
unsigned int P = 0;
P < positionX->size();
P++){
478 double theta = (*thetaTr)[
P];
489 if (iClus%dilutionFactor==0) numberTrainingEvents+=1;
490 if (iClus%dilutionFactor==1) numberTestingEvents+=1;
492 if (iClus%dilutionFactor==1 && nParticles==1 ) part_1++;
493 if (iClus%dilutionFactor==1 && nParticles==2 ) part_2++;
494 if (iClus%dilutionFactor==1 && nParticles==3 ) part_3++;
504 cout <<
" N. training events: " << numberTrainingEvents <<
505 " N. testing events: " << numberTestingEvents << endl;
507 cout <<
"now start to setup the network..." << endl;
509 TJetNet* jn =
new TJetNet( numberTestingEvents, numberTrainingEvents, nlayer, nneurons );
511 cout <<
" setting learning method... " << endl;
515 jn->
SetUpdatesPerEpoch( (
int)std::floor((
float)numberTrainingEvents/(
float)nPatternsPerUpdate) );
528 TH1F* histoControlTestX=
new TH1F(
"histoControlTestX",
"histoControlTestX",numberBinsErrorEstimate,0,numberBinsErrorEstimate);
529 TH1F* histoControlTestY=
new TH1F(
"histoControlTestY",
"histoControlTestY",numberBinsErrorEstimate,0,numberBinsErrorEstimate);
532 int trainSampleNumber=0;
533 int testSampleNumber=1;
537 cout <<
" copying over training events " << endl;
545 for (Int_t
i = 0;
i < nTotal;
i++) {
547 if (
i % 1000 == 0 ) {
548 std::cout <<
" Copying over training events. Looping over event " <<
i << std::endl;
553 for(
unsigned int clus =0; clus<NN_sizeX->size(); clus++ ){
555 vector<float> *matrixOfToT=0;
556 vector<float> *vectorOfPitchesY=0;
561 Int_t ClusterPixLayer;
562 Int_t ClusterPixBarrelEC;
564 std::vector<float> * positionX=0;
565 std::vector<float> * positionY=0;
567 std::vector<float> positionX_reorder;
568 std::vector<float> positionY_reorder;
573 std::vector<float> * thetaTr=0;
574 std::vector<float> * phiTr=0;
576 double localColumnWeightedPosition;
577 double localRowWeightedPosition;
579 sizeX = (*NN_sizeX)[clus];
580 sizeY = (*NN_sizeY)[clus];
582 matrixOfToT=&(*NN_matrixOfToT)[clus];
583 vectorOfPitchesY=&(*NN_vectorOfPitchesY)[clus];
585 phiBS = (*NN_phiBS)[clus];
586 thetaBS =(*NN_thetaBS)[clus];
587 etaModule =(*NN_etaModule)[clus];
589 ClusterPixLayer=(*NN_ClusterPixLayer)[clus];
590 ClusterPixBarrelEC = (*NN_ClusterPixBarrelEC)[clus];
592 positionX =&(*NN_positionX)[clus];
593 positionY =&(*NN_positionY)[clus];
595 positionX_reorder=*positionX;
596 positionY_reorder.clear();
600 thetaTr = &(*NN_theta)[clus];
601 phiTr = &(*NN_phi)[clus];
604 localColumnWeightedPosition =(*NN_localColumnWeightedPosition)[clus];
605 localRowWeightedPosition =(*NN_localRowWeightedPosition)[clus];
609 int nParticles = positionX->size();
610 if (nParticlesTraining!=nParticles)
623 positionX_reorder.end());
625 for (
int o=0;o<positionX->size();o++)
628 for (
int e=0;
e<positionX->size();
e++)
630 if (fabs(positionX_reorder[o]-(*positionX)[
e])<1
e-10)
632 if (fabs(corry+1000)>1
e-6)
634 cout <<
" Value find more than once! " << endl;
635 for (
int p=0;
p<positionX->size();
p++)
637 cout <<
" X n. : " <<
p <<
" is: " << (*positionX)[
p] << endl;
640 corry=(*positionY)[
e];
643 positionY_reorder.push_back(corry);
648 for(
unsigned int P = 0;
P < positionX->size();
P++){
651 double theta = (*thetaTr)[
P];
652 double phi = (*phiTr)[
P];
656 if (ClusterPixBarrelEC==2)
669 std::vector<Double_t> inputData;
671 for(
unsigned int ME =0;
ME < matrixOfToT->size();
ME++){
673 inputData.push_back(
norm_ToT((*matrixOfToT)[ME]));
676 for (
int s=0;
s<sizeY;
s++)
678 inputData.push_back(
norm_pitch((*vectorOfPitchesY)[
s]));
684 if (useTrackEstimate)
698 vector<double> outputNN_idX;
699 vector<double> outputNN_idY;
709 if(nParticlesTraining==1){
710 outputNN_idX.push_back(
back_posX(resultNN[0]));
711 outputNN_idY.push_back(
back_posY(resultNN[1]));
715 if(nParticlesTraining==2){
716 outputNN_idX.push_back(
back_posX(resultNN[0]));
717 outputNN_idX.push_back(
back_posX(resultNN[2]));
718 outputNN_idY.push_back(
back_posY(resultNN[1]));
719 outputNN_idY.push_back(
back_posY(resultNN[3]));
722 if(nParticlesTraining==3){
723 outputNN_idX.push_back(
back_posX(resultNN[0]));
724 outputNN_idX.push_back(
back_posX(resultNN[2]));
725 outputNN_idX.push_back(
back_posX(resultNN[4]));
726 outputNN_idY.push_back(
back_posY(resultNN[1]));
727 outputNN_idY.push_back(
back_posY(resultNN[3]));
728 outputNN_idY.push_back(
back_posY(resultNN[5]));
734 vector<float> outputNN_X ;
735 vector<float> outputNN_Y ;
737 for(
unsigned int t=0;
t < outputNN_idX.size();
t++){
743 double centerPosY = localColumnWeightedPosition;
744 double centerPosX = localRowWeightedPosition;
748 double indexX = outputNN_idX[
t];
749 double indexY = outputNN_idY[
t];
750 double indexPositionToTranslateY = indexY+(
double)(sizeY-1)/2;
752 double pos_idX = centerPosX + (
double)indexX * PitchX;
756 double positionYFromZero = -100;
757 double positionCenterYFromZero = -100;
758 double actualPositionFromZero=0.;
762 for (
int i=0;
i<sizeY;
i++)
764 if (indexPositionToTranslateY>=(
double)
i && indexPositionToTranslateY<=(
double)(
i+1))
766 positionYFromZero = actualPositionFromZero + (
double)(indexPositionToTranslateY-(
double)
i+0.5)*(*vectorOfPitchesY)[
i];
771 positionCenterYFromZero = actualPositionFromZero + 0.5* (*vectorOfPitchesY)[
i];
774 actualPositionFromZero+=(*vectorOfPitchesY)[
i];
777 double pos_idY = centerPosY + positionYFromZero - positionCenterYFromZero;
782 outputNN_X.push_back(pos_idX);
783 outputNN_Y.push_back(pos_idY);
789 if (matrixOfToT->size()!=sizeX*sizeY)
791 std::cout <<
" Event: " <<
i <<
" PROBLEM: size Y is: " << matrixOfToT->size() << std::endl;
792 throw std::runtime_error(
"Error in errors/trainNN.cxx");
796 for(
unsigned int ME =0;
ME < matrixOfToT->size();
ME++){
801 if (counter1 == 0) std::cout <<
" element: " <<
ME <<
" ToT set to: " <<
norm_ToT((*matrixOfToT)[ME]) << std::endl;
807 for (
int s=0;
s<sizeY;
s++)
812 if (counter0 == 0) std::cout <<
" s: " <<
s <<
" pitch set to: " <<
norm_pitch((*vectorOfPitchesY)[
s]) << std::endl;
823 if (counter0 == 0) std::cout <<
" ClusterPixLayer " <<
norm_layerNumber(ClusterPixLayer) <<
" ClusterPixBarrelEC " <<
norm_layerType(ClusterPixBarrelEC) << std::endl;
825 if (useTrackEstimate)
847 if (counter0==0) std::cout <<
854 if (useTrackEstimate) addNumber=4;
858 for (
int o=0;o<nParticlesTraining;o++)
861 if (iClus%dilutionFactor==0) jn->
SetInputTrainSet( counter0, (sizeX+1)*sizeY+addNumber+2*o+1,
norm_posY(outputNN_idY[o]) );
863 if (iClus%dilutionFactor==1) jn->
SetInputTestSet( counter1, (sizeX+1)*sizeY+addNumber+2*o,
norm_posX(outputNN_idX[o]) );
864 if (iClus%dilutionFactor==1) jn->
SetInputTestSet( counter1, (sizeX+1)*sizeY+addNumber+2*o+1,
norm_posY(outputNN_idY[o]) );
868 if (
counter==0) std::cout <<
" n. " << o <<
869 " posX: " <<
norm_posX((outputNN_idX)[o]) <<
870 " posY: " <<
norm_posY((outputNN_idY)[o]) << std::endl;
879 for (
int r=0;
r<nParticlesTraining;
r++)
885 for (
int u=0;
u<numberBinsErrorEstimate;
u++)
891 bool full=
binIsFull(
u,
true,(outputNN_X[
r]-positionX_reorder[
r]),nParticlesTraining,numberBinsErrorEstimate);
893 if (
full) nValueFull=1;
894 if (iClus%dilutionFactor==0) jn->
SetOutputTrainSet(counter0,
r*numberBinsErrorEstimate+
u, nValueFull);
895 if (iClus%dilutionFactor==1) jn->
SetOutputTestSet(counter1,
r*numberBinsErrorEstimate+
u, nValueFull);
897 if (
counter==0) std::cout <<
" X bin: " <<
u <<
" gl: "<<
r*2*numberBinsErrorEstimate+
u <<
" val: " << nValueFull;
898 histoControlTestX->Fill(
u+1,nValueFull);
901 for (
int u=0;
u<numberBinsErrorEstimate;
u++)
908 bool full=
binIsFull(
u,
false,(outputNN_Y[
r]-positionY_reorder[
r]),nParticlesTraining,numberBinsErrorEstimate);
910 if (
full) nValueFull=1;
911 if (iClus%dilutionFactor==0) jn->
SetOutputTrainSet(counter0,
r*numberBinsErrorEstimate+
u, nValueFull);
912 if (iClus%dilutionFactor==1) jn->
SetOutputTestSet(counter1,
r*numberBinsErrorEstimate+
u, nValueFull);
914 if (counter0==0) std::cout <<
" Y bin: " <<
u <<
" gl: " <<
r*2*numberBinsErrorEstimate+numberBinsErrorEstimate+
u <<
" val: " << nValueFull;
915 if (iClus%dilutionFactor==0) histoControlTestY->Fill(
u+1,nValueFull);
921 if (counter0==0) std::cout << std::endl <<
" total number of bins: " << numberoutputs << std::endl;
930 if (iClus%dilutionFactor==0) counter0+=1;
931 if (iClus%dilutionFactor==1) counter1+=1;
938 if (counter0!=numberTrainingEvents)
940 cout <<
" counter up to: " << counter0 <<
" while events in training sample are " << numberTrainingEvents << endl;
944 if (counter1!=numberTestingEvents)
946 cout <<
" counter up to: " << counter1 <<
" while events in testing sample are " << numberTestingEvents << endl;
959 std::cout <<
" Potts units are: " << jn->
GetPottsUnits() << std::endl;
962 cout <<
" setting pattern for training events " << endl;
964 if (restartTrainingFrom==0)
970 TString
name(
"Weights");
971 name+=restartTrainingFrom;
979 float minimumError=1e10;
980 int epochesWithRisingError=0;
981 int epochWithMinimum=0;
1006 if (useTrackEstimate)
1018 nameCronology+=
"/trainingCronology.txt";
1024 cronology <<
"-------------SETTINGS----------------" << endl;
1025 cronology <<
"Epochs: " << jn->
GetEpochs() << std::endl;
1031 cronology <<
"Momentum: " << jn->
GetMomentum() << std::endl;
1035 cronology <<
"-------------LAYOUT------------------" << endl;
1036 cronology <<
"Input variables: " << jn->
GetInputDim() << endl;
1037 cronology <<
"Output variables: " << jn->
GetOutputDim() << endl;
1039 cronology <<
"Layout : ";
1043 if (s<jn->GetHiddenLayerDim()+1) cronology <<
"-";
1046 cronology <<
"--------------HISTORY-----------------" << endl;
1047 cronology <<
"History of iterations: " << endl;
1051 TH1F* histoTraining=
new TH1F(
"training",
"training",(
int)std::floor((
float)nIterations/10.+0.5),1,std::floor((
float)nIterations/10.+1.5));
1052 TH1F* histoTesting=
new TH1F(
"testing",
"testing",(
int)std::floor((
float)nIterations/10.+0.5),1,std::floor((
float)nIterations/10.+1.5));
1054 double maximumTrain=0;
1055 double minimumTrain=1e10;
1057 for(
int epoch=restartTrainingFrom+1;epoch<=nIterations;++epoch)
1059 if (epoch!=restartTrainingFrom+1)
1061 trainingError = jn->
Train();
1064 if (epoch%10==0 || epoch==restartTrainingFrom+1)
1067 cronology.open(nameCronology,ios_base::app);
1069 testError = jn->
Test();
1071 if (trainingError>maximumTrain) maximumTrain=trainingError;
1072 if (testError>maximumTrain) maximumTrain=testError;
1073 if (trainingError<minimumTrain) minimumTrain=trainingError;
1074 if (testError<minimumTrain) minimumTrain=testError;
1077 histoTraining->Fill(epoch/10.,trainingError);
1078 histoTesting->Fill(epoch/10.,testError);
1080 if (testError<minimumError)
1082 minimumError=testError;
1083 epochesWithRisingError=0;
1084 epochWithMinimum=epoch;
1088 epochesWithRisingError+=10;
1093 if (epochesWithRisingError>300)
1095 if (trainingError<minimumError)
1097 cout <<
" End of training. Minimum already on epoch: " << epochWithMinimum << endl;
1098 cronology <<
" End of training. Minimum already on epoch: " << epochWithMinimum << endl;
1103 cronology <<
"Epoch: [" << epoch <<
1104 "] Error: " << trainingError <<
1105 " Test: " << testError << endl;
1107 cout <<
"Epoch: [" << epoch <<
1108 "] Error: " << trainingError <<
1109 " Test: " << testError << endl;
1118 TFile*
file=
new TFile(
name,
"recreate");
1123 trainedNetwork->Write();
1124 histoControlTestX->Write();
1125 histoControlTestY->Write();
1145 cout <<
" create Trained Network object..." << endl;
1151 cout <<
" Now getting histograms from trainingResult" << endl;
1152 cronology <<
" Now getting histograms from trainingResult" << endl;
1156 cout <<
" From network to histo..." << endl;
1159 cout <<
" From histo to network back..." << endl;
1162 cout <<
" reading back " << endl;
1169 if (epochWithMinimum!=0)
1171 cronology <<
"Minimum stored from Epoch: " << epochWithMinimum << endl;
1174 cronology <<
"Minimum not reached" << endl;
1179 if (epochWithMinimum!=0)
1184 name+=epochWithMinimum;
1186 std::cout <<
" reading back from minimum " << endl;
1189 TFile *_file0 =
new TFile(
name);
1192 cout <<
" Reading back network with minimum" << endl;
1199 trainedNetwork->Write();
1204 cout <<
" -------------------- " << endl;
1205 cout <<
" Writing OUTPUT histos " << endl;
1207 histoFName+=
"/histoWeights.root";
1209 TFile* fileHistos=
new TFile(histoFName,
"recreate");
1212 std::vector<TH1*>::const_iterator histoBegin=myHistos.begin();
1213 std::vector<TH1*>::const_iterator histoEnd=myHistos.end();
1214 for (std::vector<TH1*>::const_iterator histoIter=histoBegin;
1215 histoIter!=histoEnd;++histoIter)
1217 (*histoIter)->Write();
1219 fileHistos->Write();
1220 fileHistos->Close();
1228 cout <<
" using network at last iteration (minimum not reached..." << endl;
1236 histoTName+=
"/trainingInfo.root";
1238 TFile* histoFile=
new TFile(histoTName,
"recreate");
1239 histoTraining->Write();
1240 histoTesting->Write();
1245 TCanvas* trainingCanvas=
new TCanvas(
"trainingCanvas",
"trainingCanvas");
1246 histoTraining->SetLineColor(2);
1247 histoTesting->SetLineColor(4);
1249 histoTraining->GetYaxis()->SetRangeUser(minimumTrain,maximumTrain);
1250 histoTraining->Draw(
"l");
1251 histoTesting->Draw(
"lsame");
1253 canvasName+=
"/trainingCurve.eps";
1254 trainingCanvas->SaveAs(canvasName);
1257 TCanvas* mlpa_canvas =
new TCanvas(
"jetnet_canvas",
"Network analysis");
1258 mlpa_canvas->Divide(2,4);