8 #include "TTimeStamp.h"
24 m_pInputTrainSet(nullptr),
25 m_pOutputTrainSet(nullptr),
26 m_pInputTestSet(nullptr),
27 m_pOutputTestSet(nullptr),
32 m_IsInitialized(kFalse),
34 m_NormalizeOutput(false)
39 const Int_t aLayersCnt,
const Int_t* aLayers )
53 if( m_Debug ){ std::cout <<
"=====> Entering TJetNet::TJetNet(...)" << std::endl; }
55 m_TestSetCnt = aTestCount;
56 m_TrainSetCnt = aTrainCount;
57 m_LayerCount = aLayersCnt;
59 if( m_LayerCount > 0 )
62 m_pLayers =
new Int_t[ m_LayerCount ];
63 for(
i = 0;
i < m_LayerCount; ++
i )
65 m_pLayers[
i ] = aLayers[
i ];
69 m_InputDim = m_pLayers[ 0 ];
70 m_OutputDim = m_pLayers[ m_LayerCount - 1 ];
71 m_HiddenLayerDim = m_LayerCount-2;
74 m_IsInitialized = kFALSE;
75 m_InitLocked = kFALSE;
77 m_pInputTrainSet =
new TNeuralDataSet( m_TrainSetCnt, GetInputDim() );
78 m_pInputTestSet =
new TNeuralDataSet( m_TestSetCnt, GetInputDim() );
79 m_pOutputTrainSet =
new TNeuralDataSet( m_TrainSetCnt, GetOutputDim() );
80 m_pOutputTestSet =
new TNeuralDataSet( m_TestSetCnt, GetOutputDim() );
82 m_NormalizeOutput=
false;
84 m_enActFunction=afSigmoid;
88 if( m_Debug ){ std::cout <<
"=====> Leaving TJetNet::TJetNet(...)" << std::endl; }
94 if(
m_Debug ){ std::cout <<
"=====> Entering TJetNet::~TJetNet(...)" << std::endl; }
100 if(
m_Debug ){ std::cout <<
"=====> Leaving TJetNet::~TJetNet(...)" << std::endl; }
109 std::vector<Int_t> nHiddenLayerSize;
112 for (Int_t o=0;o<nHidden;++o)
118 std::vector<TVectorD*> thresholdVectors;
119 std::vector<TMatrixD*> weightMatrices;
121 for (Int_t o=0;o<nHidden+1;++o)
123 int sizeActualLayer=(o<nHidden)?nHiddenLayerSize[o]:nOutput;
124 int sizePreviousLayer=(o==0)?nInput:nHiddenLayerSize[o-1];
125 thresholdVectors.push_back(
new TVectorD(sizeActualLayer));
126 weightMatrices.push_back(
new TMatrixD(sizePreviousLayer,sizeActualLayer));
129 for (Int_t o=0;o<nHidden+1;++o)
135 cout <<
" Iterating on hidden layer n.: " << o << endl;
139 cout <<
" Considering output layer " << endl;
142 int sizeActualLayer=(o<nHidden)?nHiddenLayerSize[o]:nOutput;
144 for (Int_t
s=0;
s<sizeActualLayer;++
s)
149 cout <<
" To hidden node: " <<
s << endl;
154 cout <<
" To output node: " <<
s << endl;
158 for (Int_t
p=0;
p<nInput;++
p)
161 cout <<
" W from inp nod: " <<
p <<
"weight: " <<
163 weightMatrices[o]->operator() (
p,
s) =
GetWeight(o+1,
s+1,
p+1);
168 for (Int_t
p=0;
p<nHiddenLayerSize[o-1];++
p)
171 cout <<
" W from lay : " << o-1 <<
" nd: " <<
178 cout <<
" Threshold for node " <<
s <<
" : " <<
184 bool linearOutput=
false;
202 return trainedNetwork;
212 std::vector<Int_t> nHiddenLayerSize;
216 cout <<
" Network doesn't match.. not loading.." << endl;
220 for (Int_t o=0;o<nHidden;++o)
225 cout <<
" Network doesn't match... not loading..." << endl;
233 cout <<
" Network doesn't match... not loading.." << endl;
240 cout <<
" Network doesn't match.. not loading.." << endl;
247 std::vector<TMatrixD*> weightMatrices=trainedNetwork->
weightMatrices();
250 for (Int_t o=0;o<nHidden+1;++o)
252 int sizeActualLayer=(o<nHidden)?nHiddenLayerSize[o]:nOutput;
253 int sizePreviousLayer=(o==0)?nInput:nHiddenLayerSize[o-1];
255 for (Int_t
s=0;
s<sizeActualLayer;++
s)
257 Double_t nodeValue=0.;
260 for (Int_t
p=0;
p<nInput;++
p)
267 for (Int_t
p=0;
p<nHiddenLayerSize[o-1];++
p)
277 cout <<
" Setting linear output function " << endl;
281 cout <<
" Successfully read back Trained Network " << endl;
300 std::cout <<
"TJetNet" << std::endl;
301 std::cout <<
"Number of layers: " <<
m_LayerCount << std::endl;
305 std::cout <<
"\t\tNumber of units in layer " <<
i <<
" : " <<
m_pLayers[
i ] << std::endl;
308 std::cout <<
"Epochs: " <<
GetEpochs() << std::endl;
314 std::cout <<
"Momentum: " <<
GetMomentum() << std::endl;
324 Double_t fMeanError = 0.0;
329 for( Int_t iPattern = 0; iPattern < NPatterns; iPattern++ )
349 if(
m_Debug ) std::cout <<
"Testing [ " << iPattern <<
" ] - " <<
JNDAT1.OIN[ 0 ]
350 <<
" => " <<
JNDAT1.OUT[ 0 ] << std::endl;
354 fMeanError/=2.*NPatterns;
357 std::cout <<
" Test error: " << fMeanError << endl;
369 Double_t fMeanError = 0.0;
385 TString histoEffStringC(
"histoEffC");
386 TString histoEffStringL(
"histoEffL");
389 TString string1=histoEffStringC;
395 TString string2=histoEffStringL;
401 histoEfficienciesC.push_back(
histo);
402 histoEfficienciesL.push_back(histo2);
405 for( Int_t iPattern = 0; iPattern < NPatterns; iPattern++ )
474 std::cout <<
" Filled 0 " << std::endl;
475 histoEfficienciesC[
active]->Fill( 0 );
490 std::cout <<
" Filled 0 " << std::endl;
491 histoEfficienciesL[
active]->Fill( 0 );
494 if(
m_Debug ) std::cout <<
"Testing [ " << iPattern <<
" ] - " <<
JNDAT1.OIN[ 0 ]
495 <<
" => " <<
JNDAT1.OUT[ 0 ] << std::endl;
499 if (
m_Debug) std::cout <<
" Finished patterns... " << std::endl;
501 TFile* newFile=
new TFile(
"test.root",
"recreate");
502 histoEfficienciesL[0]->Write();
503 histoEfficienciesL[1]->Write();
504 histoEfficienciesL[2]->Write();
505 histoEfficienciesC[0]->Write();
506 histoEfficienciesC[1]->Write();
507 histoEfficienciesC[2]->Write();
513 for (
int u=0;
u<2;
u++)
518 std::cout <<
"c-rej --> ";
519 myVectorHistos=&histoEfficienciesC;
523 std::cout <<
"l-rej --> ";
524 myVectorHistos=&histoEfficienciesL;
528 if (
m_Debug) std::cout <<
" 1 " << std::endl;
530 Double_t allb=(*myVectorHistos)[0]->GetEntries();
531 Double_t allc=(*myVectorHistos)[1]->GetEntries();
532 Double_t allu=(*myVectorHistos)[2]->GetEntries();
534 if (
m_Debug) std::cout <<
" allb " << allb << std::endl;
536 Double_t allbsofar=0;
541 for (
int r=0;
r<
eff.size();
r++)
543 ok_eff.push_back(
false);
544 binN_Eff.push_back(0);
547 for (
int s=0;
s<(*myVectorHistos)[0]->GetNbinsX()+1;
s++) {
548 allbsofar+=(*myVectorHistos)[0]->GetBinContent((*myVectorHistos)[0]->GetNbinsX()+1-
s);
549 bool nothingMore(
true);
552 for (
int r=0;
r<
eff.size();
r++)
554 if (
m_Debug) std::cout <<
" actual eff: " << allbsofar / allb << std::endl;
556 if ((!ok_eff[
r]) && allbsofar / allb >
eff[
r])
560 if (
m_Debug) std::cout <<
" bin: " <<
s <<
" eff: " << allbsofar / allb << std::endl;
563 else if (allbsofar / allb <=
eff[
r])
568 if (nothingMore)
break;
572 for (
int r=0;
r<
eff.size();
r++)
575 std::cout <<
" " <<
eff[
r];
577 std::cout <<
" check: " << (
double)(*myVectorHistos)[0]->Integral((*myVectorHistos)[0]->GetNbinsX()-binN_Eff[
r],
578 (*myVectorHistos)[1]->GetNbinsX()+1)
581 double effc=(*myVectorHistos)[1]->Integral((*myVectorHistos)[0]->GetNbinsX()-binN_Eff[
r],
582 (*myVectorHistos)[1]->GetNbinsX()+1);
584 double effl=(*myVectorHistos)[2]->Integral((*myVectorHistos)[0]->GetNbinsX()-binN_Eff[
r],
585 (*myVectorHistos)[2]->GetNbinsX()+1);
590 std::cout <<
" c: " << 1/effc;
594 std::cout <<
" l: " << 1/effl;
598 std::cout << std::endl;
603 delete histoEfficienciesC[j];
604 delete histoEfficienciesL[j];
608 fMeanError/=2.*NPatterns;
611 std::cout <<
" Test error: " << fMeanError << endl;
622 Double_t fMeanError = 0.0;
632 if (updatesPerEpoch*patternsPerUpdate<1./2.*NPatterns)
634 cout <<
"Using only: " << updatesPerEpoch*patternsPerUpdate <<
635 " patterns on available: " << NPatterns << endl;
636 }
else if (updatesPerEpoch*patternsPerUpdate>NPatterns)
638 cout <<
" Trying to use " << updatesPerEpoch*patternsPerUpdate <<
639 " patterns, but available: " << NPatterns << endl;
643 for( Int_t iPattern = 0; iPattern < updatesPerEpoch*patternsPerUpdate;
646 for( Int_t
i = 0;
i < inputDim;
i++ )
653 for( Int_t j = 0; j < outputDim; j++ )
666 cout <<
" Invoking info of type: " << typeOfInfo << endl;
679 if (
m_Debug ) std::cout <<
"Layer " <<
i + 1 <<
" has " <<
m_pLayers[
i ] <<
" units." << std::endl;
683 cout <<
" calling JNINIT " << endl;
688 std::cout <<
" Setting to normalize output nodes: POTT nodes " << std::endl;
692 cout <<
" finishing calling JNINIT " << endl;
700 Double_t aTrain, aTest;
715 std::cout <<
"[ " <<
m_CurrentEpoch <<
" ] Train: " << aTrain << std::endl;
722 std::cout <<
"Test: " << aTest << std::endl;
782 out.open( aFileName );
823 Int_t iPatternLength;
825 in.open( aFileName );
826 bFlag = Bool_t( in.is_open() );
836 if(
m_Debug ){ std::cout <<
"Updating the Layers Nodes Counters..." << std::endl; }
840 if(
m_Debug ){ std::cout <<
"Layer [ " <<
i + 1 <<
" ] has " <<
m_pLayers[
i ] <<
" units" << std::endl; }
850 if(
m_Debug ){ std::cout <<
"Patterns per line = " << iPatternLength << std::endl; }
854 if(
m_Debug ){ std::cout <<
"Test Set has " <<
m_TestSetCnt <<
" patterns." << std::endl; }
872 while( ( j < iPatternLength ) && ( !in.eof() ) )
951 return Double_t (
JNDAT1.OUT[ aIndex ] );
958 std::cout << close( 8 ) << std::endl;
959 rename(
"./fort.8", aFileName );
965 rename( aFileName,
"./fort.12" );
968 rename(
"./fort.12", aFileName );
975 return Double_t (
JNINT1.W[
JNINDX( aLayerInd, aNodeInd, aConnectedNodeInd )-1 ] );
982 return Double_t (
JNINT1.T[
JNINDX( aLayerInd, aNodeInd, 0 )-1 ] );
993 i1 = TMath::Abs( aNodeA1 );
994 i2 = TMath::Abs( aNodeA2 );
995 j1 = TMath::Abs( aNodeB1 );
996 j2 = TMath::Abs( aNodeB2 );
1068 return JNDAT1.MSTJN[ 9 + aLayer ];
1074 JNDAT1.MSTJN[ 8 ] = aValue;
1081 JNDAT1.MSTJN[ 4 ] = aValue;
1087 JNDAT1.MSTJN[ 3 ] = aValue;
1094 JNDAT1.MSTJN[ 2 ] = aValue;
1101 JNDAT2.IGFN[ layerN ] = aValue;
1107 JNDAT1.MSTJN[ 1 ] = aValue;
1114 JNDAT1.PARJN[ 0 ] = aValue;
1120 JNDAT1.PARJN[ 1 ] = aValue;
1126 JNDAT1.PARJN[ 3 ] = aValue;
1132 JNDAT1.PARJN[ 10 ] = aValue;
1148 return JNDAT1.MSTJN[ 8 ];
1153 return JNDAT1.MSTJN[ 3 ];
1158 return JNDAT1.MSTJN[ 3 ];
1163 return JNDAT1.MSTJN[ 2 ];
1169 return JNDAT2.IGFN[ layerN ];
1174 return JNDAT1.MSTJN[ 1 ];
1179 return JNDAT1.PARJN[ 0 ];
1184 return JNDAT1.PARJN[ 1 ];
1189 return JNDAT1.PARJN[ 3 ];
1194 return JNDAT1.PARJN[ 10 ];
1204 return JNDAT1.MSTJN[ aIndex ];
1209 return JNDAT1.PARJN[ aIndex ];
1214 JNDAT1.MSTJN[ aIndex ] = aValue;
1219 JNDAT1.PARJN[ aIndex ] = aValue;
1226 Int_t
Seed =
ts.GetSec();
1227 if ( aShuffleTrainSet )
1234 if ( aShuffleTestSet )