ATLAS Offline Software
Loading...
Searching...
No Matches
TJetNet.cxx
Go to the documentation of this file.
1/*
2 Copyright (C) 2002-2022 CERN for the benefit of the ATLAS collaboration
3*/
4
5#include "TJetNet.h"
6#include "jetnet.h"
7#include "TRandom3.h"
8#include "TTimeStamp.h"
9#include <unistd.h>
10#include <cstdio>
11#include <cmath>
12#include "TTrainedNetwork.h"
13#include "TFile.h"
14
15#include "TH1F.h"
16
18
19//Constructors
20//______________________________________________________________________________
22 : m_LayerCount(0),
23 m_pLayers(0),
24 m_pInputTrainSet(nullptr),
25 m_pOutputTrainSet(nullptr),
26 m_pInputTestSet(nullptr),
27 m_pOutputTestSet(nullptr),
28 m_TrainSetCnt(0),
29 m_TestSetCnt(0),
30 m_Epochs(0),
31 m_CurrentEpoch(0),
32 m_IsInitialized(kFalse),
33 m_InitLocked(kFALSE),
34 m_NormalizeOutput(false)
35{
36}
37//______________________________________________________________________________
38TJetNet::TJetNet( Int_t aTestCount, Int_t aTrainCount,
39 const Int_t aLayersCnt, const Int_t* aLayers )
40#ifdef _DEBUG
41 : m_Debug (kTRUE)
42#else
43 : m_Debug (kFALSE)
44#endif
45{
46 // Creates neural network with aLayersCnt number of layers,
47 // aTestCount number of patterns for the test set,
48 // aTrainCount patterns for the train set.
49 // aLayers contains the information for number of the units in the different layers
50
51 int i;
52
53 if( m_Debug ){ std::cout << "=====> Entering TJetNet::TJetNet(...)" << std::endl; }
54
55 m_TestSetCnt = aTestCount;
56 m_TrainSetCnt = aTrainCount;
57 m_LayerCount = aLayersCnt; // Get the number of layers
58
59 if( m_LayerCount > 0 )
60 {
61 //Perform deep copy of the array holding Layers count
62 m_pLayers = new Int_t[ m_LayerCount ];
63 for( i = 0; i < m_LayerCount; ++i )
64 {
65 m_pLayers[ i ] = aLayers[ i ];
66 }
67 }
68
69 m_InputDim = m_pLayers[ 0 ];
72
73
74 m_IsInitialized = kFALSE;
75 m_InitLocked = kFALSE;
76
81
83
85
86 SetEpochs( -1 );
87
88 if( m_Debug ){ std::cout << "=====> Leaving TJetNet::TJetNet(...)" << std::endl; }
89}
90//______________________________________________________________________________
92{
93 // Default destructor
94 if( m_Debug ){ std::cout << "=====> Entering TJetNet::~TJetNet(...)" << std::endl; }
95 delete [] m_pLayers;
96 delete m_pInputTestSet;
97 delete m_pInputTrainSet;
98 delete m_pOutputTestSet;
99 delete m_pOutputTrainSet;
100 if( m_Debug ){ std::cout << "=====> Leaving TJetNet::~TJetNet(...)" << std::endl; }
101}
102//______________________________________________________________________________
103//by Giacinto Piacquadio (18-02-2008)
105{
106
107 Int_t nInput=GetInputDim();
108 Int_t nHidden=GetHiddenLayerDim();
109 std::vector<Int_t> nHiddenLayerSize;
110 // Int_t* nHiddenLayerSize=new Int_t[nHidden];
111
112 for (Int_t o=0;o<nHidden;++o)
113 {
114 nHiddenLayerSize.push_back(GetHiddenLayerSize(o+1));
115 }
116 Int_t nOutput=GetOutputDim();
117
118 std::vector<TVectorD*> thresholdVectors;
119 std::vector<TMatrixD*> weightMatrices;
120
121 for (Int_t o=0;o<nHidden+1;++o)
122 {
123 int sizeActualLayer=(o<nHidden)?nHiddenLayerSize[o]:nOutput;
124 int sizePreviousLayer=(o==0)?nInput:nHiddenLayerSize[o-1];
125 thresholdVectors.push_back(new TVectorD(sizeActualLayer));
126 weightMatrices.push_back(new TMatrixD(sizePreviousLayer,sizeActualLayer));
127 }
128
129 for (Int_t o=0;o<nHidden+1;++o)
130 {
131
132 if (m_Debug)
133 if (o<nHidden)
134 {
135 cout << " Iterating on hidden layer n.: " << o << endl;
136 }
137 else
138 {
139 cout << " Considering output layer " << endl;
140 }
141
142 int sizeActualLayer=(o<nHidden)?nHiddenLayerSize[o]:nOutput;
143
144 for (Int_t s=0;s<sizeActualLayer;++s)
145 {
146 if (o<nHidden)
147 {
148 if (m_Debug)
149 cout << " To hidden node: " << s << endl;
150 }
151 else
152 {
153 if (m_Debug)
154 cout << " To output node: " << s << endl;
155 }
156 if (o==0)
157 {
158 for (Int_t p=0;p<nInput;++p)
159 {
160 if (m_Debug)
161 cout << " W from inp nod: " << p << "weight: " <<
162 GetWeight(o+1,s+1,p+1) << endl;
163 weightMatrices[o]->operator() (p,s) = GetWeight(o+1,s+1,p+1);
164 }
165 }
166 else
167 {
168 for (Int_t p=0;p<nHiddenLayerSize[o-1];++p)
169 {
170 if (m_Debug)
171 cout << " W from lay : " << o-1 << " nd: " <<
172 p << " weight: " <<
173 GetWeight(o+1,s+1,p+1) << endl;
174 weightMatrices[o]->operator() (p,s)=GetWeight(o+1,s+1,p+1);
175 }
176 }
177 if (m_Debug)
178 cout << " Threshold for node " << s << " : " <<
179 GetThreshold(o+1,s+1) << endl;
180 thresholdVectors[o]->operator() (s) = GetThreshold(o+1,s+1);
181 }
182 }
183
184 bool linearOutput=false;
185 if (this->GetActivationFunction(nHidden)==4)
186 {
187 // cout << " Creating TTrainedNetwork with linear output function" << endl;
188 linearOutput=true;
189 }
190
191 TTrainedNetwork* trainedNetwork=
192 new TTrainedNetwork(nInput,
193 nHidden,
194 nOutput,
195 nHiddenLayerSize,
196 thresholdVectors,
197 weightMatrices,
199 linearOutput,
201
202 return trainedNetwork;
203
204}
205//______________________________________________________________________________
206//by Giacinto Piacquadio (18-02-2008)
208{
209
210 Int_t nInput=GetInputDim();
211 Int_t nHidden=GetHiddenLayerDim();
212 std::vector<Int_t> nHiddenLayerSize;
213
214 if (trainedNetwork->getnHidden()!=nHidden)
215 {
216 cout << " Network doesn't match.. not loading.." << endl;
217 return;
218 }
219
220 for (Int_t o=0;o<nHidden;++o)
221 {
222 nHiddenLayerSize.push_back(GetHiddenLayerSize(o+1));
223 if (nHiddenLayerSize[o]!=trainedNetwork->getnHiddenLayerSize()[o])
224 {
225 cout << " Network doesn't match... not loading..." << endl;
226 return;
227 }
228 }
229 Int_t nOutput=GetOutputDim();
230
231 if (trainedNetwork->getnInput()!=nInput)
232 {
233 cout << " Network doesn't match... not loading.." << endl;
234 return;
235 }
236
237
238 if (trainedNetwork->getnOutput()!=nOutput)
239 {
240 cout << " Network doesn't match.. not loading.." << endl;
241 return;
242 }
243
244 //OK, everything matches... can go on...
245
246 std::vector<TVectorD*> thresholdVectors=trainedNetwork->getThresholdVectors();
247 std::vector<TMatrixD*> weightMatrices=trainedNetwork->weightMatrices();
248 //ownership remains of the TTrainedNetwork
249
250 for (Int_t o=0;o<nHidden+1;++o)
251 {
252 int sizeActualLayer=(o<nHidden)?nHiddenLayerSize[o]:nOutput;
253 int sizePreviousLayer=(o==0)?nInput:nHiddenLayerSize[o-1];
254
255 for (Int_t s=0;s<sizeActualLayer;++s)
256 {
257 Double_t nodeValue=0.;
258 if (o==0)
259 {
260 for (Int_t p=0;p<nInput;++p)
261 {
262 mSetWeight(weightMatrices[o]->operator() (p,s),o+1,s+1,p+1);
263 }
264 }
265 else
266 {
267 for (Int_t p=0;p<nHiddenLayerSize[o-1];++p)
268 {
269 mSetWeight(weightMatrices[o]->operator() (p,s),o+1,s+1,p+1);
270 }
271 }
272 mSetThreshold(thresholdVectors[o]->operator() (s),o+1,s+1);
273 }
274 }
275 if (trainedNetwork->getIfLinearOutput()==true)
276 {
277 cout << " Setting linear output function " << endl;
278 this->SetActivationFunction(nHidden,4);
279 }
280
281 cout << " Successfully read back Trained Network " << endl;
282}
283//______________________________________________________________________________
284
285void TJetNet::mSetWeight( Double_t weight,Int_t aLayerInd, Int_t aNodeInd, Int_t aConnectedNodeInd )
286{
287 JNINT1.W[ JNINDX( aLayerInd, aNodeInd, aConnectedNodeInd )-1 ]=weight;
288}
289//______________________________________________________________________________
290void TJetNet::mSetThreshold( Double_t threshold, Int_t aLayerInd, Int_t aNodeInd)
291{
292 JNINT1.T[ JNINDX( aLayerInd, aNodeInd, 0 )-1 ]=threshold;
293}
294//______________________________________________________________________________
295void TJetNet::Print( void )
296{
297 // Prints on the screen, information for the neural network
298 Int_t i;
299
300 std::cout << "TJetNet" << std::endl;
301 std::cout << "Number of layers: " << m_LayerCount << std::endl;
302
303 for( i = 0; i < m_LayerCount; i++ )
304 {
305 std::cout << "\t\tNumber of units in layer " << i << " : " << m_pLayers[ i ] << std::endl;
306 }
307
308 std::cout << "Epochs: " << GetEpochs() << std::endl;
309 std::cout << "Updates Per Epoch: " << GetUpdatesPerEpoch() << std::endl;
310 std::cout << "Updating Procedure: " << GetUpdatingProcedure() << std::endl;
311 std::cout << "Error Measure: " << GetErrorMeasure() << std::endl;
312 std::cout << "Patterns Per Update: " << GetPatternsPerUpdate() << std::endl;
313 std::cout << "Learning Rate: " << GetLearningRate() << std::endl;
314 std::cout << "Momentum: " << GetMomentum() << std::endl;
315 std::cout << "Initial Weights Width: " << GetInitialWeightsWidth() << std::endl;
316 std::cout << "Learning Rate Decrease: " << GetLearningRateDecrease() << std::endl;
317 std::cout << "Activation Function: " << GetActivationFunction() << std::endl;
318}
319//______________________________________________________________________________
320Double_t TJetNet::Test( void )
321{
322 // Initiate test cycle of the neural network
323 Int_t NRight = 0;
324 Double_t fMeanError = 0.0;
325 Double_t *TMP;
326 Int_t NPatterns = GetTestSetCnt();
327
328
329 for( Int_t iPattern = 0; iPattern < NPatterns; iPattern++ )
330 {
331
332 for( Int_t i = 0; i < GetInputDim(); i++ )
333 {
334 JNDAT1.OIN[ i ] = float ( GetInputTestSet( iPattern, i ) );
335 }
336
337 NWJNWGT.OWGT = GetEventWeightTestSet( iPattern );
338
339 JNTEST();
340
341 for( Int_t j = 0; j < GetOutputDim(); j++ )
342 {
343 fMeanError+= NWJNWGT.OWGT *
344 std::pow(JNDAT1.OUT[ j ]-float( GetOutputTestSet( iPattern, j )),2)/(float)GetOutputDim();
345 }
346
347
348
349 if( m_Debug ) std::cout << "Testing [ " << iPattern << " ] - " << JNDAT1.OIN[ 0 ]
350 << " => " << JNDAT1.OUT[ 0 ] << std::endl;
351
352 }
353
354 fMeanError/=2.*NPatterns;
355
356 if (m_Debug)
357 std::cout << " Test error: " << fMeanError << endl;
358
359 return fMeanError;
360}
361//
362Double_t TJetNet::TestBTAG( void )
363{
364
365 bool test=false;
366
367 // Initiate test cycle of the neural network
368 Int_t NRight = 0;
369 Double_t fMeanError = 0.0;
370 Double_t *TMP;
371 Int_t NPatterns = GetTestSetCnt();
372 if (test)
373 {
374 NPatterns = GetTrainSetCnt();
375 }
376
377 vector<double> eff;
378 eff.push_back(0.5);
379 eff.push_back(0.6);
380 eff.push_back(0.7);
381
382 //test also the b-tagging performance directly during testing !!!
383 vector<TH1F*> histoEfficienciesC;
384 vector<TH1F*> histoEfficienciesL;
385 TString histoEffStringC("histoEffC");
386 TString histoEffStringL("histoEffL");
387 for (int i=0;i<GetOutputDim();i++)
388 {
389 TString string1=histoEffStringC;
390 string1+=i;
391 TH1F* histo=new TH1F(string1,
392 string1,
393 20000,
394 -2,3);
395 TString string2=histoEffStringL;
396 string2+=i;
397 TH1F* histo2=new TH1F(string2,
398 string2,
399 20000,
400 -2,3);
401 histoEfficienciesC.push_back(histo);
402 histoEfficienciesL.push_back(histo2);
403 }
404
405 for( Int_t iPattern = 0; iPattern < NPatterns; iPattern++ )
406 {
407
408 for( Int_t i = 0; i < GetInputDim(); i++ )
409 {
410 if (!test)
411 {
412 JNDAT1.OIN[ i ] = float ( GetInputTestSet( iPattern, i ) );
413 NWJNWGT.OWGT = GetEventWeightTestSet( iPattern );
414 }
415 else
416 {
417 JNDAT1.OIN[ i ] = float (GetInputTrainSet( iPattern, i ) );
418 NWJNWGT.OWGT = GetEventWeightTrainSet( iPattern );
419 }
420
421 }
422
423 JNTEST();
424
425 int active=0;
426 for( Int_t j = 0; j < GetOutputDim(); j++ )
427 {
428 if (!test)
429 {
430 fMeanError+= NWJNWGT.OWGT *
431 std::pow(JNDAT1.OUT[ j ]-float( GetOutputTestSet( iPattern, j )),2)/(float)GetOutputDim();
432 }
433 else
434 {
435 fMeanError+= NWJNWGT.OWGT *
436 std::pow(JNDAT1.OUT[ j ]-float( GetOutputTrainSet( iPattern, j )),2)/(float)GetOutputDim();
437 }
438
439
440 // std::cout << " j " << j << " is " << GetOutputTestSet( iPattern, j) << std::endl;
441
442 if (!test)
443 {
444 if (fabs(float( GetOutputTestSet( iPattern, j)) - 1) < 1e-4)
445 {
446 active = j;
447 }
448 }
449 else
450 {
451 if (fabs(float( GetOutputTrainSet( iPattern, j)) - 1) < 1e-4)
452 {
453 active = j;
454 }
455 }
456 }
457
458 // if (m_Debug) std::cout << " active is: " << active << std::endl;
459
460 // if (m_Debug) std::cout << " filling histograms " << std::endl;
461
462 if (JNDAT1.OUT[ 0 ] + JNDAT1.OUT[ 1 ] >= 0)
463 {
464 histoEfficienciesC[active]->Fill( JNDAT1.OUT[ 0 ] /
465 ( JNDAT1.OUT[ 0 ] + JNDAT1.OUT[ 1 ]),
466 NWJNWGT.OWGT);
467
468 // if( m_Debug ) std::cout << "Filled: " << JNDAT1.OUT[ 0 ] /
469 // ( JNDAT1.OUT[ 0 ] + JNDAT1.OUT[ 2 ]) << std::endl;
470
471 }
472 else
473 {
474 std::cout << " Filled 0 " << std::endl;
475 histoEfficienciesC[active]->Fill( 0 );
476 }
477
478
479 if (JNDAT1.OUT[ 0 ] + JNDAT1.OUT[ 2 ] >= 0)
480 {
481 histoEfficienciesL[active]->Fill( JNDAT1.OUT[ 0 ] /
482 ( JNDAT1.OUT[ 0 ] + JNDAT1.OUT[ 2 ]),
483 NWJNWGT.OWGT);
484 // if( m_Debug ) std::cout << "Filled: " << JNDAT1.OUT[ 0 ] /
485 // ( JNDAT1.OUT[ 0 ] + JNDAT1.OUT[ 1 ]) << std::endl;
486
487 }
488 else
489 {
490 std::cout << " Filled 0 " << std::endl;
491 histoEfficienciesL[active]->Fill( 0 );
492 }
493
494 if( m_Debug ) std::cout << "Testing [ " << iPattern << " ] - " << JNDAT1.OIN[ 0 ]
495 << " => " << JNDAT1.OUT[ 0 ] << std::endl;
496
497 }// finish patterns
498
499 if (m_Debug) std::cout << " Finished patterns... " << std::endl;
500
501 TFile* newFile=new TFile("test.root","recreate");
502 histoEfficienciesL[0]->Write();
503 histoEfficienciesL[1]->Write();
504 histoEfficienciesL[2]->Write();
505 histoEfficienciesC[0]->Write();
506 histoEfficienciesC[1]->Write();
507 histoEfficienciesC[2]->Write();
508 newFile->Write();
509 newFile->Close();
510
511 //for C-jet rejection
512
513 for (int u=0;u<2;u++)
514 {
515 vector<TH1F*>* myVectorHistos;
516 if (u==0)
517 {
518 std::cout << "c-rej --> ";
519 myVectorHistos=&histoEfficienciesC;
520 }
521 if (u==1)
522 {
523 std::cout << "l-rej --> ";
524 myVectorHistos=&histoEfficienciesL;
525 }
526
527
528 if (m_Debug) std::cout << " 1 " << std::endl;
529
530 Double_t allb=(*myVectorHistos)[0]->GetEntries();
531 Double_t allc=(*myVectorHistos)[1]->GetEntries();
532 Double_t allu=(*myVectorHistos)[2]->GetEntries();
533
534 if (m_Debug) std::cout << " allb " << allb << std::endl;
535
536 Double_t allbsofar=0;
537
538 vector<int> binN_Eff;
539 vector<bool> ok_eff;
540
541 for (int r=0;r<eff.size();r++)
542 {
543 ok_eff.push_back(false);
544 binN_Eff.push_back(0);
545 }
546
547 for (int s=0;s<(*myVectorHistos)[0]->GetNbinsX()+1;s++) {
548 allbsofar+=(*myVectorHistos)[0]->GetBinContent((*myVectorHistos)[0]->GetNbinsX()+1-s);
549 bool nothingMore(true);
550
551
552 for (int r=0;r<eff.size();r++)
553 {
554 if (m_Debug) std::cout << " actual eff: " << allbsofar / allb << std::endl;
555
556 if ((!ok_eff[r]) && allbsofar / allb > eff[r])
557 {
558 binN_Eff[r]=s;
559 ok_eff[r]=true;
560 if (m_Debug) std::cout << " bin: " << s << " eff: " << allbsofar / allb << std::endl;
561 // std::cout << " Cut value: " << (*myVectorHistos)[0]->GetBinCenter(s) << std::endl;
562 }
563 else if (allbsofar / allb <= eff[r])
564 {
565 nothingMore=false;
566 }
567 }
568 if (nothingMore) break;
569 }
570
571
572 for (int r=0;r<eff.size();r++)
573 {
574
575 std::cout << " " << eff[r];
576
577 std::cout << " check: " << (double)(*myVectorHistos)[0]->Integral((*myVectorHistos)[0]->GetNbinsX()-binN_Eff[r],
578 (*myVectorHistos)[1]->GetNbinsX()+1)
579 / (double)allb;
580
581 double effc=(*myVectorHistos)[1]->Integral((*myVectorHistos)[0]->GetNbinsX()-binN_Eff[r],
582 (*myVectorHistos)[1]->GetNbinsX()+1);
583 effc /= allc;
584 double effl=(*myVectorHistos)[2]->Integral((*myVectorHistos)[0]->GetNbinsX()-binN_Eff[r],
585 (*myVectorHistos)[2]->GetNbinsX()+1);
586 effl /= allu;
587
588 if (effc!=0)
589 {
590 std::cout << " c: " << 1/effc;
591 }
592 if (effl!=0)
593 {
594 std::cout << " l: " << 1/effl;
595 }
596
597 }
598 std::cout << std::endl;
599 }
600
601 for( Int_t j = 0; j < GetOutputDim(); j++ )
602 {
603 delete histoEfficienciesC[j];
604 delete histoEfficienciesL[j];
605 }
606
607
608 fMeanError/=2.*NPatterns;
609
610 if (m_Debug)
611 std::cout << " Test error: " << fMeanError << endl;
612
613 return fMeanError;
614}
615
616
617//______________________________________________________________________________
618Double_t TJetNet::Train( void )
619{
620 // Initiate the train phase for the neural network
621 Int_t NRight = 0;
622 Double_t fMeanError = 0.0;
623 Int_t NPatterns = GetTrainSetCnt();
624
625 // cout << " NPatterns is: " << NPatterns << endl;
626
627 Int_t inputDim=GetInputDim();
628 Int_t outputDim=GetOutputDim();
629 Int_t updatesPerEpoch=GetUpdatesPerEpoch();
630 Int_t patternsPerUpdate=GetPatternsPerUpdate();
631
632 if (updatesPerEpoch*patternsPerUpdate<1./2.*NPatterns)
633 {
634 cout << "Using only: " << updatesPerEpoch*patternsPerUpdate <<
635 " patterns on available: " << NPatterns << endl;
636 } else if (updatesPerEpoch*patternsPerUpdate>NPatterns)
637 {
638 cout << " Trying to use " << updatesPerEpoch*patternsPerUpdate <<
639 " patterns, but available: " << NPatterns << endl;
640 return -100;
641 }
642
643 for( Int_t iPattern = 0; iPattern < updatesPerEpoch*patternsPerUpdate;
644 iPattern++ )
645 {
646 for( Int_t i = 0; i < inputDim; i++ )
647 {
648 JNDAT1.OIN[ i ] = float ( GetInputTrainSet( iPattern, i ) );
649 }
650
651 NWJNWGT.OWGT = GetEventWeightTrainSet( iPattern );
652
653 for( Int_t j = 0; j < outputDim; j++ )
654 {
655 JNDAT1.OUT[ j ] = float ( GetOutputTrainSet( iPattern, j ) );
656 }
657
658 JNTRAL();
659 }
660
661 return GetPARJN(8);
662}
663//______________________________________________________________________________
664void TJetNet::writeNetworkInfo(Int_t typeOfInfo)
665{
666 cout << " Invoking info of type: " << typeOfInfo << endl;
667 JNSTAT(typeOfInfo);
668}
669//______________________________________________________________________________
670void TJetNet::Init( void )
671{
672 // Initializes the neuaral network
673 Int_t i;
674 JNDAT1.MSTJN[ 0 ] = m_LayerCount; // Set the number of layers
675
676 // Set the number of nodes for each layer
677 for( i = 0; i < m_LayerCount; i++ )
678 {
679 if ( m_Debug ) std::cout << "Layer " << i + 1 << " has " << m_pLayers[ i ] << " units." << std::endl;
680 JNDAT1.MSTJN[ 9 + i ] = m_pLayers[ i ];
681 }
682
683 cout << " calling JNINIT " << endl;
684 JNINIT();
685
687 {
688 std::cout << " Setting to normalize output nodes: POTT nodes " << std::endl;
690 }
691
692 cout << " finishing calling JNINIT " << endl;
693 m_IsInitialized = kTRUE;
694}
695//______________________________________________________________________________
696Int_t TJetNet::Epoch( void )
697{
698 // Initiate one train/test step the network.
699
700 Double_t aTrain, aTest;
701 if ( m_CurrentEpoch < m_Epochs )
702 {
704 aTrain = Train();
705
706 // if (m_CurrentEpoch%2)
707
708 // std::cout << " Calls to MSTJN: " << GetMSTJN(6) <<
709 // std::endl;
710
711 if ( m_Debug )
712 {
713
714
715 std::cout << "[ " << m_CurrentEpoch << " ] Train: " << aTrain << std::endl;
716 }
717 if ( ( m_CurrentEpoch % 2 ) == 0 )
718 {
719 aTest = Test();
720 // if ( m_Debug )
721 std::cout << "[" << m_CurrentEpoch << "]: " << GetPARJN(8) << " ";
722 std::cout << "Test: " << aTest << std::endl;
723 }
724 }
725 return m_CurrentEpoch;
726}
727//______________________________________________________________________________
728void TJetNet::SetInputTrainSet( Int_t aPatternInd, Int_t aInputInd, Double_t aValue )
729{
730 // Changes the value of the cell corresponding to unit aInputInd in pattern aPatternInd into INPUT TRAIN set
731 m_pInputTrainSet->SetData( aPatternInd, aInputInd, aValue );
732}
733//______________________________________________________________________________
734void TJetNet::SetOutputTrainSet( Int_t aPatternInd, Int_t aOutputInd, Double_t aValue )
735{
736 // Changes the value of the cell corresponding to unit aInputInd in pattern aPatternInd into OUTPUT TRAIN set
737 m_pOutputTrainSet->SetData( aPatternInd, aOutputInd, aValue );
738}
739//______________________________________________________________________________
740void TJetNet::SetInputTestSet( Int_t aPatternInd, Int_t aInputInd, Double_t aValue )
741{
742 // Changes the value of the cell corresponding to unit aInputInd in pattern aPatternInd into INPUT TEST set
743 m_pInputTestSet->SetData( aPatternInd, aInputInd, aValue );
744}
745//______________________________________________________________________________
746Double_t TJetNet::GetOutputTrainSet( Int_t aPatternInd, Int_t aOutputInd )
747{
748 // Returns the value of the cell corresponding to unit aInputInd in pattern aPatternInd into OUTPUT TRAIN set
749 return m_pOutputTrainSet->GetData( aPatternInd, aOutputInd );
750}
751//______________________________________________________________________________
752void TJetNet::SetEventWeightTrainSet( Int_t aPatternInd, Double_t aValue )
753{
754 m_pInputTrainSet->SetEventWeight(aPatternInd,aValue);
755}
756//______________________________________________________________________________
757
758void TJetNet::SetEventWeightTestSet( Int_t aPatternInd, Double_t aValue )
759{
760 m_pInputTestSet->SetEventWeight(aPatternInd,aValue);
761}
762//______________________________________________________________________________
763Double_t TJetNet::GetInputTestSet( Int_t aPatternInd, Int_t aInputInd )
764{
765 // Returns the value of the cell corresponding to unit aInputInd in pattern aPatternInd into INPUT TEST set
766 return m_pInputTestSet->GetData( aPatternInd, aInputInd );
767}
768//______________________________________________________________________________
769Double_t TJetNet::GetOutputTestSet( Int_t aPatternInd, Int_t aOutputInd )
770{
771 // Returns the value of the cell corresponding to unit aInputInd in pattern aPatternInd into OUTPUT TEST set
772 return m_pOutputTestSet->GetData( aPatternInd, aOutputInd );
773}
774//______________________________________________________________________________
775void TJetNet::SaveDataAscii( TString aFileName )
776{
777 // Saves the Input/Output test and train data in plain text file
778 ofstream out;
779 int i, j;
780
781 // Open ASCII file
782 out.open( aFileName );
783
784 //Write the number of layers, including the input and output
785 out << m_LayerCount << std::endl;
786
787 // Write into the file the number of units in input, hidden and output layers
788 for ( i = 0; i < m_LayerCount; i++ ) out << m_pLayers[ i ] << " ";
789 out << std::endl;
790
791 // Write the size of Train and Test sets
792 out << m_TrainSetCnt << " " << m_TestSetCnt << std::endl;
793
794 // Dump the Train set : Input1 Input2 ... InputN Output1 Output2 ... OutputN
795 for ( i = 0; i < m_TrainSetCnt; i++ )
796 {
797 out << GetInputTrainSet( i, 0 );
798 for( j = 1; j < m_pLayers[ 0 ]; j++ ) out << " " << GetInputTrainSet( i, j );
799 for( j = 0; j < m_pLayers[ m_LayerCount - 1 ]; j++ ) out << " " << GetOutputTrainSet( i, j );
800 out << std::endl;
801 }
802
803 // Dump the Test set : Input1 Input2 ... InputN Output1 Output2 ... OutputN
804 for ( i = 0; i < m_TestSetCnt; i++ )
805 {
806 out << GetInputTestSet( i, 0 );
807 for( j = 1; j < m_pLayers[ 0 ]; j++ ) out << " " << GetInputTestSet( i, j );
808 for( j = 0; j < m_pLayers[ m_LayerCount - 1 ]; j++ ) out << " " << GetOutputTestSet( i, j );
809 out << std::endl;
810 }
811 // Close the file
812 out.close();
813}
814//______________________________________________________________________________
815void TJetNet::LoadDataAscii( TString aFileName )
816{
817 // Loads the input/output test/train data from plain text file
818 ifstream in;
819 int i, j, k, l, m;
820 int aiParam[ 5 ];//iTrainCount, iTestCount, iInputDim, iHiddenDim, iOutputDim;
821 Bool_t bFlag;
822 Double_t tmp;
823 Int_t iPatternLength;
824
825 in.open( aFileName );
826 bFlag = Bool_t( in.is_open() );
827 if ( in )
828 {
829 in >> m_LayerCount;
830 if( m_Debug ){ std::cout << "Layers Count Set to " << m_LayerCount << std::endl;}
831 i = 0;
832
833 delete [] m_pLayers;
834 m_pLayers = new Int_t[ m_LayerCount ];
835
836 if( m_Debug ){ std::cout << "Updating the Layers Nodes Counters..." << std::endl; }
837 while( ( i < m_LayerCount ) && ( !in.eof() ) )
838 {
839 in >> m_pLayers[ i ];
840 if( m_Debug ){ std::cout << "Layer [ " << i + 1 << " ] has " << m_pLayers[ i ] << " units" << std::endl; }
841 i++;
842 }
843
844 m_InputDim = m_pLayers[ 0 ];
847
848 //Get the patterns count per line
849 iPatternLength = m_InputDim + m_OutputDim;
850 if( m_Debug ){ std::cout << "Patterns per line = " << iPatternLength << std::endl; }
851 in >> m_TrainSetCnt;
852 if( m_Debug ){ std::cout << "Train Set has " << m_TrainSetCnt << " patterns." << std::endl; }
853 in >> m_TestSetCnt;
854 if( m_Debug ){ std::cout << "Test Set has " << m_TestSetCnt << " patterns." << std::endl; }
855
856 delete m_pInputTestSet;
857 delete m_pInputTrainSet;
858 delete m_pOutputTestSet;
859 delete m_pOutputTrainSet;
860
865
866 i = 0;
867 j = 0;
868
869 while( ( i < ( m_TrainSetCnt + m_TestSetCnt ) ) && ( !in.eof() ) )
870 {
871 j = 0;
872 while( ( j < iPatternLength ) && ( !in.eof() ) )
873 {
874 if( i < m_TrainSetCnt )
875 {
876 if( j < m_InputDim )
877 {
878 //Train Input Set
879 in >> tmp;
880 SetInputTrainSet( i, j, tmp );
881 }
882 else
883 {
884 //Train Output Set
885 m = j - m_InputDim;
886 in >> tmp;
887 SetOutputTrainSet( i, m, tmp );
888 }
889 }
890 else
891 {
892 l = i - m_TrainSetCnt;
893 if( j < m_InputDim )
894 {
895 //Test Input Set
896 in >> tmp;
897 SetInputTestSet( l, j, tmp );
898 }
899 else
900 {
901 //Test Output Set
902 m = j - m_InputDim;
903 in >> tmp;
904 SetOutputTestSet( l, m, tmp );
905 }
906
907 }
908 j++;
909 }
910 i++;
911 }
912 }
913 in.close();
914}
915//______________________________________________________________________________
916void TJetNet::SaveDataRoot( TString aFileName )
917{
918 // Saves the neural network in ROOT file
919}
920//______________________________________________________________________________
921void TJetNet::LoadDataRoot( TString aFileName )
922{
923 // Loads the neural network from ROOT file
924}
925//______________________________________________________________________________
927{
928 //evaluates directly the input provided through SetInputs()
929 JNTEST();
930}
931//______________________________________________________________________________
932void TJetNet::Evaluate( Int_t aPattern )
933{
934 // Evaluates the network output form the input data specified by the Test Pattern
935 for( Int_t i = 0; i < GetInputDim(); i++ )
936 {
937 JNDAT1.OIN[ i ] = float ( GetInputTestSet( aPattern, i ) );
938 }
939 JNTEST();
940}
941//______________________________________________________________________________
942void TJetNet::SetInputs( Int_t aIndex, Double_t aValue )
943{
944 // Directly sets the inputs of the network
945 JNDAT1.OIN[ aIndex ] = float ( aValue );
946}
947//______________________________________________________________________________
948Double_t TJetNet::GetOutput( Int_t aIndex )
949{
950 // Returns the output of the network
951 return Double_t ( JNDAT1.OUT[ aIndex ] );
952}
953//______________________________________________________________________________
954void TJetNet::DumpToFile( TString aFileName )
955{
956 // Dumps the network data into JETNET specific format
957 JNDUMP( -8 );
958 std::cout << close( 8 ) << std::endl;
959 rename( "./fort.8", aFileName );
960}
961//______________________________________________________________________________
962void TJetNet::ReadFromFile( TString aFileName )
963{
964 // Loads the network from JETNET specific file
965 rename( aFileName, "./fort.12" );
966 JNREAD( -12 );
967 Reinitialize();
968 rename( "./fort.12", aFileName );
969 //std::cout << close( 12 ) << std::endl;
970}
971//______________________________________________________________________________
972Double_t TJetNet::GetWeight( Int_t aLayerInd, Int_t aNodeInd, Int_t aConnectedNodeInd ) const
973{
974 // Returns the node weight in specific Layer
975 return Double_t ( JNINT1.W[ JNINDX( aLayerInd, aNodeInd, aConnectedNodeInd )-1 ] );
976 //GP: ONE HAS TO PAY ATTENTION TO THIS STUPID -1!!!
977}
978//______________________________________________________________________________
979Double_t TJetNet::GetThreshold( Int_t aLayerInd, Int_t aNodeInd) const
980{
981 //Returns the node threshold in the specific layer
982 return Double_t ( JNINT1.T[ JNINDX( aLayerInd, aNodeInd, 0 )-1 ] );
983 //GP: ONE HAS TO PAY ATTENTION TO THIS STUPID -1!!!
984}
985//______________________________________________________________________________
986void TJetNet::SelectiveFields( Int_t aLayerA, Int_t aNodeA1, Int_t aNodeA2, Int_t aNodeB1, Int_t aNodeB2, Int_t aSwitch )
987{
988 // JetNet Selective Fields
989 Int_t tmp, i1, i2, j1, j2;
990
991 if( ( aLayerA > 0 ) && ( aLayerA < m_LayerCount ) )
992 {
993 i1 = TMath::Abs( aNodeA1 );
994 i2 = TMath::Abs( aNodeA2 );
995 j1 = TMath::Abs( aNodeB1 );
996 j2 = TMath::Abs( aNodeB2 );
997
998 if( i1 > i2 )
999 {
1000 tmp = i1;
1001 i1 = i2;
1002 i2 = i1;
1003 }//if
1004
1005 if( i1 > i2 )
1006 {
1007 tmp = i1;
1008 i1 = i2;
1009 i2 = i1;
1010 }//if
1011
1012 if( ( i1 < m_pLayers[ aLayerA ] ) && ( i2 < m_pLayers[ aLayerA ] ) &&
1013 ( j1 < m_pLayers[ aLayerA - 1 ] ) && ( j2 < m_pLayers[ aLayerA - 1 ] ) )
1014 {
1015 JNSEFI( aLayerA, i1, i2, j1, j2, aSwitch );
1016 }//if
1017 } //if
1018}
1019//______________________________________________________________________________
1021{
1022 //Initializes the settings of the network
1023 Int_t i;
1024
1025 m_LayerCount = JNDAT1.MSTJN[ 0 ]; // Set the number of layers
1026
1027 delete [] m_pLayers;
1028 m_pLayers = new Int_t[ m_LayerCount ];
1029
1030 // Set the number of nodes for each layer
1031 for( i = 0; i < m_LayerCount; i++ )
1032 {
1033 m_pLayers[ i ] = JNDAT1.MSTJN[ 9 + i ];
1034 }
1035
1040
1041 m_InputDim = m_pLayers[ 0 ];
1044
1045
1046}
1047//______________________________________________________________________________
1049{
1050 // Normilizes Inputs (both test and train)
1051 m_pInputTrainSet->Normalize();
1052 m_pInputTestSet->Normalize();
1053}
1054//______________________________________________________________________________
1056{
1057 // Randomizes Inputs and Outputs of both train and test sets
1058 m_pInputTrainSet->Randomize();
1059 m_pInputTestSet->Randomize();
1060 m_pOutputTrainSet->Randomize();
1061 m_pOutputTestSet->Randomize();
1062}
1063//______________________________________________________________________________
1064Int_t TJetNet::GetUnitCount( Int_t aLayer )
1065{
1066 // Returns the number of the units in specfic layer
1067 if( ( aLayer > -1 ) && ( aLayer < m_LayerCount ) )
1068 return JNDAT1.MSTJN[ 9 + aLayer ];
1069}
1070//______________________________________________________________________________
1072{
1073 // Sets the number of the updates per epoch
1074 JNDAT1.MSTJN[ 8 ] = aValue;
1075 if( !m_InitLocked ) this->Init();
1076}
1077//______________________________________________________________________________
1079{
1080 // Set specific weights update function
1081 JNDAT1.MSTJN[ 4 ] = aValue;
1082 if( !m_InitLocked ) this->Init();
1083}
1084//______________________________________________________________________________
1085void TJetNet::SetErrorMeasure( Int_t aValue )
1086{
1087 JNDAT1.MSTJN[ 3 ] = aValue;
1088 if( !m_InitLocked ) this->Init();
1089}
1090//______________________________________________________________________________
1092{
1093 // Set the kind of activation function used
1094 JNDAT1.MSTJN[ 2 ] = aValue;
1095 if( !m_InitLocked ) this->Init();
1096}
1097//______________________________________________________________________________
1098void TJetNet::SetActivationFunction( Int_t aValue, Int_t layerN )//layer 0 is first hidden layer
1099{
1100 // Set the kind of activation function used
1101 JNDAT2.IGFN[ layerN ] = aValue;
1102 if( !m_InitLocked ) this->Init();
1103}
1104//______________________________________________________________________________
1106{
1107 JNDAT1.MSTJN[ 1 ] = aValue;
1108 if( !m_InitLocked ) this->Init();
1109}
1110//______________________________________________________________________________
1111void TJetNet::SetLearningRate( Double_t aValue )
1112{
1113 // Change the Learning Rate
1114 JNDAT1.PARJN[ 0 ] = aValue;
1115 if( !m_InitLocked ) this->Init();
1116}
1117//______________________________________________________________________________
1118void TJetNet::SetMomentum( Double_t aValue )
1119{
1120 JNDAT1.PARJN[ 1 ] = aValue;
1121 if( !m_InitLocked ) this->Init();
1122}
1123//______________________________________________________________________________
1124void TJetNet::SetInitialWeightsWidth( Double_t aValue )
1125{
1126 JNDAT1.PARJN[ 3 ] = aValue;
1127 if( !m_InitLocked ) this->Init();
1128}
1129//______________________________________________________________________________
1131{
1132 JNDAT1.PARJN[ 10 ] = aValue;
1133 if( !m_InitLocked ) this->Init();
1134}
1135//______________________________________________________________________________
1136void TJetNet::SetPottsUnits(Int_t aValue)
1137{
1138 JNINT2.IPOTT = aValue;
1139}
1140//_____________________________________________________________________________
1142{
1143 m_NormalizeOutput=isTrue;
1144}
1145//______________________________________________________________________________
1147{
1148 return JNDAT1.MSTJN[ 8 ];
1149}
1150//______________________________________________________________________________
1152{
1153 return JNDAT1.MSTJN[ 3 ];
1154}
1155//______________________________________________________________________________
1157{
1158 return JNDAT1.MSTJN[ 3 ];
1159}
1160//______________________________________________________________________________
1162{
1163 return JNDAT1.MSTJN[ 2 ];
1164}
1165//______________________________________________________________________________
1166
1167Int_t TJetNet::GetActivationFunction( Int_t layerN ) const
1168{
1169 return JNDAT2.IGFN[ layerN ];
1170}
1171//______________________________________________________________________________
1173{
1174 return JNDAT1.MSTJN[ 1 ];
1175}
1176//______________________________________________________________________________
1178{
1179 return JNDAT1.PARJN[ 0 ];
1180}
1181//______________________________________________________________________________
1182Double_t TJetNet::GetMomentum( void )
1183{
1184 return JNDAT1.PARJN[ 1 ];
1185}
1186//______________________________________________________________________________
1188{
1189 return JNDAT1.PARJN[ 3 ];
1190}
1191//______________________________________________________________________________
1193{
1194 return JNDAT1.PARJN[ 10 ];
1195}
1196//______________________________________________________________________________
1198{
1199 return JNINT2.IPOTT;
1200}
1201//______________________________________________________________________________
1202Int_t TJetNet::GetMSTJN( Int_t aIndex )
1203{
1204 return JNDAT1.MSTJN[ aIndex ];
1205}
1206//______________________________________________________________________________
1207Double_t TJetNet::GetPARJN( Int_t aIndex )
1208{
1209 return JNDAT1.PARJN[ aIndex ];
1210}
1211//______________________________________________________________________________
1212void TJetNet::SetMSTJN( Int_t aIndex, Int_t aValue )
1213{
1214 JNDAT1.MSTJN[ aIndex ] = aValue;
1215}
1216//______________________________________________________________________________
1217void TJetNet::SetPARJN( Int_t aIndex, Double_t aValue )
1218{
1219 JNDAT1.PARJN[ aIndex ] = aValue;
1220}
1221//______________________________________________________________________________
1222void TJetNet::Shuffle( Bool_t aShuffleTrainSet, Bool_t aShuffleTestSet )
1223{
1224 // Shuffles the train and/or test input/output sets
1225 TTimeStamp ts;
1226 Int_t Seed = ts.GetSec();
1227 if ( aShuffleTrainSet )
1228 {
1229
1230 m_pInputTrainSet->Shuffle( Seed );
1231 m_pOutputTrainSet->Shuffle( Seed );
1232 }
1233 //Shuffle Test Set
1234 if ( aShuffleTestSet )
1235 {
1236 Seed = ts.GetSec();
1237 m_pInputTestSet->Shuffle( Seed );
1238 m_pOutputTestSet->Shuffle( Seed );
1239 }
1240
1241 return;
1242}
1243
1244
1245//EOF
ClassImp(TJetNet) TJetNet
Definition TJetNet.cxx:17
Double_t GetEventWeightTrainSet(Int_t aPatternInd)
Definition TJetNet.h:208
Int_t GetPottsUnits()
Definition TJetNet.cxx:1197
void Randomize(void)
Definition TJetNet.cxx:1055
void SetInputs(Int_t aIndex=0, Double_t aValue=0.0)
Definition TJetNet.cxx:942
virtual ~TJetNet(void)
Definition TJetNet.cxx:91
void NormalizeOutput(bool isTrue=true)
Definition TJetNet.cxx:1141
void mSetWeight(Double_t weight, Int_t aLayerInd, Int_t aNodeInd, Int_t aConnectedNodeInd)
Definition TJetNet.cxx:285
Int_t GetInputDim(void) const
Definition TJetNet.h:54
Int_t Epoch(void)
Definition TJetNet.cxx:696
TNeuralDataSet * m_pInputTrainSet
Array which contains the number of units in each layer.
Definition TJetNet.h:175
void Init(void)
Definition TJetNet.cxx:670
Int_t m_LayerCount
Definition TJetNet.h:172
Int_t m_Epochs
Definition TJetNet.h:185
Double_t GetLearningRateDecrease(void)
Definition TJetNet.cxx:1192
void SetMomentum(Double_t aValue)
Definition TJetNet.cxx:1118
void SetPottsUnits(Int_t aValue)
Definition TJetNet.cxx:1136
void Normalize(void)
Definition TJetNet.cxx:1048
Int_t GetUpdatingProcedure(void)
Definition TJetNet.cxx:1151
Int_t m_InputDim
Definition TJetNet.h:182
void SetLearningRateDecrease(Double_t aValue)
Definition TJetNet.cxx:1130
void LoadDataRoot(TString aFileName="jndata.root")
Definition TJetNet.cxx:921
TNeuralDataSet * m_pOutputTrainSet
Definition TJetNet.h:176
void SetErrorMeasure(Int_t aValue)
Definition TJetNet.cxx:1085
TActivationFunction m_enActFunction
Definition TJetNet.h:170
void SetOutputTestSet(Int_t aPatternInd, Int_t aOutputInd, Double_t aValue)
Definition TJetNet.h:196
Bool_t m_InitLocked
Definition TJetNet.h:189
Int_t m_HiddenLayerDim
Definition TJetNet.h:183
Int_t GetTrainSetCnt(void) const
Definition TJetNet.h:52
void Print(void)
Definition TJetNet.cxx:295
Double_t GetMomentum(void)
Definition TJetNet.cxx:1182
void SetInitialWeightsWidth(Double_t aValue)
Definition TJetNet.cxx:1124
Bool_t m_Debug
Definition TJetNet.h:187
void SetEventWeightTestSet(Int_t aPatternInd, Double_t aValue)
Definition TJetNet.cxx:758
Int_t m_TestSetCnt
Definition TJetNet.h:180
Int_t GetPatternsPerUpdate(void)
Definition TJetNet.cxx:1172
Double_t Test(void)
Definition TJetNet.cxx:320
Double_t GetOutput(Int_t aIndex=0)
Definition TJetNet.cxx:948
Double_t GetThreshold(Int_t aLayerInd, Int_t aNodeInd) const
Definition TJetNet.cxx:979
void DumpToFile(TString aFileName="fort.8")
Definition TJetNet.cxx:954
Int_t GetTestSetCnt(void) const
Definition TJetNet.h:53
Double_t GetInitialWeightsWidth(void)
Definition TJetNet.cxx:1187
Int_t m_TrainSetCnt
Definition TJetNet.h:180
Int_t GetHiddenLayerSize(Int_t number) const
Definition TJetNet.h:56
void SetInputTrainSet(Int_t aPatternInd, Int_t aInputInd, Double_t aValue)
Definition TJetNet.cxx:728
Int_t GetHiddenLayerDim(void) const
Definition TJetNet.h:55
void SetEpochs(const Int_t aEpochs)
Definition TJetNet.h:79
void SetLearningRate(Double_t aValue)
Definition TJetNet.cxx:1111
Double_t GetOutputTestSet(Int_t aPatternInd, Int_t aOutputInd)
Definition TJetNet.cxx:769
Double_t TestBTAG(void)
Definition TJetNet.cxx:362
Int_t GetUpdatesPerEpoch(void)
Definition TJetNet.cxx:1146
Double_t GetInputTrainSet(Int_t aPatternInd, Int_t aInputInd)
Definition TJetNet.h:202
void mSetThreshold(Double_t threshold, Int_t aLayerInd, Int_t aNodeInd)
Definition TJetNet.cxx:290
Int_t GetEpochs(void)
Definition TJetNet.h:78
Int_t * m_pLayers
Definition TJetNet.h:173
void writeNetworkInfo(Int_t typeOfInfo=0)
Definition TJetNet.cxx:664
void SaveDataAscii(TString aFileName="jndata.dat")
Definition TJetNet.cxx:775
Double_t Train(void)
Definition TJetNet.cxx:618
void SetMSTJN(Int_t aIndex, Int_t aValue)
Definition TJetNet.cxx:1212
Bool_t m_NormalizeOutput
Definition TJetNet.h:190
void SetActivationFunction(Int_t aValue)
Definition TJetNet.cxx:1091
void ReadFromFile(TString aFileName="fort.8")
Definition TJetNet.cxx:962
void Shuffle(Bool_t aShuffleTrainSet=true, Bool_t aShuffleTestSet=true)
Definition TJetNet.cxx:1222
Int_t GetErrorMeasure(void)
Definition TJetNet.cxx:1156
void SetUpdatingProcedure(Int_t aValue)
Definition TJetNet.cxx:1078
Double_t GetInputTestSet(Int_t aPatternInd, Int_t aInputInd)
Definition TJetNet.cxx:763
Double_t GetWeight(Int_t aLayerInd, Int_t aNodeInd, Int_t aConnectedNodeInd) const
Definition TJetNet.cxx:972
void readBackTrainedNetwork(const TTrainedNetwork *)
Definition TJetNet.cxx:207
void Evaluate()
Definition TJetNet.cxx:926
TNeuralDataSet * m_pOutputTestSet
Definition TJetNet.h:178
void SetPARJN(Int_t aIndex, Double_t aValue)
Definition TJetNet.cxx:1217
Int_t m_CurrentEpoch
Definition TJetNet.h:186
Int_t GetMSTJN(Int_t aIndex)
Definition TJetNet.cxx:1202
Double_t GetLearningRate(void)
Definition TJetNet.cxx:1177
void Reinitialize(void)
Definition TJetNet.cxx:1020
void SetInputTestSet(Int_t aPatternInd, Int_t aInputInd, Double_t aValue)
Definition TJetNet.cxx:740
void SetUpdatesPerEpoch(Int_t aValue)
Definition TJetNet.cxx:1071
Double_t GetOutputTrainSet(Int_t aPatternInd, Int_t aOutputInd)
Definition TJetNet.cxx:746
void SetEventWeightTrainSet(Int_t aPatternInd, Double_t aValue)
Definition TJetNet.cxx:752
Bool_t m_IsInitialized
Definition TJetNet.h:188
TNeuralDataSet * m_pInputTestSet
Definition TJetNet.h:177
void SetOutputTrainSet(Int_t aPatternInd, Int_t aOutputInd, Double_t aValue)
Definition TJetNet.cxx:734
Double_t GetPARJN(Int_t aIndex)
Definition TJetNet.cxx:1207
Double_t GetEventWeightTestSet(Int_t aPatternInd)
Definition TJetNet.h:213
Int_t GetUnitCount(Int_t aLayer)
Definition TJetNet.cxx:1064
TTrainedNetwork * createTrainedNetwork() const
Definition TJetNet.cxx:104
void SelectiveFields(Int_t aLayerA, Int_t aNodeA1, Int_t aNodeA2, Int_t aNodeB1, Int_t aNodeB2, Int_t aSwitch=0)
Definition TJetNet.cxx:986
void LoadDataAscii(TString aFileName="jndata.dat")
Definition TJetNet.cxx:815
Int_t GetOutputDim(void) const
Definition TJetNet.h:57
Int_t GetActivationFunction(void) const
Definition TJetNet.cxx:1161
void SaveDataRoot(TString aFileName="jndata.root")
Definition TJetNet.cxx:916
TJetNet(void)
Int_t m_OutputDim
Definition TJetNet.h:184
@ afSigmoid
Definition TJetNet.h:151
void SetPatternsPerUpdate(Int_t aValue)
Definition TJetNet.cxx:1105
STL class.
int ts
Definition globals.cxx:24
int r
Definition globals.cxx:22
#define JNDAT2
Definition jetnet.h:65
#define JNINT2
Definition jetnet.h:83
#define JNINT1
Definition jetnet.h:103
#define NWJNWGT
Definition jetnet.h:51
#define JNINIT()
Definition jetnet.h:134
#define JNSTAT(IS)
Definition jetnet.h:167
#define JNTEST()
Definition jetnet.h:148
#define JNINDX(IL, I, J)
Definition jetnet.h:112
#define JNSEFI(ILA, I1, I2, J1, J2, NO)
Definition jetnet.h:173
#define JNTRAL()
Definition jetnet.h:141
#define JNDUMP(NF)
Definition jetnet.h:154
#define JNDAT1
Definition jetnet.h:42
#define JNREAD(NF)
Definition jetnet.h:161