ATLAS Offline Software
Public Member Functions | Private Member Functions | Private Attributes | List of all members
lwtDev::RecurrentStack Class Reference

#include <Stack.h>

Collaboration diagram for lwtDev::RecurrentStack:

Public Member Functions

 RecurrentStack (size_t n_inputs, const std::vector< LayerConfig > &layers)
 
 ~RecurrentStack ()
 
 RecurrentStack (RecurrentStack &)=delete
 
RecurrentStackoperator= (RecurrentStack &)=delete
 
MatrixXd scan (MatrixXd inputs) const
 
size_t n_outputs () const
 

Private Member Functions

size_t add_lstm_layers (size_t n_inputs, const LayerConfig &)
 
size_t add_gru_layers (size_t n_inputs, const LayerConfig &)
 
size_t add_bidirectional_layers (size_t n_inputs, const LayerConfig &)
 
size_t add_embedding_layers (size_t n_inputs, const LayerConfig &)
 

Private Attributes

std::vector< IRecurrentLayer * > m_layers
 
size_t m_n_outputs
 

Detailed Description

Definition at line 173 of file Stack.h.

Constructor & Destructor Documentation

◆ RecurrentStack() [1/2]

lwtDev::RecurrentStack::RecurrentStack ( size_t  n_inputs,
const std::vector< LayerConfig > &  layers 
)

Definition at line 271 of file Stack.cxx.

273  {
274  using namespace lwtDev;
275  const size_t n_layers = layers.size();
276  for (size_t layer_n = 0; layer_n < n_layers; layer_n++) {
277  auto& layer = layers.at(layer_n);
278 
279  // add recurrent layers (now LSTM and GRU!)
280  if (layer.architecture == Architecture::LSTM) {
281  n_inputs = add_lstm_layers(n_inputs, layer);
282  } else if (layer.architecture == Architecture::GRU) {
283  n_inputs = add_gru_layers(n_inputs, layer);
284  } else if (layer.architecture == Architecture::BIDIRECTIONAL) {
285  n_inputs = add_bidirectional_layers(n_inputs, layer);
286  } else if (layer.architecture == Architecture::EMBEDDING) {
287  n_inputs = add_embedding_layers(n_inputs, layer);
288  } else {
289  throw NNConfigurationException("found non-recurrent layer");
290  }
291  }
292  m_n_outputs = n_inputs;
293  }

◆ ~RecurrentStack()

lwtDev::RecurrentStack::~RecurrentStack ( )

Definition at line 294 of file Stack.cxx.

294  {
295  for (auto& layer: m_layers) {
296  delete layer;
297  layer = 0;
298  }
299  }

◆ RecurrentStack() [2/2]

lwtDev::RecurrentStack::RecurrentStack ( RecurrentStack )
delete

Member Function Documentation

◆ add_bidirectional_layers()

size_t lwtDev::RecurrentStack::add_bidirectional_layers ( size_t  n_inputs,
const LayerConfig layer 
)
private

Definition at line 344 of file Stack.cxx.

345  {
346  // nasty hack to get the hands on RNNs: create RNN, fetch it from m_layers and finally pop it
347  if(layer.sublayers.size() != 2)
348  throw NNConfigurationException("Number of sublayers not matching expected number of 2 for bidirectional layers");
349  const LayerConfig forward_layer_conf = layer.sublayers[0];
350  const LayerConfig backward_layer_conf = layer.sublayers[1];
351  size_t n_forward = 0;
352  // fixing nasty -Wunused-but-set-variable warning
353  (void) n_forward;
354  size_t n_backward = 0;
355  if(forward_layer_conf.architecture == Architecture::LSTM)
356  n_forward = add_lstm_layers(n_inputs, forward_layer_conf);
357  else if(forward_layer_conf.architecture == Architecture::GRU)
358  n_forward = add_gru_layers(n_inputs, forward_layer_conf);
359  else
360  throw NNConfigurationException("Bidirectional forward layer type not supported");
361 
362  std::unique_ptr<IRecurrentLayer> forward_layer(m_layers.back());
363  m_layers.pop_back();
364 
365  if(backward_layer_conf.architecture == Architecture::LSTM)
366  n_backward = add_lstm_layers(n_inputs, backward_layer_conf);
367  else if(backward_layer_conf.architecture == Architecture::GRU)
368  n_backward = add_gru_layers(n_inputs, backward_layer_conf);
369  else
370  throw NNConfigurationException("Bidirectional backward layer type not supported");
371 
372  std::unique_ptr<IRecurrentLayer> backward_layer(m_layers.back());
373  backward_layer->m_go_backwards = (!forward_layer->m_go_backwards);
374  m_layers.pop_back();
375 
376  m_layers.push_back(new BidirectionalLayer(std::move(forward_layer),
377  std::move(backward_layer),
378  layer.merge_mode,
379  layer.return_sequence));
380  return n_backward;
381 
382  }

◆ add_embedding_layers()

size_t lwtDev::RecurrentStack::add_embedding_layers ( size_t  n_inputs,
const LayerConfig layer 
)
private

Definition at line 384 of file Stack.cxx.

385  {
386  for (const auto& emb: layer.embedding) {
387  size_t n_wt = emb.weights.size();
388  size_t n_cats = n_wt / emb.n_out;
389  MatrixXd mat = build_matrix(emb.weights, n_cats);
390  m_layers.push_back(new EmbeddingLayer(emb.index, mat));
391  n_inputs += emb.n_out - 1;
392  }
393  return n_inputs;
394  }

◆ add_gru_layers()

size_t lwtDev::RecurrentStack::add_gru_layers ( size_t  n_inputs,
const LayerConfig layer 
)
private

Definition at line 330 of file Stack.cxx.

331  {
332  auto& comps = layer.components;
333  const auto& z = get_component(comps.at(Component::Z), n_inputs);
334  const auto& r = get_component(comps.at(Component::R), n_inputs);
335  const auto& h = get_component(comps.at(Component::H), n_inputs);
336  m_layers.push_back(
337  new GRULayer(layer.activation, layer.inner_activation,
338  z.W, z.U, z.b,
339  r.W, r.U, r.b,
340  h.W, h.U, h.b));
341  return h.b.rows();
342  }

◆ add_lstm_layers()

size_t lwtDev::RecurrentStack::add_lstm_layers ( size_t  n_inputs,
const LayerConfig layer 
)
private

Definition at line 310 of file Stack.cxx.

311  {
312  auto& comps = layer.components;
313  const auto& i = get_component(comps.at(Component::I), n_inputs);
314  const auto& o = get_component(comps.at(Component::O), n_inputs);
315  const auto& f = get_component(comps.at(Component::F), n_inputs);
316  const auto& c = get_component(comps.at(Component::C), n_inputs);
317  const bool& go_backwards = layer.go_backwards;
318  const bool& return_sequence = layer.return_sequence;
319  m_layers.push_back(
320  new LSTMLayer(layer.activation, layer.inner_activation,
321  i.W, i.U, i.b,
322  f.W, f.U, f.b,
323  o.W, o.U, o.b,
324  c.W, c.U, c.b,
325  go_backwards,
326  return_sequence));
327  return o.b.rows();
328  }

◆ n_outputs()

size_t lwtDev::RecurrentStack::n_outputs ( ) const

Definition at line 306 of file Stack.cxx.

306  {
307  return m_n_outputs;
308  }

◆ operator=()

RecurrentStack& lwtDev::RecurrentStack::operator= ( RecurrentStack )
delete

◆ scan()

MatrixXd lwtDev::RecurrentStack::scan ( MatrixXd  inputs) const

Definition at line 300 of file Stack.cxx.

300  {
301  for (auto* layer: m_layers) {
302  in = layer->scan(in);
303  }
304  return in;
305  }

Member Data Documentation

◆ m_layers

std::vector<IRecurrentLayer*> lwtDev::RecurrentStack::m_layers
private

Definition at line 183 of file Stack.h.

◆ m_n_outputs

size_t lwtDev::RecurrentStack::m_n_outputs
private

Definition at line 188 of file Stack.h.


The documentation for this class was generated from the following files:
lwtDev::Component::C
@ C
beamspotman.r
def r
Definition: beamspotman.py:676
lwtDev::GRULayer
Definition: Stack.h:278
lwtDev::Component::Z
@ Z
lwtDev::LSTMLayer
Definition: Stack.h:237
lwtDev::build_matrix
MatrixXd build_matrix(const std::vector< double > &weights, size_t n_inputs)
Definition: Stack.cxx:741
lwtDev::EmbeddingLayer
Definition: Stack.h:223
lwtDev::BidirectionalLayer
bidirectional unit ///
Definition: Stack.h:311
mat
GeoMaterial * mat
Definition: LArDetectorConstructionTBEC.cxx:55
module_driven_slicing.layers
layers
Definition: module_driven_slicing.py:114
lwtDev::Architecture::LSTM
@ LSTM
lwtDev::LayerConfig::architecture
Architecture architecture
Definition: NNLayerConfig.h:64
lwtDev::Component::I
@ I
lwtDev::NNConfigurationException
Definition: Reconstruction/tauRecTools/tauRecTools/lwtnn/Exceptions.h:21
lwtDev::LayerConfig
Definition: NNLayerConfig.h:46
lwtDev::RecurrentStack::add_embedding_layers
size_t add_embedding_layers(size_t n_inputs, const LayerConfig &)
Definition: Stack.cxx:384
lwtDev::Component::O
@ O
lwtDev::RecurrentStack::add_bidirectional_layers
size_t add_bidirectional_layers(size_t n_inputs, const LayerConfig &)
Definition: Stack.cxx:344
lumiFormat.i
int i
Definition: lumiFormat.py:85
z
#define z
h
TRT::Hit::layer
@ layer
Definition: HitInfo.h:79
lwtDev::Component::F
@ F
hist_file_dump.f
f
Definition: hist_file_dump.py:135
lwtDev
Definition: Reconstruction/tauRecTools/Root/lwtnn/Exceptions.cxx:8
lwtDev::RecurrentStack::add_lstm_layers
size_t add_lstm_layers(size_t n_inputs, const LayerConfig &)
Definition: Stack.cxx:310
lwtDev::Architecture::GRU
@ GRU
lwtDev::RecurrentStack::m_n_outputs
size_t m_n_outputs
Definition: Stack.h:188
lwtDev::get_component
DenseComponents get_component(const lwtDev::LayerConfig &layer, size_t n_in)
Definition: Stack.cxx:792
lwtDev::Architecture::BIDIRECTIONAL
@ BIDIRECTIONAL
lwtDev::Architecture::EMBEDDING
@ EMBEDDING
lwtDev::RecurrentStack::m_layers
std::vector< IRecurrentLayer * > m_layers
Definition: Stack.h:183
python.compressB64.c
def c
Definition: compressB64.py:93
lwtDev::Component::R
@ R
lwtDev::Component::H
@ H
lwtDev::RecurrentStack::add_gru_layers
size_t add_gru_layers(size_t n_inputs, const LayerConfig &)
Definition: Stack.cxx:330