ATLAS Offline Software
Loading...
Searching...
No Matches
lwtDev::RecurrentStack Class Reference

#include <Stack.h>

Collaboration diagram for lwtDev::RecurrentStack:

Public Member Functions

 RecurrentStack (size_t n_inputs, const std::vector< LayerConfig > &layers)
 ~RecurrentStack ()
 RecurrentStack (RecurrentStack &)=delete
RecurrentStackoperator= (RecurrentStack &)=delete
MatrixXd scan (MatrixXd inputs) const
size_t n_outputs () const

Private Member Functions

size_t add_lstm_layers (size_t n_inputs, const LayerConfig &)
size_t add_gru_layers (size_t n_inputs, const LayerConfig &)
size_t add_bidirectional_layers (size_t n_inputs, const LayerConfig &)
size_t add_embedding_layers (size_t n_inputs, const LayerConfig &)

Private Attributes

std::vector< IRecurrentLayer * > m_layers
size_t m_n_outputs

Detailed Description

Definition at line 173 of file Stack.h.

Constructor & Destructor Documentation

◆ RecurrentStack() [1/2]

lwtDev::RecurrentStack::RecurrentStack ( size_t n_inputs,
const std::vector< LayerConfig > & layers )

Definition at line 271 of file Stack.cxx.

273 {
274 using namespace lwtDev;
275 const size_t n_layers = layers.size();
276 for (size_t layer_n = 0; layer_n < n_layers; layer_n++) {
277 auto& layer = layers.at(layer_n);
278
279 // add recurrent layers (now LSTM and GRU!)
280 if (layer.architecture == Architecture::LSTM) {
281 n_inputs = add_lstm_layers(n_inputs, layer);
282 } else if (layer.architecture == Architecture::GRU) {
283 n_inputs = add_gru_layers(n_inputs, layer);
284 } else if (layer.architecture == Architecture::BIDIRECTIONAL) {
285 n_inputs = add_bidirectional_layers(n_inputs, layer);
286 } else if (layer.architecture == Architecture::EMBEDDING) {
287 n_inputs = add_embedding_layers(n_inputs, layer);
288 } else {
289 throw NNConfigurationException("found non-recurrent layer");
290 }
291 }
292 m_n_outputs = n_inputs;
293 }
size_t add_embedding_layers(size_t n_inputs, const LayerConfig &)
Definition Stack.cxx:384
size_t add_gru_layers(size_t n_inputs, const LayerConfig &)
Definition Stack.cxx:330
size_t add_bidirectional_layers(size_t n_inputs, const LayerConfig &)
Definition Stack.cxx:344
size_t add_lstm_layers(size_t n_inputs, const LayerConfig &)
Definition Stack.cxx:310
layers(flags, cells_name, *args, **kw)
Here we define wrapper functions to set up all of the standard corrections.
@ layer
Definition HitInfo.h:79

◆ ~RecurrentStack()

lwtDev::RecurrentStack::~RecurrentStack ( )

Definition at line 294 of file Stack.cxx.

294 {
295 for (auto& layer: m_layers) {
296 delete layer;
297 layer = 0;
298 }
299 }
std::vector< IRecurrentLayer * > m_layers
Definition Stack.h:183

◆ RecurrentStack() [2/2]

lwtDev::RecurrentStack::RecurrentStack ( RecurrentStack & )
delete

Member Function Documentation

◆ add_bidirectional_layers()

size_t lwtDev::RecurrentStack::add_bidirectional_layers ( size_t n_inputs,
const LayerConfig & layer )
private

Definition at line 344 of file Stack.cxx.

345 {
346 // nasty hack to get the hands on RNNs: create RNN, fetch it from m_layers and finally pop it
347 if(layer.sublayers.size() != 2)
348 throw NNConfigurationException("Number of sublayers not matching expected number of 2 for bidirectional layers");
349 const LayerConfig forward_layer_conf = layer.sublayers[0];
350 const LayerConfig backward_layer_conf = layer.sublayers[1];
351 size_t n_forward = 0;
352 // fixing nasty -Wunused-but-set-variable warning
353 (void) n_forward;
354 size_t n_backward = 0;
355 if(forward_layer_conf.architecture == Architecture::LSTM)
356 n_forward = add_lstm_layers(n_inputs, forward_layer_conf);
357 else if(forward_layer_conf.architecture == Architecture::GRU)
358 n_forward = add_gru_layers(n_inputs, forward_layer_conf);
359 else
360 throw NNConfigurationException("Bidirectional forward layer type not supported");
361
362 std::unique_ptr<IRecurrentLayer> forward_layer(m_layers.back());
363 m_layers.pop_back();
364
365 if(backward_layer_conf.architecture == Architecture::LSTM)
366 n_backward = add_lstm_layers(n_inputs, backward_layer_conf);
367 else if(backward_layer_conf.architecture == Architecture::GRU)
368 n_backward = add_gru_layers(n_inputs, backward_layer_conf);
369 else
370 throw NNConfigurationException("Bidirectional backward layer type not supported");
371
372 std::unique_ptr<IRecurrentLayer> backward_layer(m_layers.back());
373 backward_layer->m_go_backwards = (!forward_layer->m_go_backwards);
374 m_layers.pop_back();
375
376 m_layers.push_back(new BidirectionalLayer(std::move(forward_layer),
377 std::move(backward_layer),
378 layer.merge_mode,
379 layer.return_sequence));
380 return n_backward;
381
382 }
Architecture architecture

◆ add_embedding_layers()

size_t lwtDev::RecurrentStack::add_embedding_layers ( size_t n_inputs,
const LayerConfig & layer )
private

Definition at line 384 of file Stack.cxx.

385 {
386 for (const auto& emb: layer.embedding) {
387 size_t n_wt = emb.weights.size();
388 size_t n_cats = n_wt / emb.n_out;
389 MatrixXd mat = build_matrix(emb.weights, n_cats);
390 m_layers.push_back(new EmbeddingLayer(emb.index, mat));
391 n_inputs += emb.n_out - 1;
392 }
393 return n_inputs;
394 }
MatrixXd build_matrix(const std::vector< double > &weights, size_t n_inputs)
Definition Stack.cxx:741

◆ add_gru_layers()

size_t lwtDev::RecurrentStack::add_gru_layers ( size_t n_inputs,
const LayerConfig & layer )
private

Definition at line 330 of file Stack.cxx.

331 {
332 auto& comps = layer.components;
333 const auto& z = get_component(comps.at(Component::Z), n_inputs);
334 const auto& r = get_component(comps.at(Component::R), n_inputs);
335 const auto& h = get_component(comps.at(Component::H), n_inputs);
336 m_layers.push_back(
337 new GRULayer(layer.activation, layer.inner_activation,
338 z.W, z.U, z.b,
339 r.W, r.U, r.b,
340 h.W, h.U, h.b));
341 return h.b.rows();
342 }
#define z
int r
Definition globals.cxx:22
DenseComponents get_component(const lwtDev::LayerConfig &layer, size_t n_in)
Definition Stack.cxx:792

◆ add_lstm_layers()

size_t lwtDev::RecurrentStack::add_lstm_layers ( size_t n_inputs,
const LayerConfig & layer )
private

Definition at line 310 of file Stack.cxx.

311 {
312 auto& comps = layer.components;
313 const auto& i = get_component(comps.at(Component::I), n_inputs);
314 const auto& o = get_component(comps.at(Component::O), n_inputs);
315 const auto& f = get_component(comps.at(Component::F), n_inputs);
316 const auto& c = get_component(comps.at(Component::C), n_inputs);
317 const bool& go_backwards = layer.go_backwards;
318 const bool& return_sequence = layer.return_sequence;
319 m_layers.push_back(
320 new LSTMLayer(layer.activation, layer.inner_activation,
321 i.W, i.U, i.b,
322 f.W, f.U, f.b,
323 o.W, o.U, o.b,
324 c.W, c.U, c.b,
325 go_backwards,
326 return_sequence));
327 return o.b.rows();
328 }

◆ n_outputs()

size_t lwtDev::RecurrentStack::n_outputs ( ) const

Definition at line 306 of file Stack.cxx.

306 {
307 return m_n_outputs;
308 }

◆ operator=()

RecurrentStack & lwtDev::RecurrentStack::operator= ( RecurrentStack & )
delete

◆ scan()

MatrixXd lwtDev::RecurrentStack::scan ( MatrixXd inputs) const

Definition at line 300 of file Stack.cxx.

300 {
301 for (auto* layer: m_layers) {
302 in = layer->scan(in);
303 }
304 return in;
305 }

Member Data Documentation

◆ m_layers

std::vector<IRecurrentLayer*> lwtDev::RecurrentStack::m_layers
private

Definition at line 183 of file Stack.h.

◆ m_n_outputs

size_t lwtDev::RecurrentStack::m_n_outputs
private

Definition at line 188 of file Stack.h.


The documentation for this class was generated from the following files: