#include <SaltModel.h>
◆ SaltModel()
| SaltModel::SaltModel |
( |
const std::string & |
name | ) |
|
◆ ~SaltModel()
| SaltModel::~SaltModel |
( |
| ) |
|
|
default |
◆ initialize()
| void SaltModel::initialize |
( |
| ) |
|
Definition at line 17 of file OnnxUtil.cxx.
22 m_env = std::make_unique< Ort::Env >(ORT_LOGGING_LEVEL_WARNING,
"");
25 Ort::SessionOptions session_options;
26 session_options.SetIntraOpNumThreads(1);
27 session_options.SetGraphOptimizationLevel(GraphOptimizationLevel::ORT_ENABLE_EXTENDED);
30 m_session = std::make_unique< Ort::Session >(*
m_env, fullPathToFile.c_str(), session_options);
31 Ort::AllocatorWithDefaultOptions allocator;
34 size_t num_input_nodes =
m_session->GetInputCount();
37 for (std::size_t
i = 0;
i < num_input_nodes;
i++) {
38 auto input_name =
m_session->GetInputNameAllocated(
i, allocator);
43 size_t num_output_nodes =
m_session->GetOutputCount();
44 std::vector<int64_t> output_node_dims;
47 for(std::size_t
i = 0;
i < num_output_nodes;
i++ ) {
48 auto output_name =
m_session->GetOutputNameAllocated(
i, allocator);
52 Ort::TypeInfo type_info =
m_session->GetOutputTypeInfo(
i);
53 auto tensor_info = type_info.GetTensorTypeAndShapeInfo();
55 output_node_dims = tensor_info.GetShape();
◆ runInference() [1/2]
| void SaltModel::runInference |
( |
const std::vector< std::vector< float >> & |
node_feat, |
|
|
std::vector< float > & |
effAllJet |
|
) |
| const |
Definition at line 64 of file OnnxUtil.cxx.
72 std::vector<float> input_tensor_values;
73 std::vector<int64_t> input_node_dims = {1,
static_cast<int>(node_feat.size()),
static_cast<int>(node_feat.at(0).size())};
75 for (
const auto&
it : node_feat){
76 input_tensor_values.insert(input_tensor_values.end(),
it.begin(),
it.end());
80 auto memory_info = Ort::MemoryInfo::CreateCpu(OrtArenaAllocator, OrtMemTypeDefault);
81 Ort::Value input_tensor = Ort::Value::CreateTensor<float>(memory_info, input_tensor_values.data(), input_tensor_values.size(), input_node_dims.data(), input_node_dims.size());
98 auto output_tensors = session.Run(Ort::RunOptions{
nullptr}, input_node_names.data(), &input_tensor, input_node_names.size(), output_node_names.data(), output_node_names.size());
101 float* float_ptr = output_tensors.front().GetTensorMutableData<
float>();
102 int num_jets = node_feat.size();
103 effAllJet = {float_ptr, float_ptr + num_jets};
◆ runInference() [2/2]
| void SaltModel::runInference |
( |
const std::vector< std::vector< float >> & |
node_feat, |
|
|
std::vector< std::vector< float >> & |
effAllJetAllWp |
|
) |
| const |
Definition at line 108 of file OnnxUtil.cxx.
118 std::vector<float> input_tensor_values;
119 std::vector<int64_t> input_node_dims = {1,
static_cast<int>(node_feat.size()),
static_cast<int>(node_feat.at(0).size())};
121 for (
auto&
it : node_feat){
122 input_tensor_values.insert(input_tensor_values.end(),
it.begin(),
it.end());
126 auto memory_info = Ort::MemoryInfo::CreateCpu(OrtArenaAllocator, OrtMemTypeDefault);
127 Ort::Value input_tensor = Ort::Value::CreateTensor<float>(memory_info, input_tensor_values.data(), input_tensor_values.size(), input_node_dims.data(), input_node_dims.size());
144 auto output_tensors = session.Run(Ort::RunOptions{
nullptr}, input_node_names.data(), &input_tensor, input_node_names.size(), output_node_names.data(), output_node_names.size());
147 float* float_ptr = output_tensors.front().GetTensorMutableData<
float>();
149 int num_jets = node_feat.size();
151 for (
int i=0;
i<num_jets;
i++){
152 std::vector<float> eff_one_jet_tmp;
154 eff_one_jet_tmp.push_back(float_ptr[
i*
m_num_wp+j]);
156 effAllJetAllWp.push_back(std::move(eff_one_jet_tmp));
◆ m_env
| std::unique_ptr< Ort::Env > SaltModel::m_env |
|
private |
◆ m_input_node_names
| std::vector<std::string> SaltModel::m_input_node_names |
|
private |
◆ m_num_wp
| int SaltModel::m_num_wp {} |
|
private |
◆ m_output_node_names
| std::vector<std::string> SaltModel::m_output_node_names |
|
private |
◆ m_path_to_onnx
| std::string SaltModel::m_path_to_onnx |
|
private |
◆ m_session
| std::unique_ptr< Ort::Session > SaltModel::m_session |
|
private |
The documentation for this class was generated from the following files: