|
ATLAS Offline Software
|
Go to the documentation of this file.
18 ATH_MSG_INFO(
"The total no. of sample crossed the no. of available sample ....");
19 return StatusCode::FAILURE;
28 return StatusCode::SUCCESS;
34 std::vector<float> inputDataVector;
38 inputDataVector.insert(inputDataVector.end(), flatten.begin(), flatten.end());
40 std::vector<int64_t> inputShape = {
m_batchSize, 28, 28};
43 inputData[
"flatten_input"] = std::make_pair(
44 inputShape, std::move(inputDataVector)
48 outputData[
"dense_1/Softmax"] = std::make_pair(
49 std::vector<int64_t>{
m_batchSize, 10}, std::vector<float>{}
54 auto& outputScores = std::get<std::vector<float>>(outputData[
"dense_1/Softmax"].second);
55 auto inRange = [&outputScores](
int idx)->
bool{
return (
idx>=0) and (
idx<std::ssize(outputScores));};
57 for(
int ibatch = 0; ibatch <
m_batchSize; ibatch++){
60 for (
int i = 0;
i < 10;
i++){
61 ATH_MSG_DEBUG(
"Score for class "<<
i <<
" = "<<outputScores[
i] <<
" in batch " << ibatch);
62 int index =
i + ibatch * 10;
70 ATH_MSG_ERROR(
"No maximum found in EvaluateModelWithAthInfer::execute");
71 return StatusCode::FAILURE;
73 ATH_MSG_DEBUG(
"Class: "<<max_index<<
" has the highest score: "<<outputScores[max_index] <<
" in batch " << ibatch);
76 return StatusCode::SUCCESS;
ToolHandle< AthInfer::IAthInferenceTool > m_onnxTool
Tool handler for onnx inference session.
static std::string find_file(const std::string &logical_file_name, const std::string &search_path, SearchType search_type=LocalSearch)
virtual StatusCode execute(const EventContext &ctx) const override
Function executing the algorithm for a single event.
Gaudi::Property< int > m_batchSize
Following properties needed to be consdered if the .onnx model is evaluated in batch mode.
virtual StatusCode initialize() override
Function initialising the algorithm.
std::vector< std::vector< std::vector< float > > > m_input_tensor_values_notFlat
::StatusCode StatusCode
StatusCode definition for legacy code.
std::map< std::string, InferenceData > OutputDataMap
Gaudi::Property< std::string > m_pixelFileName
Name of the model file to load.
bool inRange(const double *boundaries, const double value, const double tolerance=0.02)
std::vector< std::vector< std::vector< float > > > read_mnist_pixel_notFlat(const std::string &full_path)
std::vector< T > flattenNestedVectors(const std::vector< std::vector< T >> &features)
std::map< std::string, InferenceData > InputDataMap
Namespace holding all of the Onnx Runtime example code.