AthOnnx::IOnnxRuntimeInferenceTool Node1 AthOnnx::IOnnxRuntimeInferenceTool # m_numInputs # m_numOutputs + setBatchSize() + getBatchSize() + addInput() + addOutput() + inference() + printModelInfo() - createTensor() Node2 asg::IAsgTool + ~IAsgTool() + print() Node2->Node1 Node3 IAlgTool Node3->Node2 Node4 std::vector< std::vector < int64_t > > Node4->Node1 #m_inputShapes #m_outputShapes Node5 std::vector< int64_t > + elements Node5->Node4 +elements