Skip to content
Snippets Groups Projects
Commit 0ac0c9a2 authored by Xiangyang Ju's avatar Xiangyang Ju
Browse files

remove redundant packages

parent c3bf27d9
No related branches found
No related tags found
No related merge requests found
Showing
with 0 additions and 384 deletions
Control/AthOnnxruntimeService
/*
Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration
*/
#ifndef ATHONNXRUNTIMESERVICE__ATHONNXRUNTIMESERVICE_DICT_H
#define ATHONNXRUNTIMESERVICE__ATHONNXRUNTIMESERVICE_DICT_H
#include "AthOnnxruntimeService/ONNXRuntimeSvc.h"
#endif
// Dear emacs, this is -*- c++ -*-
// Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
#ifndef ATHEXONNXRUNTIME_IONNXRUNTIMESVC_H
#define ATHEXONNXRUNTIME_IONNXRUNTIMESVC_H
// Gaudi include(s).
#include <AsgServices/IAsgService.h>
// ONNX include(s).
#include <core/session/onnxruntime_cxx_api.h>
/// Namespace holding all of the ONNX Runtime example code
namespace AthONNX {
//class IAsgService
/// Service used for managing global objects used by ONNX Runtime
///
/// In order to allow multiple clients to use ONNX Runtime at the same
/// time, this service is used to manage the objects that must only
/// be created once in the Athena process.
///
/// @author Attila Krasznahorkay <Attila.Krasznahorkay@cern.ch>
///
class IONNXRuntimeSvc : virtual public asg::IAsgService{
public:
/// Virtual destructor, to make vtable happy
virtual ~IONNXRuntimeSvc() = default;
/// Declare the interface that this class provides
DeclareInterfaceID (AthONNX::IONNXRuntimeSvc, 1, 0);
/// Return the ONNX Runtime environment object
virtual Ort::Env& env() const = 0;
}; // class IONNXRuntimeSvc
} // namespace AthONNX
#endif // ATHEXONNXRUNTIME_IONNXRUNTIMESVC_H
// Dear emacs, this is -*- c++ -*-
// Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
#ifndef ATHONNXRUNTIMESERVICE_ONNXRUNTIMESVC_H
#define ATHONNXRUNTIMESERVICE_ONNXRUNTIMESVC_H
// Local include(s).
#include "AthOnnxruntimeService/IONNXRuntimeSvc.h"
// Framework include(s).
#include <AsgServices/AsgService.h>
// ONNX include(s).
#include <core/session/onnxruntime_cxx_api.h>
// System include(s).
#include <memory>
namespace AthONNX {
/// Service implementing @c AthONNX::IONNXRuntimeSvc
///
/// This is a very simple implementation, just managing the lifetime
/// of some ONNX Runtime C++ objects.
///
/// @author Attila Krasznahorkay <Attila.Krasznahorkay@cern.ch>
///
class ONNXRuntimeSvc : public asg::AsgService, virtual public IONNXRuntimeSvc {
public:
/// @name Function(s) inherited from @c Service
/// @{
ONNXRuntimeSvc (const std::string& name, ISvcLocator* svc);
/// Function initialising the service
virtual StatusCode initialize() override;
/// Function finalising the service
virtual StatusCode finalize() override;
/// @}
/// @name Function(s) inherited from @c AthONNX::IONNXRuntimeSvc
/// @{
/// Return the ONNX Runtime environment object
virtual Ort::Env& env() const override;
/// @}
private:
/// Global runtime environment for ONNX Runtime
std::unique_ptr< Ort::Env > m_env;
}; // class ONNXRuntimeSvc
} // namespace AthONNX
#endif // ATHONNXRUNTIMESERVICE_ONNXRUNTIMESVC_H
<lcgdict>
<class name="AthONNX::IONNXRuntimeSvc" />
<class name="AthONNX::ONNXRuntimeSvc" />
</lcgdict>
# Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration
# Declare the package's name.
atlas_subdir( AthOnnxruntimeService )
# External dependencies.
find_package( onnxruntime )
# Component(s) in the package.
atlas_add_library( AthOnnxruntimeServiceLib
AthOnnxruntimeService/*.h Root/*.cxx
PUBLIC_HEADERS AthOnnxruntimeService
INCLUDE_DIRS ${ONNXRUNTIME_INCLUDE_DIRS}
LINK_LIBRARIES ${ONNXRUNTIME_LIBRARIES} AsgServicesLib)
if (XAOD_STANDALONE)
atlas_add_dictionary( AthOnnxruntimeServiceDict
AthOnnxruntimeService/AthOnnxruntimeServiceDict.h
AthOnnxruntimeService/selection.xml
LINK_LIBRARIES AthOnnxruntimeServiceLib )
endif ()
if (NOT XAOD_STANDALONE)
atlas_add_component( AthOnnxruntimeService
src/*.h src/*.cxx src/components/*.cxx
INCLUDE_DIRS ${ONNXRUNTIME_INCLUDE_DIRS}
LINK_LIBRARIES ${ONNXRUNTIME_LIBRARIES} AthOnnxruntimeServiceLib AthenaBaseComps GaudiKernel AsgServicesLib)
endif ()
# ONNXRUNTIMESERVICE
This package is meant to accommodate all onnxruntimeService related services
e.g. `IONNXRuntimeSvc.h` and `ONNXRuntimeSvc.*`
To observe its usecases please check
`https://gitlab.cern.ch/atlas/athena/-/blob/main/Control/AthenaExamples/AthExOnnxRuntime/src/CxxApiAlgorithm.h#L66`
// Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
// Local include(s).
#include "AthOnnxruntimeService/ONNXRuntimeSvc.h"
namespace AthONNX {
ONNXRuntimeSvc::ONNXRuntimeSvc(const std::string& name, ISvcLocator* svc) :
asg::AsgService(name, svc)
{
declareServiceInterface<AthONNX::IONNXRuntimeSvc>();
}
StatusCode ONNXRuntimeSvc::initialize() {
// Create the environment object.
m_env = std::make_unique< Ort::Env >( ORT_LOGGING_LEVEL_WARNING,
name().c_str() );
ATH_MSG_DEBUG( "Ort::Env object created" );
// Return gracefully.
return StatusCode::SUCCESS;
}
StatusCode ONNXRuntimeSvc::finalize() {
// Dekete the environment object.
m_env.reset();
ATH_MSG_DEBUG( "Ort::Env object deleted" );
// Return gracefully.
return StatusCode::SUCCESS;
}
Ort::Env& ONNXRuntimeSvc::env() const {
return *m_env;
}
} // namespace AthONNX
// Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
// Local include(s).
#include <AthOnnxruntimeService/ONNXRuntimeSvc.h>
// Declare the package's components.
DECLARE_COMPONENT( AthONNX::ONNXRuntimeSvc )
Control/AthOnnxruntimeUtils
// Dear emacs, this is -*- c++ -*-
// Copyright (C) 2002-2023 CERN for the benefit of the ATLAS collaboration
#ifndef ONNX_UTILS_H
#define ONNX_UTILS_H
#include <string>
#include <iostream>
#include <fstream>
#include <arpa/inet.h>
#include <vector>
#include <iterator>
#include <tuple>
// ONNX Runtime include(s).
#include <core/session/onnxruntime_cxx_api.h>
// Local include(s).
#include "AthOnnxruntimeService/IONNXRuntimeSvc.h"
#include "GaudiKernel/ServiceHandle.h"
namespace AthONNX {
/************************Flattening of Input Data***************************/
/***************************************************************************/
template<typename T>
inline std::vector<T> FlattenInput_multiD_1D( std::vector<std::vector<T>> features){
// 1. Compute the total size required.
int total_size = 0;
for (const auto& feature : features) total_size += feature.size();
// 2. Create a vector to hold the data.
std::vector<T> Flatten1D;
Flatten1D.reserve(total_size);
// 3. Fill it
for (const auto& feature : features)
for (const auto& elem : feature)
Flatten1D.push_back(elem);
return Flatten1D;
}
/*********************************Creation of ORT tensor*********************************/
/****************************************************************************************/
template<typename T>
inline Ort::Value TensorCreator(std::vector<T>& flattenData, std::vector<int64_t>& input_node_dims ){
/************** Create input tensor object from input data values to feed into your model *********************/
auto memory_info = Ort::MemoryInfo::CreateCpu(OrtAllocatorType::OrtArenaAllocator, OrtMemType::OrtMemTypeDefault);
Ort::Value input_tensor = Ort::Value::CreateTensor<T>(memory_info,
flattenData.data(),
flattenData.size(), /*** 1x28x28 = 784 ***/
input_node_dims.data(),
input_node_dims.size()); /*** [1, 28, 28] = 3 ***/
return input_tensor;
}
/*********************************Creation of ORT Session*********************************/
/*****************************************************************************************/
//template<typename T>
inline std::unique_ptr< Ort::Session > CreateORTSession(const std::string& modelFile, bool withCUDA=false){
// Set up the ONNX Runtime session.
Ort::SessionOptions sessionOptions;
sessionOptions.SetIntraOpNumThreads( 1 );
if (withCUDA) {
; // does nothing for now until we have a GPU enabled build
}
sessionOptions.SetGraphOptimizationLevel( GraphOptimizationLevel::ORT_ENABLE_EXTENDED );
ServiceHandle< IONNXRuntimeSvc > svc("AthONNX::ONNXRuntimeSvc",
"AthONNX::ONNXRuntimeSvc");
return std::make_unique<Ort::Session>( svc->env(),
modelFile.c_str(),
sessionOptions );
}
/*********************************Input Node Structure of Model*********************************/
/***********************************************************************************************/
inline std::tuple<std::vector<int64_t>, std::vector<const char*> > GetInputNodeInfo(const std::unique_ptr< Ort::Session >& session){
std::vector<int64_t> input_node_dims;
size_t num_input_nodes = session->GetInputCount();
std::vector<const char*> input_node_names(num_input_nodes);
Ort::AllocatorWithDefaultOptions allocator;
for( std::size_t i = 0; i < num_input_nodes; i++ ) {
char* input_name = session->GetInputNameAllocated(i, allocator).release();
input_node_names[i] = input_name;
Ort::TypeInfo type_info = session->GetInputTypeInfo(i);
auto tensor_info = type_info.GetTensorTypeAndShapeInfo();
input_node_dims = tensor_info.GetShape();
}
return std::make_tuple(input_node_dims, input_node_names);
}
/*********************************Output Node Structure of Model*********************************/
/***********************************************************************************************/
inline std::tuple<std::vector<int64_t>, std::vector<const char*> > GetOutputNodeInfo(const std::unique_ptr< Ort::Session >& session){
//output nodes
std::vector<int64_t> output_node_dims;
size_t num_output_nodes = session->GetOutputCount();
std::vector<const char*> output_node_names(num_output_nodes);
Ort::AllocatorWithDefaultOptions allocator;
for( std::size_t i = 0; i < num_output_nodes; i++ ) {
char* output_name = session->GetOutputNameAllocated(i, allocator).release();
output_node_names[i] = output_name;
Ort::TypeInfo type_info = session->GetOutputTypeInfo(i);
auto tensor_info = type_info.GetTensorTypeAndShapeInfo();
output_node_dims = tensor_info.GetShape();
}
return std::make_tuple(output_node_dims, output_node_names);
}
/*********************************Running Inference through ORT*********************************/
/***********************************************************************************************/
inline float* Inference(const std::unique_ptr< Ort::Session >& session,std::vector<const char*>& input_node_names, Ort::Value& input_tensor, std::vector<const char*>& output_node_names){
auto output_tensor = session->Run(Ort::RunOptions{nullptr},
input_node_names.data(),
&input_tensor,
input_node_names.size(), /** 1, flatten_input:0 **/
output_node_names.data(),
output_node_names.size()); /** 1, dense_1/Softmax:0 **/
//assert(output_tensor.size() == output_node_names.size() && output_tensor.front().IsTensor());
// Get pointer to output tensor float values
float* floatarr = output_tensor.front().GetTensorMutableData<float>();
return floatarr;
}
void InferenceWithIOBinding(const std::unique_ptr<Ort::Session>& session,
const std::vector<const char*>& inputNames,
const std::vector<Ort::Value>& inputData,
const std::vector<const char*>& outputNames,
const std::vector<Ort::Value>& outputData){
if (inputNames.empty()) {
throw std::runtime_error("Onnxruntime input data maping cannot be empty");
}
assert(inputNames.size() == inputData.size());
Ort::IoBinding iobinding(*session);
for(size_t idx = 0; idx < inputNames.size(); ++idx){
iobinding.BindInput(inputNames[idx], inputData[idx]);
}
for(size_t idx = 0; idx < outputNames.size(); ++idx){
iobinding.BindOutput(outputNames[idx], outputData[idx]);
}
session->Run(Ort::RunOptions{nullptr}, iobinding);
}
}
#endif
# Copyright (C) 2002-2023 CERN for the benefit of the ATLAS collaboration
# Declare the package's name.
atlas_subdir( AthOnnxruntimeUtils )
# External dependencies.
find_package( onnxruntime )
# Component(s) in the package.
atlas_add_library( AthOnnxruntimeUtilsLib
INTERFACE
PUBLIC_HEADERS AthOnnxruntimeUtils
INCLUDE_DIRS ${ONNXRUNTIME_INCLUDE_DIRS}
LINK_LIBRARIES ${ONNXRUNTIME_LIBRARIES} AthOnnxruntimeServiceLib AthenaKernel GaudiKernel AsgServicesLib )
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment