C++ Sample

The C++ samples below show how to:

  • Append the QAic Execution Provider to an ONNX Runtime session (using a model-settings YAML file and optional device ID).

  • Load a compiled model and input data.

  • Run inference and inspect output values.

Load a Model

Contents of sample.cpp

#include <iostream>
#include <vector>
#include <fstream>

#include <onnxruntime_cxx_api.h>
#include <qaic_provider_factory.h>

// Global QAIC / model configuration
constexpr int   kAicDeviceId = 0;
constexpr const char* kConfigPath = "/opt/qti-aic/integrations/qaic_onnxrt/tests/resnet50/resnet50.yaml";
constexpr const char* kModelPath  = "/opt/qti-aic/integrations/qaic_onnxrt/tests/resnet50/resnet50-v1-12-batch.onnx";
constexpr const char* kRawPath    = "/opt/qti-aic/integrations/qaic_onnxrt/tests/resnet50/input_goldfish.raw";

// Known model I/O names
constexpr const char* kInputName  = "data";
constexpr const char* kOutputName = "resnetv17_dense0_fwd";

bool LoadModel(Ort::Env& env, Ort::Session& session) {
    Ort::SessionOptions opts;
    opts.SetGraphOptimizationLevel(GraphOptimizationLevel::ORT_ENABLE_EXTENDED);

    // Append QAIC EP
    OrtStatus* status = OrtSessionOptionsAppendExecutionProvider_QAic(
        opts, kConfigPath, kAicDeviceId);
    if (status != nullptr) {
        std::cerr << "Failed to append QAIC EP\n";
        Ort::GetApi().ReleaseStatus(status);
        return false;
    }

    // Create session
    session = Ort::Session(env, kModelPath, opts);
    return true;
}

Perform inference

bool RunInference(Ort::Session& session) {
    // Assume ResNet50 input: [1, 3, 224, 224] float32
    std::vector<int64_t> input_shape = {1, 3, 224, 224};
    size_t input_size = 1;
    for (auto d : input_shape) {
        input_size *= static_cast<size_t>(d);
    }

    // Load input tensor from raw file
    std::ifstream fin(kRawPath, std::ios::binary);
    if (!fin) {
        std::cerr << "Failed to open input file: " << kRawPath << "\n";
        return false;
    }

    std::vector<float> input_data(input_size);
    fin.read(reinterpret_cast<char*>(input_data.data()),
            static_cast<std::streamsize>(input_size * sizeof(float)));
    if (!fin) {
        std::cerr << "Failed to read expected bytes from input file\n";
        return false;
    }

    // Create input tensor
    Ort::MemoryInfo mem_info = Ort::MemoryInfo::CreateCpu(
        OrtAllocatorType::OrtArenaAllocator, OrtMemTypeDefault);

    Ort::Value input_tensor = Ort::Value::CreateTensor<float>(
        mem_info,
        input_data.data(),
        input_size,
        input_shape.data(),
        input_shape.size());

    const char* input_names[]  = {kInputName};
    const char* output_names[] = {kOutputName};

    // Run inference
    auto outputs = session.Run(
        Ort::RunOptions{nullptr},
        input_names, &input_tensor, 1,
        output_names, 1);

    if (outputs.empty() || !outputs[0].IsTensor()) {
        std::cerr << "Unexpected output\n";
        return false;
    }

    // Print first few output values
    float* out_data = outputs[0].GetTensorMutableData<float>();
    auto info = outputs[0].GetTensorTypeAndShapeInfo();
    size_t out_size = info.GetElementCount();

    std::cout << "Output size: " << out_size << "\n";
    std::cout << "First 10 output values:\n";
    for (size_t i = 0; i < std::min<size_t>(10, out_size); ++i) {
        std::cout << out_data[i] << (i + 1 < 10 ? ", " : "\n");
    }

    return true;
}

int main() {
    try {
        Ort::Env env(ORT_LOGGING_LEVEL_ERROR, "qaic_resnet50");
        Ort::Session session(nullptr);

        if (!LoadModel(env, session)) {
            return 1;
        }
        if (!RunInference(session)) {
            return 1;
        }
        return 0;
    } catch (const Ort::Exception& e) {
        std::cerr << "ONNX Runtime error: " << e.what() << "\n";
        return 1;
    } catch (const std::exception& e) {
        std::cerr << "Standard error: " << e.what() << "\n";
        return 1;
    }
}

Build sample

Contents of CMakeLists.txt

cmake_minimum_required(VERSION 3.10)
project(model_sample CXX)

# Adjust this to your environment
set(ORT_ROOT "/opt/qti-aic/integrations/qaic_onnxrt/onnxruntime_qaic")

# Compiler flags
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF)
add_compile_options(-O2 -Wall -Wextra)

# Include directories
include_directories(
    "${ORT_ROOT}/include/onnxruntime/core/session"
    "${ORT_ROOT}/include/onnxruntime/core/providers/qaic"
)

# Library directories
link_directories(
    "${ORT_ROOT}/build/Linux/Release"
)

# Executable
add_executable(sample sample.cpp)

# Libraries (ONNX Runtime core + QAIC provider + system libs)
target_link_libraries(sample
    onnxruntime
    onnxruntime_providers_shared
    pthread
    dl
)

Build steps:

mkdir build && cd build
cmake ..
make
cd ..

Run sample

export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/opt/qti-aic/integrations/qaic_onnxrt/onnxruntime_qaic/build/Linux/Release
./sample