Skip to content

Commit

Permalink
Improve the SNPE EP sample with command line option to switch SNPE ba…
Browse files Browse the repository at this point in the history
…ckend (microsoft#120)

* Improve the sample with command line option to switch SNPE backend, and set the input file path.
Fix an issue for Android build, need to use libc++_shared.so from SNPE SDK.

* 1. Update the API call according the API change in Ort, SessionOptionsAppendExecutionProvider_SNPE -> SessionOptionsAppendExecutionProvider
2. format update
  • Loading branch information
HectorSVC authored and shamaksx committed Sep 28, 2022
1 parent b64c316 commit 101afa2
Show file tree
Hide file tree
Showing 3 changed files with 51 additions and 10 deletions.
8 changes: 7 additions & 1 deletion c_cxx/Snpe_EP/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,13 @@ ADD_EXECUTABLE(snpe_ep_sample main.cpp)
if(HAVE_TENSORRT_PROVIDER_FACTORY_H)
target_compile_definitions(snpe_ep_sample PRIVATE -DHAVE_TENSORRT_PROVIDER_FACTORY_H)
endif()
target_link_libraries(snpe_ep_sample onnxruntime)

set(ORT_LIBS onnxruntime)
if(ANDROID)
list(APPEND ORT_LIBS libc++_shared.so)
endif()
target_link_libraries(snpe_ep_sample ${ORT_LIBS})

if(MSVC)
target_link_directories(snpe_ep_sample PRIVATE ${ONNXRUNTIME_ROOTDIR}/build/Windows/Release/Release)
else()
Expand Down
11 changes: 9 additions & 2 deletions c_cxx/Snpe_EP/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,12 @@
chairs.raw -- from $SNPE_ROOT/models/inception_v3/data/cropped
imagenet_slim_labels.txt -- from $SNPE_ROOT/models/inception_v3/data
Run snpe_ep_sample.exe, it will output:
Run
```
snpe_ep_sample.exe --cpu chairs.raw
```
it will output:
```
832, 0.299591, studio couch
Expand Down Expand Up @@ -103,6 +108,7 @@
adb push [$SNPE_ROOT]/lib/dsp/*.so /data/local/tmp/snpeexample
adb push [$Onnxruntime_ROOT]/build/Android/Release/libonnxruntime.so /data/local/tmp/snpeexample
adb push [$SNPE_ROOT]/models/inception_v3/data/cropped/chairs.raw /data/local/tmp/snpeexample
adb push [$SNPE_ROOT]/models/inception_v3/data/imagenet_slim_labels.txt /data/local/tmp/snpeexample
adb push [$SNPE_ROOT]/models/inception_v3/snpe_inception_v3.onnx /data/local/tmp/snpeexample
adb push ./onnxruntime-inference-examples/c_cxx/Snpe_EP/build_android/snpe_ep_sample /data/local/tmp/snpeexample
```
Expand All @@ -115,7 +121,8 @@
chmod +x *
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/snpeexample
export PATH=$PATH:/data/local/tmp/snpeexample
snpe_ep_sample
snpe_ep_sample --cpu chairs.raw
snpe_ep_sample --dsp chairs.raw
```
it will output:
Expand Down
42 changes: 35 additions & 7 deletions c_cxx/Snpe_EP/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,12 @@ bool CheckStatus(const OrtApi* g_ort, OrtStatus* status) {
const char* msg = g_ort->GetErrorMessage(status);
std::cerr << msg << std::endl;
g_ort->ReleaseStatus(status);
throw std::exception();
throw Ort::Exception(msg, OrtErrorCode::ORT_EP_FAIL);
}
return true;
}

void run_ort_snpe_ep() {
void run_ort_snpe_ep(std::string backend, std::string input_path) {
#ifdef _WIN32
const wchar_t* model_path = L"snpe_inception_v3.onnx";
#else
Expand All @@ -38,10 +38,10 @@ void run_ort_snpe_ep() {
CheckStatus(g_ort, g_ort->SetSessionGraphOptimizationLevel(session_options, ORT_ENABLE_BASIC));

std::vector<const char*> options_keys = {"runtime", "buffer_type"};
std::vector<const char*> options_values = {"CPU", "FLOAT"}; // set to TF8 if use quantized data
std::vector<const char*> options_values = {backend.c_str(), "FLOAT"}; // set to TF8 if use quantized data

CheckStatus(g_ort, g_ort->SessionOptionsAppendExecutionProvider_SNPE(session_options, options_keys.data(),
options_values.data(), options_keys.size()));
CheckStatus(g_ort, g_ort->SessionOptionsAppendExecutionProvider(session_options, "SNPE", options_keys.data(),
options_values.data(), options_keys.size()));
OrtSession* session;
CheckStatus(g_ort, g_ort->CreateSession(env, model_path, session_options, &session));

Expand Down Expand Up @@ -124,7 +124,7 @@ void run_ort_snpe_ep() {
size_t input_data_length = input_data_size * sizeof(float);
std::vector<float> input_data(input_data_size, 1.0);

std::ifstream input_raw_file("chairs.raw", std::ios::binary);
std::ifstream input_raw_file(input_path, std::ios::binary);
input_raw_file.seekg(0, std::ios::end);
const size_t num_elements = input_raw_file.tellg() / sizeof(float);
input_raw_file.seekg(0, std::ios::beg);
Expand Down Expand Up @@ -162,7 +162,35 @@ void run_ort_snpe_ep() {
printf("%d, %f, %s \n", max_index, *max, label_table[max_index].c_str());
}

void PrintHelp() {
std::cout << "To run the sample, use the following command:" << std::endl;
std::cout << "Example: ./snpe_ep_sample --cpu <path_to_raw_input>" << std::endl;
std::cout << "To Run with SNPE CPU backend. Example: ./snpe_ep_sample --cpu chairs.raw" << std::endl;
std::cout << "To Run with SNPE DSP backend. Example: ./snpe_ep_sample --dsp chairs.raw" << std::endl;
}

constexpr const char* CPUBACKEDN = "--cpu";
constexpr const char* DSPBACKEDN = "--dsp";

int main(int argc, char* argv[]) {
run_ort_snpe_ep();
std::string backend = "CPU";

if (argc != 3) {
PrintHelp();
return 1;
}

if (strcmp(argv[1], CPUBACKEDN) == 0) {
backend = "CPU";
} else if (strcmp(argv[1], DSPBACKEDN) == 0) {
backend = "DSP";
} else {
std::cout << "This sample only support CPU, DSP." << std::endl;
PrintHelp();
return 1;
}
std::string input_path(argv[2]);

run_ort_snpe_ep(backend, input_path);
return 0;
}

0 comments on commit 101afa2

Please sign in to comment.