Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improve the SNPE EP sample with command line option to switch SNPE backend #120

Merged
merged 2 commits into from
Jun 20, 2022
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion c_cxx/Snpe_EP/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,13 @@ ADD_EXECUTABLE(snpe_ep_sample main.cpp)
if(HAVE_TENSORRT_PROVIDER_FACTORY_H)
target_compile_definitions(snpe_ep_sample PRIVATE -DHAVE_TENSORRT_PROVIDER_FACTORY_H)
endif()
target_link_libraries(snpe_ep_sample onnxruntime)

set(ORT_LIBS onnxruntime)
if(ANDROID)
list(APPEND ORT_LIBS libc++_shared.so)
endif()
target_link_libraries(snpe_ep_sample ${ORT_LIBS})

if(MSVC)
target_link_directories(snpe_ep_sample PRIVATE ${ONNXRUNTIME_ROOTDIR}/build/Windows/Release/Release)
else()
Expand Down
11 changes: 9 additions & 2 deletions c_cxx/Snpe_EP/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,12 @@
chairs.raw -- from $SNPE_ROOT/models/inception_v3/data/cropped
imagenet_slim_labels.txt -- from $SNPE_ROOT/models/inception_v3/data

Run snpe_ep_sample.exe, it will output:
Run
```
snpe_ep_sample.exe --cpu chairs.raw
```

it will output:

```
832, 0.299591, studio couch
Expand Down Expand Up @@ -103,6 +108,7 @@
adb push [$SNPE_ROOT]/lib/dsp/*.so /data/local/tmp/snpeexample
adb push [$Onnxruntime_ROOT]/build/Android/Release/libonnxruntime.so /data/local/tmp/snpeexample
adb push [$SNPE_ROOT]/models/inception_v3/data/cropped/chairs.raw /data/local/tmp/snpeexample
adb push [$SNPE_ROOT]/models/inception_v3/data/imagenet_slim_labels.txt /data/local/tmp/snpeexample
adb push [$SNPE_ROOT]/models/inception_v3/snpe_inception_v3.onnx /data/local/tmp/snpeexample
adb push ./onnxruntime-inference-examples/c_cxx/Snpe_EP/build_android/snpe_ep_sample /data/local/tmp/snpeexample
```
Expand All @@ -115,7 +121,8 @@
chmod +x *
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/snpeexample
export PATH=$PATH:/data/local/tmp/snpeexample
snpe_ep_sample
snpe_ep_sample --cpu chairs.raw
snpe_ep_sample --dsp chairs.raw
```

it will output:
Expand Down
38 changes: 33 additions & 5 deletions c_cxx/Snpe_EP/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,12 @@ bool CheckStatus(const OrtApi* g_ort, OrtStatus* status) {
const char* msg = g_ort->GetErrorMessage(status);
std::cerr << msg << std::endl;
g_ort->ReleaseStatus(status);
throw std::exception();
throw Ort::Exception(msg, OrtErrorCode::ORT_EP_FAIL);
}
return true;
}

void run_ort_snpe_ep() {
void run_ort_snpe_ep(std::string backend, std::string input_path) {
#ifdef _WIN32
const wchar_t* model_path = L"snpe_inception_v3.onnx";
#else
Expand All @@ -38,7 +38,7 @@ void run_ort_snpe_ep() {
CheckStatus(g_ort, g_ort->SetSessionGraphOptimizationLevel(session_options, ORT_ENABLE_BASIC));

std::vector<const char*> options_keys = {"runtime", "buffer_type"};
std::vector<const char*> options_values = {"CPU", "FLOAT"}; // set to TF8 if use quantized data
std::vector<const char*> options_values = {backend.c_str(), "FLOAT"}; // set to TF8 if use quantized data

CheckStatus(g_ort, g_ort->SessionOptionsAppendExecutionProvider_SNPE(session_options, options_keys.data(),
options_values.data(), options_keys.size()));
Expand Down Expand Up @@ -124,7 +124,7 @@ void run_ort_snpe_ep() {
size_t input_data_length = input_data_size * sizeof(float);
std::vector<float> input_data(input_data_size, 1.0);

std::ifstream input_raw_file("chairs.raw", std::ios::binary);
std::ifstream input_raw_file(input_path, std::ios::binary);
input_raw_file.seekg(0, std::ios::end);
const size_t num_elements = input_raw_file.tellg() / sizeof(float);
input_raw_file.seekg(0, std::ios::beg);
Expand Down Expand Up @@ -162,7 +162,35 @@ void run_ort_snpe_ep() {
printf("%d, %f, %s \n", max_index, *max, label_table[max_index].c_str());
}

void PrintHelp() {
std::cout << "To run the sample, use the following command:" << std::endl;
std::cout << "Example: ./snpe_ep_sample --cpu <path_to_raw_input>" << std::endl;
std::cout << "To Run with SNPE CPU backend. Example: ./snpe_ep_sample --cpu chairs.raw" << std::endl;
std::cout << "To Run with SNPE DSP backend. Example: ./snpe_ep_sample --dsp chairs.raw" << std::endl;
}

constexpr const char* CPUBACKEDN = "--cpu";
constexpr const char* DSPBACKEDN = "--dsp";

int main(int argc, char* argv[]) {
run_ort_snpe_ep();
std::string backend = "CPU";

if (argc != 3) {
PrintHelp();
return 1;
}

if (strcmp(argv[1], CPUBACKEDN) == 0) {
backend = "CPU";
} else if (strcmp(argv[1], DSPBACKEDN) == 0) {
backend = "DSP";
} else {
std::cout << "This sample only support CPU, DSP." << std::endl;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

please format the code

PrintHelp();
return 1;
}
std::string input_path(argv[2]);

run_ort_snpe_ep(backend, input_path);
return 0;
}