Skip to content

Commit

Permalink
Support Android NNAPI. (#622)
Browse files Browse the repository at this point in the history
  • Loading branch information
csukuangfj authored Mar 1, 2024
1 parent f9db33c commit e2397cd
Show file tree
Hide file tree
Showing 4 changed files with 73 additions and 0 deletions.
34 changes: 34 additions & 0 deletions build-android-arm64-v8a.sh
Original file line number Diff line number Diff line change
Expand Up @@ -76,8 +76,42 @@ cmake -DCMAKE_TOOLCHAIN_FILE="$ANDROID_NDK/build/cmake/android.toolchain.cmake"
-DANDROID_ABI="arm64-v8a" \
-DANDROID_PLATFORM=android-21 ..

# Please use -DANDROID_PLATFORM=android-27 if you want to use Android NNAPI

# make VERBOSE=1 -j4
make -j4
make install/strip
cp -fv $onnxruntime_version/jni/arm64-v8a/libonnxruntime.so install/lib
rm -rf install/lib/pkgconfig

# To run the generated binaries on Android, please use the following steps.
#
#
# 1. Copy sherpa-onnx and its dependencies to Android
#
# cd build-android-arm64-v8a/install/lib
# adb push ./lib*.so /data/local/tmp
# cd ../bin
# adb push ./sherpa-onnx /data/local/tmp
#
# 2. Login into Android
#
# adb shell
# cd /data/local/tmp
# ./sherpa-onnx
#
# which shows the following error log:
#
# CANNOT LINK EXECUTABLE "./sherpa-onnx": library "libsherpa-onnx-core.so" not found: needed by main executable
#
# Please run:
#
# export LD_LIBRARY_PATH=$PWD:$LD_LIBRARY_PATH
#
# and then you can run:
#
# ./sherpa-onnx
#
# It should show the help message of sherpa-onnx.
#
# Please use the above approach to copy model files to your phone.
2 changes: 2 additions & 0 deletions sherpa-onnx/csrc/provider.cc
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ Provider StringToProvider(std::string s) {
return Provider::kCoreML;
} else if (s == "xnnpack") {
return Provider::kXnnpack;
} else if (s == "nnapi") {
return Provider::kNNAPI;
} else {
SHERPA_ONNX_LOGE("Unsupported string: %s. Fallback to cpu", s.c_str());
return Provider::kCPU;
Expand Down
1 change: 1 addition & 0 deletions sherpa-onnx/csrc/provider.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ enum class Provider {
kCUDA = 1, // CUDAExecutionProvider
kCoreML = 2, // CoreMLExecutionProvider
kXnnpack = 3, // XnnpackExecutionProvider
kNNAPI = 4, // NnapiExecutionProvider
};

/**
Expand Down
36 changes: 36 additions & 0 deletions sherpa-onnx/csrc/session.cc
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@
#include "coreml_provider_factory.h" // NOLINT
#endif

#if __ANDROID_API__ >= 27
#include "nnapi_provider_factory.h"
#endif

namespace sherpa_onnx {

static Ort::SessionOptions GetSessionOptionsImpl(int32_t num_threads,
Expand Down Expand Up @@ -74,6 +78,38 @@ static Ort::SessionOptions GetSessionOptionsImpl(int32_t num_threads,
coreml_flags);
#else
SHERPA_ONNX_LOGE("CoreML is for Apple only. Fallback to cpu!");
#endif
break;
}
case Provider::kNNAPI: {
#if __ANDROID_API__ >= 27
SHERPA_ONNX_LOGE("Current API level %d ", (int32_t)__ANDROID_API__);

// Please see
// https://onnxruntime.ai/docs/execution-providers/NNAPI-ExecutionProvider.html#usage
// to enable different flags
uint32_t nnapi_flags = 0;
// nnapi_flags |= NNAPI_FLAG_USE_FP16;
// nnapi_flags |= NNAPI_FLAG_CPU_DISABLED;
OrtStatus *status = OrtSessionOptionsAppendExecutionProvider_Nnapi(
sess_opts, nnapi_flags);

if (status) {
const auto &api = Ort::GetApi();
const char *msg = api.GetErrorMessage(status);
SHERPA_ONNX_LOGE(
"Failed to enable NNAPI: %s. Available providers: %s. Fallback to "
"cpu",
msg, os.str().c_str());
api.ReleaseStatus(status);
} else {
SHERPA_ONNX_LOGE("Use nnapi");
}
#else
SHERPA_ONNX_LOGE(
"Android NNAPI requires API level >= 27. Current API level %d "
"Fallback to cpu!",
(int32_t)__ANDROID_API__);
#endif
break;
}
Expand Down

0 comments on commit e2397cd

Please sign in to comment.