diff --git a/.github/workflows/android-rknn.yaml b/.github/workflows/android-rknn.yaml new file mode 100644 index 00000000..eaf10666 --- /dev/null +++ b/.github/workflows/android-rknn.yaml @@ -0,0 +1,283 @@ +name: android-rknn + +on: + push: + branches: + - master + paths: + - '.github/workflows/android-rknn.yaml' + - 'cmake/**' + - 'sherpa-onnx/csrc/*' + - 'sherpa-onnx/jni/*' + - 'build-android*.sh' + tags: + - 'v[0-9]+.[0-9]+.[0-9]+*' + pull_request: + branches: + - master + paths: + - '.github/workflows/android-rknn.yaml' + - 'cmake/**' + - 'sherpa-onnx/csrc/*' + - 'sherpa-onnx/jni/*' + - 'build-android*.sh' + + workflow_dispatch: + +concurrency: + group: android-rknn-${{ github.ref }} + cancel-in-progress: true + +jobs: + build-android-rknn-libs: + name: Android rknn libs + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: ccache + uses: hendrikmuhs/ccache-action@v1.2 + with: + key: ${{ matrix.os }}-android-rknn + + - name: Display NDK HOME + shell: bash + run: | + echo "ANDROID_NDK_LATEST_HOME: ${ANDROID_NDK_LATEST_HOME}" + ls -lh ${ANDROID_NDK_LATEST_HOME} + + - name: build android arm64-v8a + shell: bash + run: | + export CMAKE_CXX_COMPILER_LAUNCHER=ccache + export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH" + + export ANDROID_NDK=$ANDROID_NDK_LATEST_HOME + export SHERPA_ONNX_ENABLE_C_API=ON + export SHERPA_ONNX_ENABLE_RKNN=ON + ./build-android-arm64-v8a.sh + mkdir -p jniLibs/arm64-v8a/ + cp -v ./build-android-arm64-v8a/install/lib/*.so ./jniLibs/arm64-v8a/ + cp -v ./build-android-arm64-v8a/install/lib/README.md ./jniLibs/arm64-v8a/ + rm -rf ./build-android-arm64-v8a/ + + - name: build android armv7-eabi + shell: bash + run: | + export CMAKE_CXX_COMPILER_LAUNCHER=ccache + export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH" + + export ANDROID_NDK=$ANDROID_NDK_LATEST_HOME + export SHERPA_ONNX_ENABLE_C_API=ON + export SHERPA_ONNX_ENABLE_RKNN=ON + ./build-android-armv7-eabi.sh + mkdir -p ./jniLibs/armeabi-v7a/ + cp -v ./build-android-armv7-eabi/install/lib/*.so ./jniLibs/armeabi-v7a/ + cp -v ./build-android-armv7-eabi/install/lib/README.md ./jniLibs/armeabi-v7a/ + rm -rf ./build-android-armv7-eabi + + - name: Copy files + shell: bash + run: | + SHERPA_ONNX_VERSION=v$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2) + echo "SHERPA_ONNX_VERSION=$SHERPA_ONNX_VERSION" >> "$GITHUB_ENV" + + filename=sherpa-onnx-${SHERPA_ONNX_VERSION}-android-rknn.tar.bz2 + + tar cjvf $filename ./jniLibs + + ls -lh + + - uses: actions/upload-artifact@v4 + with: + name: sherpa-onnx-android-libs-rknn + path: ./jniLibs + + # https://huggingface.co/docs/hub/spaces-github-actions + - name: Publish to huggingface + if: (github.repository_owner == 'csukuangfj' || github.repository_owner == 'k2-fsa') && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') + env: + HF_TOKEN: ${{ secrets.HF_TOKEN }} + uses: nick-fields/retry@v3 + with: + max_attempts: 20 + timeout_seconds: 200 + shell: bash + command: | + git config --global user.email "csukuangfj@gmail.com" + git config --global user.name "Fangjun Kuang" + du -h -d1 . + ls -lh + + rm -rf huggingface + export GIT_CLONE_PROTECTION_ACTIVE=false + GIT_LFS_SKIP_SMUDGE=1 git clone https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-libs huggingface + + cd huggingface + + cp -v ../sherpa-onnx-*-android-rknn.tar.bz2 ./ + + git status + git lfs track "*.bz2" + + git add . + + git commit -m "upload sherpa-onnx-${SHERPA_ONNX_VERSION}-android.tar.bz2" + + git push https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-libs main + + - name: Release android libs + if: (github.repository_owner == 'csukuangfj' || github.repository_owner == 'k2-fsa') && github.event_name == 'push' && contains(github.ref, 'refs/tags/') + uses: svenstaro/upload-release-action@v2 + with: + file_glob: true + overwrite: true + file: sherpa-onnx-*-android-rknn.tar.bz2 + # repo_name: k2-fsa/sherpa-onnx + # repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }} + # tag: v1.11.3 + + build-android-aar-rknn: + needs: [build-android-rknn-libs] + name: Android rknn AAR + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + # https://github.com/actions/setup-java + - uses: actions/setup-java@v4 + with: + distribution: 'temurin' # See 'Supported distributions' for available options + java-version: '21' + + - name: Display NDK HOME + shell: bash + run: | + echo "ANDROID_NDK_LATEST_HOME: ${ANDROID_NDK_LATEST_HOME}" + ls -lh ${ANDROID_NDK_LATEST_HOME} + + - name: Retrieve artifact + uses: actions/download-artifact@v4 + with: + name: sherpa-onnx-android-libs-rknn + path: /tmp/jniLibs + + - name: Show jni libs + shell: bash + run: | + ls -lh /tmp/jniLibs + + # drwxr-xr-x 2 runner docker 4.0K Dec 12 06:56 arm64-v8a + # drwxr-xr-x 2 runner docker 4.0K Dec 12 06:56 armeabi-v7a + + - name: Copy libs + shell: bash + run: | + for arch in arm64-v8a armeabi-v7a; do + cp -v /tmp/jniLibs/$arch/* android/SherpaOnnxAar/sherpa_onnx/src/main/jniLibs/$arch/ + done + + rm -rf android/SherpaOnnxAar/sherpa_onnx/src/main/jniLibs/x86 + rm -rf android/SherpaOnnxAar/sherpa_onnx/src/main/jniLibs/x86_64 + + - name: Check libs + shell: bash + run: | + ls -lh android/SherpaOnnxAar/sherpa_onnx/src/main/jniLibs/* + + - name: Build aar + shell: bash + run: | + cd android/SherpaOnnxAar + + ./gradlew :sherpa_onnx:assembleRelease + + - name: Display aar + shell: bash + run: | + cd android/SherpaOnnxAar + + ls -lh ./sherpa_onnx/build/outputs/aar/sherpa_onnx-release.aar + cp ./sherpa_onnx/build/outputs/aar/sherpa_onnx-release.aar ../../ + + + - name: Rename aar + shell: bash + run: | + SHERPA_ONNX_VERSION=$(grep "SHERPA_ONNX_VERSION" ./CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2) + echo "SHERPA_ONNX_VERSION=$SHERPA_ONNX_VERSION" >> "$GITHUB_ENV" + + mv sherpa_onnx-release.aar sherpa-onnx-${SHERPA_ONNX_VERSION}-rknn.aar + + - uses: actions/upload-artifact@v4 + with: + name: sherpa-onnx-android-aar + path: ./*.aar + + # https://huggingface.co/docs/hub/spaces-github-actions + - name: Publish to huggingface + if: (github.repository_owner == 'csukuangfj' || github.repository_owner == 'k2-fsa') && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') + env: + HF_TOKEN: ${{ secrets.HF_TOKEN }} + uses: nick-fields/retry@v3 + with: + max_attempts: 20 + timeout_seconds: 200 + shell: bash + command: | + git config --global user.email "csukuangfj@gmail.com" + git config --global user.name "Fangjun Kuang" + du -h -d1 . + ls -lh + + rm -rf huggingface + export GIT_CLONE_PROTECTION_ACTIVE=false + GIT_LFS_SKIP_SMUDGE=1 git clone https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-libs huggingface + + cd huggingface + dst=android/aar + mkdir -p $dst + + cp -v ../*.aar $dst + + git status + git lfs track "*.aar" + + git add . + + git commit -m "upload sherpa-onnx-${SHERPA_ONNX_VERSION}-rknn.aar" + + git push https://csukuangfj:$HF_TOKEN@huggingface.co/csukuangfj/sherpa-onnx-libs main + + - name: Release android aar + if: github.repository_owner == 'csukuangfj' && github.event_name == 'push' && contains(github.ref, 'refs/tags/') + uses: svenstaro/upload-release-action@v2 + with: + file_glob: true + overwrite: true + file: ./*.aar + # repo_name: k2-fsa/sherpa-onnx + # repo_token: ${{ secrets.UPLOAD_GH_SHERPA_ONNX_TOKEN }} + # tag: v1.11.3 + + - name: Release android aar + if: github.repository_owner == 'k2-fsa' && github.event_name == 'push' && contains(github.ref, 'refs/tags/') + uses: svenstaro/upload-release-action@v2 + with: + file_glob: true + overwrite: true + file: ./*.aar diff --git a/build-android-arm64-v8a.sh b/build-android-arm64-v8a.sh index 88ba09ef..c84836f1 100755 --- a/build-android-arm64-v8a.sh +++ b/build-android-arm64-v8a.sh @@ -97,6 +97,23 @@ fi echo "SHERPA_ONNXRUNTIME_LIB_DIR: $SHERPA_ONNXRUNTIME_LIB_DIR" echo "SHERPA_ONNXRUNTIME_INCLUDE_DIR $SHERPA_ONNXRUNTIME_INCLUDE_DIR" +if [ -z $SHERPA_ONNX_ENABLE_RKNN ]; then + SHERPA_ONNX_ENABLE_RKNN=OFF +fi + +if [ $SHERPA_ONNX_ENABLE_RKNN == ON ]; then + rknn_version=2.2.0 + if [ ! -d ./librknnrt-android ]; then + rm -fv librknnrt-android.tar.bz2 + wget https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/librknnrt-android.tar.bz2 + tar xvf librknnrt-android.tar.bz2 + rm librknnrt-android.tar.bz2 + fi + + export SHERPA_ONNX_RKNN_TOOLKIT2_LIB_DIR=$PWD/librknnrt-android/v$rknn_version/arm64-v8a/ + export CPLUS_INCLUDE_PATH=$PWD/librknnrt-android/v$rknn_version/include:$CPLUS_INCLUDE_PATH +fi + if [ -z $SHERPA_ONNX_ENABLE_TTS ]; then SHERPA_ONNX_ENABLE_TTS=ON fi @@ -135,6 +152,7 @@ cmake -DCMAKE_TOOLCHAIN_FILE="$ANDROID_NDK/build/cmake/android.toolchain.cmake" -DSHERPA_ONNX_LINK_LIBSTDCPP_STATICALLY=OFF \ -DSHERPA_ONNX_ENABLE_C_API=$SHERPA_ONNX_ENABLE_C_API \ -DCMAKE_INSTALL_PREFIX=./install \ + -DSHERPA_ONNX_ENABLE_RKNN=$SHERPA_ONNX_ENABLE_RKNN \ -DANDROID_ABI="arm64-v8a" \ -DANDROID_PLATFORM=android-21 .. @@ -147,6 +165,11 @@ cmake -DCMAKE_TOOLCHAIN_FILE="$ANDROID_NDK/build/cmake/android.toolchain.cmake" make -j4 make install/strip cp -fv $onnxruntime_version/jni/arm64-v8a/libonnxruntime.so install/lib 2>/dev/null || true + +if [ $SHERPA_ONNX_ENABLE_RKNN == ON ]; then + cp -fv $SHERPA_ONNX_RKNN_TOOLKIT2_LIB_DIR/librknnrt.so install/lib +fi + rm -rf install/share rm -rf install/lib/pkgconfig rm -rf install/lib/lib*.a @@ -186,3 +209,19 @@ fi # It should show the help message of sherpa-onnx. # # Please use the above approach to copy model files to your phone. +# +# ---------------------------------------- +# For android rknn +# ---------------------------------------- +# If you get the following error from the logcat +# 2025-04-15 15:27:43.441 19568-19646 RKNN com.k2fsa.sherpa.onnx E Meet unsupported input dtype for gather +# 2025-04-15 15:27:43.442 19568-19646 RKNN com.k2fsa.sherpa.onnx E Op type:Gather, name: Gather:/Concat_78_2gather, fallback cpu failed. If using rknn, update to the latest toolkit2 and runtime from: https://console.zbox.filez.com/l/I00fc3 (PWD: rknn). If using rknn-llm, update from: https://github.com/airockchip/rknn-llm +# 2025-04-15 15:27:43.442 19568-19646 sherpa-onnx com.k2fsa.sherpa.onnx W Return code is: -1 +# 2025-04-15 15:27:43.442 19568-19646 sherpa-onnx com.k2fsa.sherpa.onnx W Failed to run encoder +# +# You need to update /vendor/lib64/librknnrt.so and /vendor/lib/librknnrt.so +# +# adb root +# adb remount /vendor +# adb push ./install/lib/librknnrt.so /vendor/lib64 +# adb push ./install/lib/librknnrt.so /vendor/lib diff --git a/build-android-armv7-eabi.sh b/build-android-armv7-eabi.sh index 77d4615e..2e350cd2 100755 --- a/build-android-armv7-eabi.sh +++ b/build-android-armv7-eabi.sh @@ -98,6 +98,23 @@ fi echo "SHERPA_ONNXRUNTIME_LIB_DIR: $SHERPA_ONNXRUNTIME_LIB_DIR" echo "SHERPA_ONNXRUNTIME_INCLUDE_DIR $SHERPA_ONNXRUNTIME_INCLUDE_DIR" +if [ -z $SHERPA_ONNX_ENABLE_RKNN ]; then + SHERPA_ONNX_ENABLE_RKNN=OFF +fi + +if [ $SHERPA_ONNX_ENABLE_RKNN == ON ]; then + rknn_version=2.2.0 + if [ ! -d ./librknnrt-android ]; then + rm -fv librknnrt-android.tar.bz2 + wget https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/librknnrt-android.tar.bz2 + tar xvf librknnrt-android.tar.bz2 + rm librknnrt-android.tar.bz2 + fi + + export SHERPA_ONNX_RKNN_TOOLKIT2_LIB_DIR=$PWD/librknnrt-android/v$rknn_version/armeabi-v7a/ + export CPLUS_INCLUDE_PATH=$PWD/librknnrt-android/v$rknn_version/include:$CPLUS_INCLUDE_PATH +fi + if [ -z $SHERPA_ONNX_ENABLE_TTS ]; then SHERPA_ONNX_ENABLE_TTS=ON fi @@ -136,6 +153,7 @@ cmake -DCMAKE_TOOLCHAIN_FILE="$ANDROID_NDK/build/cmake/android.toolchain.cmake" -DSHERPA_ONNX_LINK_LIBSTDCPP_STATICALLY=OFF \ -DSHERPA_ONNX_ENABLE_C_API=$SHERPA_ONNX_ENABLE_C_API \ -DCMAKE_INSTALL_PREFIX=./install \ + -DSHERPA_ONNX_ENABLE_RKNN=$SHERPA_ONNX_ENABLE_RKNN \ -DANDROID_ABI="armeabi-v7a" -DANDROID_ARM_NEON=ON \ -DANDROID_PLATFORM=android-21 .. @@ -146,6 +164,11 @@ cmake -DCMAKE_TOOLCHAIN_FILE="$ANDROID_NDK/build/cmake/android.toolchain.cmake" make -j4 make install/strip cp -fv $onnxruntime_version/jni/armeabi-v7a/libonnxruntime.so install/lib 2>/dev/null || true + +if [ $SHERPA_ONNX_ENABLE_RKNN == ON ]; then + cp -fv $SHERPA_ONNX_RKNN_TOOLKIT2_LIB_DIR/librknnrt.so install/lib +fi + rm -rf install/share rm -rf install/lib/pkgconfig rm -rf install/lib/lib*.a diff --git a/sherpa-onnx/csrc/online-recognizer-impl.cc b/sherpa-onnx/csrc/online-recognizer-impl.cc index 328081c4..9354ab08 100644 --- a/sherpa-onnx/csrc/online-recognizer-impl.cc +++ b/sherpa-onnx/csrc/online-recognizer-impl.cc @@ -51,7 +51,9 @@ std::unique_ptr OnlineRecognizerImpl::Create( #else SHERPA_ONNX_LOGE( "Please rebuild sherpa-onnx with -DSHERPA_ONNX_ENABLE_RKNN=ON if you " - "want to use rknn. Fallback to CPU"); + "want to use rknn."); + SHERPA_ONNX_EXIT(-1); + return nullptr #endif } @@ -108,7 +110,9 @@ std::unique_ptr OnlineRecognizerImpl::Create( #else SHERPA_ONNX_LOGE( "Please rebuild sherpa-onnx with -DSHERPA_ONNX_ENABLE_RKNN=ON if you " - "want to use rknn. Fallback to CPU"); + "want to use rknn."); + SHERPA_ONNX_EXIT(-1); + return nullptr #endif } diff --git a/sherpa-onnx/csrc/rknn/online-recognizer-ctc-rknn-impl.h b/sherpa-onnx/csrc/rknn/online-recognizer-ctc-rknn-impl.h index 9edd45a4..32fc6b8d 100644 --- a/sherpa-onnx/csrc/rknn/online-recognizer-ctc-rknn-impl.h +++ b/sherpa-onnx/csrc/rknn/online-recognizer-ctc-rknn-impl.h @@ -55,9 +55,9 @@ class OnlineRecognizerCtcRknnImpl : public OnlineRecognizerImpl { const OnlineRecognizerConfig &config) : OnlineRecognizerImpl(mgr, config), config_(config), - model_( - std::make_unique(config.model_config)), - sym_(mgr, config.model_config.tokens), + model_(std::make_unique( + mgr, config_.model_config)), + sym_(mgr, config_.model_config.tokens), endpoint_(config_.endpoint_config) { InitDecoder(); } diff --git a/sherpa-onnx/csrc/rknn/online-recognizer-transducer-rknn-impl.h b/sherpa-onnx/csrc/rknn/online-recognizer-transducer-rknn-impl.h index c3df5b80..8336ed21 100644 --- a/sherpa-onnx/csrc/rknn/online-recognizer-transducer-rknn-impl.h +++ b/sherpa-onnx/csrc/rknn/online-recognizer-transducer-rknn-impl.h @@ -111,9 +111,33 @@ class OnlineRecognizerTransducerRknnImpl : public OnlineRecognizerImpl { : OnlineRecognizerImpl(mgr, config), config_(config), endpoint_(config_.endpoint_config), - model_( - std::make_unique(mgr, config)) { - // TODO(fangjun): Support Android + model_(std::make_unique( + mgr, config_.model_config)) { + if (!config.model_config.tokens_buf.empty()) { + sym_ = SymbolTable(config.model_config.tokens_buf, false); + } else { + /// assuming tokens_buf and tokens are guaranteed not being both empty + sym_ = SymbolTable(mgr, config.model_config.tokens); + } + + if (sym_.Contains("")) { + unk_id_ = sym_[""]; + } + + if (config.decoding_method == "greedy_search") { + decoder_ = std::make_unique( + model_.get(), unk_id_); + } else if (config.decoding_method == "modified_beam_search") { + decoder_ = + std::make_unique( + model_.get(), config.max_active_paths, unk_id_); + } else { + SHERPA_ONNX_LOGE( + "Invalid decoding method: '%s'. Support only greedy_search and " + "modified_beam_search.", + config.decoding_method.c_str()); + SHERPA_ONNX_EXIT(-1); + } } std::unique_ptr CreateStream() const override { diff --git a/sherpa-onnx/csrc/rknn/online-zipformer-ctc-model-rknn.cc b/sherpa-onnx/csrc/rknn/online-zipformer-ctc-model-rknn.cc index eeb08007..4a89a618 100644 --- a/sherpa-onnx/csrc/rknn/online-zipformer-ctc-model-rknn.cc +++ b/sherpa-onnx/csrc/rknn/online-zipformer-ctc-model-rknn.cc @@ -252,7 +252,7 @@ OnlineZipformerCtcModelRknn::OnlineZipformerCtcModelRknn( template OnlineZipformerCtcModelRknn::OnlineZipformerCtcModelRknn( Manager *mgr, const OnlineModelConfig &config) - : impl_(std::make_unique(mgr, config)) {} + : impl_(std::make_unique(mgr, config)) {} std::vector> OnlineZipformerCtcModelRknn::GetInitStates() const { diff --git a/sherpa-onnx/csrc/rknn/online-zipformer-transducer-model-rknn.cc b/sherpa-onnx/csrc/rknn/online-zipformer-transducer-model-rknn.cc index 9c4f6ea6..69199944 100644 --- a/sherpa-onnx/csrc/rknn/online-zipformer-transducer-model-rknn.cc +++ b/sherpa-onnx/csrc/rknn/online-zipformer-transducer-model-rknn.cc @@ -435,8 +435,7 @@ OnlineZipformerTransducerModelRknn::OnlineZipformerTransducerModelRknn( template OnlineZipformerTransducerModelRknn::OnlineZipformerTransducerModelRknn( Manager *mgr, const OnlineModelConfig &config) - : impl_(std::make_unique(mgr, config)) { -} + : impl_(std::make_unique(mgr, config)) {} std::vector> OnlineZipformerTransducerModelRknn::GetEncoderInitStates() const { diff --git a/sherpa-onnx/csrc/vad-model.cc b/sherpa-onnx/csrc/vad-model.cc index 676fc1d1..714801d8 100644 --- a/sherpa-onnx/csrc/vad-model.cc +++ b/sherpa-onnx/csrc/vad-model.cc @@ -22,22 +22,34 @@ namespace sherpa_onnx { std::unique_ptr VadModel::Create(const VadModelConfig &config) { -#if SHERPA_ONNX_ENABLE_RKNN if (config.provider == "rknn") { +#if SHERPA_ONNX_ENABLE_RKNN return std::make_unique(config); - } +#else + SHERPA_ONNX_LOGE( + "Please rebuild sherpa-onnx with -DSHERPA_ONNX_ENABLE_RKNN=ON if you " + "want to use rknn."); + SHERPA_ONNX_EXIT(-1); + return nullptr; #endif + } return std::make_unique(config); } template std::unique_ptr VadModel::Create(Manager *mgr, const VadModelConfig &config) { -#if SHERPA_ONNX_ENABLE_RKNN if (config.provider == "rknn") { +#if SHERPA_ONNX_ENABLE_RKNN return std::make_unique(mgr, config); - } +#else + SHERPA_ONNX_LOGE( + "Please rebuild sherpa-onnx with -DSHERPA_ONNX_ENABLE_RKNN=ON if you " + "want to use rknn."); + SHERPA_ONNX_EXIT(-1); + return nullptr; #endif + } return std::make_unique(mgr, config); } diff --git a/sherpa-onnx/kotlin-api/OnlineRecognizer.kt b/sherpa-onnx/kotlin-api/OnlineRecognizer.kt index 3aeecdca..43d1b64c 100644 --- a/sherpa-onnx/kotlin-api/OnlineRecognizer.kt +++ b/sherpa-onnx/kotlin-api/OnlineRecognizer.kt @@ -394,6 +394,34 @@ fun getModelConfig(type: Int): OnlineModelConfig? { tokens = "$modelDir/tokens.txt", ) } + + 1000 -> { + val modelDir = "sherpa-onnx-rk3588-streaming-zipformer-bilingual-zh-en-2023-02-20" + return OnlineModelConfig( + transducer = OnlineTransducerModelConfig( + encoder = "$modelDir/encoder.rknn", + decoder = "$modelDir/decoder.rknn", + joiner = "$modelDir/joiner.rknn", + ), + tokens = "$modelDir/tokens.txt", + modelType = "zipformer", + provider = "rknn", + ) + } + + 1001 -> { + val modelDir = "sherpa-onnx-rk3588-streaming-zipformer-small-bilingual-zh-en-2023-02-16" + return OnlineModelConfig( + transducer = OnlineTransducerModelConfig( + encoder = "$modelDir/encoder.rknn", + decoder = "$modelDir/decoder.rknn", + joiner = "$modelDir/joiner.rknn", + ), + tokens = "$modelDir/tokens.txt", + modelType = "zipformer", + provider = "rknn", + ) + } } return null }