diff --git a/.github/workflows/export-whisper-to-onnx.yaml b/.github/workflows/export-whisper-to-onnx.yaml index 4d6f9531..6b7664d5 100644 --- a/.github/workflows/export-whisper-to-onnx.yaml +++ b/.github/workflows/export-whisper-to-onnx.yaml @@ -24,7 +24,7 @@ jobs: - name: Install dependencies shell: bash run: | - python3 -m pip install openai-whisper torch onnxruntime onnx + python3 -m pip install openai-whisper torch onnxruntime==1.15.1 onnx - name: export ${{ matrix.model }} shell: bash diff --git a/.github/workflows/linux.yaml b/.github/workflows/linux.yaml index bbb2c775..1bfb327e 100644 --- a/.github/workflows/linux.yaml +++ b/.github/workflows/linux.yaml @@ -39,12 +39,14 @@ concurrency: jobs: linux: + name: ${{ matrix.build_type }} ${{ matrix.shared_lib }} runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: os: [ubuntu-latest] build_type: [Release, Debug] + shared_lib: [ON, OFF] steps: - uses: actions/checkout@v2 @@ -56,7 +58,7 @@ jobs: run: | mkdir build cd build - cmake -D CMAKE_BUILD_TYPE=${{ matrix.build_type }} -DCMAKE_INSTALL_PREFIX=./install .. + cmake -D CMAKE_BUILD_TYPE=${{ matrix.build_type }} -D BUILD_SHARED_LIBS=${{ matrix.shared_lib }} -DCMAKE_INSTALL_PREFIX=./install .. - name: Build sherpa-onnx for ubuntu shell: bash @@ -88,6 +90,8 @@ jobs: export PATH=$PWD/build/bin:$PATH export EXE=sherpa-onnx-offline + readelf -d build/bin/sherpa-onnx-offline + .github/scripts/test-offline-whisper.sh - name: Test offline CTC diff --git a/CMakeLists.txt b/CMakeLists.txt index 7c8840ee..833e5c20 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -20,6 +20,7 @@ option(SHERPA_ONNX_ENABLE_JNI "Whether to build JNI internface" OFF) option(SHERPA_ONNX_ENABLE_C_API "Whether to build C API" ON) option(SHERPA_ONNX_ENABLE_WEBSOCKET "Whether to build webscoket server/client" ON) option(SHERPA_ONNX_ENABLE_GPU "Enable ONNX Runtime GPU support" OFF) +option(SHERPA_ONNX_LINK_LIBSTDCPP_STATICALLY "True to link libstdc++ statically. Used only when BUILD_SHARED_LIBS is ON on Linux" ON) set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib") set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib") @@ -65,6 +66,10 @@ is installed on your system. Otherwise, you will get errors at runtime. Hint: You don't need sudo permission to install CUDA toolkit. Please refer to https://k2-fsa.github.io/k2/installation/cuda-cudnn.html to install CUDA toolkit if you have not installed it.") + if(NOT BUILD_SHARED_LIBS) + message(STATUS "Set BUILD_SHARED_LIBS to ON since SHERPA_ONNX_ENABLE_GPU is ON") + set(BUILD_SHARED_LIBS ON CACHE BOOL "" FORCE) + endif() endif() if(BUILD_SHARED_LIBS AND MSVC) @@ -131,12 +136,22 @@ if(WIN32 AND MSVC) foreach(w IN LISTS disabled_warnings) string(APPEND CMAKE_CXX_FLAGS " ${w} ") endforeach() -endif() + add_compile_options("$<$:/utf-8>") + add_compile_options("$<$:/utf-8>") +endif() list(APPEND CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake/Modules) list(APPEND CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake) +if(NOT BUILD_SHARED_LIBS AND LINUX AND NOT APPLE) + if(SHERPA_ONNX_LINK_LIBSTDCPP_STATICALLY) + message(STATUS "Link libstdc++ statically") + set(CMAKE_CXX_FLAGS " ${CMAKE_CXX_FLAGS} -static-libstdc++ -static-libgcc ") + else() + message(STATUS "Link libstdc++ dynamically") + endif() +endif() include(kaldi-native-fbank) include(onnxruntime) @@ -186,3 +201,4 @@ install( DESTINATION . ) +message(STATUS "CMAKE_CXX_FLAGS: ${CMAKE_CXX_FLAGS}") diff --git a/c-api-examples/decode-file-c-api.c b/c-api-examples/decode-file-c-api.c index 792b12d6..542cab9c 100644 --- a/c-api-examples/decode-file-c-api.c +++ b/c-api-examples/decode-file-c-api.c @@ -96,6 +96,7 @@ int32_t main(int32_t argc, char *argv[]) { } SherpaOnnxOnlineRecognizerConfig config; + memset(&config, 0, sizeof(config)); config.model_config.debug = 0; config.model_config.num_threads = 1; @@ -195,7 +196,7 @@ int32_t main(int32_t argc, char *argv[]) { DecodeOnlineStream(recognizer, stream); } - SherpaOnnxOnlineRecognizerResult *r = + const SherpaOnnxOnlineRecognizerResult *r = GetOnlineStreamResult(recognizer, stream); if (strlen(r->text)) { @@ -223,7 +224,7 @@ int32_t main(int32_t argc, char *argv[]) { DecodeOnlineStream(recognizer, stream); } - SherpaOnnxOnlineRecognizerResult *r = + const SherpaOnnxOnlineRecognizerResult *r = GetOnlineStreamResult(recognizer, stream); if (strlen(r->text)) { diff --git a/cmake/onnxruntime-linux-x86_64-static.cmake b/cmake/onnxruntime-linux-x86_64-static.cmake new file mode 100644 index 00000000..90045ad4 --- /dev/null +++ b/cmake/onnxruntime-linux-x86_64-static.cmake @@ -0,0 +1,67 @@ +# Copyright (c) 2022-2023 Xiaomi Corporation +message(STATUS "CMAKE_SYSTEM_NAME: ${CMAKE_SYSTEM_NAME}") +message(STATUS "CMAKE_SYSTEM_PROCESSOR: ${CMAKE_SYSTEM_PROCESSOR}") + +if(NOT CMAKE_SYSTEM_NAME STREQUAL Linux) + message(FATAL_ERROR "This file is for Linux only. Given: ${CMAKE_SYSTEM_NAME}") +endif() + +if(NOT CMAKE_SYSTEM_PROCESSOR STREQUAL x86_64) + message(FATAL_ERROR "This file is for x86_64 only. Given: ${CMAKE_SYSTEM_PROCESSOR}") +endif() + +if(BUILD_SHARED_LIBS) + message(FATAL_ERROR "This file is for building static libraries. BUILD_SHARED_LIBS: ${BUILD_SHARED_LIBS}") +endif() + +# TODO(fangjun): update the URL +set(onnxruntime_URL "https://huggingface.co/csukuangfj/sherpa-onnx-cmake-deps/resolve/main/onnxruntime-linux-x64-static_lib-1.15.1.tgz") +set(onnxruntime_URL2 "https://huggingface.co/csukuangfj/sherpa-onnx-cmake-deps/resolve/main/onnxruntime-linux-x64-static_lib-1.15.1.tgz") +set(onnxruntime_HASH "SHA256=b64fcf4115e3d02193c7406461d582703ccc1f0c24ad320ef74b07e5f71681c6") + +# If you don't have access to the Internet, +# please download onnxruntime to one of the following locations. +# You can add more if you want. +set(possible_file_locations + ${PROJECT_SOURCE_DIR}/onnxruntime-linux-x64-static_lib-1.15.1.tgz + + $ENV{HOME}/Downloads/onnxruntime-linux-x64-static_lib-1.15.1.tgz + ${PROJECT_SOURCE_DIR}/onnxruntime-linux-x64-static_lib-1.15.1.tgz + ${PROJECT_BINARY_DIR}/onnxruntime-linux-x64-static_lib-1.15.1.tgz + /tmp/onnxruntime-linux-x64-static_lib-1.15.1.tgz + /star-fj/fangjun/download/github/onnxruntime-linux-x64-static_lib-1.15.1.tgz +) + +foreach(f IN LISTS possible_file_locations) + if(EXISTS ${f}) + set(onnxruntime_URL "${f}") + file(TO_CMAKE_PATH "${onnxruntime_URL}" onnxruntime_URL) + message(STATUS "Found local downloaded onnxruntime: ${onnxruntime_URL}") + set(onnxruntime_URL2) + break() + endif() +endforeach() + +FetchContent_Declare(onnxruntime + URL + ${onnxruntime_URL} + ${onnxruntime_URL2} + URL_HASH ${onnxruntime_HASH} +) + +FetchContent_GetProperties(onnxruntime) +if(NOT onnxruntime_POPULATED) + message(STATUS "Downloading onnxruntime from ${onnxruntime_URL}") + FetchContent_Populate(onnxruntime) +endif() +message(STATUS "onnxruntime is downloaded to ${onnxruntime_SOURCE_DIR}") + +# for static libraries, we use onnxruntime_lib_files directly below +include_directories(${onnxruntime_SOURCE_DIR}/include) + +file(GLOB onnxruntime_lib_files "${onnxruntime_SOURCE_DIR}/lib/lib*.a") + +set(onnxruntime_lib_files ${onnxruntime_lib_files} PARENT_SCOPE) + +message(STATUS "onnxruntime lib files: ${onnxruntime_lib_files}") +install(FILES ${onnxruntime_lib_files} DESTINATION lib) diff --git a/cmake/onnxruntime-linux-x86_64.cmake b/cmake/onnxruntime-linux-x86_64.cmake index 47dac2af..9f34e06f 100644 --- a/cmake/onnxruntime-linux-x86_64.cmake +++ b/cmake/onnxruntime-linux-x86_64.cmake @@ -10,6 +10,10 @@ if(NOT CMAKE_SYSTEM_PROCESSOR STREQUAL x86_64) message(FATAL_ERROR "This file is for x86_64 only. Given: ${CMAKE_SYSTEM_PROCESSOR}") endif() +if(NOT BUILD_SHARED_LIBS) + message(FATAL_ERROR "This file is for building shared libraries. BUILD_SHARED_LIBS: ${BUILD_SHARED_LIBS}") +endif() + set(onnxruntime_URL "https://github.com/microsoft/onnxruntime/releases/download/v1.15.1/onnxruntime-linux-x64-1.15.1.tgz") set(onnxruntime_URL2 "https://huggingface.co/csukuangfj/sherpa-onnx-cmake-deps/resolve/main/onnxruntime-linux-x64-1.15.1.tgz") set(onnxruntime_HASH "SHA256=5492f9065f87538a286fb04c8542e9ff7950abb2ea6f8c24993a940006787d87") diff --git a/cmake/onnxruntime.cmake b/cmake/onnxruntime.cmake index 4e027e3a..171e4251 100644 --- a/cmake/onnxruntime.cmake +++ b/cmake/onnxruntime.cmake @@ -12,8 +12,10 @@ function(download_onnxruntime) elseif(CMAKE_SYSTEM_NAME STREQUAL Linux AND CMAKE_SYSTEM_PROCESSOR STREQUAL x86_64) if(SHERPA_ONNX_ENABLE_GPU) include(onnxruntime-linux-x86_64-gpu) - else() + elseif(BUILD_SHARED_LIBS) include(onnxruntime-linux-x86_64) + else() + include(onnxruntime-linux-x86_64-static) endif() elseif(CMAKE_SYSTEM_NAME STREQUAL Darwin) if (arm64 IN_LIST CMAKE_OSX_ARCHITECTURES AND x86_64 IN_LIST CMAKE_OSX_ARCHITECTURES) diff --git a/sherpa-onnx/csrc/CMakeLists.txt b/sherpa-onnx/csrc/CMakeLists.txt index b5c4c586..8cc1feb6 100644 --- a/sherpa-onnx/csrc/CMakeLists.txt +++ b/sherpa-onnx/csrc/CMakeLists.txt @@ -97,7 +97,7 @@ endif() target_link_libraries(sherpa-onnx-core kaldi-native-fbank-core) -if(BUILD_SHARED_LIBS OR NOT WIN32) +if(BUILD_SHARED_LIBS OR APPLE OR CMAKE_SYSTEM_PROCESSOR STREQUAL aarch64 OR CMAKE_SYSTEM_PROCESSOR STREQUAL arm) target_link_libraries(sherpa-onnx-core onnxruntime) else() target_link_libraries(sherpa-onnx-core ${onnxruntime_lib_files}) @@ -122,10 +122,15 @@ if(SHERPA_ONNX_ENABLE_CHECK) endif() endif() +if(NOT BUILD_SHARED_LIBS AND LINUX AND NOT APPLE) + target_link_libraries(sherpa-onnx-core -pthread -ldl) +endif() + add_executable(sherpa-onnx sherpa-onnx.cc) add_executable(sherpa-onnx-offline sherpa-onnx-offline.cc) add_executable(sherpa-onnx-offline-parallel sherpa-onnx-offline-parallel.cc) + target_link_libraries(sherpa-onnx sherpa-onnx-core) target_link_libraries(sherpa-onnx-offline sherpa-onnx-core) target_link_libraries(sherpa-onnx-offline-parallel sherpa-onnx-core)