Support linking onnxruntime lib statically on Linux (#326)

This commit is contained in:
Fangjun Kuang
2023-09-21 10:15:42 +08:00
committed by GitHub
parent f5c060dd61
commit 532ed142d2
8 changed files with 106 additions and 7 deletions

View File

@@ -24,7 +24,7 @@ jobs:
- name: Install dependencies
shell: bash
run: |
python3 -m pip install openai-whisper torch onnxruntime onnx
python3 -m pip install openai-whisper torch onnxruntime==1.15.1 onnx
- name: export ${{ matrix.model }}
shell: bash

View File

@@ -39,12 +39,14 @@ concurrency:
jobs:
linux:
name: ${{ matrix.build_type }} ${{ matrix.shared_lib }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
build_type: [Release, Debug]
shared_lib: [ON, OFF]
steps:
- uses: actions/checkout@v2
@@ -56,7 +58,7 @@ jobs:
run: |
mkdir build
cd build
cmake -D CMAKE_BUILD_TYPE=${{ matrix.build_type }} -DCMAKE_INSTALL_PREFIX=./install ..
cmake -D CMAKE_BUILD_TYPE=${{ matrix.build_type }} -D BUILD_SHARED_LIBS=${{ matrix.shared_lib }} -DCMAKE_INSTALL_PREFIX=./install ..
- name: Build sherpa-onnx for ubuntu
shell: bash
@@ -88,6 +90,8 @@ jobs:
export PATH=$PWD/build/bin:$PATH
export EXE=sherpa-onnx-offline
readelf -d build/bin/sherpa-onnx-offline
.github/scripts/test-offline-whisper.sh
- name: Test offline CTC