* Support using onnxruntime 1.16.0 with CUDA 11.4 on Jetson Orin NX. The pre-built onnxruntime libs are provided by the community using the following command: ```bash ./build.sh --build_shared_lib --config Release --update \ --build --parallel --use_cuda \ --cuda_home /usr/local/cuda \ --cudnn_home /usr/lib/aarch64-linux-gnu 2>&1 | tee my-log.txt ``` See also https://github.com/microsoft/onnxruntime/discussions/11226 --- Info about the board: ``` Model: NVIDIA Orin NX T801-16GB - Jetpack 5.1.4 [L4T 35.6.0] ``` ``` nvidia@nvidia-desktop:~/Downloads$ head -n 1 /etc/nv_tegra_release # R35 (release), REVISION: 6.0, GCID: 37391689, BOARD: t186ref, EABI: aarch64, DATE: Wed Aug 28 09:12:27 UTC 2024 nvidia@nvidia-desktop:~/Downloads$ uname -r 5.10.216-tegra nvidia@nvidia-desktop:~/Downloads$ lsb_release -i -r Distributor ID: Ubuntu Release: 20.04 nvidia@nvidia-desktop:~/Downloads$ nvcc -V nvcc: NVIDIA (R) Cuda compiler driver Copyright (c) 2005-2022 NVIDIA Corporation Built on Wed_Sep_21_10:43:33_PDT_2022 Cuda compilation tools, release 11.8, V11.8.89 Build cuda_11.8.r11.8/compiler.31833905_0 nvidia@nvidia-desktop:~/Downloads$ dpkg -l libcudnn8 Desired=Unknown/Install/Remove/Purge/Hold | Status=Not/Inst/Conf-files/Unpacked/halF-conf/Half-inst/trig-aWait/Trig-pend |/ Err?=(none)/Reinst-required (Status,Err: uppercase=bad) ||/ Name Version Architecture Description +++-==============-====================-============-================================= ii libcudnn8 8.6.0.166-1+cuda11.4 arm64 cuDNN runtime libraries nvidia@nvidia-desktop:~/Downloads$ dpkg -l tensorrt Desired=Unknown/Install/Remove/Purge/Hold | Status=Not/Inst/Conf-files/Unpacked/halF-conf/Half-inst/trig-aWait/Trig-pend |/ Err?=(none)/Reinst-required (Status,Err: uppercase=bad) ||/ Name Version Architecture Description +++-==============-==================-============-================================= ii tensorrt 8.5.2.2-1+cuda11.4 arm64 Meta package for TensorRT ```
120 lines
4.3 KiB
CMake
120 lines
4.3 KiB
CMake
# Copyright (c) 2022-2024 Xiaomi Corporation
|
|
message(STATUS "CMAKE_SYSTEM_NAME: ${CMAKE_SYSTEM_NAME}")
|
|
message(STATUS "CMAKE_SYSTEM_PROCESSOR: ${CMAKE_SYSTEM_PROCESSOR}")
|
|
|
|
if(NOT CMAKE_SYSTEM_NAME STREQUAL Linux)
|
|
message(FATAL_ERROR "This file is for Linux only. Given: ${CMAKE_SYSTEM_NAME}")
|
|
endif()
|
|
|
|
if(NOT CMAKE_SYSTEM_PROCESSOR STREQUAL aarch64)
|
|
message(FATAL_ERROR "This file is for aarch64 only. Given: ${CMAKE_SYSTEM_PROCESSOR}")
|
|
endif()
|
|
|
|
if(NOT BUILD_SHARED_LIBS)
|
|
message(FATAL_ERROR "This file is for building shared libraries. BUILD_SHARED_LIBS: ${BUILD_SHARED_LIBS}")
|
|
endif()
|
|
|
|
if(NOT SHERPA_ONNX_ENABLE_GPU)
|
|
message(FATAL_ERROR "This file is for NVIDIA GPU only. Given SHERPA_ONNX_ENABLE_GPU: ${SHERPA_ONNX_ENABLE_GPU}")
|
|
endif()
|
|
|
|
message(WARNING "\
|
|
SHERPA_ONNX_LINUX_ARM64_GPU_ONNXRUNTIME_VERSION: ${SHERPA_ONNX_LINUX_ARM64_GPU_ONNXRUNTIME_VERSION}
|
|
If you use Jetson nano b01, then please pass
|
|
-DSHERPA_ONNX_LINUX_ARM64_GPU_ONNXRUNTIME_VERSION=1.11.0
|
|
to cmake (You need to make sure CUDA 10.2 is available on your board).
|
|
|
|
If you use Jetson Orin NX, then please pass
|
|
-DSHERPA_ONNX_LINUX_ARM64_GPU_ONNXRUNTIME_VERSION=1.16.0
|
|
to cmake (You need to make sure CUDA 11.4 is available on your board).
|
|
")
|
|
|
|
set(v ${SHERPA_ONNX_LINUX_ARM64_GPU_ONNXRUNTIME_VERSION})
|
|
|
|
set(onnxruntime_URL "https://github.com/csukuangfj/onnxruntime-libs/releases/download/v${v}/onnxruntime-linux-aarch64-gpu-${v}.tar.bz2")
|
|
set(onnxruntime_URL2 "https://hf-mirror.com/csukuangfj/onnxruntime-libs/resolve/main/onnxruntime-linux-aarch64-gpu-${v}.tar.bz2")
|
|
|
|
if(v STREQUAL "1.11.0")
|
|
set(onnxruntime_HASH "SHA256=36eded935551e23aead09d4173bdf0bd1e7b01fdec15d77f97d6e34029aa60d7")
|
|
else()
|
|
set(onnxruntime_HASH "SHA256=4c09d5acf2c2682b4eab1dc2f1ad98fc1fde5f5f1960063e337983ba59379a4b")
|
|
endif()
|
|
|
|
# If you don't have access to the Internet,
|
|
# please download onnxruntime to one of the following locations.
|
|
# You can add more if you want.
|
|
set(possible_file_locations
|
|
$ENV{HOME}/Downloads/onnxruntime-linux-aarch64-gpu-${v}.tar.bz2
|
|
${CMAKE_SOURCE_DIR}/onnxruntime-linux-aarch64-gpu-${v}.tar.bz2
|
|
${CMAKE_BINARY_DIR}/onnxruntime-linux-aarch64-gpu-${v}.tar.bz2
|
|
/tmp/onnxruntime-linux-aarch64-gpu-${v}.tar.bz2
|
|
/star-fj/fangjun/download/github/onnxruntime-linux-aarch64-gpu-${v}.tar.bz2
|
|
)
|
|
|
|
foreach(f IN LISTS possible_file_locations)
|
|
if(EXISTS ${f})
|
|
set(onnxruntime_URL "${f}")
|
|
file(TO_CMAKE_PATH "${onnxruntime_URL}" onnxruntime_URL)
|
|
message(STATUS "Found local downloaded onnxruntime: ${onnxruntime_URL}")
|
|
set(onnxruntime_URL2)
|
|
break()
|
|
endif()
|
|
endforeach()
|
|
|
|
FetchContent_Declare(onnxruntime
|
|
URL
|
|
${onnxruntime_URL}
|
|
${onnxruntime_URL2}
|
|
URL_HASH ${onnxruntime_HASH}
|
|
)
|
|
|
|
FetchContent_GetProperties(onnxruntime)
|
|
if(NOT onnxruntime_POPULATED)
|
|
message(STATUS "Downloading onnxruntime from ${onnxruntime_URL}")
|
|
FetchContent_Populate(onnxruntime)
|
|
endif()
|
|
message(STATUS "onnxruntime is downloaded to ${onnxruntime_SOURCE_DIR}")
|
|
|
|
find_library(location_onnxruntime onnxruntime
|
|
PATHS
|
|
"${onnxruntime_SOURCE_DIR}/lib"
|
|
NO_CMAKE_SYSTEM_PATH
|
|
)
|
|
|
|
message(STATUS "location_onnxruntime: ${location_onnxruntime}")
|
|
|
|
add_library(onnxruntime SHARED IMPORTED)
|
|
|
|
set_target_properties(onnxruntime PROPERTIES
|
|
IMPORTED_LOCATION ${location_onnxruntime}
|
|
INTERFACE_INCLUDE_DIRECTORIES "${onnxruntime_SOURCE_DIR}/include"
|
|
)
|
|
|
|
find_library(location_onnxruntime_cuda_lib onnxruntime_providers_cuda
|
|
PATHS
|
|
"${onnxruntime_SOURCE_DIR}/lib"
|
|
NO_CMAKE_SYSTEM_PATH
|
|
)
|
|
|
|
add_library(onnxruntime_providers_cuda SHARED IMPORTED)
|
|
set_target_properties(onnxruntime_providers_cuda PROPERTIES
|
|
IMPORTED_LOCATION ${location_onnxruntime_cuda_lib}
|
|
)
|
|
message(STATUS "location_onnxruntime_cuda_lib: ${location_onnxruntime_cuda_lib}")
|
|
|
|
# for libonnxruntime_providers_shared.so
|
|
find_library(location_onnxruntime_providers_shared_lib onnxruntime_providers_shared
|
|
PATHS
|
|
"${onnxruntime_SOURCE_DIR}/lib"
|
|
NO_CMAKE_SYSTEM_PATH
|
|
)
|
|
add_library(onnxruntime_providers_shared SHARED IMPORTED)
|
|
set_target_properties(onnxruntime_providers_shared PROPERTIES
|
|
IMPORTED_LOCATION ${location_onnxruntime_providers_shared_lib}
|
|
)
|
|
message(STATUS "location_onnxruntime_providers_shared_lib: ${location_onnxruntime_providers_shared_lib}")
|
|
|
|
file(GLOB onnxruntime_lib_files "${onnxruntime_SOURCE_DIR}/lib/libonnxruntime*")
|
|
message(STATUS "onnxruntime lib files: ${onnxruntime_lib_files}")
|
|
install(FILES ${onnxruntime_lib_files} DESTINATION lib)
|