Fix building (#1508)

This commit is contained in:
Fangjun Kuang
2024-11-03 19:47:04 +08:00
committed by GitHub
parent f0cced1f37
commit 6ee8c99c5d
3 changed files with 6 additions and 6 deletions

View File

@@ -123,7 +123,7 @@ std::unique_ptr<OfflineRecognizerImpl> OfflineRecognizerImpl::Create(
auto model_type = auto model_type =
LookupCustomModelMetaData(meta_data, "model_type", allocator); LookupCustomModelMetaData(meta_data, "model_type", allocator);
if (!model_type.empty()) { if (model_type.empty()) {
SHERPA_ONNX_LOGE( SHERPA_ONNX_LOGE(
"No model_type in the metadata!\n\n" "No model_type in the metadata!\n\n"
"Please refer to the following URLs to add metadata" "Please refer to the following URLs to add metadata"

View File

@@ -25,7 +25,7 @@ static std::string GetInputName(Ort::Session *sess, size_t index,
OrtAllocator *allocator) { OrtAllocator *allocator) {
// Note(fangjun): We only tested 1.17.1 and 1.11.0 // Note(fangjun): We only tested 1.17.1 and 1.11.0
// For other versions, we may need to change it // For other versions, we may need to change it
#if ORT_API_VERSION >= 17 #if ORT_API_VERSION >= 12
auto v = sess->GetInputNameAllocated(index, allocator); auto v = sess->GetInputNameAllocated(index, allocator);
return v.get(); return v.get();
#else #else
@@ -40,7 +40,7 @@ static std::string GetOutputName(Ort::Session *sess, size_t index,
OrtAllocator *allocator) { OrtAllocator *allocator) {
// Note(fangjun): We only tested 1.17.1 and 1.11.0 // Note(fangjun): We only tested 1.17.1 and 1.11.0
// For other versions, we may need to change it // For other versions, we may need to change it
#if ORT_API_VERSION >= 17 #if ORT_API_VERSION >= 12
auto v = sess->GetOutputNameAllocated(index, allocator); auto v = sess->GetOutputNameAllocated(index, allocator);
return v.get(); return v.get();
#else #else
@@ -106,7 +106,7 @@ Ort::Value GetEncoderOutFrame(OrtAllocator *allocator, Ort::Value *encoder_out,
void PrintModelMetadata(std::ostream &os, const Ort::ModelMetadata &meta_data) { void PrintModelMetadata(std::ostream &os, const Ort::ModelMetadata &meta_data) {
Ort::AllocatorWithDefaultOptions allocator; Ort::AllocatorWithDefaultOptions allocator;
#if ORT_API_VERSION >= 17 #if ORT_API_VERSION >= 12
std::vector<Ort::AllocatedStringPtr> v = std::vector<Ort::AllocatedStringPtr> v =
meta_data.GetCustomMetadataMapKeysAllocated(allocator); meta_data.GetCustomMetadataMapKeysAllocated(allocator);
for (const auto &key : v) { for (const auto &key : v) {
@@ -406,7 +406,7 @@ std::string LookupCustomModelMetaData(const Ort::ModelMetadata &meta_data,
OrtAllocator *allocator) { OrtAllocator *allocator) {
// Note(fangjun): We only tested 1.17.1 and 1.11.0 // Note(fangjun): We only tested 1.17.1 and 1.11.0
// For other versions, we may need to change it // For other versions, we may need to change it
#if ORT_API_VERSION >= 17 #if ORT_API_VERSION >= 12
auto v = meta_data.LookupCustomMetadataMapAllocated(key, allocator); auto v = meta_data.LookupCustomMetadataMapAllocated(key, allocator);
return v.get(); return v.get();
#else #else

View File

@@ -60,7 +60,7 @@ Ort::SessionOptions GetSessionOptionsImpl(
case Provider::kCPU: case Provider::kCPU:
break; // nothing to do for the CPU provider break; // nothing to do for the CPU provider
case Provider::kXnnpack: { case Provider::kXnnpack: {
#if ORT_API_VERSION >= 17 #if ORT_API_VERSION >= 12
if (std::find(available_providers.begin(), available_providers.end(), if (std::find(available_providers.begin(), available_providers.end(),
"XnnpackExecutionProvider") != available_providers.end()) { "XnnpackExecutionProvider") != available_providers.end()) {
sess_opts.AppendExecutionProvider("XNNPACK"); sess_opts.AppendExecutionProvider("XNNPACK");