This repository has been archived on 2025-08-26. You can view files and clone it, but cannot push or open issues or pull requests.
Files
enginex-mr_series-sherpa-onnx/sherpa-onnx/csrc/online-rnn-lm.h
keanu 1a1b9fd236 RNNLM model support lm_num_thread and lm_provider setting (#173)
* rnnlm model inference supports num_threads setting

* rnnlm params decouple num_thread and provider with Transducer.

* fix python csrc bug which offline-lm-config.cc and online-lm-config.cc arguments problem

* lm_num_threads and lm_provider set default values

---------

Co-authored-by: cuidongcai1035 <cuidongcai1035@wezhuiyi.com>
2023-06-12 15:51:27 +08:00

55 lines
1.3 KiB
C++

// sherpa-onnx/csrc/online-rnn-lm.h
//
// Copyright (c) 2023 Pingfeng Luo
// Copyright (c) 2023 Xiaomi Corporation
#ifndef SHERPA_ONNX_CSRC_ONLINE_RNN_LM_H_
#define SHERPA_ONNX_CSRC_ONLINE_RNN_LM_H_
#include <memory>
#include <utility>
#include <vector>
#include "onnxruntime_cxx_api.h" // NOLINT
#include "sherpa-onnx/csrc/online-lm-config.h"
#include "sherpa-onnx/csrc/online-lm.h"
namespace sherpa_onnx {
class OnlineRnnLM : public OnlineLM {
public:
~OnlineRnnLM() override;
explicit OnlineRnnLM(const OnlineLMConfig &config);
std::pair<Ort::Value, std::vector<Ort::Value>> GetInitStates() override;
/** ScoreToken a batch of sentences.
*
* @param x A 2-D tensor of shape (N, L) with data type int64.
* @param states It contains the states for the LM model
* @return Return a pair containingo
* - log_prob of NN LM
* - updated states
*
*/
std::pair<Ort::Value, std::vector<Ort::Value>> ScoreToken(
Ort::Value x, std::vector<Ort::Value> states) override;
/** This function updates lm_lob_prob and nn_lm_scores of hyp
*
* @param scale LM score
* @param hyps It is changed in-place.
*
*/
void ComputeLMScore(float scale, Hypothesis *hyp) override;
private:
class Impl;
std::unique_ptr<Impl> impl_;
};
} // namespace sherpa_onnx
#endif // SHERPA_ONNX_CSRC_ONLINE_RNN_LM_H_