RNNLM model support lm_num_thread and lm_provider setting (#173)
* rnnlm model inference supports num_threads setting * rnnlm params decouple num_thread and provider with Transducer. * fix python csrc bug which offline-lm-config.cc and online-lm-config.cc arguments problem * lm_num_threads and lm_provider set default values --------- Co-authored-by: cuidongcai1035 <cuidongcai1035@wezhuiyi.com>
This commit is contained in:
@@ -13,10 +13,13 @@ namespace sherpa_onnx {
|
||||
void PybindOfflineLMConfig(py::module *m) {
|
||||
using PyClass = OfflineLMConfig;
|
||||
py::class_<PyClass>(*m, "OfflineLMConfig")
|
||||
.def(py::init<const std::string &, float>(), py::arg("model"),
|
||||
py::arg("scale"))
|
||||
.def(py::init<const std::string &, float, int32_t, const std::string &>(),
|
||||
py::arg("model"), py::arg("scale") = 0.5f,
|
||||
py::arg("lm_num_threads") = 1, py::arg("lm-provider") = "cpu")
|
||||
.def_readwrite("model", &PyClass::model)
|
||||
.def_readwrite("scale", &PyClass::scale)
|
||||
.def_readwrite("lm_provider", &PyClass::lm_provider)
|
||||
.def_readwrite("lm_num_threads", &PyClass::lm_num_threads)
|
||||
.def("__str__", &PyClass::ToString);
|
||||
}
|
||||
|
||||
|
||||
@@ -13,10 +13,13 @@ namespace sherpa_onnx {
|
||||
void PybindOnlineLMConfig(py::module *m) {
|
||||
using PyClass = OnlineLMConfig;
|
||||
py::class_<PyClass>(*m, "OnlineLMConfig")
|
||||
.def(py::init<const std::string &, float>(), py::arg("model") = "",
|
||||
py::arg("scale") = 0.5f)
|
||||
.def(py::init<const std::string &, float, int32_t, const std::string &>(),
|
||||
py::arg("model") = "", py::arg("scale") = 0.5f,
|
||||
py::arg("lm_num_threads") = 1, py::arg("lm_provider") = "cpu")
|
||||
.def_readwrite("model", &PyClass::model)
|
||||
.def_readwrite("scale", &PyClass::scale)
|
||||
.def_readwrite("lm_provider", &PyClass::lm_provider)
|
||||
.def_readwrite("lm_num_threads", &PyClass::lm_num_threads)
|
||||
.def("__str__", &PyClass::ToString);
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user