Simplify multi-tokenizer (#11295)
Signed-off-by: zhengkezhou1 <madzhou1@gmail.com> Co-authored-by: Liangsheng Yin <lsyincs@gmail.com>
This commit is contained in:
@@ -3006,10 +3006,6 @@ def lru_cache_frozenset(maxsize=128):
|
||||
return decorator
|
||||
|
||||
|
||||
def get_origin_rid(rid):
|
||||
return rid.split("_", 1)[1] if "_" in rid else rid
|
||||
|
||||
|
||||
def apply_module_patch(target_module, target_function, wrappers):
|
||||
original_module, original_function = parse_module_path(
|
||||
target_module, target_function, False
|
||||
|
||||
Reference in New Issue
Block a user