[router] add tokenizer metrics (#9307)
Co-authored-by: Chang Su <chang.s.su@oracle.com>
This commit is contained in:
@@ -1,9 +1,11 @@
|
||||
use super::traits;
|
||||
use super::{traits, TokenizerTrait};
|
||||
use crate::metrics::TokenizerMetrics;
|
||||
use anyhow::{Error, Result};
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
|
||||
#[cfg(feature = "huggingface")]
|
||||
use super::huggingface::HuggingFaceTokenizer;
|
||||
@@ -22,6 +24,8 @@ pub enum TokenizerType {
|
||||
/// - json: HuggingFace tokenizer
|
||||
/// - For testing: can return mock tokenizer
|
||||
pub fn create_tokenizer_from_file(file_path: &str) -> Result<Arc<dyn traits::Tokenizer>> {
|
||||
let start_time = Instant::now();
|
||||
|
||||
// Special case for testing
|
||||
if file_path == "mock" || file_path == "test" {
|
||||
return Ok(Arc::new(super::mock::MockTokenizer::new()));
|
||||
@@ -31,6 +35,7 @@ pub fn create_tokenizer_from_file(file_path: &str) -> Result<Arc<dyn traits::Tok
|
||||
|
||||
// Check if file exists
|
||||
if !path.exists() {
|
||||
TokenizerMetrics::record_factory_error("file_not_found");
|
||||
return Err(Error::msg(format!("File not found: {}", file_path)));
|
||||
}
|
||||
|
||||
@@ -40,15 +45,20 @@ pub fn create_tokenizer_from_file(file_path: &str) -> Result<Arc<dyn traits::Tok
|
||||
.and_then(std::ffi::OsStr::to_str)
|
||||
.map(|s| s.to_lowercase());
|
||||
|
||||
match extension.as_deref() {
|
||||
let result = match extension.as_deref() {
|
||||
Some("json") => {
|
||||
#[cfg(feature = "huggingface")]
|
||||
{
|
||||
let tokenizer = HuggingFaceTokenizer::from_file(file_path)?;
|
||||
Ok(Arc::new(tokenizer))
|
||||
|
||||
TokenizerMetrics::record_factory_load("json");
|
||||
TokenizerMetrics::set_vocab_size("huggingface", tokenizer.vocab_size());
|
||||
|
||||
Ok(Arc::new(tokenizer) as Arc<dyn traits::Tokenizer>)
|
||||
}
|
||||
#[cfg(not(feature = "huggingface"))]
|
||||
{
|
||||
TokenizerMetrics::record_factory_error("huggingface_disabled");
|
||||
Err(Error::msg(
|
||||
"HuggingFace support not enabled. Enable the 'huggingface' feature.",
|
||||
))
|
||||
@@ -56,17 +66,27 @@ pub fn create_tokenizer_from_file(file_path: &str) -> Result<Arc<dyn traits::Tok
|
||||
}
|
||||
Some("model") => {
|
||||
// SentencePiece model file
|
||||
TokenizerMetrics::record_factory_error("unsupported_sentencepiece");
|
||||
Err(Error::msg("SentencePiece models not yet supported"))
|
||||
}
|
||||
Some("gguf") => {
|
||||
// GGUF format
|
||||
TokenizerMetrics::record_factory_error("unsupported_gguf");
|
||||
Err(Error::msg("GGUF format not yet supported"))
|
||||
}
|
||||
_ => {
|
||||
// Try to auto-detect by reading file content
|
||||
auto_detect_tokenizer(file_path)
|
||||
auto_detect_tokenizer(file_path).inspect(|tokenizer| {
|
||||
TokenizerMetrics::record_factory_load("auto_detected");
|
||||
TokenizerMetrics::set_vocab_size("auto_detected", tokenizer.vocab_size());
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
if result.is_ok() {
|
||||
TokenizerMetrics::record_factory_load_duration(start_time.elapsed());
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Auto-detect tokenizer type by examining file content
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use super::traits::{Decoder, Encoder, Encoding, SpecialTokens, Tokenizer as TokenizerTrait};
|
||||
use crate::metrics::TokenizerMetrics;
|
||||
use anyhow::{Error, Result};
|
||||
use std::collections::HashMap;
|
||||
use std::time::Instant;
|
||||
use tokenizers::tokenizer::Tokenizer as HfTokenizer;
|
||||
|
||||
/// HuggingFace tokenizer wrapper
|
||||
@@ -92,19 +94,36 @@ impl HuggingFaceTokenizer {
|
||||
|
||||
impl Encoder for HuggingFaceTokenizer {
|
||||
fn encode(&self, input: &str) -> Result<Encoding> {
|
||||
let encoding = self
|
||||
.tokenizer
|
||||
.encode(input, false)
|
||||
.map_err(|e| Error::msg(format!("Encoding failed: {}", e)))?;
|
||||
let start = Instant::now();
|
||||
|
||||
Ok(Encoding::Hf(Box::new(encoding)))
|
||||
TokenizerMetrics::record_encode_request("huggingface");
|
||||
TokenizerMetrics::record_chars_per_encode(input.len());
|
||||
|
||||
self.tokenizer
|
||||
.encode(input, false)
|
||||
.map_err(|e| {
|
||||
TokenizerMetrics::record_encode_error("encoding_failed");
|
||||
Error::msg(format!("Encoding failed: {}", e))
|
||||
})
|
||||
.map(|encoding| {
|
||||
TokenizerMetrics::record_tokens_per_encode(encoding.get_ids().len());
|
||||
TokenizerMetrics::record_encode_duration(start.elapsed());
|
||||
Encoding::Hf(Box::new(encoding))
|
||||
})
|
||||
}
|
||||
|
||||
fn encode_batch(&self, inputs: &[&str]) -> Result<Vec<Encoding>> {
|
||||
let start = Instant::now();
|
||||
|
||||
let encodings = self
|
||||
.tokenizer
|
||||
.encode_batch(inputs.to_vec(), false)
|
||||
.map_err(|e| Error::msg(format!("Batch encoding failed: {}", e)))?;
|
||||
.map_err(|e| {
|
||||
TokenizerMetrics::record_encode_error("batch_encoding_failed");
|
||||
Error::msg(format!("Batch encoding failed: {}", e))
|
||||
})?;
|
||||
|
||||
TokenizerMetrics::record_encode_batch_duration(start.elapsed(), inputs.len());
|
||||
|
||||
Ok(encodings
|
||||
.into_iter()
|
||||
@@ -115,9 +134,20 @@ impl Encoder for HuggingFaceTokenizer {
|
||||
|
||||
impl Decoder for HuggingFaceTokenizer {
|
||||
fn decode(&self, token_ids: &[u32], skip_special_tokens: bool) -> Result<String> {
|
||||
let start = Instant::now();
|
||||
|
||||
TokenizerMetrics::record_decode_request("huggingface");
|
||||
TokenizerMetrics::record_tokens_per_decode(token_ids.len());
|
||||
|
||||
self.tokenizer
|
||||
.decode(token_ids, skip_special_tokens)
|
||||
.map_err(|e| Error::msg(format!("Decoding failed: {}", e)))
|
||||
.map_err(|e| {
|
||||
TokenizerMetrics::record_decode_error("decoding_failed");
|
||||
Error::msg(format!("Decoding failed: {}", e))
|
||||
})
|
||||
.inspect(|_| {
|
||||
TokenizerMetrics::record_decode_duration(start.elapsed());
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
use super::traits;
|
||||
use crate::metrics::TokenizerMetrics;
|
||||
use anyhow::Result;
|
||||
use std::collections::HashSet;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
|
||||
/// Output from the sequence decoder
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
@@ -93,6 +95,8 @@ impl StopSequenceDecoder {
|
||||
|
||||
/// Process a single token
|
||||
pub fn process_token(&mut self, token_id: u32) -> Result<SequenceDecoderOutput> {
|
||||
let start = Instant::now();
|
||||
|
||||
if self.stopped {
|
||||
return Ok(SequenceDecoderOutput::Stopped);
|
||||
}
|
||||
@@ -100,23 +104,30 @@ impl StopSequenceDecoder {
|
||||
// Check for token-level stops first
|
||||
if self.config.stop_tokens.contains(&token_id) {
|
||||
self.stopped = true;
|
||||
TokenizerMetrics::record_stop_sequence_detected("token");
|
||||
|
||||
// Flush any jailed text before stopping
|
||||
if !self.jail_buffer.is_empty() {
|
||||
let output = self.jail_buffer.clone();
|
||||
self.jail_buffer.clear();
|
||||
TokenizerMetrics::record_stop_detection_duration(start.elapsed());
|
||||
return Ok(SequenceDecoderOutput::StoppedWithText(output));
|
||||
}
|
||||
TokenizerMetrics::record_stop_detection_duration(start.elapsed());
|
||||
return Ok(SequenceDecoderOutput::Stopped);
|
||||
}
|
||||
|
||||
if self.config.visible_stop_tokens.contains(&token_id) {
|
||||
self.stopped = true;
|
||||
TokenizerMetrics::record_stop_sequence_detected("visible_token");
|
||||
|
||||
// Include jailed text plus the stop token
|
||||
let stop_text = self
|
||||
.tokenizer
|
||||
.decode(&[token_id], self.skip_special_tokens)?;
|
||||
let output = format!("{}{}", self.jail_buffer, stop_text);
|
||||
self.jail_buffer.clear();
|
||||
TokenizerMetrics::record_stop_detection_duration(start.elapsed());
|
||||
return Ok(SequenceDecoderOutput::StoppedWithText(output));
|
||||
}
|
||||
|
||||
@@ -161,9 +172,12 @@ impl StopSequenceDecoder {
|
||||
for stop_seq in &self.config.stop_sequences {
|
||||
if let Some(pos) = check_text.find(stop_seq) {
|
||||
self.stopped = true;
|
||||
TokenizerMetrics::record_stop_sequence_detected("string");
|
||||
|
||||
// Output text before the stop sequence
|
||||
let output = check_text[..pos].to_string();
|
||||
self.jail_buffer.clear();
|
||||
TokenizerMetrics::record_stop_detection_duration(start.elapsed());
|
||||
return Ok(if output.is_empty() {
|
||||
SequenceDecoderOutput::Stopped
|
||||
} else {
|
||||
@@ -176,10 +190,13 @@ impl StopSequenceDecoder {
|
||||
for stop_seq in &self.config.visible_stop_sequences {
|
||||
if let Some(pos) = check_text.find(stop_seq) {
|
||||
self.stopped = true;
|
||||
TokenizerMetrics::record_stop_sequence_detected("visible_string");
|
||||
|
||||
// Include the stop sequence in output
|
||||
let end_pos = pos + stop_seq.len();
|
||||
let output = check_text[..end_pos].to_string();
|
||||
self.jail_buffer.clear();
|
||||
TokenizerMetrics::record_stop_detection_duration(start.elapsed());
|
||||
return Ok(SequenceDecoderOutput::StoppedWithText(output));
|
||||
}
|
||||
}
|
||||
@@ -202,6 +219,8 @@ impl StopSequenceDecoder {
|
||||
}
|
||||
|
||||
if partial_match_len > 0 {
|
||||
TokenizerMetrics::record_partial_match();
|
||||
|
||||
// Split: output safe text, jail the potential match
|
||||
let safe_end = check_text.len() - partial_match_len;
|
||||
let safe_text = &check_text[..safe_end];
|
||||
@@ -211,6 +230,8 @@ impl StopSequenceDecoder {
|
||||
self.prefix_offset = self.read_offset;
|
||||
self.read_offset = self.token_buffer.len();
|
||||
|
||||
TokenizerMetrics::record_stop_detection_duration(start.elapsed());
|
||||
|
||||
if safe_text.is_empty() {
|
||||
Ok(SequenceDecoderOutput::Held)
|
||||
} else {
|
||||
@@ -224,6 +245,8 @@ impl StopSequenceDecoder {
|
||||
self.prefix_offset = self.read_offset;
|
||||
self.read_offset = self.token_buffer.len();
|
||||
|
||||
TokenizerMetrics::record_stop_detection_duration(start.elapsed());
|
||||
|
||||
Ok(SequenceDecoderOutput::Text(check_text))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
// src/tokenizer/stream.rs
|
||||
|
||||
use super::traits;
|
||||
use crate::metrics::TokenizerMetrics;
|
||||
use anyhow::Result;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
|
||||
const INITIAL_INCREMENTAL_DETOKENIZATION_OFFSET: usize = 5;
|
||||
|
||||
@@ -43,8 +45,12 @@ impl DecodeStream {
|
||||
/// Step appends a token_id to the internal state and tries to produce a text chunk.
|
||||
/// Returning `None` means the given id is not enough to produce a chunk.
|
||||
pub fn step(&mut self, id: u32) -> Result<Option<String>> {
|
||||
let start = Instant::now();
|
||||
|
||||
self.all_token_ids.push(id);
|
||||
|
||||
TokenizerMetrics::record_stream_token();
|
||||
|
||||
let prefix_text = self.tokenizer.decode(
|
||||
&self.all_token_ids[self.prefix_offset..self.read_offset],
|
||||
self.skip_special_tokens,
|
||||
@@ -61,8 +67,16 @@ impl DecodeStream {
|
||||
self.prefix_offset = self.read_offset;
|
||||
self.read_offset = self.all_token_ids.len();
|
||||
|
||||
TokenizerMetrics::record_stream_step_duration(start.elapsed());
|
||||
|
||||
Ok(Some(new_text))
|
||||
} else {
|
||||
if new_text.ends_with("<EFBFBD>") {
|
||||
TokenizerMetrics::record_incomplete_utf8();
|
||||
}
|
||||
|
||||
TokenizerMetrics::record_stream_step_duration(start.elapsed());
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user