Upgrade to vllm 0.17.0 corex v4.1 overlay
This commit is contained in:
@@ -7,6 +7,7 @@ import warnings
|
||||
from abc import ABC, abstractmethod
|
||||
from collections import Counter, defaultdict
|
||||
from collections.abc import Awaitable, Callable, Iterable
|
||||
from dataclasses import dataclass
|
||||
from functools import cached_property, lru_cache, partial
|
||||
from itertools import accumulate
|
||||
from pathlib import Path
|
||||
@@ -1024,6 +1025,13 @@ class AsyncMultiModalContentParser(BaseMultiModalContentParser):
|
||||
self._add_placeholder("video", placeholder)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ChatTemplateConfig:
|
||||
chat_template: str | None = None
|
||||
chat_template_content_format: ChatTemplateContentFormatOption = "auto"
|
||||
trust_request_chat_template: bool = False
|
||||
|
||||
|
||||
def validate_chat_template(chat_template: Path | str | None):
|
||||
"""Raises if the provided chat template appears invalid."""
|
||||
if chat_template is None:
|
||||
|
||||
Reference in New Issue
Block a user