Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 15 additions & 1 deletion langtest/modelhandler/llm_modelhandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,13 @@ def load_model(cls: Type[_T], hub: str, path: str, *args, **kwargs) -> _T:
model_type = kwargs.get("model_type", None)
output_schema = kwargs.get("output_schema", None)

# Optional logical task identifier coming from ModelConfig.task_id.
# We pull it out early so it doesn't get passed as a raw keyword
# argument to the underlying LangChain model classes (which would
# raise on unexpected parameters). Instead, we convert it into
# provider-specific request metadata (e.g. OpenAI extra_body.metadata).
task_id = kwargs.pop("task_id", None)

exclude_args = [
"task",
"device",
Expand All @@ -100,6 +107,14 @@ def load_model(cls: Type[_T], hub: str, path: str, *args, **kwargs) -> _T:
try:
cls._update_model_parameters(hub, filtered_kwargs)

# Attach task_id as provider metadata where supported.
# For OpenAI-compatible hubs (openai, azure-openai, openrouter),
# we use the standard `extra_body={"metadata": {"task_id": ...}}`
# pattern so the identifier is available on every completion.
if task_id and hub in ("openai", "azure-openai", "openrouter"):
extra_body = {"metadata": {"tags": [f"task_id:{task_id}"]}}
filtered_kwargs["extra_body"] = extra_body

from .utils import MODEL_CLASSES

if model_type is None and hub in ("azure-openai", "openai"):
Expand Down Expand Up @@ -226,7 +241,6 @@ def predict(self, text: Union[str, dict], prompt: dict, *args, **kwargs):

# prompt configuration
prompt_manager = PromptManager()

prompt_template = prompt_manager.get_prompt()

if prompt_template is None:
Expand Down
3 changes: 3 additions & 0 deletions langtest/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,14 @@ class ModelConfig(TypedDict):
model (str): The name of the model.
type (Literal['chat', 'completion']): The type of the model, either 'chat' or 'completion'.
hub (str): The hub where the model is located.
task_id (Optional[str]): Optional identifier used to track requests on LitellmProxy
by adding this as a toag to extra_body``).
"""

model: str
type: Literal["chat", "completion"]
hub: str
task_id: Optional[str]


class DatasetConfig(TypedDict):
Expand Down