|
|
|
@ -164,19 +164,34 @@ class MultiAgentRouter:
|
|
|
|
# Avoids errors on models like `gpt-3.5-turbo` which don't support json_schema
|
|
|
|
# Avoids errors on models like `gpt-3.5-turbo` which don't support json_schema
|
|
|
|
def _supports_structured_outputs(model_name: str) -> bool:
|
|
|
|
def _supports_structured_outputs(model_name: str) -> bool:
|
|
|
|
name = (model_name or "").lower()
|
|
|
|
name = (model_name or "").lower()
|
|
|
|
return any(
|
|
|
|
# Models that DON'T support structured outputs (exclude these)
|
|
|
|
prefix in name
|
|
|
|
unsupported_models = [
|
|
|
|
for prefix in [
|
|
|
|
"gpt-3.5-turbo",
|
|
|
|
"gpt-4.1",
|
|
|
|
"gpt-4-turbo",
|
|
|
|
"openai/gpt-4.1",
|
|
|
|
"gpt-4",
|
|
|
|
"gpt-4o",
|
|
|
|
"text-davinci",
|
|
|
|
"openai/gpt-4o",
|
|
|
|
"text-curie",
|
|
|
|
"o3-",
|
|
|
|
"text-babbage",
|
|
|
|
"openai/o3-",
|
|
|
|
"text-ada",
|
|
|
|
"o4-",
|
|
|
|
"claude-2",
|
|
|
|
"openai/o4-",
|
|
|
|
"claude-instant",
|
|
|
|
]
|
|
|
|
"claude-v1",
|
|
|
|
)
|
|
|
|
"gemini-pro-vision",
|
|
|
|
|
|
|
|
"text-bison",
|
|
|
|
|
|
|
|
"chat-bison",
|
|
|
|
|
|
|
|
"llama-2",
|
|
|
|
|
|
|
|
"llama-3",
|
|
|
|
|
|
|
|
"mistral-7b",
|
|
|
|
|
|
|
|
"mistral-small",
|
|
|
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# If it's in the unsupported list, return False
|
|
|
|
|
|
|
|
if any(unsupported in name for unsupported in unsupported_models):
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Otherwise, assume it supports structured outputs
|
|
|
|
|
|
|
|
# This includes newer Claude, Gemini, and OpenAI models
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
# Build LiteLLM kwargs with conditional response_format
|
|
|
|
# Build LiteLLM kwargs with conditional response_format
|
|
|
|
lite_llm_kwargs = {
|
|
|
|
lite_llm_kwargs = {
|
|
|
|
|