diff --git a/examples/demos/spike/function_caller_example.py b/examples/demos/spike/function_caller_example.py index 0578df7d..46c8b9fe 100644 --- a/examples/demos/spike/function_caller_example.py +++ b/examples/demos/spike/function_caller_example.py @@ -8,7 +8,7 @@ Todo import os from dotenv import load_dotenv -from swarm_models import OpenAIChat, OpenAIFunctionCaller +from swarms.utils.litellm_wrapper import LiteLLM from pydantic import BaseModel from typing import List @@ -30,21 +30,19 @@ load_dotenv() api_key = os.getenv("GROQ_API_KEY") # Initialize the model -model = OpenAIChat( - openai_api_base="https://api.groq.com/openai/v1", - openai_api_key=api_key, - model_name="llama-3.1-70b-versatile", +model = LiteLLM( + model_name="groq/llama-3.1-70b-versatile", temperature=0.1, ) -function_caller = OpenAIFunctionCaller( +function_caller = LiteLLM( + model_name="gpt-4o", system_prompt="""You are a college selection final decision maker. Your role is to: - Balance all relevant factors and stakeholder input. - Only return the output in the schema format. """, - openai_api_key=os.getenv("OPENAI_API_KEY"), - base_model=CollegesRecommendation, - # parallel_tool_calls=True, + response_format=CollegesRecommendation, + temperature=0.1, )