fix: resolve LiteLLM initialization issues in math and calc servers

- Updated initialization logic in `math_server.py` and `calc_server.py`
- Addressed missing or invalid response output due to improper model handling
pull/819/head
Pavan Kumar 2 days ago committed by ascender1729
parent a877cc8a47
commit c75fa97cb7

@ -29,5 +29,5 @@ def percentage(value: float, percent: float) -> float:
if __name__ == "__main__":
print("Starting Calculation Server on port 6275...")
llm = LiteLLM()
llm = LiteLLM(system_prompt="You are a financial calculation expert.")
mcp.run(transport="sse", host="0.0.0.0", port=6275)

@ -66,5 +66,5 @@ def divide(a: float, b: float) -> float:
if __name__ == "__main__":
print("Starting Math Server on port 6274...")
llm = LiteLLM(model_name="gpt-4o-mini")
llm = LiteLLM() # This will use the default model
mcp.run(transport="sse", host="0.0.0.0", port=6274)
Loading…
Cancel
Save