pull/938/head
harshalmore31 2 months ago
parent 1bad76b5d6
commit aa8f47e951

@ -3103,7 +3103,9 @@ class Agent:
verbose=self.verbose, verbose=self.verbose,
) )
else: else:
tool_response = temp_llm.run(tool_response) tool_response = temp_llm.run(
f"Please analyze and summarize the following tool execution output:\n\n{output}"
)
# Add the tool response to memory # Add the tool response to memory
self.short_memory.add( self.short_memory.add(

@ -16,6 +16,7 @@ from swarms.tools.pydantic_to_json import (
) )
from swarms.tools.tool_parse_exec import parse_and_execute_json from swarms.tools.tool_parse_exec import parse_and_execute_json
from swarms.utils.loguru_logger import initialize_logger from swarms.utils.loguru_logger import initialize_logger
from loguru import logger as loguru_logger
logger = initialize_logger(log_folder="base_tool") logger = initialize_logger(log_folder="base_tool")
@ -3083,6 +3084,14 @@ class BaseTool(BaseModel):
Returns: Returns:
Union[str, Dict[str, Any]]: Processed response (text or tool calls) Union[str, Dict[str, Any]]: Processed response (text or tool calls)
""" """
# Validate response
if not response:
logger.warning("Empty streaming response received")
return ""
if not hasattr(response, "__iter__"):
logger.warning("Non-iterable response received for streaming")
return str(response) if response else ""
if hasattr(llm, 'parse_streaming_chunks_with_tools'): if hasattr(llm, 'parse_streaming_chunks_with_tools'):
text_response, tool_calls = llm.parse_streaming_chunks_with_tools( text_response, tool_calls = llm.parse_streaming_chunks_with_tools(
stream=response, stream=response,
@ -3116,12 +3125,16 @@ class BaseTool(BaseModel):
else: else:
# Simple fallback streaming # Simple fallback streaming
chunks = [] chunks = []
for chunk in response: try:
if hasattr(chunk, "choices") and chunk.choices and chunk.choices[0].delta.content: for chunk in response:
content = chunk.choices[0].delta.content if hasattr(chunk, "choices") and chunk.choices and chunk.choices[0].delta.content:
chunks.append(content) content = chunk.choices[0].delta.content
if print_on: chunks.append(content)
print(content, end="", flush=True) if print_on:
if print_on and chunks: print(content, end="", flush=True)
print() if print_on and chunks:
return "".join(chunks) print()
return "".join(chunks)
except Exception as e:
logger.error(f"Error in fallback streaming for agent {agent_name}: {e}")
return "".join(chunks) if chunks else ""

Loading…
Cancel
Save