docs cleanup

dependabot/pip/pydantic-2.11.7
Kye Gomez 2 weeks ago
parent 6c0b1fd5b0
commit 3bc92755ff

@ -15,7 +15,7 @@ Prerequisites:
""" """
import os import os
from typing import List, Callable from typing import List
from swarms.structs.agent import Agent from swarms.structs.agent import Agent
from swarms.structs.conversation import Conversation from swarms.structs.conversation import Conversation
from swarms.structs.multi_agent_exec import run_agents_concurrently from swarms.structs.multi_agent_exec import run_agents_concurrently
@ -24,6 +24,7 @@ from swarms.utils.history_output_formatter import (
HistoryOutputType, HistoryOutputType,
) )
def aggregator_agent_task_prompt( def aggregator_agent_task_prompt(
task: str, workers: List[Agent], conversation: Conversation task: str, workers: List[Agent], conversation: Conversation
): ):
@ -52,7 +53,7 @@ def aggregator_agent_task_prompt(
def create_research_agents() -> List[Agent]: def create_research_agents() -> List[Agent]:
"""Create a team of specialized research agents.""" """Create a team of specialized research agents."""
# Data Analyst Agent # Data Analyst Agent
data_analyst = Agent( data_analyst = Agent(
agent_name="DataAnalyst", agent_name="DataAnalyst",
@ -70,7 +71,7 @@ def create_research_agents() -> List[Agent]:
verbose=True, verbose=True,
output_type="string", output_type="string",
) )
# Research Specialist Agent # Research Specialist Agent
researcher = Agent( researcher = Agent(
agent_name="ResearchSpecialist", agent_name="ResearchSpecialist",
@ -88,10 +89,10 @@ def create_research_agents() -> List[Agent]:
verbose=True, verbose=True,
output_type="string", output_type="string",
) )
# Strategic Advisor Agent # Strategic Advisor Agent
strategist = Agent( strategist = Agent(
agent_name="StrategicAdvisor", agent_name="StrategicAdvisor",
agent_description="Expert in strategic planning, business strategy, and decision-making", agent_description="Expert in strategic planning, business strategy, and decision-making",
system_prompt="""You are a strategic advisor with expertise in: system_prompt="""You are a strategic advisor with expertise in:
- Strategic planning and business strategy - Strategic planning and business strategy
@ -106,7 +107,7 @@ def create_research_agents() -> List[Agent]:
verbose=True, verbose=True,
output_type="string", output_type="string",
) )
return [data_analyst, researcher, strategist] return [data_analyst, researcher, strategist]
@ -148,7 +149,7 @@ def aggregate_with_supabase(
): ):
""" """
Aggregate agent responses using Supabase for conversation storage. Aggregate agent responses using Supabase for conversation storage.
Args: Args:
workers: List of Agent instances workers: List of Agent instances
task: The task to execute task: The task to execute
@ -158,56 +159,62 @@ def aggregate_with_supabase(
supabase_url: Supabase project URL supabase_url: Supabase project URL
supabase_key: Supabase API key supabase_key: Supabase API key
""" """
if task is None: if task is None:
raise ValueError("Task is required for agent aggregation") raise ValueError("Task is required for agent aggregation")
if not workers: if not workers:
raise ValueError("At least one worker agent is required") raise ValueError("At least one worker agent is required")
if not all(isinstance(worker, Agent) for worker in workers): if not all(isinstance(worker, Agent) for worker in workers):
raise ValueError("All workers must be Agent instances") raise ValueError("All workers must be Agent instances")
# Set up Supabase conversation storage # Set up Supabase conversation storage
conversation_kwargs = {} conversation_kwargs = {}
if backend == "supabase": if backend == "supabase":
url = supabase_url or os.getenv("SUPABASE_URL") url = supabase_url or os.getenv("SUPABASE_URL")
key = supabase_key or os.getenv("SUPABASE_ANON_KEY") key = supabase_key or os.getenv("SUPABASE_ANON_KEY")
if not url or not key: if not url or not key:
raise ValueError( raise ValueError(
"Supabase backend requires SUPABASE_URL and SUPABASE_ANON_KEY " "Supabase backend requires SUPABASE_URL and SUPABASE_ANON_KEY "
"environment variables or explicit parameters" "environment variables or explicit parameters"
) )
conversation_kwargs.update({ conversation_kwargs.update(
"supabase_url": url, {
"supabase_key": key, "supabase_url": url,
}) "supabase_key": key,
}
)
try: try:
# Create conversation with Supabase backend # Create conversation with Supabase backend
conversation = Conversation( conversation = Conversation(
backend=backend, backend=backend,
**conversation_kwargs, **conversation_kwargs,
system_prompt="Multi-agent collaboration session with persistent storage", system_prompt="Multi-agent collaboration session with persistent storage",
time_enabled=True, time_enabled=True,
) )
print(f"✅ Successfully initialized {backend} backend for conversation storage") print(
f"✅ Successfully initialized {backend} backend for conversation storage"
)
# Add initial task to conversation # Add initial task to conversation
conversation.add("system", f"Task: {task}") conversation.add("system", f"Task: {task}")
except ImportError as e: except ImportError as e:
print(f"❌ Backend initialization failed: {e}") print(f"❌ Backend initialization failed: {e}")
print(f"💡 Falling back to in-memory storage") print("💡 Falling back to in-memory storage")
conversation = Conversation(backend="in-memory") conversation = Conversation(backend="in-memory")
# Create aggregator agent # Create aggregator agent
aggregator_agent = create_aggregator_agent() aggregator_agent = create_aggregator_agent()
print(f"🚀 Starting multi-agent execution with {len(workers)} agents...") print(
f"🚀 Starting multi-agent execution with {len(workers)} agents..."
)
# Run agents concurrently # Run agents concurrently
results = run_agents_concurrently(agents=workers, task=task) results = run_agents_concurrently(agents=workers, task=task)
@ -217,7 +224,7 @@ def aggregate_with_supabase(
print(f"📝 Stored response from {agent.agent_name}") print(f"📝 Stored response from {agent.agent_name}")
print("🔄 Running aggregation analysis...") print("🔄 Running aggregation analysis...")
# Generate aggregated analysis # Generate aggregated analysis
final_result = aggregator_agent.run( final_result = aggregator_agent.run(
task=aggregator_agent_task_prompt(task, workers, conversation) task=aggregator_agent_task_prompt(task, workers, conversation)
@ -225,12 +232,11 @@ def aggregate_with_supabase(
# Store aggregated result # Store aggregated result
conversation.add( conversation.add(
content=final_result, content=final_result, role=aggregator_agent.agent_name
role=aggregator_agent.agent_name
) )
print("✅ Aggregation complete!") print("✅ Aggregation complete!")
# Return formatted history # Return formatted history
return history_output_formatter( return history_output_formatter(
conversation=conversation, type=type conversation=conversation, type=type
@ -239,41 +245,51 @@ def aggregate_with_supabase(
# Example usage with real Swarms agents # Example usage with real Swarms agents
if __name__ == "__main__": if __name__ == "__main__":
print("🧪 Testing Swarms Multi-Agent System with Supabase Backend") print(
print("="*70) "🧪 Testing Swarms Multi-Agent System with Supabase Backend"
)
print("=" * 70)
# Check environment setup # Check environment setup
print("\n⚙️ Environment Setup Check") print("\n⚙️ Environment Setup Check")
print("-" * 40) print("-" * 40)
supabase_url = os.getenv("SUPABASE_URL") supabase_url = os.getenv("SUPABASE_URL")
supabase_key = os.getenv("SUPABASE_ANON_KEY") supabase_key = os.getenv("SUPABASE_ANON_KEY")
openai_key = os.getenv("OPENAI_API_KEY") openai_key = os.getenv("OPENAI_API_KEY")
print(f"SUPABASE_URL: {'✅ Set' if supabase_url else '❌ Not set'}") print(
print(f"SUPABASE_ANON_KEY: {'✅ Set' if supabase_key else '❌ Not set'}") f"SUPABASE_URL: {'✅ Set' if supabase_url else '❌ Not set'}"
print(f"OPENAI_API_KEY: {'✅ Set' if openai_key else '❌ Not set'}") )
print(
f"SUPABASE_ANON_KEY: {'✅ Set' if supabase_key else '❌ Not set'}"
)
print(
f"OPENAI_API_KEY: {'✅ Set' if openai_key else '❌ Not set'}"
)
if not (supabase_url and supabase_key): if not (supabase_url and supabase_key):
print("\n⚠️ Missing Supabase configuration!") print("\n⚠️ Missing Supabase configuration!")
print("Please set the following environment variables:") print("Please set the following environment variables:")
print("export SUPABASE_URL=https://your-project.supabase.co") print("export SUPABASE_URL=https://your-project.supabase.co")
print("export SUPABASE_ANON_KEY=your-anon-key") print("export SUPABASE_ANON_KEY=your-anon-key")
print("\nFalling back to demonstration with mock data...") print("\nFalling back to demonstration with mock data...")
if not openai_key: if not openai_key:
print("\n⚠️ Missing OpenAI API key!") print("\n⚠️ Missing OpenAI API key!")
print("Please set: export OPENAI_API_KEY=your-api-key") print("Please set: export OPENAI_API_KEY=your-api-key")
print("You can also use other LLM providers (Anthropic, Google, etc.)") print(
"You can also use other LLM providers (Anthropic, Google, etc.)"
)
# Example 1: Basic Multi-Agent Research Task # Example 1: Basic Multi-Agent Research Task
print("\n📦 Example 1: Multi-Agent Market Research") print("\n📦 Example 1: Multi-Agent Market Research")
print("-" * 50) print("-" * 50)
try: try:
# Create research team # Create research team
research_team = create_research_agents() research_team = create_research_agents()
# Define research task # Define research task
research_task = """ research_task = """
Analyze the current state and future prospects of artificial intelligence Analyze the current state and future prospects of artificial intelligence
@ -281,10 +297,12 @@ if __name__ == "__main__":
regulatory challenges, and investment opportunities. Provide insights regulatory challenges, and investment opportunities. Provide insights
on key players, emerging technologies, and potential risks. on key players, emerging technologies, and potential risks.
""" """
print(f"📋 Task: {research_task.strip()}") print(f"📋 Task: {research_task.strip()}")
print(f"👥 Team: {[agent.agent_name for agent in research_team]}") print(
f"👥 Team: {[agent.agent_name for agent in research_team]}"
)
if supabase_url and supabase_key and openai_key: if supabase_url and supabase_key and openai_key:
# Run with real agents and Supabase storage # Run with real agents and Supabase storage
result = aggregate_with_supabase( result = aggregate_with_supabase(
@ -295,21 +313,23 @@ if __name__ == "__main__":
supabase_url=supabase_url, supabase_url=supabase_url,
supabase_key=supabase_key, supabase_key=supabase_key,
) )
print("\n📊 Research Results:") print("\n📊 Research Results:")
print("=" * 50) print("=" * 50)
print(result) print(result)
else: else:
print("❌ Skipping real agent execution due to missing configuration") print(
"❌ Skipping real agent execution due to missing configuration"
)
except Exception as e: except Exception as e:
print(f"❌ Error in multi-agent research: {e}") print(f"❌ Error in multi-agent research: {e}")
# Example 2: Simple Conversation Storage Test # Example 2: Simple Conversation Storage Test
print("\n📦 Example 2: Direct Conversation Storage Test") print("\n📦 Example 2: Direct Conversation Storage Test")
print("-" * 50) print("-" * 50)
try: try:
if supabase_url and supabase_key: if supabase_url and supabase_key:
# Test direct conversation with Supabase # Test direct conversation with Supabase
@ -319,37 +339,62 @@ if __name__ == "__main__":
supabase_key=supabase_key, supabase_key=supabase_key,
time_enabled=True, time_enabled=True,
) )
print("✅ Supabase conversation created successfully") print("✅ Supabase conversation created successfully")
# Add sample conversation # Add sample conversation
conv.add("user", "What are the latest trends in AI technology?") conv.add(
conv.add("assistant", "Based on current developments, key AI trends include:") "user", "What are the latest trends in AI technology?"
conv.add("assistant", "1. Large Language Models (LLMs) advancing rapidly") )
conv.add("assistant", "2. Multimodal AI combining text, image, and video") conv.add(
conv.add("assistant", "3. AI agents becoming more autonomous and capable") "assistant",
"Based on current developments, key AI trends include:",
)
conv.add(
"assistant",
"1. Large Language Models (LLMs) advancing rapidly",
)
conv.add(
"assistant",
"2. Multimodal AI combining text, image, and video",
)
conv.add(
"assistant",
"3. AI agents becoming more autonomous and capable",
)
conv.add("user", "How do these trends affect businesses?") conv.add("user", "How do these trends affect businesses?")
conv.add("assistant", "These trends are transforming businesses through automation, enhanced decision-making, and new product capabilities.") conv.add(
"assistant",
"These trends are transforming businesses through automation, enhanced decision-making, and new product capabilities.",
)
# Test conversation operations # Test conversation operations
print(f"📊 Message count: {len(conv.to_dict())}") print(f"📊 Message count: {len(conv.to_dict())}")
print(f"🔍 Search results for 'AI': {len(conv.search('AI'))}") print(
print(f"📈 Role distribution: {conv.count_messages_by_role()}") f"🔍 Search results for 'AI': {len(conv.search('AI'))}"
)
print(
f"📈 Role distribution: {conv.count_messages_by_role()}"
)
# Export conversation # Export conversation
conv.export_conversation("supabase_ai_conversation.json") conv.export_conversation("supabase_ai_conversation.json")
print("💾 Conversation exported to supabase_ai_conversation.json") print(
"💾 Conversation exported to supabase_ai_conversation.json"
)
else: else:
print("❌ Skipping Supabase test due to missing configuration") print(
"❌ Skipping Supabase test due to missing configuration"
)
except Exception as e: except Exception as e:
print(f"❌ Error in conversation storage test: {e}") print(f"❌ Error in conversation storage test: {e}")
# Example 3: Agent Creation and Configuration Demo # Example 3: Agent Creation and Configuration Demo
print("\n📦 Example 3: Agent Configuration Demo") print("\n📦 Example 3: Agent Configuration Demo")
print("-" * 50) print("-" * 50)
try: try:
if openai_key: if openai_key:
# Create a simple agent for demonstration # Create a simple agent for demonstration
@ -361,55 +406,71 @@ if __name__ == "__main__":
max_loops=1, max_loops=1,
verbose=False, verbose=False,
) )
print("✅ Demo agent created successfully") print("✅ Demo agent created successfully")
print(f"Agent: {demo_agent.agent_name}") print(f"Agent: {demo_agent.agent_name}")
print(f"Description: {demo_agent.agent_description}") print(f"Description: {demo_agent.agent_description}")
# Test simple agent run # Test simple agent run
simple_task = "Explain the benefits of using persistent conversation storage in AI applications." simple_task = "Explain the benefits of using persistent conversation storage in AI applications."
response = demo_agent.run(simple_task) response = demo_agent.run(simple_task)
print(f"\n📝 Agent Response:") print("\n📝 Agent Response:")
print("-" * 30) print("-" * 30)
print(response[:500] + "..." if len(response) > 500 else response) print(
response[:500] + "..."
if len(response) > 500
else response
)
else: else:
print("❌ Skipping agent demo due to missing OpenAI API key") print(
"❌ Skipping agent demo due to missing OpenAI API key"
)
except Exception as e: except Exception as e:
print(f"❌ Error in agent demo: {e}") print(f"❌ Error in agent demo: {e}")
# Summary and Next Steps # Summary and Next Steps
print("\n" + "="*70) print("\n" + "=" * 70)
print("🏁 Demo Summary") print("🏁 Demo Summary")
print("="*70) print("=" * 70)
print("\n✨ What was demonstrated:") print("\n✨ What was demonstrated:")
print("1. 🏗️ Real Swarms agent creation with specialized roles") print("1. 🏗️ Real Swarms agent creation with specialized roles")
print("2. 🗄️ Supabase backend integration for persistent storage") print("2. 🗄️ Supabase backend integration for persistent storage")
print("3. 🤝 Multi-agent collaboration and response aggregation") print("3. 🤝 Multi-agent collaboration and response aggregation")
print("4. 💾 Conversation export and search capabilities") print("4. 💾 Conversation export and search capabilities")
print("5. ⚙️ Proper error handling and graceful fallbacks") print("5. ⚙️ Proper error handling and graceful fallbacks")
print("\n🚀 Next Steps to get started:") print("\n🚀 Next Steps to get started:")
print("1. Set up Supabase project: https://supabase.com") print("1. Set up Supabase project: https://supabase.com")
print("2. Configure environment variables") print("2. Configure environment variables")
print("3. Install dependencies: pip install swarms supabase") print("3. Install dependencies: pip install swarms supabase")
print("4. Customize agents for your specific use cases") print("4. Customize agents for your specific use cases")
print("5. Scale to larger agent teams and complex workflows") print("5. Scale to larger agent teams and complex workflows")
print("\n🔗 Resources:") print("\n🔗 Resources:")
print("- Swarms Documentation: https://docs.swarms.world") print("- Swarms Documentation: https://docs.swarms.world")
print("- Supabase Python Docs: https://supabase.com/docs/reference/python/") print(
"- Supabase Python Docs: https://supabase.com/docs/reference/python/"
)
print("- GitHub Repository: https://github.com/kyegomez/swarms") print("- GitHub Repository: https://github.com/kyegomez/swarms")
print(f"\n⚙️ Final Configuration Status:") print("\n⚙️ Final Configuration Status:")
print(f" SUPABASE_URL: {'✅ Set' if supabase_url else '❌ Not set'}") print(
print(f" SUPABASE_ANON_KEY: {'✅ Set' if supabase_key else '❌ Not set'}") f" SUPABASE_URL: {'✅ Set' if supabase_url else '❌ Not set'}"
print(f" OPENAI_API_KEY: {'✅ Set' if openai_key else '❌ Not set'}") )
print(
f" SUPABASE_ANON_KEY: {'✅ Set' if supabase_key else '❌ Not set'}"
)
print(
f" OPENAI_API_KEY: {'✅ Set' if openai_key else '❌ Not set'}"
)
if supabase_url and supabase_key and openai_key: if supabase_url and supabase_key and openai_key:
print("\n🎉 All systems ready! You can run the full demo.") print("\n🎉 All systems ready! You can run the full demo.")
else: else:
print("\n⚠️ Set missing environment variables to run the full demo.") print(
"\n⚠️ Set missing environment variables to run the full demo."
)

@ -315,6 +315,7 @@ nav:
- Agent Output Types: "swarms/examples/agent_output_types.md" - Agent Output Types: "swarms/examples/agent_output_types.md"
- Agent with Structured Outputs: "swarms/examples/agent_structured_outputs.md" - Agent with Structured Outputs: "swarms/examples/agent_structured_outputs.md"
- Agents with Vision: "swarms/examples/vision_processing.md" - Agents with Vision: "swarms/examples/vision_processing.md"
- Gradio Chat Interface: "swarms/ui/main.md"
- Various Model Providers: - Various Model Providers:
- OpenAI: "swarms/examples/openai_example.md" - OpenAI: "swarms/examples/openai_example.md"
- Anthropic: "swarms/examples/claude.md" - Anthropic: "swarms/examples/claude.md"
@ -392,6 +393,7 @@ nav:
- Swarms API Pricing: "swarms_cloud/api_pricing.md" - Swarms API Pricing: "swarms_cloud/api_pricing.md"
- Swarms API Pricing in Chinese: "swarms_cloud/chinese_api_pricing.md" - Swarms API Pricing in Chinese: "swarms_cloud/chinese_api_pricing.md"
- Swarms Cloud Subscription Tiers: "swarms_cloud/subscription_tiers.md" - Swarms Cloud Subscription Tiers: "swarms_cloud/subscription_tiers.md"
- Swarm Ecosystem APIs: - Swarm Ecosystem APIs:
- MCS API: "swarms_cloud/mcs_api.md" - MCS API: "swarms_cloud/mcs_api.md"
# - CreateNow API: "swarms_cloud/create_api.md" # - CreateNow API: "swarms_cloud/create_api.md"
@ -403,15 +405,14 @@ nav:
- Overview: "swarms_platform/index.md" - Overview: "swarms_platform/index.md"
- Swarm Platform API Keys: "swarms_platform/apikeys.md" - Swarm Platform API Keys: "swarms_platform/apikeys.md"
- Account Management: "swarms_platform/account_management.md" - Account Management: "swarms_platform/account_management.md"
- Swarms Chat Tutorial: "swarms/ui/main.md"
- Swarms Rust: - Swarms Rust:
- Overview: "swarms_rs/overview.md" - Overview: "swarms_rs/overview.md"
- Agents: "swarms_rs/agents.md" - Agents: "swarms_rs/agents.md"
- Governance: - Resources:
- Resources: "governance/main.md" - Overview: "governance/main.md"
- Tokenomics: "web3/token.md" # - Tokenomics: "web3/token.md"
# - Prompts API: # - Prompts API:

@ -67,7 +67,7 @@ def show_ascii_art():
Text(ASCII_ART, style=f"bold {COLORS['primary']}"), Text(ASCII_ART, style=f"bold {COLORS['primary']}"),
border_style=COLORS["secondary"], border_style=COLORS["secondary"],
title="[bold]Welcome to Swarms[/bold]", title="[bold]Welcome to Swarms[/bold]",
subtitle="[dim]Power to the Swarms[/dim]", subtitle="[dim]swarms.ai[/dim]",
) )
console.print(panel) console.print(panel)

@ -78,6 +78,7 @@ class DuckDBConversation(BaseCommunication):
# Lazy load duckdb with auto-installation # Lazy load duckdb with auto-installation
try: try:
import duckdb import duckdb
self.duckdb = duckdb self.duckdb = duckdb
self.duckdb_available = True self.duckdb_available = True
except ImportError: except ImportError:
@ -86,19 +87,20 @@ class DuckDBConversation(BaseCommunication):
try: try:
import subprocess import subprocess
import sys import sys
# Install duckdb # Install duckdb
subprocess.check_call([ subprocess.check_call(
sys.executable, "-m", "pip", "install", "duckdb" [sys.executable, "-m", "pip", "install", "duckdb"]
]) )
print("✅ DuckDB installed successfully!") print("✅ DuckDB installed successfully!")
# Try importing again # Try importing again
import duckdb import duckdb
self.duckdb = duckdb self.duckdb = duckdb
self.duckdb_available = True self.duckdb_available = True
print("✅ DuckDB loaded successfully!") print("✅ DuckDB loaded successfully!")
except Exception as e: except Exception as e:
raise ImportError( raise ImportError(
f"Failed to auto-install DuckDB. Please install manually with 'pip install duckdb': {e}" f"Failed to auto-install DuckDB. Please install manually with 'pip install duckdb': {e}"

@ -66,27 +66,37 @@ class PulsarConversation(BaseCommunication):
# Lazy load Pulsar with auto-installation # Lazy load Pulsar with auto-installation
try: try:
import pulsar import pulsar
self.pulsar = pulsar self.pulsar = pulsar
self.pulsar_available = True self.pulsar_available = True
except ImportError: except ImportError:
# Auto-install pulsar-client if not available # Auto-install pulsar-client if not available
print("📦 Pulsar client not found. Installing automatically...") print(
"📦 Pulsar client not found. Installing automatically..."
)
try: try:
import subprocess import subprocess
import sys import sys
# Install pulsar-client # Install pulsar-client
subprocess.check_call([ subprocess.check_call(
sys.executable, "-m", "pip", "install", "pulsar-client" [
]) sys.executable,
"-m",
"pip",
"install",
"pulsar-client",
]
)
print("✅ Pulsar client installed successfully!") print("✅ Pulsar client installed successfully!")
# Try importing again # Try importing again
import pulsar import pulsar
self.pulsar = pulsar self.pulsar = pulsar
self.pulsar_available = True self.pulsar_available = True
print("✅ Pulsar loaded successfully!") print("✅ Pulsar loaded successfully!")
except Exception as e: except Exception as e:
self.pulsar_available = False self.pulsar_available = False
logger.error( logger.error(
@ -646,6 +656,7 @@ class PulsarConversation(BaseCommunication):
""" """
try: try:
import pulsar import pulsar
pulsar_available = True pulsar_available = True
except ImportError: except ImportError:
logger.error("Pulsar client library is not installed") logger.error("Pulsar client library is not installed")

@ -31,6 +31,7 @@ try:
RedisError, RedisError,
TimeoutError, TimeoutError,
) )
REDIS_AVAILABLE = True REDIS_AVAILABLE = True
except ImportError: except ImportError:
# Auto-install Redis at import time # Auto-install Redis at import time
@ -38,13 +39,13 @@ except ImportError:
try: try:
import subprocess import subprocess
import sys import sys
# Install redis # Install redis
subprocess.check_call([ subprocess.check_call(
sys.executable, "-m", "pip", "install", "redis" [sys.executable, "-m", "pip", "install", "redis"]
]) )
print("✅ Redis installed successfully!") print("✅ Redis installed successfully!")
# Try importing again # Try importing again
import redis import redis
from redis.exceptions import ( from redis.exceptions import (
@ -54,12 +55,15 @@ except ImportError:
RedisError, RedisError,
TimeoutError, TimeoutError,
) )
REDIS_AVAILABLE = True REDIS_AVAILABLE = True
print("✅ Redis loaded successfully!") print("✅ Redis loaded successfully!")
except Exception as e: except Exception as e:
REDIS_AVAILABLE = False REDIS_AVAILABLE = False
print(f"❌ Failed to auto-install Redis. Please install manually with 'pip install redis': {e}") print(
f"❌ Failed to auto-install Redis. Please install manually with 'pip install redis': {e}"
)
class RedisConnectionError(Exception): class RedisConnectionError(Exception):
@ -186,7 +190,11 @@ rdbchecksum yes
try: try:
if self.process: if self.process:
# Send SAVE and BGSAVE commands before stopping if persistence is enabled # Send SAVE and BGSAVE commands before stopping if persistence is enabled
if self.persist and self.auto_persist and REDIS_AVAILABLE: if (
self.persist
and self.auto_persist
and REDIS_AVAILABLE
):
try: try:
r = redis.Redis( r = redis.Redis(
host="localhost", port=self.port host="localhost", port=self.port
@ -328,14 +336,14 @@ class RedisConversation(BaseStructure):
RedisOperationError: If Redis operations fail. RedisOperationError: If Redis operations fail.
""" """
global REDIS_AVAILABLE global REDIS_AVAILABLE
# Check if Redis is available (should be True after module import auto-installation) # Check if Redis is available (should be True after module import auto-installation)
if not REDIS_AVAILABLE: if not REDIS_AVAILABLE:
raise ImportError( raise ImportError(
"Redis is not available. Module-level auto-installation failed. " "Redis is not available. Module-level auto-installation failed. "
"Please install manually with 'pip install redis'" "Please install manually with 'pip install redis'"
) )
self.redis_available = True self.redis_available = True
super().__init__() super().__init__()

@ -96,29 +96,39 @@ class SupabaseConversation(BaseCommunication):
# Lazy load Supabase with auto-installation # Lazy load Supabase with auto-installation
try: try:
from supabase import Client, create_client from supabase import Client, create_client
self.supabase_client = Client self.supabase_client = Client
self.create_client = create_client self.create_client = create_client
self.supabase_available = True self.supabase_available = True
except ImportError: except ImportError:
# Auto-install supabase if not available # Auto-install supabase if not available
print("📦 Supabase not found. Installing automatically...") print(
"📦 Supabase not found. Installing automatically..."
)
try: try:
import subprocess import subprocess
import sys import sys
# Install supabase # Install supabase
subprocess.check_call([ subprocess.check_call(
sys.executable, "-m", "pip", "install", "supabase" [
]) sys.executable,
"-m",
"pip",
"install",
"supabase",
]
)
print("✅ Supabase installed successfully!") print("✅ Supabase installed successfully!")
# Try importing again # Try importing again
from supabase import Client, create_client from supabase import Client, create_client
self.supabase_client = Client self.supabase_client = Client
self.create_client = create_client self.create_client = create_client
self.supabase_available = True self.supabase_available = True
print("✅ Supabase loaded successfully!") print("✅ Supabase loaded successfully!")
except Exception as e: except Exception as e:
self.supabase_available = False self.supabase_available = False
if logger: if logger:
@ -179,7 +189,9 @@ class SupabaseConversation(BaseCommunication):
) # For thread-safe operations if any (e.g. token calculation) ) # For thread-safe operations if any (e.g. token calculation)
try: try:
self.client = self.create_client(supabase_url, supabase_key) self.client = self.create_client(
supabase_url, supabase_key
)
if self.enable_logging: if self.enable_logging:
self.logger.info( self.logger.info(
f"Successfully initialized Supabase client for URL: {supabase_url}" f"Successfully initialized Supabase client for URL: {supabase_url}"

@ -53,41 +53,65 @@ def get_conversation_dir():
# Define available providers # Define available providers
providers = Literal["mem0", "in-memory", "supabase", "redis", "sqlite", "duckdb", "pulsar"] providers = Literal[
"mem0",
"in-memory",
"supabase",
"redis",
"sqlite",
"duckdb",
"pulsar",
]
def _create_backend_conversation(backend: str, **kwargs): def _create_backend_conversation(backend: str, **kwargs):
""" """
Create a backend conversation instance based on the specified backend type. Create a backend conversation instance based on the specified backend type.
This function uses lazy loading to import backend dependencies only when needed. This function uses lazy loading to import backend dependencies only when needed.
Each backend class handles its own dependency management and error messages. Each backend class handles its own dependency management and error messages.
Args: Args:
backend (str): The backend type to create backend (str): The backend type to create
**kwargs: Arguments to pass to the backend constructor **kwargs: Arguments to pass to the backend constructor
Returns: Returns:
Backend conversation instance Backend conversation instance
Raises: Raises:
ImportError: If required packages for the backend are not installed (raised by lazy loading) ImportError: If required packages for the backend are not installed (raised by lazy loading)
ValueError: If backend is not supported ValueError: If backend is not supported
""" """
try: try:
if backend == "supabase": if backend == "supabase":
from swarms.communication.supabase_wrap import SupabaseConversation from swarms.communication.supabase_wrap import (
SupabaseConversation,
)
return SupabaseConversation(**kwargs) return SupabaseConversation(**kwargs)
elif backend == "redis": elif backend == "redis":
from swarms.communication.redis_wrap import RedisConversation from swarms.communication.redis_wrap import (
RedisConversation,
)
return RedisConversation(**kwargs) return RedisConversation(**kwargs)
elif backend == "sqlite": elif backend == "sqlite":
from swarms.communication.sqlite_wrap import SQLiteConversation from swarms.communication.sqlite_wrap import (
SQLiteConversation,
)
return SQLiteConversation(**kwargs) return SQLiteConversation(**kwargs)
elif backend == "duckdb": elif backend == "duckdb":
from swarms.communication.duckdb_wrap import DuckDBConversation from swarms.communication.duckdb_wrap import (
DuckDBConversation,
)
return DuckDBConversation(**kwargs) return DuckDBConversation(**kwargs)
elif backend == "pulsar": elif backend == "pulsar":
from swarms.communication.pulsar_struct import PulsarConversation from swarms.communication.pulsar_struct import (
PulsarConversation,
)
return PulsarConversation(**kwargs) return PulsarConversation(**kwargs)
else: else:
raise ValueError( raise ValueError(
@ -103,8 +127,10 @@ def _create_backend_conversation(backend: str, **kwargs):
"duckdb": "pip install duckdb", "duckdb": "pip install duckdb",
"pulsar": "pip install pulsar-client", "pulsar": "pip install pulsar-client",
} }
install_cmd = backend_deps.get(backend, f"Check documentation for {backend}") install_cmd = backend_deps.get(
backend, f"Check documentation for {backend}"
)
logger.error( logger.error(
f"Failed to initialize {backend} backend. " f"Failed to initialize {backend} backend. "
f"Missing dependencies. Install with: {install_cmd}" f"Missing dependencies. Install with: {install_cmd}"
@ -190,7 +216,6 @@ class Conversation(BaseStructure):
auto_persist: bool = True, auto_persist: bool = True,
redis_data_dir: Optional[str] = None, redis_data_dir: Optional[str] = None,
conversations_dir: Optional[str] = None, conversations_dir: Optional[str] = None,
*args, *args,
**kwargs, **kwargs,
): ):
@ -202,7 +227,15 @@ class Conversation(BaseStructure):
self.backend_instance = None self.backend_instance = None
# Validate backend # Validate backend
valid_backends = ["in-memory", "mem0", "supabase", "redis", "sqlite", "duckdb", "pulsar"] valid_backends = [
"in-memory",
"mem0",
"supabase",
"redis",
"sqlite",
"duckdb",
"pulsar",
]
if self.backend not in valid_backends: if self.backend not in valid_backends:
raise ValueError( raise ValueError(
f"Invalid backend: '{self.backend}'. " f"Invalid backend: '{self.backend}'. "
@ -243,7 +276,13 @@ class Conversation(BaseStructure):
self.conversations_dir = conversations_dir self.conversations_dir = conversations_dir
# Initialize backend if using persistent storage # Initialize backend if using persistent storage
if self.backend in ["supabase", "redis", "sqlite", "duckdb", "pulsar"]: if self.backend in [
"supabase",
"redis",
"sqlite",
"duckdb",
"pulsar",
]:
try: try:
self._initialize_backend( self._initialize_backend(
supabase_url=supabase_url, supabase_url=supabase_url,
@ -258,7 +297,7 @@ class Conversation(BaseStructure):
persist_redis=persist_redis, persist_redis=persist_redis,
auto_persist=auto_persist, auto_persist=auto_persist,
redis_data_dir=redis_data_dir, redis_data_dir=redis_data_dir,
**kwargs **kwargs,
) )
except Exception as e: except Exception as e:
logger.warning( logger.warning(
@ -275,7 +314,7 @@ class Conversation(BaseStructure):
def _initialize_backend(self, **kwargs): def _initialize_backend(self, **kwargs):
""" """
Initialize the persistent storage backend. Initialize the persistent storage backend.
Args: Args:
**kwargs: Backend-specific configuration parameters **kwargs: Backend-specific configuration parameters
""" """
@ -297,52 +336,78 @@ class Conversation(BaseStructure):
# Add backend-specific parameters # Add backend-specific parameters
if self.backend == "supabase": if self.backend == "supabase":
supabase_url = kwargs.get("supabase_url") or os.getenv("SUPABASE_URL") supabase_url = kwargs.get("supabase_url") or os.getenv(
supabase_key = kwargs.get("supabase_key") or os.getenv("SUPABASE_ANON_KEY") "SUPABASE_URL"
)
supabase_key = kwargs.get("supabase_key") or os.getenv(
"SUPABASE_ANON_KEY"
)
if not supabase_url or not supabase_key: if not supabase_url or not supabase_key:
raise ValueError( raise ValueError(
"Supabase backend requires 'supabase_url' and 'supabase_key' parameters " "Supabase backend requires 'supabase_url' and 'supabase_key' parameters "
"or SUPABASE_URL and SUPABASE_ANON_KEY environment variables" "or SUPABASE_URL and SUPABASE_ANON_KEY environment variables"
) )
backend_kwargs.update({ backend_kwargs.update(
"supabase_url": supabase_url, {
"supabase_key": supabase_key, "supabase_url": supabase_url,
"table_name": kwargs.get("table_name", "conversations"), "supabase_key": supabase_key,
}) "table_name": kwargs.get(
"table_name", "conversations"
),
}
)
elif self.backend == "redis": elif self.backend == "redis":
backend_kwargs.update({ backend_kwargs.update(
"redis_host": kwargs.get("redis_host", "localhost"), {
"redis_port": kwargs.get("redis_port", 6379), "redis_host": kwargs.get(
"redis_db": kwargs.get("redis_db", 0), "redis_host", "localhost"
"redis_password": kwargs.get("redis_password"), ),
"use_embedded_redis": kwargs.get("use_embedded_redis", True), "redis_port": kwargs.get("redis_port", 6379),
"persist_redis": kwargs.get("persist_redis", True), "redis_db": kwargs.get("redis_db", 0),
"auto_persist": kwargs.get("auto_persist", True), "redis_password": kwargs.get("redis_password"),
"redis_data_dir": kwargs.get("redis_data_dir"), "use_embedded_redis": kwargs.get(
"conversation_id": self.id, "use_embedded_redis", True
"name": self.name, ),
}) "persist_redis": kwargs.get(
"persist_redis", True
),
"auto_persist": kwargs.get("auto_persist", True),
"redis_data_dir": kwargs.get("redis_data_dir"),
"conversation_id": self.id,
"name": self.name,
}
)
elif self.backend in ["sqlite", "duckdb"]: elif self.backend in ["sqlite", "duckdb"]:
db_path = kwargs.get("db_path") db_path = kwargs.get("db_path")
if db_path: if db_path:
backend_kwargs["db_path"] = db_path backend_kwargs["db_path"] = db_path
elif self.backend == "pulsar": elif self.backend == "pulsar":
# Add pulsar-specific parameters # Add pulsar-specific parameters
backend_kwargs.update({ backend_kwargs.update(
"pulsar_url": kwargs.get("pulsar_url", "pulsar://localhost:6650"), {
"topic": kwargs.get("topic", f"conversation-{self.id}"), "pulsar_url": kwargs.get(
}) "pulsar_url", "pulsar://localhost:6650"
),
"topic": kwargs.get(
"topic", f"conversation-{self.id}"
),
}
)
# Create the backend instance # Create the backend instance
logger.info(f"Initializing {self.backend} backend...") logger.info(f"Initializing {self.backend} backend...")
self.backend_instance = _create_backend_conversation(self.backend, **backend_kwargs) self.backend_instance = _create_backend_conversation(
self.backend, **backend_kwargs
)
# Log successful initialization # Log successful initialization
logger.info(f"Successfully initialized {self.backend} backend for conversation '{self.name}'") logger.info(
f"Successfully initialized {self.backend} backend for conversation '{self.name}'"
)
def setup(self): def setup(self):
# Set up conversations directory # Set up conversations directory
@ -473,7 +538,9 @@ class Conversation(BaseStructure):
) )
else: else:
# Fallback to in-memory if mem0 is not available # Fallback to in-memory if mem0 is not available
logger.warning("Mem0 provider not available, falling back to in-memory storage") logger.warning(
"Mem0 provider not available, falling back to in-memory storage"
)
self.add_in_memory(role, content) self.add_in_memory(role, content)
def add( def add(
@ -486,9 +553,13 @@ class Conversation(BaseStructure):
# If using a persistent backend, delegate to it # If using a persistent backend, delegate to it
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.add(role=role, content=content, metadata=metadata) return self.backend_instance.add(
role=role, content=content, metadata=metadata
)
except Exception as e: except Exception as e:
logger.error(f"Backend add failed: {e}. Falling back to in-memory.") logger.error(
f"Backend add failed: {e}. Falling back to in-memory."
)
return self.add_in_memory(role, content) return self.add_in_memory(role, content)
elif self.provider == "in-memory": elif self.provider == "in-memory":
return self.add_in_memory(role, content) return self.add_in_memory(role, content)
@ -570,7 +641,9 @@ class Conversation(BaseStructure):
""" """
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.update(index, role, content) return self.backend_instance.update(
index, role, content
)
except Exception as e: except Exception as e:
logger.error(f"Backend update failed: {e}") logger.error(f"Backend update failed: {e}")
raise raise
@ -615,7 +688,7 @@ class Conversation(BaseStructure):
logger.error(f"Backend search failed: {e}") logger.error(f"Backend search failed: {e}")
# Fallback to in-memory search # Fallback to in-memory search
pass pass
return [ return [
message message
for message in self.conversation_history for message in self.conversation_history
@ -630,12 +703,14 @@ class Conversation(BaseStructure):
""" """
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.display_conversation(detailed) return self.backend_instance.display_conversation(
detailed
)
except Exception as e: except Exception as e:
logger.error(f"Backend display failed: {e}") logger.error(f"Backend display failed: {e}")
# Fallback to in-memory display # Fallback to in-memory display
pass pass
# In-memory display implementation with proper formatting # In-memory display implementation with proper formatting
for message in self.conversation_history: for message in self.conversation_history:
content = message.get("content", "") content = message.get("content", "")
@ -668,21 +743,25 @@ class Conversation(BaseStructure):
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.export_conversation(filename, *args, **kwargs) return self.backend_instance.export_conversation(
filename, *args, **kwargs
)
except Exception as e: except Exception as e:
logger.error(f"Backend export failed: {e}") logger.error(f"Backend export failed: {e}")
# Fallback to in-memory export # Fallback to in-memory export
pass pass
# In-memory export implementation # In-memory export implementation
# If the filename ends with .json, use save_as_json # If the filename ends with .json, use save_as_json
if filename.endswith(".json"): if filename.endswith(".json"):
self.save_as_json(filename) self.save_as_json(filename)
else: else:
# Simple text export for non-JSON files # Simple text export for non-JSON files
with open(filename, "w",encoding="utf-8") as f: with open(filename, "w", encoding="utf-8") as f:
for message in self.conversation_history: for message in self.conversation_history:
f.write(f"{message['role']}: {message['content']}\n") f.write(
f"{message['role']}: {message['content']}\n"
)
def import_conversation(self, filename: str): def import_conversation(self, filename: str):
"""Import a conversation history from a file. """Import a conversation history from a file.
@ -692,7 +771,9 @@ class Conversation(BaseStructure):
""" """
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.import_conversation(filename) return self.backend_instance.import_conversation(
filename
)
except Exception as e: except Exception as e:
logger.error(f"Backend import failed: {e}") logger.error(f"Backend import failed: {e}")
# Fallback to in-memory import # Fallback to in-memory import
@ -710,7 +791,9 @@ class Conversation(BaseStructure):
try: try:
return self.backend_instance.count_messages_by_role() return self.backend_instance.count_messages_by_role()
except Exception as e: except Exception as e:
logger.error(f"Backend count_messages_by_role failed: {e}") logger.error(
f"Backend count_messages_by_role failed: {e}"
)
# Fallback to local implementation below # Fallback to local implementation below
pass pass
# Initialize counts with expected roles # Initialize counts with expected roles
@ -720,7 +803,7 @@ class Conversation(BaseStructure):
"assistant": 0, "assistant": 0,
"function": 0, "function": 0,
} }
# Count messages by role # Count messages by role
for message in self.conversation_history: for message in self.conversation_history:
role = message["role"] role = message["role"]
@ -729,8 +812,9 @@ class Conversation(BaseStructure):
else: else:
# Handle unexpected roles dynamically # Handle unexpected roles dynamically
counts[role] = counts.get(role, 0) + 1 counts[role] = counts.get(role, 0) + 1
return counts return counts
def return_history_as_string(self): def return_history_as_string(self):
"""Return the conversation history as a string. """Return the conversation history as a string.
@ -739,12 +823,16 @@ class Conversation(BaseStructure):
""" """
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.return_history_as_string() return (
self.backend_instance.return_history_as_string()
)
except Exception as e: except Exception as e:
logger.error(f"Backend return_history_as_string failed: {e}") logger.error(
f"Backend return_history_as_string failed: {e}"
)
# Fallback to in-memory implementation # Fallback to in-memory implementation
pass pass
formatted_messages = [] formatted_messages = []
for message in self.conversation_history: for message in self.conversation_history:
formatted_messages.append( formatted_messages.append(
@ -781,7 +869,7 @@ class Conversation(BaseStructure):
except Exception as e: except Exception as e:
logger.error(f"Backend save_as_json failed: {e}") logger.error(f"Backend save_as_json failed: {e}")
# Fallback to local save implementation below # Fallback to local save implementation below
# Don't save if saving is disabled # Don't save if saving is disabled
if not self.save_enabled: if not self.save_enabled:
return return
@ -1000,9 +1088,13 @@ class Conversation(BaseStructure):
""" """
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.get_last_message_as_string() return (
self.backend_instance.get_last_message_as_string()
)
except Exception as e: except Exception as e:
logger.error(f"Backend get_last_message_as_string failed: {e}") logger.error(
f"Backend get_last_message_as_string failed: {e}"
)
# Fallback to in-memory implementation # Fallback to in-memory implementation
pass pass
elif self.provider == "mem0": elif self.provider == "mem0":
@ -1025,7 +1117,9 @@ class Conversation(BaseStructure):
try: try:
return self.backend_instance.return_messages_as_list() return self.backend_instance.return_messages_as_list()
except Exception as e: except Exception as e:
logger.error(f"Backend return_messages_as_list failed: {e}") logger.error(
f"Backend return_messages_as_list failed: {e}"
)
# Fallback to in-memory implementation # Fallback to in-memory implementation
pass pass
return [ return [
@ -1041,9 +1135,13 @@ class Conversation(BaseStructure):
""" """
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.return_messages_as_dictionary() return (
self.backend_instance.return_messages_as_dictionary()
)
except Exception as e: except Exception as e:
logger.error(f"Backend return_messages_as_dictionary failed: {e}") logger.error(
f"Backend return_messages_as_dictionary failed: {e}"
)
# Fallback to in-memory implementation # Fallback to in-memory implementation
pass pass
return [ return [
@ -1099,9 +1197,13 @@ class Conversation(BaseStructure):
""" """
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.get_final_message_content() return (
self.backend_instance.get_final_message_content()
)
except Exception as e: except Exception as e:
logger.error(f"Backend get_final_message_content failed: {e}") logger.error(
f"Backend get_final_message_content failed: {e}"
)
# Fallback to in-memory implementation # Fallback to in-memory implementation
pass pass
if self.conversation_history: if self.conversation_history:
@ -1119,7 +1221,9 @@ class Conversation(BaseStructure):
try: try:
return self.backend_instance.return_all_except_first() return self.backend_instance.return_all_except_first()
except Exception as e: except Exception as e:
logger.error(f"Backend return_all_except_first failed: {e}") logger.error(
f"Backend return_all_except_first failed: {e}"
)
# Fallback to in-memory implementation # Fallback to in-memory implementation
pass pass
return self.conversation_history[2:] return self.conversation_history[2:]
@ -1132,9 +1236,13 @@ class Conversation(BaseStructure):
""" """
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.return_all_except_first_string() return (
self.backend_instance.return_all_except_first_string()
)
except Exception as e: except Exception as e:
logger.error(f"Backend return_all_except_first_string failed: {e}") logger.error(
f"Backend return_all_except_first_string failed: {e}"
)
# Fallback to in-memory implementation # Fallback to in-memory implementation
pass pass
return "\n".join( return "\n".join(

@ -86,7 +86,9 @@ class RedisConversationTester:
"""Initialize Redis server and conversation for testing.""" """Initialize Redis server and conversation for testing."""
try: try:
# Try first with external Redis (if available) # Try first with external Redis (if available)
logger.info("Trying to connect to external Redis server...") logger.info(
"Trying to connect to external Redis server..."
)
self.conversation = RedisConversation( self.conversation = RedisConversation(
system_prompt="Test System Prompt", system_prompt="Test System Prompt",
redis_host="localhost", redis_host="localhost",
@ -94,10 +96,14 @@ class RedisConversationTester:
redis_retry_attempts=1, redis_retry_attempts=1,
use_embedded_redis=False, # Try external first use_embedded_redis=False, # Try external first
) )
logger.info("Successfully connected to external Redis server") logger.info(
"Successfully connected to external Redis server"
)
return True return True
except Exception as external_error: except Exception as external_error:
logger.info(f"External Redis connection failed: {external_error}") logger.info(
f"External Redis connection failed: {external_error}"
)
logger.info("Trying to start embedded Redis server...") logger.info("Trying to start embedded Redis server...")
try: try:
@ -109,10 +115,14 @@ class RedisConversationTester:
redis_retry_attempts=3, redis_retry_attempts=3,
use_embedded_redis=True, use_embedded_redis=True,
) )
logger.info("Successfully started embedded Redis server") logger.info(
"Successfully started embedded Redis server"
)
return True return True
except Exception as embedded_error: except Exception as embedded_error:
logger.error(f"Both external and embedded Redis failed:") logger.error(
"Both external and embedded Redis failed:"
)
logger.error(f" External: {external_error}") logger.error(f" External: {external_error}")
logger.error(f" Embedded: {embedded_error}") logger.error(f" Embedded: {embedded_error}")
return False return False
@ -122,10 +132,16 @@ class RedisConversationTester:
if self.conversation: if self.conversation:
try: try:
# Check if we have an embedded server to stop # Check if we have an embedded server to stop
if hasattr(self.conversation, 'embedded_server') and self.conversation.embedded_server is not None: if (
hasattr(self.conversation, "embedded_server")
and self.conversation.embedded_server is not None
):
self.conversation.embedded_server.stop() self.conversation.embedded_server.stop()
# Close Redis client if it exists # Close Redis client if it exists
if hasattr(self.conversation, 'redis_client') and self.conversation.redis_client: if (
hasattr(self.conversation, "redis_client")
and self.conversation.redis_client
):
self.conversation.redis_client.close() self.conversation.redis_client.close()
except Exception as e: except Exception as e:
logger.warning(f"Error during cleanup: {str(e)}") logger.warning(f"Error during cleanup: {str(e)}")
@ -151,16 +167,22 @@ class RedisConversationTester:
json_content = {"key": "value", "nested": {"data": 123}} json_content = {"key": "value", "nested": {"data": 123}}
self.conversation.add("system", json_content) self.conversation.add("system", json_content)
last_message = self.conversation.get_final_message_content() last_message = self.conversation.get_final_message_content()
# Parse the JSON string back to dict for comparison # Parse the JSON string back to dict for comparison
if isinstance(last_message, str): if isinstance(last_message, str):
try: try:
parsed_content = json.loads(last_message) parsed_content = json.loads(last_message)
assert isinstance(parsed_content, dict), "Failed to handle JSON message" assert isinstance(
parsed_content, dict
), "Failed to handle JSON message"
except json.JSONDecodeError: except json.JSONDecodeError:
assert False, "JSON message was not stored as valid JSON" assert (
False
), "JSON message was not stored as valid JSON"
else: else:
assert isinstance(last_message, dict), "Failed to handle JSON message" assert isinstance(
last_message, dict
), "Failed to handle JSON message"
def test_search(self): def test_search(self):
"""Test search functionality.""" """Test search functionality."""
@ -175,7 +197,9 @@ class RedisConversationTester:
) )
if initial_count > 0: if initial_count > 0:
self.conversation.delete(0) self.conversation.delete(0)
new_count = len(self.conversation.return_messages_as_list()) new_count = len(
self.conversation.return_messages_as_list()
)
assert ( assert (
new_count == initial_count - 1 new_count == initial_count - 1
), "Failed to delete message" ), "Failed to delete message"
@ -228,7 +252,9 @@ class RedisConversationTester:
self.conversation.add("user", "token test message") self.conversation.add("user", "token test message")
time.sleep(1) # Wait for async token counting time.sleep(1) # Wait for async token counting
messages = self.conversation.to_dict() messages = self.conversation.to_dict()
assert isinstance(messages, list), "Token counting test completed" assert isinstance(
messages, list
), "Token counting test completed"
def test_cache_operations(self): def test_cache_operations(self):
"""Test cache operations.""" """Test cache operations."""
@ -254,8 +280,10 @@ class RedisConversationTester:
try: try:
if not self.setup(): if not self.setup():
logger.warning("Failed to setup Redis connection. This is expected on systems without Redis server.") logger.warning(
"Failed to setup Redis connection. This is expected on systems without Redis server."
)
# Generate a report indicating the limitation # Generate a report indicating the limitation
setup_failed_md = [ setup_failed_md = [
"# Redis Conversation Test Results", "# Redis Conversation Test Results",
@ -265,9 +293,9 @@ class RedisConversationTester:
"## Summary", "## Summary",
"❌ **Redis Server Setup Failed**", "❌ **Redis Server Setup Failed**",
"", "",
"The Redis conversation class will work properly when a Redis server is available." "The Redis conversation class will work properly when a Redis server is available.",
] ]
return "\n".join(setup_failed_md) return "\n".join(setup_failed_md)
tests = [ tests = [
@ -304,12 +332,16 @@ def main():
# Save results to file # Save results to file
try: try:
with open("redis_test_results.md", "w", encoding="utf-8") as f: with open(
"redis_test_results.md", "w", encoding="utf-8"
) as f:
f.write(markdown_results) f.write(markdown_results)
logger.info("Test results have been saved to redis_test_results.md") logger.info(
"Test results have been saved to redis_test_results.md"
)
except Exception as e: except Exception as e:
logger.error(f"Failed to save test results: {e}") logger.error(f"Failed to save test results: {e}")
# Also print results to console # Also print results to console
print(markdown_results) print(markdown_results)

Loading…
Cancel
Save