docs cleanup

dependabot/pip/pydantic-2.11.7
Kye Gomez 2 weeks ago
parent 6c0b1fd5b0
commit 3bc92755ff

@ -15,7 +15,7 @@ Prerequisites:
""" """
import os import os
from typing import List, Callable from typing import List
from swarms.structs.agent import Agent from swarms.structs.agent import Agent
from swarms.structs.conversation import Conversation from swarms.structs.conversation import Conversation
from swarms.structs.multi_agent_exec import run_agents_concurrently from swarms.structs.multi_agent_exec import run_agents_concurrently
@ -24,6 +24,7 @@ from swarms.utils.history_output_formatter import (
HistoryOutputType, HistoryOutputType,
) )
def aggregator_agent_task_prompt( def aggregator_agent_task_prompt(
task: str, workers: List[Agent], conversation: Conversation task: str, workers: List[Agent], conversation: Conversation
): ):
@ -180,10 +181,12 @@ def aggregate_with_supabase(
"environment variables or explicit parameters" "environment variables or explicit parameters"
) )
conversation_kwargs.update({ conversation_kwargs.update(
"supabase_url": url, {
"supabase_key": key, "supabase_url": url,
}) "supabase_key": key,
}
)
try: try:
# Create conversation with Supabase backend # Create conversation with Supabase backend
@ -193,20 +196,24 @@ def aggregate_with_supabase(
system_prompt="Multi-agent collaboration session with persistent storage", system_prompt="Multi-agent collaboration session with persistent storage",
time_enabled=True, time_enabled=True,
) )
print(f"✅ Successfully initialized {backend} backend for conversation storage") print(
f"✅ Successfully initialized {backend} backend for conversation storage"
)
# Add initial task to conversation # Add initial task to conversation
conversation.add("system", f"Task: {task}") conversation.add("system", f"Task: {task}")
except ImportError as e: except ImportError as e:
print(f"❌ Backend initialization failed: {e}") print(f"❌ Backend initialization failed: {e}")
print(f"💡 Falling back to in-memory storage") print("💡 Falling back to in-memory storage")
conversation = Conversation(backend="in-memory") conversation = Conversation(backend="in-memory")
# Create aggregator agent # Create aggregator agent
aggregator_agent = create_aggregator_agent() aggregator_agent = create_aggregator_agent()
print(f"🚀 Starting multi-agent execution with {len(workers)} agents...") print(
f"🚀 Starting multi-agent execution with {len(workers)} agents..."
)
# Run agents concurrently # Run agents concurrently
results = run_agents_concurrently(agents=workers, task=task) results = run_agents_concurrently(agents=workers, task=task)
@ -225,8 +232,7 @@ def aggregate_with_supabase(
# Store aggregated result # Store aggregated result
conversation.add( conversation.add(
content=final_result, content=final_result, role=aggregator_agent.agent_name
role=aggregator_agent.agent_name
) )
print("✅ Aggregation complete!") print("✅ Aggregation complete!")
@ -239,8 +245,10 @@ def aggregate_with_supabase(
# Example usage with real Swarms agents # Example usage with real Swarms agents
if __name__ == "__main__": if __name__ == "__main__":
print("🧪 Testing Swarms Multi-Agent System with Supabase Backend") print(
print("="*70) "🧪 Testing Swarms Multi-Agent System with Supabase Backend"
)
print("=" * 70)
# Check environment setup # Check environment setup
print("\n⚙️ Environment Setup Check") print("\n⚙️ Environment Setup Check")
@ -250,9 +258,15 @@ if __name__ == "__main__":
supabase_key = os.getenv("SUPABASE_ANON_KEY") supabase_key = os.getenv("SUPABASE_ANON_KEY")
openai_key = os.getenv("OPENAI_API_KEY") openai_key = os.getenv("OPENAI_API_KEY")
print(f"SUPABASE_URL: {'✅ Set' if supabase_url else '❌ Not set'}") print(
print(f"SUPABASE_ANON_KEY: {'✅ Set' if supabase_key else '❌ Not set'}") f"SUPABASE_URL: {'✅ Set' if supabase_url else '❌ Not set'}"
print(f"OPENAI_API_KEY: {'✅ Set' if openai_key else '❌ Not set'}") )
print(
f"SUPABASE_ANON_KEY: {'✅ Set' if supabase_key else '❌ Not set'}"
)
print(
f"OPENAI_API_KEY: {'✅ Set' if openai_key else '❌ Not set'}"
)
if not (supabase_url and supabase_key): if not (supabase_url and supabase_key):
print("\n⚠️ Missing Supabase configuration!") print("\n⚠️ Missing Supabase configuration!")
@ -264,7 +278,9 @@ if __name__ == "__main__":
if not openai_key: if not openai_key:
print("\n⚠️ Missing OpenAI API key!") print("\n⚠️ Missing OpenAI API key!")
print("Please set: export OPENAI_API_KEY=your-api-key") print("Please set: export OPENAI_API_KEY=your-api-key")
print("You can also use other LLM providers (Anthropic, Google, etc.)") print(
"You can also use other LLM providers (Anthropic, Google, etc.)"
)
# Example 1: Basic Multi-Agent Research Task # Example 1: Basic Multi-Agent Research Task
print("\n📦 Example 1: Multi-Agent Market Research") print("\n📦 Example 1: Multi-Agent Market Research")
@ -283,7 +299,9 @@ if __name__ == "__main__":
""" """
print(f"📋 Task: {research_task.strip()}") print(f"📋 Task: {research_task.strip()}")
print(f"👥 Team: {[agent.agent_name for agent in research_team]}") print(
f"👥 Team: {[agent.agent_name for agent in research_team]}"
)
if supabase_url and supabase_key and openai_key: if supabase_url and supabase_key and openai_key:
# Run with real agents and Supabase storage # Run with real agents and Supabase storage
@ -301,7 +319,9 @@ if __name__ == "__main__":
print(result) print(result)
else: else:
print("❌ Skipping real agent execution due to missing configuration") print(
"❌ Skipping real agent execution due to missing configuration"
)
except Exception as e: except Exception as e:
print(f"❌ Error in multi-agent research: {e}") print(f"❌ Error in multi-agent research: {e}")
@ -323,25 +343,50 @@ if __name__ == "__main__":
print("✅ Supabase conversation created successfully") print("✅ Supabase conversation created successfully")
# Add sample conversation # Add sample conversation
conv.add("user", "What are the latest trends in AI technology?") conv.add(
conv.add("assistant", "Based on current developments, key AI trends include:") "user", "What are the latest trends in AI technology?"
conv.add("assistant", "1. Large Language Models (LLMs) advancing rapidly") )
conv.add("assistant", "2. Multimodal AI combining text, image, and video") conv.add(
conv.add("assistant", "3. AI agents becoming more autonomous and capable") "assistant",
"Based on current developments, key AI trends include:",
)
conv.add(
"assistant",
"1. Large Language Models (LLMs) advancing rapidly",
)
conv.add(
"assistant",
"2. Multimodal AI combining text, image, and video",
)
conv.add(
"assistant",
"3. AI agents becoming more autonomous and capable",
)
conv.add("user", "How do these trends affect businesses?") conv.add("user", "How do these trends affect businesses?")
conv.add("assistant", "These trends are transforming businesses through automation, enhanced decision-making, and new product capabilities.") conv.add(
"assistant",
"These trends are transforming businesses through automation, enhanced decision-making, and new product capabilities.",
)
# Test conversation operations # Test conversation operations
print(f"📊 Message count: {len(conv.to_dict())}") print(f"📊 Message count: {len(conv.to_dict())}")
print(f"🔍 Search results for 'AI': {len(conv.search('AI'))}") print(
print(f"📈 Role distribution: {conv.count_messages_by_role()}") f"🔍 Search results for 'AI': {len(conv.search('AI'))}"
)
print(
f"📈 Role distribution: {conv.count_messages_by_role()}"
)
# Export conversation # Export conversation
conv.export_conversation("supabase_ai_conversation.json") conv.export_conversation("supabase_ai_conversation.json")
print("💾 Conversation exported to supabase_ai_conversation.json") print(
"💾 Conversation exported to supabase_ai_conversation.json"
)
else: else:
print("❌ Skipping Supabase test due to missing configuration") print(
"❌ Skipping Supabase test due to missing configuration"
)
except Exception as e: except Exception as e:
print(f"❌ Error in conversation storage test: {e}") print(f"❌ Error in conversation storage test: {e}")
@ -370,20 +415,26 @@ if __name__ == "__main__":
simple_task = "Explain the benefits of using persistent conversation storage in AI applications." simple_task = "Explain the benefits of using persistent conversation storage in AI applications."
response = demo_agent.run(simple_task) response = demo_agent.run(simple_task)
print(f"\n📝 Agent Response:") print("\n📝 Agent Response:")
print("-" * 30) print("-" * 30)
print(response[:500] + "..." if len(response) > 500 else response) print(
response[:500] + "..."
if len(response) > 500
else response
)
else: else:
print("❌ Skipping agent demo due to missing OpenAI API key") print(
"❌ Skipping agent demo due to missing OpenAI API key"
)
except Exception as e: except Exception as e:
print(f"❌ Error in agent demo: {e}") print(f"❌ Error in agent demo: {e}")
# Summary and Next Steps # Summary and Next Steps
print("\n" + "="*70) print("\n" + "=" * 70)
print("🏁 Demo Summary") print("🏁 Demo Summary")
print("="*70) print("=" * 70)
print("\n✨ What was demonstrated:") print("\n✨ What was demonstrated:")
print("1. 🏗️ Real Swarms agent creation with specialized roles") print("1. 🏗️ Real Swarms agent creation with specialized roles")
@ -401,15 +452,25 @@ if __name__ == "__main__":
print("\n🔗 Resources:") print("\n🔗 Resources:")
print("- Swarms Documentation: https://docs.swarms.world") print("- Swarms Documentation: https://docs.swarms.world")
print("- Supabase Python Docs: https://supabase.com/docs/reference/python/") print(
"- Supabase Python Docs: https://supabase.com/docs/reference/python/"
)
print("- GitHub Repository: https://github.com/kyegomez/swarms") print("- GitHub Repository: https://github.com/kyegomez/swarms")
print(f"\n⚙️ Final Configuration Status:") print("\n⚙️ Final Configuration Status:")
print(f" SUPABASE_URL: {'✅ Set' if supabase_url else '❌ Not set'}") print(
print(f" SUPABASE_ANON_KEY: {'✅ Set' if supabase_key else '❌ Not set'}") f" SUPABASE_URL: {'✅ Set' if supabase_url else '❌ Not set'}"
print(f" OPENAI_API_KEY: {'✅ Set' if openai_key else '❌ Not set'}") )
print(
f" SUPABASE_ANON_KEY: {'✅ Set' if supabase_key else '❌ Not set'}"
)
print(
f" OPENAI_API_KEY: {'✅ Set' if openai_key else '❌ Not set'}"
)
if supabase_url and supabase_key and openai_key: if supabase_url and supabase_key and openai_key:
print("\n🎉 All systems ready! You can run the full demo.") print("\n🎉 All systems ready! You can run the full demo.")
else: else:
print("\n⚠️ Set missing environment variables to run the full demo.") print(
"\n⚠️ Set missing environment variables to run the full demo."
)

@ -315,6 +315,7 @@ nav:
- Agent Output Types: "swarms/examples/agent_output_types.md" - Agent Output Types: "swarms/examples/agent_output_types.md"
- Agent with Structured Outputs: "swarms/examples/agent_structured_outputs.md" - Agent with Structured Outputs: "swarms/examples/agent_structured_outputs.md"
- Agents with Vision: "swarms/examples/vision_processing.md" - Agents with Vision: "swarms/examples/vision_processing.md"
- Gradio Chat Interface: "swarms/ui/main.md"
- Various Model Providers: - Various Model Providers:
- OpenAI: "swarms/examples/openai_example.md" - OpenAI: "swarms/examples/openai_example.md"
- Anthropic: "swarms/examples/claude.md" - Anthropic: "swarms/examples/claude.md"
@ -392,6 +393,7 @@ nav:
- Swarms API Pricing: "swarms_cloud/api_pricing.md" - Swarms API Pricing: "swarms_cloud/api_pricing.md"
- Swarms API Pricing in Chinese: "swarms_cloud/chinese_api_pricing.md" - Swarms API Pricing in Chinese: "swarms_cloud/chinese_api_pricing.md"
- Swarms Cloud Subscription Tiers: "swarms_cloud/subscription_tiers.md" - Swarms Cloud Subscription Tiers: "swarms_cloud/subscription_tiers.md"
- Swarm Ecosystem APIs: - Swarm Ecosystem APIs:
- MCS API: "swarms_cloud/mcs_api.md" - MCS API: "swarms_cloud/mcs_api.md"
# - CreateNow API: "swarms_cloud/create_api.md" # - CreateNow API: "swarms_cloud/create_api.md"
@ -403,15 +405,14 @@ nav:
- Overview: "swarms_platform/index.md" - Overview: "swarms_platform/index.md"
- Swarm Platform API Keys: "swarms_platform/apikeys.md" - Swarm Platform API Keys: "swarms_platform/apikeys.md"
- Account Management: "swarms_platform/account_management.md" - Account Management: "swarms_platform/account_management.md"
- Swarms Chat Tutorial: "swarms/ui/main.md"
- Swarms Rust: - Swarms Rust:
- Overview: "swarms_rs/overview.md" - Overview: "swarms_rs/overview.md"
- Agents: "swarms_rs/agents.md" - Agents: "swarms_rs/agents.md"
- Governance: - Resources:
- Resources: "governance/main.md" - Overview: "governance/main.md"
- Tokenomics: "web3/token.md" # - Tokenomics: "web3/token.md"
# - Prompts API: # - Prompts API:

@ -67,7 +67,7 @@ def show_ascii_art():
Text(ASCII_ART, style=f"bold {COLORS['primary']}"), Text(ASCII_ART, style=f"bold {COLORS['primary']}"),
border_style=COLORS["secondary"], border_style=COLORS["secondary"],
title="[bold]Welcome to Swarms[/bold]", title="[bold]Welcome to Swarms[/bold]",
subtitle="[dim]Power to the Swarms[/dim]", subtitle="[dim]swarms.ai[/dim]",
) )
console.print(panel) console.print(panel)

@ -78,6 +78,7 @@ class DuckDBConversation(BaseCommunication):
# Lazy load duckdb with auto-installation # Lazy load duckdb with auto-installation
try: try:
import duckdb import duckdb
self.duckdb = duckdb self.duckdb = duckdb
self.duckdb_available = True self.duckdb_available = True
except ImportError: except ImportError:
@ -88,13 +89,14 @@ class DuckDBConversation(BaseCommunication):
import sys import sys
# Install duckdb # Install duckdb
subprocess.check_call([ subprocess.check_call(
sys.executable, "-m", "pip", "install", "duckdb" [sys.executable, "-m", "pip", "install", "duckdb"]
]) )
print("✅ DuckDB installed successfully!") print("✅ DuckDB installed successfully!")
# Try importing again # Try importing again
import duckdb import duckdb
self.duckdb = duckdb self.duckdb = duckdb
self.duckdb_available = True self.duckdb_available = True
print("✅ DuckDB loaded successfully!") print("✅ DuckDB loaded successfully!")

@ -66,23 +66,33 @@ class PulsarConversation(BaseCommunication):
# Lazy load Pulsar with auto-installation # Lazy load Pulsar with auto-installation
try: try:
import pulsar import pulsar
self.pulsar = pulsar self.pulsar = pulsar
self.pulsar_available = True self.pulsar_available = True
except ImportError: except ImportError:
# Auto-install pulsar-client if not available # Auto-install pulsar-client if not available
print("📦 Pulsar client not found. Installing automatically...") print(
"📦 Pulsar client not found. Installing automatically..."
)
try: try:
import subprocess import subprocess
import sys import sys
# Install pulsar-client # Install pulsar-client
subprocess.check_call([ subprocess.check_call(
sys.executable, "-m", "pip", "install", "pulsar-client" [
]) sys.executable,
"-m",
"pip",
"install",
"pulsar-client",
]
)
print("✅ Pulsar client installed successfully!") print("✅ Pulsar client installed successfully!")
# Try importing again # Try importing again
import pulsar import pulsar
self.pulsar = pulsar self.pulsar = pulsar
self.pulsar_available = True self.pulsar_available = True
print("✅ Pulsar loaded successfully!") print("✅ Pulsar loaded successfully!")
@ -646,6 +656,7 @@ class PulsarConversation(BaseCommunication):
""" """
try: try:
import pulsar import pulsar
pulsar_available = True pulsar_available = True
except ImportError: except ImportError:
logger.error("Pulsar client library is not installed") logger.error("Pulsar client library is not installed")

@ -31,6 +31,7 @@ try:
RedisError, RedisError,
TimeoutError, TimeoutError,
) )
REDIS_AVAILABLE = True REDIS_AVAILABLE = True
except ImportError: except ImportError:
# Auto-install Redis at import time # Auto-install Redis at import time
@ -40,9 +41,9 @@ except ImportError:
import sys import sys
# Install redis # Install redis
subprocess.check_call([ subprocess.check_call(
sys.executable, "-m", "pip", "install", "redis" [sys.executable, "-m", "pip", "install", "redis"]
]) )
print("✅ Redis installed successfully!") print("✅ Redis installed successfully!")
# Try importing again # Try importing again
@ -54,12 +55,15 @@ except ImportError:
RedisError, RedisError,
TimeoutError, TimeoutError,
) )
REDIS_AVAILABLE = True REDIS_AVAILABLE = True
print("✅ Redis loaded successfully!") print("✅ Redis loaded successfully!")
except Exception as e: except Exception as e:
REDIS_AVAILABLE = False REDIS_AVAILABLE = False
print(f"❌ Failed to auto-install Redis. Please install manually with 'pip install redis': {e}") print(
f"❌ Failed to auto-install Redis. Please install manually with 'pip install redis': {e}"
)
class RedisConnectionError(Exception): class RedisConnectionError(Exception):
@ -186,7 +190,11 @@ rdbchecksum yes
try: try:
if self.process: if self.process:
# Send SAVE and BGSAVE commands before stopping if persistence is enabled # Send SAVE and BGSAVE commands before stopping if persistence is enabled
if self.persist and self.auto_persist and REDIS_AVAILABLE: if (
self.persist
and self.auto_persist
and REDIS_AVAILABLE
):
try: try:
r = redis.Redis( r = redis.Redis(
host="localhost", port=self.port host="localhost", port=self.port

@ -96,24 +96,34 @@ class SupabaseConversation(BaseCommunication):
# Lazy load Supabase with auto-installation # Lazy load Supabase with auto-installation
try: try:
from supabase import Client, create_client from supabase import Client, create_client
self.supabase_client = Client self.supabase_client = Client
self.create_client = create_client self.create_client = create_client
self.supabase_available = True self.supabase_available = True
except ImportError: except ImportError:
# Auto-install supabase if not available # Auto-install supabase if not available
print("📦 Supabase not found. Installing automatically...") print(
"📦 Supabase not found. Installing automatically..."
)
try: try:
import subprocess import subprocess
import sys import sys
# Install supabase # Install supabase
subprocess.check_call([ subprocess.check_call(
sys.executable, "-m", "pip", "install", "supabase" [
]) sys.executable,
"-m",
"pip",
"install",
"supabase",
]
)
print("✅ Supabase installed successfully!") print("✅ Supabase installed successfully!")
# Try importing again # Try importing again
from supabase import Client, create_client from supabase import Client, create_client
self.supabase_client = Client self.supabase_client = Client
self.create_client = create_client self.create_client = create_client
self.supabase_available = True self.supabase_available = True
@ -179,7 +189,9 @@ class SupabaseConversation(BaseCommunication):
) # For thread-safe operations if any (e.g. token calculation) ) # For thread-safe operations if any (e.g. token calculation)
try: try:
self.client = self.create_client(supabase_url, supabase_key) self.client = self.create_client(
supabase_url, supabase_key
)
if self.enable_logging: if self.enable_logging:
self.logger.info( self.logger.info(
f"Successfully initialized Supabase client for URL: {supabase_url}" f"Successfully initialized Supabase client for URL: {supabase_url}"

@ -53,7 +53,16 @@ def get_conversation_dir():
# Define available providers # Define available providers
providers = Literal["mem0", "in-memory", "supabase", "redis", "sqlite", "duckdb", "pulsar"] providers = Literal[
"mem0",
"in-memory",
"supabase",
"redis",
"sqlite",
"duckdb",
"pulsar",
]
def _create_backend_conversation(backend: str, **kwargs): def _create_backend_conversation(backend: str, **kwargs):
""" """
@ -75,19 +84,34 @@ def _create_backend_conversation(backend: str, **kwargs):
""" """
try: try:
if backend == "supabase": if backend == "supabase":
from swarms.communication.supabase_wrap import SupabaseConversation from swarms.communication.supabase_wrap import (
SupabaseConversation,
)
return SupabaseConversation(**kwargs) return SupabaseConversation(**kwargs)
elif backend == "redis": elif backend == "redis":
from swarms.communication.redis_wrap import RedisConversation from swarms.communication.redis_wrap import (
RedisConversation,
)
return RedisConversation(**kwargs) return RedisConversation(**kwargs)
elif backend == "sqlite": elif backend == "sqlite":
from swarms.communication.sqlite_wrap import SQLiteConversation from swarms.communication.sqlite_wrap import (
SQLiteConversation,
)
return SQLiteConversation(**kwargs) return SQLiteConversation(**kwargs)
elif backend == "duckdb": elif backend == "duckdb":
from swarms.communication.duckdb_wrap import DuckDBConversation from swarms.communication.duckdb_wrap import (
DuckDBConversation,
)
return DuckDBConversation(**kwargs) return DuckDBConversation(**kwargs)
elif backend == "pulsar": elif backend == "pulsar":
from swarms.communication.pulsar_struct import PulsarConversation from swarms.communication.pulsar_struct import (
PulsarConversation,
)
return PulsarConversation(**kwargs) return PulsarConversation(**kwargs)
else: else:
raise ValueError( raise ValueError(
@ -104,7 +128,9 @@ def _create_backend_conversation(backend: str, **kwargs):
"pulsar": "pip install pulsar-client", "pulsar": "pip install pulsar-client",
} }
install_cmd = backend_deps.get(backend, f"Check documentation for {backend}") install_cmd = backend_deps.get(
backend, f"Check documentation for {backend}"
)
logger.error( logger.error(
f"Failed to initialize {backend} backend. " f"Failed to initialize {backend} backend. "
f"Missing dependencies. Install with: {install_cmd}" f"Missing dependencies. Install with: {install_cmd}"
@ -190,7 +216,6 @@ class Conversation(BaseStructure):
auto_persist: bool = True, auto_persist: bool = True,
redis_data_dir: Optional[str] = None, redis_data_dir: Optional[str] = None,
conversations_dir: Optional[str] = None, conversations_dir: Optional[str] = None,
*args, *args,
**kwargs, **kwargs,
): ):
@ -202,7 +227,15 @@ class Conversation(BaseStructure):
self.backend_instance = None self.backend_instance = None
# Validate backend # Validate backend
valid_backends = ["in-memory", "mem0", "supabase", "redis", "sqlite", "duckdb", "pulsar"] valid_backends = [
"in-memory",
"mem0",
"supabase",
"redis",
"sqlite",
"duckdb",
"pulsar",
]
if self.backend not in valid_backends: if self.backend not in valid_backends:
raise ValueError( raise ValueError(
f"Invalid backend: '{self.backend}'. " f"Invalid backend: '{self.backend}'. "
@ -243,7 +276,13 @@ class Conversation(BaseStructure):
self.conversations_dir = conversations_dir self.conversations_dir = conversations_dir
# Initialize backend if using persistent storage # Initialize backend if using persistent storage
if self.backend in ["supabase", "redis", "sqlite", "duckdb", "pulsar"]: if self.backend in [
"supabase",
"redis",
"sqlite",
"duckdb",
"pulsar",
]:
try: try:
self._initialize_backend( self._initialize_backend(
supabase_url=supabase_url, supabase_url=supabase_url,
@ -258,7 +297,7 @@ class Conversation(BaseStructure):
persist_redis=persist_redis, persist_redis=persist_redis,
auto_persist=auto_persist, auto_persist=auto_persist,
redis_data_dir=redis_data_dir, redis_data_dir=redis_data_dir,
**kwargs **kwargs,
) )
except Exception as e: except Exception as e:
logger.warning( logger.warning(
@ -297,33 +336,49 @@ class Conversation(BaseStructure):
# Add backend-specific parameters # Add backend-specific parameters
if self.backend == "supabase": if self.backend == "supabase":
supabase_url = kwargs.get("supabase_url") or os.getenv("SUPABASE_URL") supabase_url = kwargs.get("supabase_url") or os.getenv(
supabase_key = kwargs.get("supabase_key") or os.getenv("SUPABASE_ANON_KEY") "SUPABASE_URL"
)
supabase_key = kwargs.get("supabase_key") or os.getenv(
"SUPABASE_ANON_KEY"
)
if not supabase_url or not supabase_key: if not supabase_url or not supabase_key:
raise ValueError( raise ValueError(
"Supabase backend requires 'supabase_url' and 'supabase_key' parameters " "Supabase backend requires 'supabase_url' and 'supabase_key' parameters "
"or SUPABASE_URL and SUPABASE_ANON_KEY environment variables" "or SUPABASE_URL and SUPABASE_ANON_KEY environment variables"
) )
backend_kwargs.update({ backend_kwargs.update(
"supabase_url": supabase_url, {
"supabase_key": supabase_key, "supabase_url": supabase_url,
"table_name": kwargs.get("table_name", "conversations"), "supabase_key": supabase_key,
}) "table_name": kwargs.get(
"table_name", "conversations"
),
}
)
elif self.backend == "redis": elif self.backend == "redis":
backend_kwargs.update({ backend_kwargs.update(
"redis_host": kwargs.get("redis_host", "localhost"), {
"redis_port": kwargs.get("redis_port", 6379), "redis_host": kwargs.get(
"redis_db": kwargs.get("redis_db", 0), "redis_host", "localhost"
"redis_password": kwargs.get("redis_password"), ),
"use_embedded_redis": kwargs.get("use_embedded_redis", True), "redis_port": kwargs.get("redis_port", 6379),
"persist_redis": kwargs.get("persist_redis", True), "redis_db": kwargs.get("redis_db", 0),
"auto_persist": kwargs.get("auto_persist", True), "redis_password": kwargs.get("redis_password"),
"redis_data_dir": kwargs.get("redis_data_dir"), "use_embedded_redis": kwargs.get(
"conversation_id": self.id, "use_embedded_redis", True
"name": self.name, ),
}) "persist_redis": kwargs.get(
"persist_redis", True
),
"auto_persist": kwargs.get("auto_persist", True),
"redis_data_dir": kwargs.get("redis_data_dir"),
"conversation_id": self.id,
"name": self.name,
}
)
elif self.backend in ["sqlite", "duckdb"]: elif self.backend in ["sqlite", "duckdb"]:
db_path = kwargs.get("db_path") db_path = kwargs.get("db_path")
@ -332,17 +387,27 @@ class Conversation(BaseStructure):
elif self.backend == "pulsar": elif self.backend == "pulsar":
# Add pulsar-specific parameters # Add pulsar-specific parameters
backend_kwargs.update({ backend_kwargs.update(
"pulsar_url": kwargs.get("pulsar_url", "pulsar://localhost:6650"), {
"topic": kwargs.get("topic", f"conversation-{self.id}"), "pulsar_url": kwargs.get(
}) "pulsar_url", "pulsar://localhost:6650"
),
"topic": kwargs.get(
"topic", f"conversation-{self.id}"
),
}
)
# Create the backend instance # Create the backend instance
logger.info(f"Initializing {self.backend} backend...") logger.info(f"Initializing {self.backend} backend...")
self.backend_instance = _create_backend_conversation(self.backend, **backend_kwargs) self.backend_instance = _create_backend_conversation(
self.backend, **backend_kwargs
)
# Log successful initialization # Log successful initialization
logger.info(f"Successfully initialized {self.backend} backend for conversation '{self.name}'") logger.info(
f"Successfully initialized {self.backend} backend for conversation '{self.name}'"
)
def setup(self): def setup(self):
# Set up conversations directory # Set up conversations directory
@ -473,7 +538,9 @@ class Conversation(BaseStructure):
) )
else: else:
# Fallback to in-memory if mem0 is not available # Fallback to in-memory if mem0 is not available
logger.warning("Mem0 provider not available, falling back to in-memory storage") logger.warning(
"Mem0 provider not available, falling back to in-memory storage"
)
self.add_in_memory(role, content) self.add_in_memory(role, content)
def add( def add(
@ -486,9 +553,13 @@ class Conversation(BaseStructure):
# If using a persistent backend, delegate to it # If using a persistent backend, delegate to it
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.add(role=role, content=content, metadata=metadata) return self.backend_instance.add(
role=role, content=content, metadata=metadata
)
except Exception as e: except Exception as e:
logger.error(f"Backend add failed: {e}. Falling back to in-memory.") logger.error(
f"Backend add failed: {e}. Falling back to in-memory."
)
return self.add_in_memory(role, content) return self.add_in_memory(role, content)
elif self.provider == "in-memory": elif self.provider == "in-memory":
return self.add_in_memory(role, content) return self.add_in_memory(role, content)
@ -570,7 +641,9 @@ class Conversation(BaseStructure):
""" """
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.update(index, role, content) return self.backend_instance.update(
index, role, content
)
except Exception as e: except Exception as e:
logger.error(f"Backend update failed: {e}") logger.error(f"Backend update failed: {e}")
raise raise
@ -630,7 +703,9 @@ class Conversation(BaseStructure):
""" """
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.display_conversation(detailed) return self.backend_instance.display_conversation(
detailed
)
except Exception as e: except Exception as e:
logger.error(f"Backend display failed: {e}") logger.error(f"Backend display failed: {e}")
# Fallback to in-memory display # Fallback to in-memory display
@ -668,7 +743,9 @@ class Conversation(BaseStructure):
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.export_conversation(filename, *args, **kwargs) return self.backend_instance.export_conversation(
filename, *args, **kwargs
)
except Exception as e: except Exception as e:
logger.error(f"Backend export failed: {e}") logger.error(f"Backend export failed: {e}")
# Fallback to in-memory export # Fallback to in-memory export
@ -680,9 +757,11 @@ class Conversation(BaseStructure):
self.save_as_json(filename) self.save_as_json(filename)
else: else:
# Simple text export for non-JSON files # Simple text export for non-JSON files
with open(filename, "w",encoding="utf-8") as f: with open(filename, "w", encoding="utf-8") as f:
for message in self.conversation_history: for message in self.conversation_history:
f.write(f"{message['role']}: {message['content']}\n") f.write(
f"{message['role']}: {message['content']}\n"
)
def import_conversation(self, filename: str): def import_conversation(self, filename: str):
"""Import a conversation history from a file. """Import a conversation history from a file.
@ -692,7 +771,9 @@ class Conversation(BaseStructure):
""" """
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.import_conversation(filename) return self.backend_instance.import_conversation(
filename
)
except Exception as e: except Exception as e:
logger.error(f"Backend import failed: {e}") logger.error(f"Backend import failed: {e}")
# Fallback to in-memory import # Fallback to in-memory import
@ -710,7 +791,9 @@ class Conversation(BaseStructure):
try: try:
return self.backend_instance.count_messages_by_role() return self.backend_instance.count_messages_by_role()
except Exception as e: except Exception as e:
logger.error(f"Backend count_messages_by_role failed: {e}") logger.error(
f"Backend count_messages_by_role failed: {e}"
)
# Fallback to local implementation below # Fallback to local implementation below
pass pass
# Initialize counts with expected roles # Initialize counts with expected roles
@ -731,6 +814,7 @@ class Conversation(BaseStructure):
counts[role] = counts.get(role, 0) + 1 counts[role] = counts.get(role, 0) + 1
return counts return counts
def return_history_as_string(self): def return_history_as_string(self):
"""Return the conversation history as a string. """Return the conversation history as a string.
@ -739,9 +823,13 @@ class Conversation(BaseStructure):
""" """
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.return_history_as_string() return (
self.backend_instance.return_history_as_string()
)
except Exception as e: except Exception as e:
logger.error(f"Backend return_history_as_string failed: {e}") logger.error(
f"Backend return_history_as_string failed: {e}"
)
# Fallback to in-memory implementation # Fallback to in-memory implementation
pass pass
@ -1000,9 +1088,13 @@ class Conversation(BaseStructure):
""" """
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.get_last_message_as_string() return (
self.backend_instance.get_last_message_as_string()
)
except Exception as e: except Exception as e:
logger.error(f"Backend get_last_message_as_string failed: {e}") logger.error(
f"Backend get_last_message_as_string failed: {e}"
)
# Fallback to in-memory implementation # Fallback to in-memory implementation
pass pass
elif self.provider == "mem0": elif self.provider == "mem0":
@ -1025,7 +1117,9 @@ class Conversation(BaseStructure):
try: try:
return self.backend_instance.return_messages_as_list() return self.backend_instance.return_messages_as_list()
except Exception as e: except Exception as e:
logger.error(f"Backend return_messages_as_list failed: {e}") logger.error(
f"Backend return_messages_as_list failed: {e}"
)
# Fallback to in-memory implementation # Fallback to in-memory implementation
pass pass
return [ return [
@ -1041,9 +1135,13 @@ class Conversation(BaseStructure):
""" """
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.return_messages_as_dictionary() return (
self.backend_instance.return_messages_as_dictionary()
)
except Exception as e: except Exception as e:
logger.error(f"Backend return_messages_as_dictionary failed: {e}") logger.error(
f"Backend return_messages_as_dictionary failed: {e}"
)
# Fallback to in-memory implementation # Fallback to in-memory implementation
pass pass
return [ return [
@ -1099,9 +1197,13 @@ class Conversation(BaseStructure):
""" """
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.get_final_message_content() return (
self.backend_instance.get_final_message_content()
)
except Exception as e: except Exception as e:
logger.error(f"Backend get_final_message_content failed: {e}") logger.error(
f"Backend get_final_message_content failed: {e}"
)
# Fallback to in-memory implementation # Fallback to in-memory implementation
pass pass
if self.conversation_history: if self.conversation_history:
@ -1119,7 +1221,9 @@ class Conversation(BaseStructure):
try: try:
return self.backend_instance.return_all_except_first() return self.backend_instance.return_all_except_first()
except Exception as e: except Exception as e:
logger.error(f"Backend return_all_except_first failed: {e}") logger.error(
f"Backend return_all_except_first failed: {e}"
)
# Fallback to in-memory implementation # Fallback to in-memory implementation
pass pass
return self.conversation_history[2:] return self.conversation_history[2:]
@ -1132,9 +1236,13 @@ class Conversation(BaseStructure):
""" """
if self.backend_instance: if self.backend_instance:
try: try:
return self.backend_instance.return_all_except_first_string() return (
self.backend_instance.return_all_except_first_string()
)
except Exception as e: except Exception as e:
logger.error(f"Backend return_all_except_first_string failed: {e}") logger.error(
f"Backend return_all_except_first_string failed: {e}"
)
# Fallback to in-memory implementation # Fallback to in-memory implementation
pass pass
return "\n".join( return "\n".join(

@ -86,7 +86,9 @@ class RedisConversationTester:
"""Initialize Redis server and conversation for testing.""" """Initialize Redis server and conversation for testing."""
try: try:
# Try first with external Redis (if available) # Try first with external Redis (if available)
logger.info("Trying to connect to external Redis server...") logger.info(
"Trying to connect to external Redis server..."
)
self.conversation = RedisConversation( self.conversation = RedisConversation(
system_prompt="Test System Prompt", system_prompt="Test System Prompt",
redis_host="localhost", redis_host="localhost",
@ -94,10 +96,14 @@ class RedisConversationTester:
redis_retry_attempts=1, redis_retry_attempts=1,
use_embedded_redis=False, # Try external first use_embedded_redis=False, # Try external first
) )
logger.info("Successfully connected to external Redis server") logger.info(
"Successfully connected to external Redis server"
)
return True return True
except Exception as external_error: except Exception as external_error:
logger.info(f"External Redis connection failed: {external_error}") logger.info(
f"External Redis connection failed: {external_error}"
)
logger.info("Trying to start embedded Redis server...") logger.info("Trying to start embedded Redis server...")
try: try:
@ -109,10 +115,14 @@ class RedisConversationTester:
redis_retry_attempts=3, redis_retry_attempts=3,
use_embedded_redis=True, use_embedded_redis=True,
) )
logger.info("Successfully started embedded Redis server") logger.info(
"Successfully started embedded Redis server"
)
return True return True
except Exception as embedded_error: except Exception as embedded_error:
logger.error(f"Both external and embedded Redis failed:") logger.error(
"Both external and embedded Redis failed:"
)
logger.error(f" External: {external_error}") logger.error(f" External: {external_error}")
logger.error(f" Embedded: {embedded_error}") logger.error(f" Embedded: {embedded_error}")
return False return False
@ -122,10 +132,16 @@ class RedisConversationTester:
if self.conversation: if self.conversation:
try: try:
# Check if we have an embedded server to stop # Check if we have an embedded server to stop
if hasattr(self.conversation, 'embedded_server') and self.conversation.embedded_server is not None: if (
hasattr(self.conversation, "embedded_server")
and self.conversation.embedded_server is not None
):
self.conversation.embedded_server.stop() self.conversation.embedded_server.stop()
# Close Redis client if it exists # Close Redis client if it exists
if hasattr(self.conversation, 'redis_client') and self.conversation.redis_client: if (
hasattr(self.conversation, "redis_client")
and self.conversation.redis_client
):
self.conversation.redis_client.close() self.conversation.redis_client.close()
except Exception as e: except Exception as e:
logger.warning(f"Error during cleanup: {str(e)}") logger.warning(f"Error during cleanup: {str(e)}")
@ -156,11 +172,17 @@ class RedisConversationTester:
if isinstance(last_message, str): if isinstance(last_message, str):
try: try:
parsed_content = json.loads(last_message) parsed_content = json.loads(last_message)
assert isinstance(parsed_content, dict), "Failed to handle JSON message" assert isinstance(
parsed_content, dict
), "Failed to handle JSON message"
except json.JSONDecodeError: except json.JSONDecodeError:
assert False, "JSON message was not stored as valid JSON" assert (
False
), "JSON message was not stored as valid JSON"
else: else:
assert isinstance(last_message, dict), "Failed to handle JSON message" assert isinstance(
last_message, dict
), "Failed to handle JSON message"
def test_search(self): def test_search(self):
"""Test search functionality.""" """Test search functionality."""
@ -175,7 +197,9 @@ class RedisConversationTester:
) )
if initial_count > 0: if initial_count > 0:
self.conversation.delete(0) self.conversation.delete(0)
new_count = len(self.conversation.return_messages_as_list()) new_count = len(
self.conversation.return_messages_as_list()
)
assert ( assert (
new_count == initial_count - 1 new_count == initial_count - 1
), "Failed to delete message" ), "Failed to delete message"
@ -228,7 +252,9 @@ class RedisConversationTester:
self.conversation.add("user", "token test message") self.conversation.add("user", "token test message")
time.sleep(1) # Wait for async token counting time.sleep(1) # Wait for async token counting
messages = self.conversation.to_dict() messages = self.conversation.to_dict()
assert isinstance(messages, list), "Token counting test completed" assert isinstance(
messages, list
), "Token counting test completed"
def test_cache_operations(self): def test_cache_operations(self):
"""Test cache operations.""" """Test cache operations."""
@ -254,7 +280,9 @@ class RedisConversationTester:
try: try:
if not self.setup(): if not self.setup():
logger.warning("Failed to setup Redis connection. This is expected on systems without Redis server.") logger.warning(
"Failed to setup Redis connection. This is expected on systems without Redis server."
)
# Generate a report indicating the limitation # Generate a report indicating the limitation
setup_failed_md = [ setup_failed_md = [
@ -265,7 +293,7 @@ class RedisConversationTester:
"## Summary", "## Summary",
"❌ **Redis Server Setup Failed**", "❌ **Redis Server Setup Failed**",
"", "",
"The Redis conversation class will work properly when a Redis server is available." "The Redis conversation class will work properly when a Redis server is available.",
] ]
return "\n".join(setup_failed_md) return "\n".join(setup_failed_md)
@ -304,9 +332,13 @@ def main():
# Save results to file # Save results to file
try: try:
with open("redis_test_results.md", "w", encoding="utf-8") as f: with open(
"redis_test_results.md", "w", encoding="utf-8"
) as f:
f.write(markdown_results) f.write(markdown_results)
logger.info("Test results have been saved to redis_test_results.md") logger.info(
"Test results have been saved to redis_test_results.md"
)
except Exception as e: except Exception as e:
logger.error(f"Failed to save test results: {e}") logger.error(f"Failed to save test results: {e}")

Loading…
Cancel
Save