code formatting cleanup operation

pull/1166/head
Kye Gomez 1 week ago
parent 916fcd5ae3
commit ac2dae1885

1
.gitignore vendored

@ -16,6 +16,7 @@ databases
static/generated
conversations/
next_swarms_update.txt
.pytest_cache
infra.md
runs
Financial-Analysis-Agent_state.json

@ -19,4 +19,5 @@ out = agent.run(
task="What are the top five best energy stocks across nuclear, solar, gas, and other energy sources?",
n=1,
)
print(json.dumps(out, indent=4))

@ -0,0 +1,10 @@
from swarms.utils import LiteLLM, NetworkConnectionError
model = LiteLLM(model_name="gpt-4o-mini")
try:
response = model.run(task="Your task here")
print(response)
except NetworkConnectionError as e:
print(f"Network issue: {e}")
print("Trying to use local model")

@ -0,0 +1,149 @@
"""
Example demonstrating network error handling in LiteLLM wrapper.
This example shows how the LiteLLM wrapper handles network connectivity issues
and provides helpful error messages to guide users to use local models like Ollama
when internet connection is unavailable.
"""
from swarms.utils import LiteLLM, NetworkConnectionError
def example_with_network_handling():
"""
Example of using LiteLLM with proper network error handling.
This function demonstrates how to catch NetworkConnectionError
and handle it appropriately.
"""
# Initialize LiteLLM with a cloud model
model = LiteLLM(
model_name="gpt-4o-mini",
temperature=0.7,
max_tokens=1000,
)
try:
# Try to run the model
response = model.run(
task="Explain the concept of quantum entanglement in simple terms."
)
print(f"Response: {response}")
except NetworkConnectionError as e:
# Handle network connectivity issues
print(f"Network error detected: {e}")
print("\nFalling back to local model...")
# Fallback to a local Ollama model
local_model = LiteLLM(
model_name="ollama/llama2",
temperature=0.7,
max_tokens=1000,
)
try:
response = local_model.run(
task="Explain the concept of quantum entanglement in simple terms."
)
print(f"Local model response: {response}")
except Exception as local_error:
print(f"Local model error: {local_error}")
print("\nMake sure Ollama is installed and running:")
print("1. Install: https://ollama.ai")
print("2. Run: ollama pull llama2")
print("3. Start the server if not running")
def example_check_internet_connection():
"""
Example of manually checking internet connectivity.
This function demonstrates how to use the static method
to check internet connection before attempting API calls.
"""
# Check if internet is available
has_internet = LiteLLM.check_internet_connection()
if has_internet:
print("✓ Internet connection available")
model = LiteLLM(model_name="gpt-4o-mini")
else:
print("✗ No internet connection detected")
print("Using local Ollama model instead...")
model = LiteLLM(model_name="ollama/llama2")
# Use the model
try:
response = model.run(task="What is the meaning of life?")
print(f"Response: {response}")
except NetworkConnectionError as e:
print(f"Error: {e}")
def example_is_local_model():
"""
Example of checking if a model is a local model.
This function demonstrates how to determine if a model
is local or requires internet connectivity.
"""
# Check various model names
models = [
"gpt-4o-mini",
"ollama/llama2",
"anthropic/claude-3",
"ollama/mistral",
"local/custom-model",
]
for model_name in models:
is_local = LiteLLM.is_local_model(model_name)
status = "Local" if is_local else "Cloud"
print(f"{model_name}: {status}")
def example_with_custom_base_url():
"""
Example of using LiteLLM with a custom base URL.
This demonstrates using a local model server with custom base URL.
"""
# Using Ollama with custom base URL
model = LiteLLM(
model_name="ollama/llama2",
base_url="http://localhost:11434",
temperature=0.7,
)
try:
response = model.run(task="Write a haiku about programming.")
print(f"Response: {response}")
except NetworkConnectionError as e:
print(f"Connection error: {e}")
print("\nTroubleshooting:")
print("- Ensure Ollama is running on localhost:11434")
print("- Check if the model is loaded: ollama list")
print("- Try: ollama serve")
if __name__ == "__main__":
print("=" * 70)
print("Example 1: Network Error Handling with Fallback")
print("=" * 70)
example_with_network_handling()
print("\n" + "=" * 70)
print("Example 2: Manual Internet Connection Check")
print("=" * 70)
example_check_internet_connection()
print("\n" + "=" * 70)
print("Example 3: Check if Model is Local")
print("=" * 70)
example_is_local_model()
print("\n" + "=" * 70)
print("Example 4: Custom Base URL")
print("=" * 70)
example_with_custom_base_url()

@ -5,7 +5,7 @@ build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "swarms"
version = "8.5.3"
version = "8.5.4"
description = "Swarms - TGSC"
license = "MIT"
authors = ["Kye Gomez <kye@swarms.world>"]

@ -26,6 +26,11 @@ from swarms.utils.history_output_formatter import (
history_output_formatter,
)
from swarms.utils.litellm_tokenizer import count_tokens
from swarms.utils.litellm_wrapper import (
LiteLLM,
NetworkConnectionError,
LiteLLMException,
)
from swarms.utils.output_types import HistoryOutputType
from swarms.utils.parse_code import extract_code_from_markdown
from swarms.utils.pdf_to_text import pdf_to_text
@ -52,4 +57,7 @@ __all__ = [
"load_agents_from_markdown",
"dynamic_auto_chunking",
"MarkdownAgentLoader",
"LiteLLM",
"NetworkConnectionError",
"LiteLLMException",
]

@ -4,6 +4,7 @@ import traceback
import uuid
from pathlib import Path
from typing import List, Optional
import socket
import litellm
from pydantic import BaseModel
@ -18,6 +19,12 @@ class LiteLLMException(Exception):
"""
class NetworkConnectionError(Exception):
"""
Exception raised when network connectivity issues are detected.
"""
def get_audio_base64(audio_source: str) -> str:
"""
Convert audio data from a URL or local file path to a base64-encoded string.
@ -875,6 +882,69 @@ class LiteLLM:
else:
return False
@staticmethod
def check_internet_connection(
host: str = "8.8.8.8", port: int = 53, timeout: int = 3
) -> bool:
"""
Check if there is an active internet connection.
This method attempts to establish a socket connection to a DNS server
(default is Google's DNS at 8.8.8.8) to verify internet connectivity.
Args:
host (str, optional): The host to connect to for checking connectivity.
Defaults to "8.8.8.8" (Google DNS).
port (int, optional): The port to use for the connection. Defaults to 53 (DNS).
timeout (int, optional): Connection timeout in seconds. Defaults to 3.
Returns:
bool: True if internet connection is available, False otherwise.
"""
try:
socket.setdefaulttimeout(timeout)
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect(
(host, port)
)
return True
except (socket.error, socket.timeout):
return False
@staticmethod
def is_local_model(
model_name: str, base_url: Optional[str] = None
) -> bool:
"""
Determine if the model is a local model (e.g., Ollama, LlamaCPP).
Args:
model_name (str): The name of the model to check.
base_url (str, optional): The base URL if specified. Defaults to None.
Returns:
bool: True if the model is a local model, False otherwise.
"""
local_indicators = [
"ollama",
"llama-cpp",
"local",
"localhost",
"127.0.0.1",
"custom",
]
model_lower = model_name.lower()
is_local_model = any(
indicator in model_lower for indicator in local_indicators
)
is_local_url = base_url is not None and any(
indicator in base_url.lower()
for indicator in local_indicators
)
return is_local_model or is_local_url
def run(
self,
task: str,
@ -1024,10 +1094,74 @@ class LiteLLM:
else:
return response.choices[0].message.content
except (
requests.exceptions.ConnectionError,
requests.exceptions.Timeout,
requests.exceptions.RequestException,
ConnectionError,
TimeoutError,
) as network_error:
# Check if this is a local model
if self.is_local_model(self.model_name, self.base_url):
error_msg = (
f"Network error connecting to local model '{self.model_name}': {str(network_error)}\n\n"
"Troubleshooting steps:\n"
"1. Ensure your local model server (e.g., Ollama, LlamaCPP) is running\n"
"2. Verify the base_url is correct and accessible\n"
"3. Check that the model is properly loaded and available\n"
)
logger.error(error_msg)
raise NetworkConnectionError(
error_msg
) from network_error
# Check internet connectivity
has_internet = self.check_internet_connection()
if not has_internet:
error_msg = (
f"No internet connection detected while trying to use model '{self.model_name}'.\n\n"
"Possible solutions:\n"
"1. Check your internet connection and try again\n"
"2. Reconnect to your network\n"
"3. Use a local model instead (e.g., Ollama):\n"
" - Install Ollama from https://ollama.ai\n"
" - Run: ollama pull llama2\n"
" - Use model_name='ollama/llama2' in your LiteLLM configuration\n"
"\nExample:\n"
" model = LiteLLM(model_name='ollama/llama2')\n"
)
logger.error(error_msg)
raise NetworkConnectionError(
error_msg
) from network_error
else:
# Internet is available but request failed
error_msg = (
f"Network error occurred while connecting to '{self.model_name}': {str(network_error)}\n\n"
"Possible causes:\n"
"1. The API endpoint may be temporarily unavailable\n"
"2. Connection timeout or slow network\n"
"3. Firewall or proxy blocking the connection\n"
"\nConsider using a local model as a fallback:\n"
" model = LiteLLM(model_name='ollama/llama2')\n"
)
logger.error(error_msg)
raise NetworkConnectionError(
error_msg
) from network_error
except LiteLLMException as error:
logger.error(
f"Error in LiteLLM run: {str(error)} Traceback: {traceback.format_exc()}"
)
raise
except Exception as error:
logger.error(
f"Unexpected error in LiteLLM run: {str(error)} Traceback: {traceback.format_exc()}"
)
raise
def __call__(self, task: str, *args, **kwargs):
"""

@ -137,7 +137,7 @@ def test_board_of_directors_swarm_error_handling():
"""Test BoardOfDirectorsSwarm error handling and validation"""
# Test with empty agents list
try:
board_swarm = BoardOfDirectorsSwarm(agents=[])
BoardOfDirectorsSwarm(agents=[])
assert (
False
), "Should have raised ValueError for empty agents list"
@ -153,7 +153,7 @@ def test_board_of_directors_swarm_error_handling():
)
try:
board_swarm = BoardOfDirectorsSwarm(
BoardOfDirectorsSwarm(
agents=[analyst], max_loops=0
)
assert (

@ -139,7 +139,7 @@ def test_concurrent_workflow_error_handling():
"""Test ConcurrentWorkflow error handling and validation"""
# Test with empty agents list
try:
workflow = ConcurrentWorkflow(agents=[])
ConcurrentWorkflow(agents=[])
assert (
False
), "Should have raised ValueError for empty agents list"
@ -148,7 +148,7 @@ def test_concurrent_workflow_error_handling():
# Test with None agents
try:
workflow = ConcurrentWorkflow(agents=None)
ConcurrentWorkflow(agents=None)
assert False, "Should have raised ValueError for None agents"
except ValueError as e:
assert "No agents provided" in str(e)

@ -184,7 +184,7 @@ def test_hierarchical_swarm_error_handling():
"""Test HierarchicalSwarm error handling"""
# Test with empty agents list
try:
swarm = HierarchicalSwarm(agents=[])
HierarchicalSwarm(agents=[])
assert (
False
), "Should have raised ValueError for empty agents list"
@ -200,7 +200,7 @@ def test_hierarchical_swarm_error_handling():
)
try:
swarm = HierarchicalSwarm(agents=[researcher], max_loops=0)
HierarchicalSwarm(agents=[researcher], max_loops=0)
assert (
False
), "Should have raised ValueError for invalid max_loops"

@ -151,7 +151,7 @@ def test_majority_voting_error_handling():
"""Test MajorityVoting error handling and validation"""
# Test with empty agents list
try:
mv = MajorityVoting(agents=[])
MajorityVoting(agents=[])
assert (
False
), "Should have raised ValueError for empty agents list"
@ -167,7 +167,7 @@ def test_majority_voting_error_handling():
)
try:
mv = MajorityVoting(agents=[analyst], max_loops=0)
MajorityVoting(agents=[analyst], max_loops=0)
assert (
False
), "Should have raised ValueError for invalid max_loops"

@ -170,7 +170,7 @@ def test_mixture_of_agents_error_handling():
"""Test MixtureOfAgents error handling and validation"""
# Test with empty agents list
try:
moa = MixtureOfAgents(agents=[])
MixtureOfAgents(agents=[])
assert (
False
), "Should have raised ValueError for empty agents list"
@ -186,7 +186,7 @@ def test_mixture_of_agents_error_handling():
)
try:
moa = MixtureOfAgents(
MixtureOfAgents(
agents=[analyst], aggregator_system_prompt=""
)
assert (

File diff suppressed because it is too large Load Diff

@ -127,7 +127,6 @@ def test_initialization_with_agent_rearrange_flow(sample_agents):
assert router.rearrange_flow == flow
def test_invalid_swarm_type():
"""Test error when invalid swarm type is provided."""
with pytest.raises(ValueError):
@ -639,7 +638,6 @@ def test_handle_rules(sample_agents):
)
def test_update_system_prompt_for_agent_in_swarm(sample_agents):
"""Test update_system_prompt_for_agent_in_swarm method."""
router = SwarmRouter(
@ -893,6 +891,5 @@ def test_swarm_router_config_model():
assert config.multi_agent_collab_prompt is True
if __name__ == "__main__":
pytest.main([__file__, "-v"])

Loading…
Cancel
Save