parent
95001c1f4b
commit
d6ef64eb4a
@ -0,0 +1,14 @@
|
||||
from swarms.structs.heavy_swarm import HeavySwarm
|
||||
|
||||
swarm = HeavySwarm(
|
||||
worker_model_name="claude-3-5-sonnet-20240620",
|
||||
show_dashboard=True,
|
||||
question_agent_model_name="gpt-4.1",
|
||||
loops_per_agent=1,
|
||||
)
|
||||
|
||||
out = swarm.run(
|
||||
"List the top 5 gold and commodity ETFs with the best performance and lowest expense ratios. For each ETF, provide the ticker symbol, full name, current price, 1-year and 5-year returns (in %), and the expense ratio. Also, specify which major brokerages (e.g., Fidelity, Schwab, Vanguard, E*TRADE) offer these ETFs for purchase. Present your findings in a clear, structured table."
|
||||
)
|
||||
|
||||
print(out)
|
@ -1,171 +0,0 @@
|
||||
import time
|
||||
import tracemalloc
|
||||
from functools import wraps
|
||||
from typing import Any, Callable
|
||||
|
||||
import psutil
|
||||
from pydantic import BaseModel
|
||||
|
||||
from swarms.utils.loguru_logger import initialize_logger
|
||||
|
||||
logger = initialize_logger(log_folder="calculate_func_metrics")
|
||||
|
||||
|
||||
class FunctionMetrics(BaseModel):
|
||||
execution_time: float
|
||||
memory_usage: float
|
||||
cpu_usage: float
|
||||
io_operations: int
|
||||
function_calls: int
|
||||
|
||||
|
||||
def profile_func(func):
|
||||
"""
|
||||
Decorator function that profiles the execution of a given function.
|
||||
|
||||
Args:
|
||||
func: The function to be profiled.
|
||||
|
||||
Returns:
|
||||
A wrapper function that profiles the execution of the given function and returns the result along with the metrics.
|
||||
|
||||
"""
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
# Record the initial time, memory usage, CPU usage, and I/O operations
|
||||
start_time = time.time()
|
||||
start_mem = psutil.Process().memory_info().rss
|
||||
start_cpu = psutil.cpu_percent()
|
||||
start_io = (
|
||||
psutil.disk_io_counters().read_count
|
||||
+ psutil.disk_io_counters().write_count
|
||||
)
|
||||
|
||||
# Call the function
|
||||
result = func(*args, **kwargs)
|
||||
|
||||
# Record the final time, memory usage, CPU usage, and I/O operations
|
||||
end_time = time.time()
|
||||
end_mem = psutil.Process().memory_info().rss
|
||||
end_cpu = psutil.cpu_percent()
|
||||
end_io = (
|
||||
psutil.disk_io_counters().read_count
|
||||
+ psutil.disk_io_counters().write_count
|
||||
)
|
||||
|
||||
# Calculate the execution time, memory usage, CPU usage, and I/O operations
|
||||
execution_time = end_time - start_time
|
||||
memory_usage = (end_mem - start_mem) / (
|
||||
1024**2
|
||||
) # Convert bytes to MiB
|
||||
cpu_usage = end_cpu - start_cpu
|
||||
io_operations = end_io - start_io
|
||||
|
||||
# Return the metrics as a FunctionMetrics object
|
||||
metrics = FunctionMetrics(
|
||||
execution_time=execution_time,
|
||||
memory_usage=memory_usage,
|
||||
cpu_usage=cpu_usage,
|
||||
io_operations=io_operations,
|
||||
function_calls=1, # Each call to the function counts as one function call
|
||||
)
|
||||
|
||||
json_data = metrics.model_dump_json(indent=4)
|
||||
|
||||
logger.info(f"Function metrics: {json_data}")
|
||||
|
||||
return result, metrics
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def profile_all(func: Callable) -> Callable:
|
||||
"""
|
||||
A decorator to profile memory usage, CPU usage, and I/O operations
|
||||
of a function and log the data using loguru.
|
||||
|
||||
It combines tracemalloc for memory profiling, psutil for CPU and I/O operations,
|
||||
and measures execution time.
|
||||
|
||||
Args:
|
||||
func (Callable): The function to be profiled.
|
||||
|
||||
Returns:
|
||||
Callable: The wrapped function with profiling enabled.
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
||||
# Start memory tracking
|
||||
tracemalloc.start()
|
||||
|
||||
# Get initial CPU stats
|
||||
process = psutil.Process()
|
||||
initial_cpu_times = process.cpu_times()
|
||||
|
||||
# Get initial I/O stats if available
|
||||
try:
|
||||
initial_io_counters = process.io_counters()
|
||||
io_tracking_available = True
|
||||
except AttributeError:
|
||||
logger.warning(
|
||||
"I/O counters not available on this platform."
|
||||
)
|
||||
io_tracking_available = False
|
||||
|
||||
# Start timing the function execution
|
||||
start_time = time.time()
|
||||
|
||||
# Execute the function
|
||||
result = func(*args, **kwargs)
|
||||
|
||||
# Stop timing
|
||||
end_time = time.time()
|
||||
execution_time = end_time - start_time
|
||||
|
||||
# Get final CPU stats
|
||||
final_cpu_times = process.cpu_times()
|
||||
|
||||
# Get final I/O stats if available
|
||||
if io_tracking_available:
|
||||
final_io_counters = process.io_counters()
|
||||
io_read_count = (
|
||||
final_io_counters.read_count
|
||||
- initial_io_counters.read_count
|
||||
)
|
||||
io_write_count = (
|
||||
final_io_counters.write_count
|
||||
- initial_io_counters.write_count
|
||||
)
|
||||
else:
|
||||
io_read_count = io_write_count = 0
|
||||
|
||||
# Get memory usage statistics
|
||||
snapshot = tracemalloc.take_snapshot()
|
||||
top_stats = snapshot.statistics("lineno")
|
||||
|
||||
# Calculate CPU usage
|
||||
cpu_usage = (
|
||||
final_cpu_times.user
|
||||
- initial_cpu_times.user
|
||||
+ final_cpu_times.system
|
||||
- initial_cpu_times.system
|
||||
)
|
||||
|
||||
# Log the data
|
||||
logger.info(f"Execution time: {execution_time:.4f} seconds")
|
||||
logger.info(f"CPU usage: {cpu_usage:.2f} seconds")
|
||||
if io_tracking_available:
|
||||
logger.info(
|
||||
f"I/O Operations - Read: {io_read_count}, Write: {io_write_count}"
|
||||
)
|
||||
logger.info("Top memory usage:")
|
||||
for stat in top_stats[:10]:
|
||||
logger.info(stat)
|
||||
|
||||
# Stop memory tracking
|
||||
tracemalloc.stop()
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
@ -1,66 +0,0 @@
|
||||
import time
|
||||
from typing import Any, Callable, Type, Union, Tuple
|
||||
from loguru import logger
|
||||
|
||||
|
||||
def retry_function(
|
||||
func: Callable,
|
||||
*args: Any,
|
||||
max_retries: int = 3,
|
||||
delay: float = 1.0,
|
||||
backoff_factor: float = 2.0,
|
||||
exceptions: Union[
|
||||
Type[Exception], Tuple[Type[Exception], ...]
|
||||
] = Exception,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
"""
|
||||
A function that retries another function if it raises specified exceptions.
|
||||
|
||||
Args:
|
||||
func (Callable): The function to retry
|
||||
*args: Positional arguments to pass to the function
|
||||
max_retries (int): Maximum number of retries before giving up. Defaults to 3.
|
||||
delay (float): Initial delay between retries in seconds. Defaults to 1.0.
|
||||
backoff_factor (float): Multiplier applied to delay between retries. Defaults to 2.0.
|
||||
exceptions (Exception or tuple): Exception(s) that trigger a retry. Defaults to Exception.
|
||||
**kwargs: Keyword arguments to pass to the function
|
||||
|
||||
Returns:
|
||||
Any: The return value of the function if successful
|
||||
|
||||
Example:
|
||||
def fetch_data(url: str) -> dict:
|
||||
return requests.get(url).json()
|
||||
|
||||
# Retry the fetch_data function
|
||||
result = retry_function(
|
||||
fetch_data,
|
||||
"https://api.example.com",
|
||||
max_retries=3,
|
||||
exceptions=(ConnectionError, TimeoutError)
|
||||
)
|
||||
"""
|
||||
retries = 0
|
||||
current_delay = delay
|
||||
|
||||
while True:
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except exceptions as e:
|
||||
retries += 1
|
||||
if retries > max_retries:
|
||||
logger.error(
|
||||
f"Function {func.__name__} failed after {max_retries} retries. "
|
||||
f"Final error: {str(e)}"
|
||||
)
|
||||
raise
|
||||
|
||||
logger.warning(
|
||||
f"Retry {retries}/{max_retries} for function {func.__name__} "
|
||||
f"after error: {str(e)}. "
|
||||
f"Waiting {current_delay} seconds..."
|
||||
)
|
||||
|
||||
time.sleep(current_delay)
|
||||
current_delay *= backoff_factor
|
@ -0,0 +1,428 @@
|
||||
"""
|
||||
Simple workspace management functions for creating files and folders.
|
||||
|
||||
Raw utility functions for easy file and folder creation operations.
|
||||
"""
|
||||
|
||||
import json
|
||||
import yaml
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
|
||||
def create_folder(
|
||||
folder_name: str, parent_path: Optional[str] = None
|
||||
) -> Path:
|
||||
"""
|
||||
Create a new folder.
|
||||
|
||||
Args:
|
||||
folder_name: Name of the folder to create
|
||||
parent_path: Parent directory path. If None, creates in current directory.
|
||||
|
||||
Returns:
|
||||
Path object of the created folder
|
||||
"""
|
||||
if parent_path:
|
||||
folder_path = Path(parent_path) / folder_name
|
||||
else:
|
||||
folder_path = Path(folder_name)
|
||||
|
||||
folder_path.mkdir(parents=True, exist_ok=True)
|
||||
return folder_path
|
||||
|
||||
|
||||
def file_exists(
|
||||
file_name: str, parent_path: Optional[str] = None
|
||||
) -> bool:
|
||||
"""
|
||||
Check if a file exists.
|
||||
|
||||
Args:
|
||||
file_name: Name of the file to check
|
||||
parent_path: Parent directory path. If None, checks in current directory.
|
||||
|
||||
Returns:
|
||||
True if file exists, False otherwise
|
||||
"""
|
||||
if parent_path:
|
||||
file_path = Path(parent_path) / file_name
|
||||
else:
|
||||
file_path = Path(file_name)
|
||||
|
||||
return file_path.exists() and file_path.is_file()
|
||||
|
||||
|
||||
def update_file(
|
||||
file_name: str, content: str, parent_path: Optional[str] = None
|
||||
) -> Path:
|
||||
"""
|
||||
Update an existing file with new content.
|
||||
|
||||
Args:
|
||||
file_name: Name of the file to update
|
||||
content: New content to write to the file
|
||||
parent_path: Parent directory path. If None, updates in current directory.
|
||||
|
||||
Returns:
|
||||
Path object of the updated file
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If file doesn't exist
|
||||
"""
|
||||
if parent_path:
|
||||
file_path = Path(parent_path) / file_name
|
||||
else:
|
||||
file_path = Path(file_name)
|
||||
|
||||
if not file_path.exists():
|
||||
raise FileNotFoundError(f"File {file_path} does not exist")
|
||||
|
||||
file_path.write_text(content, encoding="utf-8")
|
||||
return file_path
|
||||
|
||||
|
||||
def create_or_update_file(
|
||||
file_name: str,
|
||||
content: str = "",
|
||||
parent_path: Optional[str] = None,
|
||||
) -> Path:
|
||||
"""
|
||||
Create a new file or update existing file with content.
|
||||
|
||||
Args:
|
||||
file_name: Name of the file to create or update
|
||||
content: Content to write to the file
|
||||
parent_path: Parent directory path. If None, creates/updates in current directory.
|
||||
|
||||
Returns:
|
||||
Path object of the created or updated file
|
||||
"""
|
||||
if parent_path:
|
||||
file_path = Path(parent_path) / file_name
|
||||
else:
|
||||
file_path = Path(file_name)
|
||||
|
||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
file_path.write_text(content, encoding="utf-8")
|
||||
return file_path
|
||||
|
||||
|
||||
def create_file(
|
||||
file_name: str,
|
||||
content: str = "",
|
||||
parent_path: Optional[str] = None,
|
||||
) -> Path:
|
||||
"""
|
||||
Create a new file with content.
|
||||
|
||||
Args:
|
||||
file_name: Name of the file to create
|
||||
content: Content to write to the file
|
||||
parent_path: Parent directory path. If None, creates in current directory.
|
||||
|
||||
Returns:
|
||||
Path object of the created file
|
||||
"""
|
||||
if parent_path:
|
||||
file_path = Path(parent_path) / file_name
|
||||
else:
|
||||
file_path = Path(file_name)
|
||||
|
||||
if file_path.exists():
|
||||
raise FileExistsError(
|
||||
f"File {file_path} already exists. Use create_or_update_file() to update existing files."
|
||||
)
|
||||
|
||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
file_path.write_text(content, encoding="utf-8")
|
||||
return file_path
|
||||
|
||||
|
||||
def create_python_file(
|
||||
file_name: str,
|
||||
content: str = "",
|
||||
parent_path: Optional[str] = None,
|
||||
) -> Path:
|
||||
"""
|
||||
Create a Python file with content.
|
||||
|
||||
Args:
|
||||
file_name: Name of the file (with or without .py extension)
|
||||
content: Python code content
|
||||
parent_path: Parent directory path
|
||||
|
||||
Returns:
|
||||
Path object of the created Python file
|
||||
"""
|
||||
if not file_name.endswith(".py"):
|
||||
file_name += ".py"
|
||||
return create_file(file_name, content, parent_path)
|
||||
|
||||
|
||||
def create_or_update_python_file(
|
||||
file_name: str,
|
||||
content: str = "",
|
||||
parent_path: Optional[str] = None,
|
||||
) -> Path:
|
||||
"""
|
||||
Create a Python file or update existing Python file with content.
|
||||
|
||||
Args:
|
||||
file_name: Name of the file (with or without .py extension)
|
||||
content: Python code content
|
||||
parent_path: Parent directory path
|
||||
|
||||
Returns:
|
||||
Path object of the created or updated Python file
|
||||
"""
|
||||
if not file_name.endswith(".py"):
|
||||
file_name += ".py"
|
||||
return create_or_update_file(file_name, content, parent_path)
|
||||
|
||||
|
||||
def create_json_file(
|
||||
file_name: str,
|
||||
data: Dict[str, Any],
|
||||
parent_path: Optional[str] = None,
|
||||
) -> Path:
|
||||
"""
|
||||
Create a JSON file with data.
|
||||
|
||||
Args:
|
||||
file_name: Name of the file (with or without .json extension)
|
||||
data: Dictionary data to serialize to JSON
|
||||
parent_path: Parent directory path
|
||||
|
||||
Returns:
|
||||
Path object of the created JSON file
|
||||
"""
|
||||
if not file_name.endswith(".json"):
|
||||
file_name += ".json"
|
||||
content = json.dumps(data, indent=2, ensure_ascii=False)
|
||||
return create_file(file_name, content, parent_path)
|
||||
|
||||
|
||||
def create_or_update_json_file(
|
||||
file_name: str,
|
||||
data: Dict[str, Any],
|
||||
parent_path: Optional[str] = None,
|
||||
) -> Path:
|
||||
"""
|
||||
Create a JSON file or update existing JSON file with data.
|
||||
|
||||
Args:
|
||||
file_name: Name of the file (with or without .json extension)
|
||||
data: Dictionary data to serialize to JSON
|
||||
parent_path: Parent directory path
|
||||
|
||||
Returns:
|
||||
Path object of the created or updated JSON file
|
||||
"""
|
||||
if not file_name.endswith(".json"):
|
||||
file_name += ".json"
|
||||
content = json.dumps(data, indent=2, ensure_ascii=False)
|
||||
return create_or_update_file(file_name, content, parent_path)
|
||||
|
||||
|
||||
def create_yaml_file(
|
||||
file_name: str,
|
||||
data: Dict[str, Any],
|
||||
parent_path: Optional[str] = None,
|
||||
) -> Path:
|
||||
"""
|
||||
Create a YAML file with data.
|
||||
|
||||
Args:
|
||||
file_name: Name of the file (with or without .yaml/.yml extension)
|
||||
data: Dictionary data to serialize to YAML
|
||||
parent_path: Parent directory path
|
||||
|
||||
Returns:
|
||||
Path object of the created YAML file
|
||||
"""
|
||||
if not (
|
||||
file_name.endswith(".yaml") or file_name.endswith(".yml")
|
||||
):
|
||||
file_name += ".yaml"
|
||||
content = yaml.dump(
|
||||
data, default_flow_style=False, allow_unicode=True
|
||||
)
|
||||
return create_file(file_name, content, parent_path)
|
||||
|
||||
|
||||
def create_or_update_yaml_file(
|
||||
file_name: str,
|
||||
data: Dict[str, Any],
|
||||
parent_path: Optional[str] = None,
|
||||
) -> Path:
|
||||
"""
|
||||
Create a YAML file or update existing YAML file with data.
|
||||
|
||||
Args:
|
||||
file_name: Name of the file (with or without .yaml/.yml extension)
|
||||
data: Dictionary data to serialize to YAML
|
||||
parent_path: Parent directory path
|
||||
|
||||
Returns:
|
||||
Path object of the created or updated YAML file
|
||||
"""
|
||||
if not (
|
||||
file_name.endswith(".yaml") or file_name.endswith(".yml")
|
||||
):
|
||||
file_name += ".yaml"
|
||||
content = yaml.dump(
|
||||
data, default_flow_style=False, allow_unicode=True
|
||||
)
|
||||
return create_or_update_file(file_name, content, parent_path)
|
||||
|
||||
|
||||
def create_markdown_file(
|
||||
file_name: str,
|
||||
content: str = "",
|
||||
parent_path: Optional[str] = None,
|
||||
) -> Path:
|
||||
"""
|
||||
Create a Markdown file with content.
|
||||
|
||||
Args:
|
||||
file_name: Name of the file (with or without .md extension)
|
||||
content: Markdown content
|
||||
parent_path: Parent directory path
|
||||
|
||||
Returns:
|
||||
Path object of the created Markdown file
|
||||
"""
|
||||
if not file_name.endswith(".md"):
|
||||
file_name += ".md"
|
||||
return create_file(file_name, content, parent_path)
|
||||
|
||||
|
||||
def create_or_update_markdown_file(
|
||||
file_name: str,
|
||||
content: str = "",
|
||||
parent_path: Optional[str] = None,
|
||||
) -> Path:
|
||||
"""
|
||||
Create a Markdown file or update existing Markdown file with content.
|
||||
|
||||
Args:
|
||||
file_name: Name of the file (with or without .md extension)
|
||||
content: Markdown content
|
||||
parent_path: Parent directory path
|
||||
|
||||
Returns:
|
||||
Path object of the created or updated Markdown file
|
||||
"""
|
||||
if not file_name.endswith(".md"):
|
||||
file_name += ".md"
|
||||
return create_or_update_file(file_name, content, parent_path)
|
||||
|
||||
|
||||
def create_text_file(
|
||||
file_name: str,
|
||||
content: str = "",
|
||||
parent_path: Optional[str] = None,
|
||||
) -> Path:
|
||||
"""
|
||||
Create a text file with content.
|
||||
|
||||
Args:
|
||||
file_name: Name of the file (with or without .txt extension)
|
||||
content: Text content
|
||||
parent_path: Parent directory path
|
||||
|
||||
Returns:
|
||||
Path object of the created text file
|
||||
"""
|
||||
if not file_name.endswith(".txt"):
|
||||
file_name += ".txt"
|
||||
return create_file(file_name, content, parent_path)
|
||||
|
||||
|
||||
def create_or_update_text_file(
|
||||
file_name: str,
|
||||
content: str = "",
|
||||
parent_path: Optional[str] = None,
|
||||
) -> Path:
|
||||
"""
|
||||
Create a text file or update existing text file with content.
|
||||
|
||||
Args:
|
||||
file_name: Name of the file (with or without .txt extension)
|
||||
content: Text content
|
||||
parent_path: Parent directory path
|
||||
|
||||
Returns:
|
||||
Path object of the created or updated text file
|
||||
"""
|
||||
if not file_name.endswith(".txt"):
|
||||
file_name += ".txt"
|
||||
return create_or_update_file(file_name, content, parent_path)
|
||||
|
||||
|
||||
def create_empty_file(
|
||||
file_name: str, parent_path: Optional[str] = None
|
||||
) -> Path:
|
||||
"""
|
||||
Create an empty file.
|
||||
|
||||
Args:
|
||||
file_name: Name of the file
|
||||
parent_path: Parent directory path
|
||||
|
||||
Returns:
|
||||
Path object of the created empty file
|
||||
"""
|
||||
return create_file(file_name, "", parent_path)
|
||||
|
||||
|
||||
def create_project_structure(
|
||||
structure: Dict[str, Any], parent_path: Optional[str] = None
|
||||
) -> Dict[str, Path]:
|
||||
"""
|
||||
Create a nested project structure from a dictionary.
|
||||
|
||||
Args:
|
||||
structure: Dictionary defining the project structure
|
||||
parent_path: Parent directory path
|
||||
|
||||
Returns:
|
||||
Dictionary mapping structure keys to created Path objects
|
||||
|
||||
Example:
|
||||
structure = {
|
||||
"src": {
|
||||
"main.py": "print('Hello World')",
|
||||
"utils": {
|
||||
"__init__.py": "",
|
||||
"helper.py": "def helper(): pass"
|
||||
}
|
||||
},
|
||||
"tests": {
|
||||
"test_main.py": "import unittest"
|
||||
},
|
||||
"README.md": "# My Project"
|
||||
}
|
||||
"""
|
||||
created_paths = {}
|
||||
base_path = Path(parent_path) if parent_path else Path.cwd()
|
||||
|
||||
def _create_structure(structure_dict, current_path):
|
||||
for key, value in structure_dict.items():
|
||||
item_path = current_path / key
|
||||
|
||||
if isinstance(value, dict):
|
||||
# It's a folder
|
||||
item_path.mkdir(parents=True, exist_ok=True)
|
||||
created_paths[key] = item_path
|
||||
_create_structure(value, item_path)
|
||||
else:
|
||||
# It's a file
|
||||
content = str(value) if value is not None else ""
|
||||
item_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
item_path.write_text(content, encoding="utf-8")
|
||||
created_paths[key] = item_path
|
||||
|
||||
_create_structure(structure, base_path)
|
||||
return created_paths
|
Loading…
Reference in new issue