multi platform spreadsheet_swarm, deps build clusterops from github source

pull/700/head
Patrick Devaney 2 weeks ago
parent ed0623d7b3
commit c9f69d5fef

@ -73,7 +73,6 @@ docstring_parser = "0.16" # TODO:
tiktoken = "*" tiktoken = "*"
networkx = "*" networkx = "*"
aiofiles = "*" aiofiles = "*"
clusterops = "*"
# chromadb = "*" # chromadb = "*"
reportlab = "*" reportlab = "*"
doc-master = "*" doc-master = "*"
@ -81,6 +80,7 @@ rich = "*"
# sentence-transformers = "*" # sentence-transformers = "*"
swarm-models = "*" swarm-models = "*"
termcolor = "*" termcolor = "*"
clusterops = { git = "https://github.com/patrickbdevaney/clusterops.git", branch = "main" }
# [tool.poetry.extras] # [tool.poetry.extras]

@ -4,26 +4,30 @@ import uuid
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
from datetime import datetime from datetime import datetime
from typing import Any, Dict, List, Optional, Union from typing import Any, Dict, List, Optional, Union
import concurrent
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from tenacity import retry, stop_after_attempt, wait_exponential from tenacity import retry, stop_after_attempt, wait_exponential
from swarms.structs.agent import Agent
from swarms.structs.base_swarm import BaseSwarm
from swarms.utils.file_processing import create_file_in_folder
import concurrent
from clusterops import ( from clusterops import (
execute_on_gpu, execute_on_gpu,
execute_with_cpu_cores, execute_with_cpu_cores,
execute_on_multiple_gpus, execute_on_multiple_gpus,
list_available_gpus, list_available_gpus,
) )
from swarms.structs.agent import Agent
from swarms.structs.base_swarm import BaseSwarm
from swarms.utils.file_processing import create_file_in_folder
from swarms.utils.loguru_logger import initialize_logger from swarms.utils.loguru_logger import initialize_logger
from swarms.structs.swarm_id_generator import generate_swarm_id from swarms.structs.swarm_id_generator import generate_swarm_id
logger = initialize_logger(log_folder="concurrent_workflow") logger = initialize_logger(log_folder="concurrent_workflow")
class AgentOutputSchema(BaseModel): class AgentOutputSchema(BaseModel):
run_id: Optional[str] = Field( run_id: Optional[str] = Field(
..., description="Unique ID for the run" ..., description="Unique ID for the run"

@ -1,17 +1,14 @@
import os import os
from datetime import datetime from datetime import datetime
from uuid import uuid4 from uuid import uuid4
# Import necessary classes from your swarm module # Import necessary classes from your swarm module
from swarms.structs.agent import Agent from swarms.structs.agent import Agent
from swarms.structs.base_swarm import BaseSwarm from swarms.structs.base_swarm import BaseSwarm
from swarms.telemetry.capture_sys_data import log_agent_data from swarms.telemetry.capture_sys_data import log_agent_data
from swarms.utils.file_processing import create_file_in_folder from swarms.utils.file_processing import create_file_in_folder
from swarms import SpreadSheetSwarm from swarms import SpreadSheetSwarm
# Ensure you have an environment variable or default workspace dir # Ensure you have an environment variable or default workspace dir
workspace_dir = os.getenv("WORKSPACE_DIR", "./workspace") workspace_dir = os.getenv("WORKSPACE_DIR", "./workspace")
def create_agents(num_agents: int): def create_agents(num_agents: int):
""" """
Create a list of agent instances. Create a list of agent instances.
@ -27,14 +24,11 @@ def create_agents(num_agents: int):
agent_name = f"Agent-{i + 1}" agent_name = f"Agent-{i + 1}"
agents.append(Agent(agent_name=agent_name)) agents.append(Agent(agent_name=agent_name))
return agents return agents
def main(): def main():
# Number of agents to create # Number of agents to create
num_agents = 5 num_agents = 5
# Create the agents # Create the agents
agents = create_agents(num_agents) agents = create_agents(num_agents)
# Initialize the swarm with agents and other configurations # Initialize the swarm with agents and other configurations
swarm = SpreadSheetSwarm( swarm = SpreadSheetSwarm(
name="Test-Swarm", name="Test-Swarm",
@ -44,35 +38,28 @@ def main():
max_loops=2, max_loops=2,
workspace_dir=workspace_dir workspace_dir=workspace_dir
) )
# Run a sample task in the swarm (synchronously) # Run a sample task in the swarm (synchronously)
task = "process_data" task = "process_data"
# Ensure the run method is synchronous # Ensure the run method is synchronous
swarm_metadata = swarm.run(task) # Assuming this is made synchronous swarm_metadata = swarm.run(task) # Assuming this is made synchronous
# Print swarm metadata after task completion # Print swarm metadata after task completion
print("Swarm Metadata:") print("Swarm Metadata:")
print(swarm_metadata) print(swarm_metadata)
# Check if CSV file has been created and saved # Check if CSV file has been created and saved
if os.path.exists(swarm.save_file_path): if os.path.exists(swarm.save_file_path):
print(f"Metadata saved to: {swarm.save_file_path}") print(f"Metadata saved to: {swarm.save_file_path}")
else: else:
print(f"Metadata not saved correctly. Check the save path.") print(f"Metadata not saved correctly. Check the save path.")
# Test saving metadata to JSON file # Test saving metadata to JSON file
swarm.data_to_json_file() swarm.data_to_json_file()
# Test exporting metadata to JSON # Test exporting metadata to JSON
swarm_json = swarm.export_to_json() swarm_json = swarm.export_to_json()
print("Exported JSON metadata:") print("Exported JSON metadata:")
print(swarm_json) print(swarm_json)
# Log agent data
# Log agent data (without ClusterOps imports)
print("Logging agent data:") print("Logging agent data:")
print(log_agent_data(swarm.metadata.model_dump())) print(log_agent_data(swarm.metadata.model_dump()))
# Run the synchronous main function # Run the synchronous main function
if __name__ == "__main__": if __name__ == "__main__":
main() main()

@ -0,0 +1,65 @@
import os
from datetime import datetime
from uuid import uuid4
# Import necessary classes from your swarm module
from swarms.structs.agent import Agent
from swarms.structs.base_swarm import BaseSwarm
from swarms.telemetry.capture_sys_data import log_agent_data
from swarms.utils.file_processing import create_file_in_folder
from swarms import SpreadSheetSwarm
# Ensure you have an environment variable or default workspace dir
workspace_dir = os.getenv("WORKSPACE_DIR", "./workspace")
def create_agents(num_agents: int):
"""
Create a list of agent instances.
Args:
num_agents (int): The number of agents to create.
Returns:
List[Agent]: List of created Agent objects.
"""
agents = []
for i in range(num_agents):
agent_name = f"Agent-{i + 1}"
agents.append(Agent(agent_name=agent_name))
return agents
def main():
# Number of agents to create
num_agents = 5
# Create the agents
agents = create_agents(num_agents)
# Initialize the swarm with agents and other configurations
swarm = SpreadSheetSwarm(
name="Test-Swarm",
description="A swarm for testing purposes.",
agents=agents,
autosave_on=True,
max_loops=2,
workspace_dir=workspace_dir
)
# Run a sample task in the swarm (synchronously)
task = "process_data"
# Ensure the run method is synchronous
swarm_metadata = swarm.run(task) # Assuming this is made synchronous
# Print swarm metadata after task completion
print("Swarm Metadata:")
print(swarm_metadata)
# Check if CSV file has been created and saved
if os.path.exists(swarm.save_file_path):
print(f"Metadata saved to: {swarm.save_file_path}")
else:
print(f"Metadata not saved correctly. Check the save path.")
# Test saving metadata to JSON file
swarm.data_to_json_file()
# Test exporting metadata to JSON
swarm_json = swarm.export_to_json()
print("Exported JSON metadata:")
print(swarm_json)
# Log agent data
print("Logging agent data:")
print(log_agent_data(swarm.metadata.model_dump()))
# Run the synchronous main function
if __name__ == "__main__":
main()
Loading…
Cancel
Save