pull/10/head
Kye 1 year ago
parent 78e755d45d
commit c3ae804f9f

@ -1,5 +1,5 @@
# from swarms import Swarms, swarm # from swarms import Swarms, swarm
from swarms.swarms import HierarchicalSwarm, swarm from swarms.swarms import HierarchicalSwarm, swarm
# from swarms.workers.worker_ultra_node import WorkerUltraNode, WorkerUltra, worker_ultra_node # from swarms.workers.worker_ultra_node import WorkerUltraNode, WorkerUltra
from swarms.workers.worker_node import WorkerNode, worker_node from swarms.workers.worker_node import WorkerNode, worker_node
from swarms.boss.boss_node import BossNode from swarms.boss.boss_node import BossNode

@ -16,83 +16,93 @@ from swarms.agents.models.hf import HuggingFaceLLM
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
class AgentNodeInitializer: class AgentNodeInitializer:
"""Useful for when you need to spawn an autonomous agent instance as a agent to accomplish complex tasks, it can search the internet or spawn child multi-modality models to process and generate images and text or audio and so on""" """Useful for spawning autonomous agent instances to accomplish complex tasks."""
def __init__(self, def __init__(self,
llm, llm: Optional[Any] = None,
tools, tools: Optional[List[BaseTool]] = None,
vectorstore, vectorstore: Optional[List[Any]] = None,
temperature, temperature: float = 0.5,
model_type: str=None, model_type: Optional[str] = None,
human_in_the_loop=True, human_in_the_loop: bool = True,
model_id: str = None, model_id: Optional[str] = None,
embedding_size: int = 8192, embedding_size: int = 8192,
system_prompt: str = None, system_prompt: Optional[str] = None,
max_iterations: int = None): max_iterations: Optional[int] = None,
agent_name: Optional[str] = None,
agent_role: Optional[str] = None,
if not llm or not tools or not vectorstore: verbose: bool = False,
logging.error("llm, tools, and vectorstore cannot be None.") openai_api_key: Optional[str] = None):
raise ValueError("llm, tools, and vectorstore cannot be None.")
if not openai_api_key and (model_type is None or model_type.lower() == 'openai'):
self.llm = llm raise ValueError("OpenAI API key cannot be None when model_type is 'openai'")
self.tools = tools
self.vectorstore = vectorstore self.llm = llm or self.initialize_llm(model_type, model_id, openai_api_key, temperature)
self.tools = tools or []
self.vectorstore = vectorstore or []
self.agent = None
self.temperature = temperature self.temperature = temperature
self.model_type = model_type self.model_type = model_type
self.human_in_the_loop = human_in_the_loop self.human_in_the_loop = human_in_the_loop
self.prompt = prompt
self.model_id = model_id
self.model_id = model_id
self.embedding_size = embedding_size self.embedding_size = embedding_size
self.system_prompt system_prompt self.system_prompt = system_prompt
self.agent_name = agent_name
self.agent_role = agent_role
self.verbose = verbose
self.openai_api_key = openai_api_key
self.agent = None
self.initialize_agent()
def initialize_llm(self, model_type: str, model_id: str, openai_api_key: str, temperature: float):
try:
if model_type.lower() == 'openai':
return ChatOpenAI(openai_api_key=openai_api_key, temperature=temperature)
elif model_type.lower() == 'huggingface':
return HuggingFaceLLM(model_id=model_id, temperature=temperature)
else:
raise ValueError("Invalid model_type. It should be either 'openai' or 'huggingface'")
except Exception as e:
logger.error(f"Failed to initialize language model: {e}")
raise e
def create_agent(self, ai_name="Swarm Agent AI Assistant", ai_role="Assistant", human_in_the_loop=True, search_kwargs={}, verbose=False): def initialize_agent(self):
logging.info("Creating agent in AgentNode")
try: try:
self.agent = AutoGPT.from_llm_and_tools( self.agent = AutoGPT.from_llm_and_tools(
ai_name=ai_name, ai_name=self.agent_name,
ai_role=ai_role, ai_role=self.agent_role,
tools=self.tools, tools=self.tools,
llm=self.llm, llm=self.llm,
memory=self.vectorstore.as_retriever(search_kwargs=search_kwargs), memory=self.vectorstore.as_retriever(search_kwargs={}),
human_in_the_loop=human_in_the_loop, human_in_the_loop=self.human_in_the_loop,
chat_history_memory=FileChatMessageHistory("chat_history.txt"), chat_history_memory=FileChatMessageHistory("chat_history.txt"),
verbose=self.verbose,
) )
# self.agent.chain.verbose = verbose
except Exception as e: except Exception as e:
logging.error(f"Error while creating agent: {str(e)}") logger.error(f"Error while creating agent: {str(e)}")
raise e raise e
def add_tool(self, tool: Tool): def add_tool(self, tool: BaseTool):
if not isinstance(tool, Tool): if not isinstance(tool, BaseTool):
logging.error("Tool must be an instance of Tool.") logger.error("Tool must be an instance of BaseTool.")
raise TypeError("Tool must be an instance of Tool.") raise TypeError("Tool must be an instance of BaseTool.")
self.tools.append(tool) self.tools.append(tool)
def run(self, prompt: str) -> str: def run(self, prompt: str) -> str:
if not isinstance(prompt, str):
logging.error("Prompt must be a string.")
raise TypeError("Prompt must be a string.")
if not prompt: if not prompt:
logging.error("Prompt is empty.") logger.error("Prompt is empty.")
raise ValueError("Prompt is empty.") raise ValueError("Prompt is empty.")
try: try:
self.agent.run([f"{prompt}"]) self.agent.run([f"{prompt}"])
return "Task completed by AgentNode" return "Task completed by AgentNode"
except Exception as e: except Exception as e:
logging.error(f"While running the agent: {str(e)}") logger.error(f"While running the agent: {str(e)}")
raise e raise e
@ -121,7 +131,6 @@ class AgentNode:
except Exception as e: except Exception as e:
logging.error(f"Failed to initialize language model: {e}") logging.error(f"Failed to initialize language model: {e}")
def initialize_tools(self, llm_class): def initialize_tools(self, llm_class):
if not llm_class: if not llm_class:
logging.error("llm_class not cannot be none") logging.error("llm_class not cannot be none")

@ -20,13 +20,19 @@ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(
# TODO: Off # TODO: Off
class HierarchicalSwarm: class HierarchicalSwarm:
def __init__(self, model_id: str = None, def __init__(self,
openai_api_key="", model_id: str = None,
use_vectorstore=True, embedding_size: int = None, use_async=True, openai_api_key="",
human_in_the_loop=True, model_type: str = None, boss_prompt: str = None, use_vectorstore=True,
worker_prompt:str = None, embedding_size: int = None,
temperature=None, use_async=True,
max_iterations=None, human_in_the_loop=True,
model_type: str = None,
boss_prompt: str = None,
worker_prompt:str = None,
temperature=None,
max_iterations=None,
log_level: str = 'INFO'
): ):
#openai_api_key: the openai key. Default is empty #openai_api_key: the openai key. Default is empty
if not model_id: if not model_id:
@ -63,8 +69,8 @@ class HierarchicalSwarm:
""" """
try: try:
# Initialize language model # Initialize language model
if self.llm_class == 'openai' or OpenAI: if self.llm_class == 'openai':
return llm_class(openai_api_key=self.openai_api_key, temperature=self.temperature) return OpenAI(openai_api_key=self.openai_api_key, temperature=self.temperature)
elif self.model_type == "huggingface": elif self.model_type == "huggingface":
return HuggingFaceLLM(model_id=self.model_id, temperature=self.temperature) return HuggingFaceLLM(model_id=self.model_id, temperature=self.temperature)
except Exception as e: except Exception as e:
@ -109,7 +115,7 @@ class HierarchicalSwarm:
""" """
try: try:
embeddings_model = OpenAIEmbeddings(openai_api_key=self.openai_api_key) embeddings_model = OpenAIEmbeddings(openai_api_key=self.openai_api_key)
embedding_size = self.embedding_size or 8192 embedding_size = self.embedding_size
index = faiss.IndexFlatL2(embedding_size) index = faiss.IndexFlatL2(embedding_size)
return FAISS(embeddings_model.embed_query, index, InMemoryDocstore({}), {}) return FAISS(embeddings_model.embed_query, index, InMemoryDocstore({}), {})
@ -232,7 +238,7 @@ def swarm(api_key="", objective="", model_type="", model_id=""):
logging.error("Invalid objective") logging.error("Invalid objective")
raise ValueError("A valid objective is required") raise ValueError("A valid objective is required")
try: try:
swarms = HierarchicalSwarm(api_key, model_id, use_async=False, model_type=model_type) # Turn off async swarms = HierarchicalSwarm(api_key, model_id=model_type, use_async=False, model_type=model_type) # Turn off async
result = swarms.run(objective) result = swarms.run(objective)
if result is None: if result is None:
logging.error("Failed to run swarms") logging.error("Failed to run swarms")

Loading…
Cancel
Save