From 8a8d002f335e8b0c03d4afb1578d9b5fcd30c2a4 Mon Sep 17 00:00:00 2001 From: Kye Date: Thu, 13 Jul 2023 17:15:19 -0400 Subject: [PATCH] worker node error logging + optional modularity Former-commit-id: 2f84882be42047a0ab2274c6f72dd7c1b1b14912 --- swarms/agents/workers/WorkerNode.py | 119 ++++++++++++++++++---------- swarms/swarms.py | 8 +- 2 files changed, 80 insertions(+), 47 deletions(-) diff --git a/swarms/agents/workers/WorkerNode.py b/swarms/agents/workers/WorkerNode.py index f7fd465e..f02199f3 100644 --- a/swarms/agents/workers/WorkerNode.py +++ b/swarms/agents/workers/WorkerNode.py @@ -13,21 +13,23 @@ import logging from pydantic import BaseModel, Extra logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') + class WorkerNode: """Useful for when you need to spawn an autonomous agent instance as a worker to accomplish complex tasks, it can search the internet or spawn child multi-modality models to process and generate images and text or audio and so on""" def __init__(self, llm, tools, vectorstore): if not llm or not tools or not vectorstore: - raise ValueError("llm, tools, and vectorstore cannot be None") + logging.error("llm, tools, and vectorstore cannot be None.") + raise ValueError("llm, tools, and vectorstore cannot be None.") + self.llm = llm self.tools = tools self.vectorstore = vectorstore self.agent = None - def create_agent(self, ai_name, ai_role, human_in_the_loop, search_kwargs): + def create_agent(self, ai_name="Swarm Worker AI Assistant", ai_role="Assistant", human_in_the_loop=False, search_kwargs={}, verbose=False): logging.info("Creating agent in WorkerNode") try: - self.agent = AutoGPT.from_llm_and_tools( ai_name=ai_name, ai_role=ai_role, @@ -37,88 +39,119 @@ class WorkerNode: human_in_the_loop=human_in_the_loop, chat_history_memory=FileChatMessageHistory("chat_history.txt"), ) - self.agent.chain.verbose = True - + self.agent.chain.verbose = verbose except Exception as e: logging.error(f"Error while creating agent: {str(e)}") raise e + def add_tool(self, tool: Tool): if not isinstance(tool, Tool): - raise TypeError("Tool must be an instance of Tool") + logging.error("Tool must be an instance of Tool.") + raise TypeError("Tool must be an instance of Tool.") + self.tools.append(tool) def run(self, prompt: str) -> str: if not isinstance(prompt, str): - raise TypeError("Prompt must be a string") + logging.error("Prompt must be a string.") + raise TypeError("Prompt must be a string.") if not prompt: - raise ValueError("Prompt is empty") + logging.error("Prompt is empty.") + raise ValueError("Prompt is empty.") try: - - self.agent.run([f"{prompt}"]) return "Task completed by WorkerNode" except Exception as e: logging.error(f"While running the agent: {str(e)}") raise e + -# worker_tool = Tool( -# name="WorkerNode AI Agent", -# func=WorkerNode.run, -# description="Useful for when you need to spawn an autonomous agent instance as a worker to accomplish complex tasks, it can search the internet or spawn child multi-modality models to process and generate images and text or audio and so on" -# ) - class WorkerNodeInitializer: def __init__(self, openai_api_key): if not openai_api_key: + logging.error("OpenAI API key is not provided") raise ValueError("openai_api_key cannot be None") + self.openai_api_key = openai_api_key def initialize_llm(self, llm_class, temperature=0.5): if not llm_class: + logging.error("llm_class cannot be none") raise ValueError("llm_class cannot be None") - return llm_class(openai_api_key=self.openai_api_key, temperature=temperature) + + try: + return llm_class(openai_api_key=self.openai_api_key, temperature=temperature) + except Exception as e: + logging.error(f"Failed to initialize language model: {e}") + raise def initialize_tools(self, llm_class): if not llm_class: + logging.error("llm_class not cannot be none") raise ValueError("llm_class cannot be none") - logging.info('Creating WorkerNode') - llm = self.initialize_llm(llm_class) - web_search = DuckDuckGoSearchRun() - tools = [ - web_search, - WriteFileTool(root_dir=ROOT_DIR), - ReadFileTool(root_dir=ROOT_DIR), - process_csv, - WebpageQATool(qa_chain=load_qa_with_sources_chain(llm)), - ] - return tools + try: + + logging.info('Creating WorkerNode') + llm = self.initialize_llm(llm_class) + web_search = DuckDuckGoSearchRun() + + tools = [ + web_search, + WriteFileTool(root_dir=ROOT_DIR), + ReadFileTool(root_dir=ROOT_DIR), + process_csv, + WebpageQATool(qa_chain=load_qa_with_sources_chain(llm)), + ] + if not tools: + logging.error("Tools are not initialized") + raise ValueError("Tools are not initialized") + return tools + except Exception as e: + logging.error(f"Failed to initialize tools: {e}") def initialize_vectorstore(self): - embeddings_model = OpenAIEmbeddings(openai_api_key=self.openai_api_key) - embedding_size = 1536 - index = faiss.IndexFlatL2(embedding_size) - return FAISS(embeddings_model.embed_query, index, InMemoryDocstore({}), {}) + try: + + embeddings_model = OpenAIEmbeddings(openai_api_key=self.openai_api_key) + embedding_size = 1536 + index = faiss.IndexFlatL2(embedding_size) + return FAISS(embeddings_model.embed_query, index, InMemoryDocstore({}), {}) + except Exception as e: + logging.error(f"Failed to initialize vector store: {e}") + raise - def create_worker_node(self, llm_class=ChatOpenAI): + def create_worker_node(self, llm_class=ChatOpenAI, ai_name="Swarm Worker AI Assistant", ai_role="Assistant", human_in_the_loop=False, search_kwargs={}, verbose=False): if not llm_class: - raise ValueError("llm_class cannot be None") - worker_tools = self.initialize_tools(llm_class) - vectorstore = self.initialize_vectorstore() - worker_node = WorkerNode(llm=self.initialize_llm(llm_class), tools=worker_tools, vectorstore=vectorstore) - worker_node.create_agent(ai_name="Swarm Worker AI Assistant", ai_role="Assistant", human_in_the_loop=False, search_kwargs={}) - return worker_node + logging.error("llm_class cannot be None.") + raise ValueError("llm_class cannot be None.") + try: + worker_tools = self.initialize_tools(llm_class) + vectorstore = self.initialize_vectorstore() + worker_node = WorkerNode(llm=self.initialize_llm(llm_class), tools=worker_tools, vectorstore=vectorstore) + worker_node.create_agent(ai_name=ai_name, ai_role=ai_role, human_in_the_loop=human_in_the_loop, search_kwargs=search_kwargs, verbose=verbose) + return worker_node + except Exception as e: + logging.error(f"Failed to create worker node: {e}") + raise def worker_node(openai_api_key): if not openai_api_key: - raise ValueError("openai_api_key cannot be none") - initializer = WorkerNodeInitializer(openai_api_key) - worker_node = initializer.create_worker_node() - return worker_node + logging.error("OpenAI API key is not provided") + raise ValueError("OpenAI API key is required") + + try: + + initializer = WorkerNodeInitializer(openai_api_key) + worker_node = initializer.create_worker_node() + return worker_node + except Exception as e: + logging.error(f"An error occured in worker_node: {e}") + raise diff --git a/swarms/swarms.py b/swarms/swarms.py index c0dae34d..b15bd850 100644 --- a/swarms/swarms.py +++ b/swarms/swarms.py @@ -9,7 +9,6 @@ logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - % class Swarms: def __init__(self, openai_api_key=""): #openai_api_key: the openai key. Default is empty - if not openai_api_key: logging.error("OpenAI key is not provided") raise ValueError("OpenAI API key is required") @@ -24,8 +23,7 @@ class Swarms: llm_class(class): The Language model class. Default is OpenAI. temperature (float): The Temperature for the language model. Default is 0.5 """ - try: - + try: # Initialize language model return llm_class(openai_api_key=self.openai_api_key, temperature=temperature) except Exception as e: @@ -49,10 +47,11 @@ class Swarms: process_csv, WebpageQATool(qa_chain=load_qa_with_sources_chain(llm)), - ] + assert tools is not None, "tools is not initialized" return tools + except Exception as e: logging.error(f"Failed to initialize tools: {e}") raise @@ -67,6 +66,7 @@ class Swarms: embeddings_model = OpenAIEmbeddings(openai_api_key=self.openai_api_key) embedding_size = 1536 index = faiss.IndexFlatL2(embedding_size) + return FAISS(embeddings_model.embed_query, index, InMemoryDocstore({}), {}) except Exception as e: logging.error(f"Failed to initialize vector store: {e}")