From 7890beef3b8efaa7d705627e3b8783625c58c837 Mon Sep 17 00:00:00 2001 From: Kye Date: Thu, 10 Aug 2023 10:30:07 -0400 Subject: [PATCH] documentation --- DOCS/workers/VortexAgent.md | 75 +++++++++++++++++++++++++++++++++ swarms/workers/vortex_worker.py | 11 +++-- 2 files changed, 80 insertions(+), 6 deletions(-) create mode 100644 DOCS/workers/VortexAgent.md diff --git a/DOCS/workers/VortexAgent.md b/DOCS/workers/VortexAgent.md new file mode 100644 index 00000000..c947c1c2 --- /dev/null +++ b/DOCS/workers/VortexAgent.md @@ -0,0 +1,75 @@ +### Plan: + +1. **Example Creation**: + - Develop several usage examples, each one demonstrating a different configuration or set of parameters for the `VortexWorkerAgent` class. + +2. **Documentation**: + - Create a clear and concise documentation for each method in the class. Ensure that each method's purpose, input parameters, and return values (if any) are described. + +3. **Rules and Guidelines**: + - Establish a set of general usage rules and guidelines for effectively using the `VortexWorkerAgent` class without running into common pitfalls or misconfigurations. + +### Code: + +#### Examples: + +```python +# Example 1: Basic Initialization +agent1 = VortexWorkerAgent(openai_api_key="YOUR_OPENAI_API_KEY") +agent1.run("Help me find resources about renewable energy.") + +# Example 2: Custom Name & Role +agent2 = VortexWorkerAgent(openai_api_key="YOUR_OPENAI_API_KEY", worker_name="EcoHelper", worker_role="Researcher") +agent2.run("Fetch me the latest data on solar energy advancements.") + +# Example 3: Human-in-the-Loop Configuration +agent3 = VortexWorkerAgent(openai_api_key="YOUR_OPENAI_API_KEY", human_in_the_loop=True) +agent3.run("Provide me with a summary of the top AI advancements in 2023, and if unsure, ask me.") + +# Example 4: Custom LLM & Tools Initialization +custom_llm = InMemoryDocstore({ "answer": "This is a custom answer." }) +custom_tools = [WebpageQATool(qa_chain=load_qa_with_sources_chain(custom_llm))] + +agent4 = VortexWorkerAgent(openai_api_key="YOUR_OPENAI_API_KEY", llm=custom_llm, tools=custom_tools) +agent4.run("What's the answer?") +``` + +#### Documentation: + +```python +class VortexWorkerAgent: + """An autonomous agent instance that accomplishes complex tasks. + + Args: + openai_api_key (str): The API key for OpenAI. + llm (Optional[Union[InMemoryDocstore, ChatOpenAI]]): The Language Model to use. Defaults to ChatOpenAI. + tools (Optional[List[Tool]]): Tools to be used by the agent. Defaults to a predefined list. + embedding_size (Optional[int]): Size for embeddings. Defaults to 8192. + worker_name (Optional[str]): Name of the worker. Defaults to "Swarm Worker AI Assistant". + worker_role (Optional[str]): Role of the worker. Defaults to "Assistant". + human_in_the_loop (Optional[bool]): Flag to specify if a human will be in the loop. Defaults to False. + search_kwargs (dict): Additional keyword arguments for search. Empty by default. + verbose (Optional[bool]): Verbose flag. Defaults to False. + chat_history_file (str): File path to store chat history. Defaults to "chat_history.txt". + + Methods: + add_tool(tool: Tool): Adds a new tool to the agent's toolset. + run(prompt: str) -> str: Executes a given task or query using the agent. + """ +``` + +#### Rules and Guidelines: + +1. **Mandatory OpenAI API Key**: Always initialize the `VortexWorkerAgent` with a valid OpenAI API key. It's essential for its proper functioning. + +2. **Custom LLMs & Tools**: When providing custom LLMs or tools, ensure they are compatible with the system and the rest of the agent's components. + +3. **Human-in-the-Loop**: When `human_in_the_loop` is set to `True`, always ensure you have a mechanism to interact with the agent, especially if it prompts for human input. + +4. **Verbose Mode**: Turning on the verbose mode (`verbose=True`) can be useful for debugging but might clutter the console during standard operations. + +5. **Memory & Performance**: If you're working with large datasets or demanding tasks, ensure you have sufficient computational resources. The agent can be resource-intensive, especially with bigger embedding sizes. + +6. **Safety & Security**: Always be cautious about the data you provide and fetch using the agent. Avoid sharing sensitive or personal information unless necessary. + +7. **Chat History**: By default, the chat history is saved in a file named "chat_history.txt". Ensure you have the appropriate write permissions in the directory or specify a different path if needed. \ No newline at end of file diff --git a/swarms/workers/vortex_worker.py b/swarms/workers/vortex_worker.py index 61be7d3e..0c473332 100644 --- a/swarms/workers/vortex_worker.py +++ b/swarms/workers/vortex_worker.py @@ -2,19 +2,18 @@ #Vortex is the name of my Duck friend, ILY Vortex #Kye -from swarms.agents.base import Agent - import logging -import faiss +import os from typing import List, Optional, Union +import faiss from langchain.agents import Tool from langchain.chat_models import ChatOpenAI from langchain.docstore import InMemoryDocstore from langchain.embeddings import OpenAIEmbeddings - from langchain.vectorstores import FAISS +from swarms.agents.base import Agent from swarms.agents.tools.autogpt import ( FileChatMessageHistory, ReadFileTool, @@ -24,7 +23,6 @@ from swarms.agents.tools.autogpt import ( process_csv, web_search, ) -from swarms.agents.base import Agent logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') @@ -87,7 +85,8 @@ class VortexWorkerAgent: def init_vectorstore(self): try: - embeddings_model = OpenAIEmbeddings(openai_api_key=self.openai_api_key) + openai_api_key = self.openai_api_key or os.getenv("OPENAI_API_KEY") + embeddings_model = OpenAIEmbeddings(openai_api_key=openai_api_key) index = faiss.IndexFlatL2(embedding_size=self.embedding_size) return FAISS(embeddings_model, index, InMemoryDocstore({}), {}) except Exception as error: