From ef4f8bb78669cec8f4d296e9afe18a39c3f4dfe8 Mon Sep 17 00:00:00 2001 From: Kye Date: Wed, 4 Oct 2023 22:23:55 -0400 Subject: [PATCH] profitpilot --- swarms/agents/profitpilot.py | 13 +++++++++++-- swarms/tools/interpreter.py | 8 -------- swarms/tools/interpreter_tool.py | 24 ++++++++++++++++++++++++ 3 files changed, 35 insertions(+), 10 deletions(-) delete mode 100644 swarms/tools/interpreter.py create mode 100644 swarms/tools/interpreter_tool.py diff --git a/swarms/agents/profitpilot.py b/swarms/agents/profitpilot.py index d70d3aa4..fd961b3a 100644 --- a/swarms/agents/profitpilot.py +++ b/swarms/agents/profitpilot.py @@ -17,6 +17,8 @@ from langchain.text_splitter import CharacterTextSplitter from langchain.vectorstores import Chroma from pydantic import BaseModel, Field from swarms.models.prompts.sales import SALES_AGENT_TOOLS_PROMPT, conversation_stages +from swarms.tools.interpreter_tool import compile +from swarms.agents.omni_modal_agent import OmniModalAgent # classes @@ -164,16 +166,23 @@ def setup_knowledge_base(product_catalog: str = None): def get_tools(product_catalog): # query to get_tools can be used to be embedded and relevant tools found - # see here: https://langchain-langchain.vercel.app/docs/use_cases/agents/custom_agent_with_plugin_retrieval#tool-retriever - # we only use one tool for now, but this is highly extensible! knowledge_base = setup_knowledge_base(product_catalog) tools = [ Tool( name="ProductSearch", func=knowledge_base.run, description="useful for when you need to answer questions about product information", + ), + + #Interpreter + Tool( + name="Code Interepeter", + func=compile, + description="Useful when you need to run code locally, such as Python, Javascript, Shell, and more." ) + + #omnimodal agent ] return tools diff --git a/swarms/tools/interpreter.py b/swarms/tools/interpreter.py deleted file mode 100644 index 2263b1f4..00000000 --- a/swarms/tools/interpreter.py +++ /dev/null @@ -1,8 +0,0 @@ -import interpreter - -def compile(task: str): - task = interpreter.chat(task) - interpreter.chat() - interpreter.reset() - - diff --git a/swarms/tools/interpreter_tool.py b/swarms/tools/interpreter_tool.py new file mode 100644 index 00000000..22758de6 --- /dev/null +++ b/swarms/tools/interpreter_tool.py @@ -0,0 +1,24 @@ +import os +import interpreter + + +def compile(task: str): + """ + Open Interpreter lets LLMs run code (Python, Javascript, Shell, and more) locally. You can chat with Open Interpreter through a ChatGPT-like interface in your terminal by running $ interpreter after installing. + + This provides a natural-language interface to your computer's general-purpose capabilities: + + Create and edit photos, videos, PDFs, etc. + Control a Chrome browser to perform research + Plot, clean, and analyze large datasets + ...etc. + ⚠️ Note: You'll be asked to approve code before it's run. + """ + + task = interpreter.chat(task, return_messages=True) + interpreter.chat() + interpreter.reset(task) + + os.environ["INTERPRETER_CLI_AUTO_RUN"] = True + os.environ["INTERPRETER_CLI_FAST_MODE"] = True + os.environ["INTERPRETER_CLI_DEBUG"] = True