diff --git a/.github/workflows/python-package-conda.yml b/.github/workflows/python-package-conda.yml deleted file mode 100644 index 51c99bba..00000000 --- a/.github/workflows/python-package-conda.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: Python Package using Conda - -on: [push] - -jobs: - build-linux: - runs-on: ubuntu-latest - strategy: - max-parallel: 5 - - steps: - - uses: actions/checkout@v4 - - name: Set up Python 3.10 - uses: actions/setup-python@v5 - with: - python-version: '3.10' - - name: Add conda to system path - run: | - # $CONDA is an environment variable pointing to the root of the miniconda directory - echo $CONDA/bin >> $GITHUB_PATH - - name: Install dependencies - run: | - conda env update --file environment.yml --name base - - name: Lint with flake8 - run: | - conda install flake8 - # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - name: Test with pytest - run: | - conda install pytest - pytest diff --git a/example.py b/example.py index ec70ecfc..423554bc 100644 --- a/example.py +++ b/example.py @@ -1,4 +1,33 @@ from swarms.structs.agent import Agent +import pinecone +import os +from dotenv import load_dotenv +from datetime import datetime +from sentence_transformers import SentenceTransformer + +# Load environment variables +load_dotenv() + +# Initialize Pinecone +pinecone.init( + api_key=os.getenv("PINECONE_API_KEY"), + environment=os.getenv("PINECONE_ENVIRONMENT"), +) + +# Initialize the embedding model +embedding_model = SentenceTransformer("all-MiniLM-L6-v2") + +# Create or get the index +index_name = "financial-agent-memory" +if index_name not in pinecone.list_indexes(): + pinecone.create_index( + name=index_name, + dimension=768, # Dimension for all-MiniLM-L6-v2 + metric="cosine", + ) + +# Get the index +pinecone_index = pinecone.Index(index_name) # Initialize the agent agent = Agent( @@ -11,6 +40,45 @@ agent = Agent( output_type="all", ) -agent.run("Conduct an analysis of the best real undervalued ETFs") + +def run_agent(task): + # Run the agent and store the interaction + result = agent.run(task) + + # Generate embedding for the document + doc_text = f"Task: {task}\nResult: {result}" + embedding = embedding_model.encode(doc_text).tolist() + + # Store the interaction in Pinecone + pinecone_index.upsert( + vectors=[ + { + "id": str(datetime.now().timestamp()), + "values": embedding, + "metadata": { + "agent_name": agent.agent_name, + "task_type": "financial_analysis", + "timestamp": str(datetime.now()), + "text": doc_text, + }, + } + ] + ) + + return result + + +def query_memory(query_text, top_k=5): + # Generate embedding for the query + query_embedding = embedding_model.encode(query_text).tolist() + + # Query Pinecone + results = pinecone_index.query( + vector=query_embedding, top_k=top_k, include_metadata=True + ) + + return results + + # print(out) # print(type(out)) diff --git a/swarms/structs/conversation.py b/swarms/structs/conversation.py index 69c9e638..42d96639 100644 --- a/swarms/structs/conversation.py +++ b/swarms/structs/conversation.py @@ -115,13 +115,16 @@ class Conversation(BaseStructure): } self.cache_lock = threading.Lock() self.conversations_dir = conversations_dir - + self.setup() def setup(self): # Set up conversations directory - self.conversations_dir = self.conversations_dir or os.path.join( - os.path.expanduser("~"), ".swarms", "conversations" + self.conversations_dir = ( + self.conversations_dir + or os.path.join( + os.path.expanduser("~"), ".swarms", "conversations" + ) ) os.makedirs(self.conversations_dir, exist_ok=True) @@ -150,7 +153,9 @@ class Conversation(BaseStructure): self.add(self.user or "User", self.rules) if self.custom_rules_prompt is not None: - self.add(self.user or "User", self.custom_rules_prompt) + self.add( + self.user or "User", self.custom_rules_prompt + ) # If tokenizer then truncate if self.tokenizer is not None: