[PLAYGROUND][Cleanup]

pull/393/head
Kye 1 year ago
parent be46bcf48f
commit db43acbf7d

@ -1,10 +1,9 @@
from swarms.agents.multion_agent import MultiOnAgent from swarms.agents.multion_agent import MultiOnAgent
import timeit import timeit
from swarms import Agent, ConcurrentWorkflow, Task from swarms import Agent, ConcurrentWorkflow, Task
from swarms.utils.loguru_logger import logger
# model # model
model = MultiOnAgent(multion_api_key="") model = MultiOnAgent(multion_api_key="535ae401948b4c59bc1b2c61eec90fe6")
# out = model.run("search for a recipe") # out = model.run("search for a recipe")
agent = Agent( agent = Agent(
@ -15,27 +14,26 @@ agent = Agent(
system_prompt=None, system_prompt=None,
) )
logger.info("[Agent][ID][MultiOnAgent][Initialized][Successfully") # logger.info("[Agent][ID][MultiOnAgent][Initialized][Successfully")
# Task # Task
task = Task( task = Task(
agent=agent, agent=agent,
description=( description=(
"send an email to vyom on superhuman for a partnership with" "Download https://www.coachcamel.com/"
" multion"
), ),
) )
# Swarm # Swarm
logger.info( # logger.info(
f"Running concurrent workflow with task: {task.description}" # f"Running concurrent workflow with task: {task.description}"
) # )
# Measure execution time # Measure execution time
start_time = timeit.default_timer() start_time = timeit.default_timer()
workflow = ConcurrentWorkflow( workflow = ConcurrentWorkflow(
max_workers=1, max_workers=20,
autosave=True, autosave=True,
print_results=True, print_results=True,
return_results=True, return_results=True,
@ -47,4 +45,5 @@ workflow.run()
# Calculate execution time # Calculate execution time
execution_time = timeit.default_timer() - start_time execution_time = timeit.default_timer() - start_time
logger.info(f"Execution time: {execution_time} seconds") # logger.info(f"Execution time: {execution_time} seconds")
print(f"Execution time: {execution_time} seconds")

@ -1,28 +0,0 @@
from swarms import HierarchicalSwarm
swarm = HierarchicalSwarm(
openai_api_key="key",
model_type="openai",
model_id="gpt-4",
use_vectorstore=False,
use_async=False,
human_in_the_loop=False,
logging_enabled=False,
)
# run the swarm with an objective
result = swarm.run("Design a new car")
# or huggingface
swarm = HierarchicalSwarm(
model_type="huggingface",
model_id="tiaueu/falcon",
use_vectorstore=True,
embedding_size=768,
use_async=False,
human_in_the_loop=True,
logging_enabled=False,
)
# Run the swarm with a particular objective
result = swarm.run("Write a sci-fi short story")

@ -1,11 +1,17 @@
from swarms.memory import chroma from swarms.memory import ChromaDB
chromadbcl = chroma.ChromaClient()
chromadbcl.add_vectors( # Initialize the memory
["This is a document", "BONSAIIIIIII", "the walking dead"] chroma = ChromaDB(
metric="cosine",
limit_tokens=1000,
verbose=True,
) )
results = chromadbcl.search_vectors("zombie", limit=1) # Add text
text = "This is a test"
chroma.add(text)
# Search for similar text
similar_text = chroma.query(text)
print(results)

@ -14,7 +14,7 @@ api_key = os.environ.get("OPENAI_API_KEY")
# Initilaize the chromadb client # Initilaize the chromadb client
chromadb = ChromaDB( chromadb = ChromaDB(
metric="cosine", metric="cosine",g
output="results", output="results",
) )

@ -1,11 +0,0 @@
from swarms import Orchestrator, Worker
# Instantiate the Orchestrator with 10 agents
orchestrator = Orchestrator(
Worker, agent_list=[Worker] * 10, task_queue=[]
)
# Agent 1 sends a message to Agent 2
orchestrator.chat(
sender_id=1, receiver_id=2, message="Hello, Agent 2!"
)

@ -1,4 +1,3 @@
# Example
import os import os

@ -1,6 +1,5 @@
from swarms.models import OpenAIChat from swarms.models import OpenAIChat
from swarms.swarms import DialogueSimulator from swarms import DialogueSimulator, Worker
from swarms.workers.worker import Worker
llm = OpenAIChat( llm = OpenAIChat(
model_name="gpt-4", openai_api_key="api-key", temperature=0.5 model_name="gpt-4", openai_api_key="api-key", temperature=0.5

@ -1,7 +1,14 @@
from swarms import swarm from swarms import Agent, OpenAIChat
# Use the function ## Initialize the workflow
api_key = "APIKEY" agent = Agent(
objective = "What is the capital of the UK?" llm=OpenAIChat(),
result = swarm(api_key, objective) max_loops=1,
print(result) # Prints: "The capital of the UK is London." autosave=True,
dashboard=False,
streaming_on=True,
verbose=True,
)
# Run the workflow on a task
agent("Find a chick fil a equivalent in hayes valley")

@ -3,7 +3,7 @@ import os
from dotenv import load_dotenv from dotenv import load_dotenv
from swarms.models import Anthropic, Gemini, Mixtral, OpenAIChat from swarms.models import Anthropic, Gemini, Mixtral, OpenAIChat
from swarms.swarms import ModelParallelizer from swarms import ModelParallelizer
load_dotenv() load_dotenv()

@ -1,19 +0,0 @@
from swarms import Orchestrator, Worker
node = Worker(
openai_api_key="",
ai_name="Optimus Prime",
)
# Instantiate the Orchestrator with 10 agents
orchestrator = Orchestrator(
node, agent_list=[node] * 10, task_queue=[]
)
# Agent 7 sends a message to Agent 9
orchestrator.chat(
sender_id=7,
receiver_id=9,
message="Can you help me with this task?",
)

@ -1,19 +0,0 @@
from ..swarms import HierarchicalSwarm
# Retrieve your API key from the environment or replace with your actual key
api_key = "sksdsds"
# Initialize HierarchicalSwarm with your API key
swarm = HierarchicalSwarm(openai_api_key=api_key)
# Define an objective
objective = """
Please develop and serve a simple community web service.
People can signup, login, post, comment.
Post and comment should be visible at once.
I want it to have neumorphism-style.
The ports you can use are 4500 and 6500.
"""
# Run HierarchicalSwarm
swarm.run(objective)

@ -1,16 +0,0 @@
from swarms import HierarchicalSwarm
# Retrieve your API key from the environment or replace with your actual key
api_key = ""
# Initialize HierarchicalSwarm with your API key
swarm = HierarchicalSwarm(api_key)
# Define an objective
objective = (
"Find 20 potential customers for a HierarchicalSwarm based AI"
" Agent automation infrastructure"
)
# Run HierarchicalSwarm
swarm.run(objective)

@ -1,19 +0,0 @@
from swarms import HierarchicalSwarm
# Retrieve your API key from the environment or replace with your actual key
api_key = "sksdsds"
# Initialize HierarchicalSwarm with your API key
swarm = HierarchicalSwarm(openai_api_key=api_key)
# Define an objective
objective = """
Please develop and serve a simple web TODO app.
The user can list all TODO items and add or delete each TODO item.
I want it to have neumorphism-style.
The ports you can use are 4500 and 6500.
"""
# Run HierarchicalSwarm
swarm.run(objective)

@ -1,19 +0,0 @@
from swarms.tools.tool import tool
from swarms.tools.tool_func_doc_scraper import scrape_tool_func_docs
@tool
def search_api(query: str) -> str:
"""Search API
Args:
query (str): _description_
Returns:
str: _description_
"""
print(f"Searching API for {query}")
tool_docs = scrape_tool_func_docs(search_api)
print(tool_docs)

@ -1,7 +0,0 @@
from swarms.models import OpenAIChat
from swarms.structs.workflow import Workflow
llm = OpenAIChat()
workflow = Workflow(llm)

@ -2,8 +2,8 @@ import os
from dotenv import load_dotenv from dotenv import load_dotenv
from swarms.models import OpenAIChat from swarms import OpenAIChat, Agent
from swarms.structs import Agent from swarms.tools.tool import tool
load_dotenv() load_dotenv()
@ -12,24 +12,24 @@ api_key = os.environ.get("OPENAI_API_KEY")
llm = OpenAIChat(api_key=api_key) llm = OpenAIChat(api_key=api_key)
# @tool @tool
# def search_api(query: str) -> str: def search_api(query: str) -> str:
# """Search API """Search API
# Args: Args:
# query (str): _description_ query (str): _description_
# Returns: Returns:
# str: _description_ str: _description_
# """ """
# print(f"Searching API for {query}") print(f"Searching API for {query}")
## Initialize the workflow ## Initialize the workflow
agent = Agent( agent = Agent(
llm=llm, llm=llm,
max_loops=5, max_loops=5,
# tools=[search_api], tools=[search_api],
dashboard=True, dashboard=True,
) )

@ -1,22 +0,0 @@
from swarms.tools.tool import tool
from swarms.tools.tool_func_doc_scraper import scrape_tool_func_docs
# Define a tool by decorating a function with the tool decorator and providing a docstring
@tool(return_direct=True)
def search_api(query: str):
"""Search the web for the query
Args:
query (str): _description_
Returns:
_type_: _description_
"""
return f"Search results for {query}"
# Scrape the tool func docs to prepare for injection into the agent prompt
out = scrape_tool_func_docs(search_api)
print(out)

@ -1,10 +0,0 @@
from swarms import Workflow
from swarms.models import ChatOpenAI
workflow = Workflow(ChatOpenAI)
workflow.add("What's the weather in miami")
workflow.add("Provide details for {{ parent_output }}")
workflow.add("Summarize the above information: {{ parent_output}}")
workflow.run()

@ -37,12 +37,7 @@ class MultiOnAgent(AbstractLLM):
self.max_steps = max_steps self.max_steps = max_steps
self.starting_url = starting_url self.starting_url = starting_url
self.multion = multion.login(
use_api=True,
multion_api_key=str(multion_api_key),
*args,
**kwargs,
)
def run(self, task: str, *args, **kwargs): def run(self, task: str, *args, **kwargs):
""" """
@ -56,7 +51,14 @@ class MultiOnAgent(AbstractLLM):
Returns: Returns:
dict: The response from the browsing task. dict: The response from the browsing task.
""" """
response = self.multion.browse( multion.login(
use_api=True,
multion_api_key=str(self.multion_api_key),
*args,
**kwargs,
)
response = multion.browse(
{ {
"cmd": task, "cmd": task,
"url": self.starting_url, "url": self.starting_url,

Loading…
Cancel
Save