[PLAYGROUND][Cleanup]

pull/393/head
Kye 1 year ago
parent be46bcf48f
commit db43acbf7d

@ -1,10 +1,9 @@
from swarms.agents.multion_agent import MultiOnAgent
import timeit
from swarms import Agent, ConcurrentWorkflow, Task
from swarms.utils.loguru_logger import logger
# model
model = MultiOnAgent(multion_api_key="")
model = MultiOnAgent(multion_api_key="535ae401948b4c59bc1b2c61eec90fe6")
# out = model.run("search for a recipe")
agent = Agent(
@ -15,27 +14,26 @@ agent = Agent(
system_prompt=None,
)
logger.info("[Agent][ID][MultiOnAgent][Initialized][Successfully")
# logger.info("[Agent][ID][MultiOnAgent][Initialized][Successfully")
# Task
task = Task(
agent=agent,
description=(
"send an email to vyom on superhuman for a partnership with"
" multion"
"Download https://www.coachcamel.com/"
),
)
# Swarm
logger.info(
f"Running concurrent workflow with task: {task.description}"
)
# logger.info(
# f"Running concurrent workflow with task: {task.description}"
# )
# Measure execution time
start_time = timeit.default_timer()
workflow = ConcurrentWorkflow(
max_workers=1,
max_workers=20,
autosave=True,
print_results=True,
return_results=True,
@ -47,4 +45,5 @@ workflow.run()
# Calculate execution time
execution_time = timeit.default_timer() - start_time
logger.info(f"Execution time: {execution_time} seconds")
# logger.info(f"Execution time: {execution_time} seconds")
print(f"Execution time: {execution_time} seconds")

@ -1,28 +0,0 @@
from swarms import HierarchicalSwarm
swarm = HierarchicalSwarm(
openai_api_key="key",
model_type="openai",
model_id="gpt-4",
use_vectorstore=False,
use_async=False,
human_in_the_loop=False,
logging_enabled=False,
)
# run the swarm with an objective
result = swarm.run("Design a new car")
# or huggingface
swarm = HierarchicalSwarm(
model_type="huggingface",
model_id="tiaueu/falcon",
use_vectorstore=True,
embedding_size=768,
use_async=False,
human_in_the_loop=True,
logging_enabled=False,
)
# Run the swarm with a particular objective
result = swarm.run("Write a sci-fi short story")

@ -1,11 +1,17 @@
from swarms.memory import chroma
from swarms.memory import ChromaDB
chromadbcl = chroma.ChromaClient()
chromadbcl.add_vectors(
["This is a document", "BONSAIIIIIII", "the walking dead"]
# Initialize the memory
chroma = ChromaDB(
metric="cosine",
limit_tokens=1000,
verbose=True,
)
results = chromadbcl.search_vectors("zombie", limit=1)
# Add text
text = "This is a test"
chroma.add(text)
# Search for similar text
similar_text = chroma.query(text)
print(results)

@ -14,7 +14,7 @@ api_key = os.environ.get("OPENAI_API_KEY")
# Initilaize the chromadb client
chromadb = ChromaDB(
metric="cosine",
metric="cosine",g
output="results",
)

@ -1,11 +0,0 @@
from swarms import Orchestrator, Worker
# Instantiate the Orchestrator with 10 agents
orchestrator = Orchestrator(
Worker, agent_list=[Worker] * 10, task_queue=[]
)
# Agent 1 sends a message to Agent 2
orchestrator.chat(
sender_id=1, receiver_id=2, message="Hello, Agent 2!"
)

@ -1,6 +1,5 @@
from swarms.models import OpenAIChat
from swarms.swarms import DialogueSimulator
from swarms.workers.worker import Worker
from swarms import DialogueSimulator, Worker
llm = OpenAIChat(
model_name="gpt-4", openai_api_key="api-key", temperature=0.5

@ -1,7 +1,14 @@
from swarms import swarm
from swarms import Agent, OpenAIChat
# Use the function
api_key = "APIKEY"
objective = "What is the capital of the UK?"
result = swarm(api_key, objective)
print(result) # Prints: "The capital of the UK is London."
## Initialize the workflow
agent = Agent(
llm=OpenAIChat(),
max_loops=1,
autosave=True,
dashboard=False,
streaming_on=True,
verbose=True,
)
# Run the workflow on a task
agent("Find a chick fil a equivalent in hayes valley")

@ -3,7 +3,7 @@ import os
from dotenv import load_dotenv
from swarms.models import Anthropic, Gemini, Mixtral, OpenAIChat
from swarms.swarms import ModelParallelizer
from swarms import ModelParallelizer
load_dotenv()

@ -1,19 +0,0 @@
from swarms import Orchestrator, Worker
node = Worker(
openai_api_key="",
ai_name="Optimus Prime",
)
# Instantiate the Orchestrator with 10 agents
orchestrator = Orchestrator(
node, agent_list=[node] * 10, task_queue=[]
)
# Agent 7 sends a message to Agent 9
orchestrator.chat(
sender_id=7,
receiver_id=9,
message="Can you help me with this task?",
)

@ -1,19 +0,0 @@
from ..swarms import HierarchicalSwarm
# Retrieve your API key from the environment or replace with your actual key
api_key = "sksdsds"
# Initialize HierarchicalSwarm with your API key
swarm = HierarchicalSwarm(openai_api_key=api_key)
# Define an objective
objective = """
Please develop and serve a simple community web service.
People can signup, login, post, comment.
Post and comment should be visible at once.
I want it to have neumorphism-style.
The ports you can use are 4500 and 6500.
"""
# Run HierarchicalSwarm
swarm.run(objective)

@ -1,16 +0,0 @@
from swarms import HierarchicalSwarm
# Retrieve your API key from the environment or replace with your actual key
api_key = ""
# Initialize HierarchicalSwarm with your API key
swarm = HierarchicalSwarm(api_key)
# Define an objective
objective = (
"Find 20 potential customers for a HierarchicalSwarm based AI"
" Agent automation infrastructure"
)
# Run HierarchicalSwarm
swarm.run(objective)

@ -1,19 +0,0 @@
from swarms import HierarchicalSwarm
# Retrieve your API key from the environment or replace with your actual key
api_key = "sksdsds"
# Initialize HierarchicalSwarm with your API key
swarm = HierarchicalSwarm(openai_api_key=api_key)
# Define an objective
objective = """
Please develop and serve a simple web TODO app.
The user can list all TODO items and add or delete each TODO item.
I want it to have neumorphism-style.
The ports you can use are 4500 and 6500.
"""
# Run HierarchicalSwarm
swarm.run(objective)

@ -1,19 +0,0 @@
from swarms.tools.tool import tool
from swarms.tools.tool_func_doc_scraper import scrape_tool_func_docs
@tool
def search_api(query: str) -> str:
"""Search API
Args:
query (str): _description_
Returns:
str: _description_
"""
print(f"Searching API for {query}")
tool_docs = scrape_tool_func_docs(search_api)
print(tool_docs)

@ -1,7 +0,0 @@
from swarms.models import OpenAIChat
from swarms.structs.workflow import Workflow
llm = OpenAIChat()
workflow = Workflow(llm)

@ -2,8 +2,8 @@ import os
from dotenv import load_dotenv
from swarms.models import OpenAIChat
from swarms.structs import Agent
from swarms import OpenAIChat, Agent
from swarms.tools.tool import tool
load_dotenv()
@ -12,24 +12,24 @@ api_key = os.environ.get("OPENAI_API_KEY")
llm = OpenAIChat(api_key=api_key)
# @tool
# def search_api(query: str) -> str:
# """Search API
@tool
def search_api(query: str) -> str:
"""Search API
# Args:
# query (str): _description_
Args:
query (str): _description_
# Returns:
# str: _description_
# """
# print(f"Searching API for {query}")
Returns:
str: _description_
"""
print(f"Searching API for {query}")
## Initialize the workflow
agent = Agent(
llm=llm,
max_loops=5,
# tools=[search_api],
tools=[search_api],
dashboard=True,
)

@ -1,22 +0,0 @@
from swarms.tools.tool import tool
from swarms.tools.tool_func_doc_scraper import scrape_tool_func_docs
# Define a tool by decorating a function with the tool decorator and providing a docstring
@tool(return_direct=True)
def search_api(query: str):
"""Search the web for the query
Args:
query (str): _description_
Returns:
_type_: _description_
"""
return f"Search results for {query}"
# Scrape the tool func docs to prepare for injection into the agent prompt
out = scrape_tool_func_docs(search_api)
print(out)

@ -1,10 +0,0 @@
from swarms import Workflow
from swarms.models import ChatOpenAI
workflow = Workflow(ChatOpenAI)
workflow.add("What's the weather in miami")
workflow.add("Provide details for {{ parent_output }}")
workflow.add("Summarize the above information: {{ parent_output}}")
workflow.run()

@ -37,12 +37,7 @@ class MultiOnAgent(AbstractLLM):
self.max_steps = max_steps
self.starting_url = starting_url
self.multion = multion.login(
use_api=True,
multion_api_key=str(multion_api_key),
*args,
**kwargs,
)
def run(self, task: str, *args, **kwargs):
"""
@ -56,7 +51,14 @@ class MultiOnAgent(AbstractLLM):
Returns:
dict: The response from the browsing task.
"""
response = self.multion.browse(
multion.login(
use_api=True,
multion_api_key=str(self.multion_api_key),
*args,
**kwargs,
)
response = multion.browse(
{
"cmd": task,
"url": self.starting_url,

Loading…
Cancel
Save