pull/55/head
Kye 1 year ago
parent ce38944312
commit 5259186f37

@ -0,0 +1,5 @@
from swarms import GodMode
god_mode = GodMode(num_workers=3, openai_api_key="", ai_name="Optimus Prime")
task = "What were the winning Boston Marathon times for the past 5 years (ending in 2022)? Generate a table of the year, name, country of origin, and times."
god_mode.print_responses(task)

File diff suppressed because one or more lines are too long

@ -16,4 +16,4 @@ logo2 = """
\/ \/ \/ \/ \/ \/ \/ \/ \/ \/ \/ \/
""" """
print(logo2) # print(logo2)

@ -1,55 +1,28 @@
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
from tabulate import tabulate
from termcolor import colored from termcolor import colored
from tabulate import tabulate
from swarms.workers.worker import Worker import anthropic
from langchain.llms import Anthropic
class GodMode: class GodMode:
def __init__( def __init__(self, llms):
self, self.llms = llms
num_workers,
num_llms,
openai_api_key,
ai_name
):
self.workers = [
Worker(
openai_api_key=openai_api_key,
ai_name=ai_name
) for _ in range(num_workers)
]
# self.llms = [LLM() for _ in range(num_llms)]
self.all_agents = self.workers # + self.llms
def run_all(self, task): def run_all(self, task):
with ThreadPoolExecutor() as executor: with ThreadPoolExecutor() as executor:
responses = executor.map( responses = executor.map(lambda llm: llm(task), self.llms)
lambda agent: agent.run(task) if hasattr(
agent, 'run'
) else agent(task), self.all_agents
)
return list(responses) return list(responses)
def print_responses(self, task): def print_responses(self, task):
responses = self.run_all(task) responses = self.run_all(task)
table = [] table = []
for i, response in enumerate(responses): for i, response in enumerate(responses):
agent_type = "Worker" if i < len(self.workers) else "LLM" table.append([f"LLM {i+1}", response])
table.append([agent_type, response]) print(colored(tabulate(table, headers=["LLM", "Response"], tablefmt="pretty"), "cyan"))
print(
colored(
tabulate(
table,
headers=["Agent Type", "Response"],
tablefmt="pretty"
), "cyan")
)
# Usage # Usage
god_mode = GodMode(num_workers=3, openai_api_key="", ai_name="Optimus Prime") llms = [Anthropic(model="<model_name>", anthropic_api_key="my-api-key") for _ in range(5)]
task = "What were the winning Boston Marathon times for the past 5 years (ending in 2022)? Generate a table of the year, name, country of origin, and times."
god_mode = GodMode(llms)
task = f"{anthropic.HUMAN_PROMPT} What are the biggest risks facing humanity?{anthropic.AI_PROMPT}"
god_mode.print_responses(task) god_mode.print_responses(task)
Loading…
Cancel
Save