Former-commit-id: 602b479511
group-chat
Kye 1 year ago
parent cb32ee6289
commit 4185232e13

@ -44,17 +44,47 @@ We have a small gallery of examples to run here, [for more check out the docs to
- `MultiAgentDebate` is a simple class that enables multi agent collaboration. - `MultiAgentDebate` is a simple class that enables multi agent collaboration.
```python ```python
from swarms import Worker, MultiAgentDebate, select_speaker from swarms.workers import Worker
from swarms.swarms import MultiAgentDebate, select_speaker
from langchain.models import OpenAIChat
# Initialize agents llm = OpenAIChat(
worker1 = Worker(openai_api_key="", ai_name="Optimus Prime") model_name='gpt-4',
worker2 = Worker(openai_api_key="", ai_name="Bumblebee") openai_api_key="api-key",
worker3 = Worker(openai_api_key="", ai_name="Megatron") temperature=0.5
)
node = Worker(
llm=llm,
ai_name="Optimus Prime",
ai_role="Worker in a swarm",
external_tools = None,
human_in_the_loop = False,
temperature = 0.5,
)
node2 = Worker(
llm=llm,
ai_name="Bumble Bee",
ai_role="Worker in a swarm",
external_tools = None,
human_in_the_loop = False,
temperature = 0.5,
)
node3 = Worker(
llm=llm,
ai_name="Bumble Bee",
ai_role="Worker in a swarm",
external_tools = None,
human_in_the_loop = False,
temperature = 0.5,
)
agents = [ agents = [
worker1, node,
worker2, node2,
worker3 node3
] ]
# Initialize multi-agent debate with the selection function # Initialize multi-agent debate with the selection function

@ -1,4 +0,0 @@
from swarms.models import OpenAIChat
llm = OpenAIChat(openai_api_key="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")
out = llm("Hello, I am a robot and I like to talk about robots.")

@ -0,0 +1 @@
from swarms.swarms import GroupChatManager

@ -1,7 +1,9 @@
import torch import torch
from transformers import AutoModelForCausalLM, AutoTokenizer from transformers import AutoModelForCausalLM, AutoTokenizer
from swarms.agents.message import Message from swarms.agents.message import Message
class Mistral: class Mistral:
""" """
Mistral Mistral
@ -150,4 +152,3 @@ class Mistral:
for token in response.split(): for token in response.split():
yield token yield token

@ -2,3 +2,20 @@ from typing import List, Dict, Any, Union
from concurrent.futures import Executor, ThreadPoolExecutor, as_completed from concurrent.futures import Executor, ThreadPoolExecutor, as_completed
from graphlib import TopologicalSorter from graphlib import TopologicalSorter
class Task:
def __init__(
self,
id: str,
parents: List["Task"] = None,
children: List["Task"] = None
):
self.id = id
self.parents = parents
self.children = children
def can_execute(self):
raise NotImplementedError
def execute(self):
raise NotImplementedError

@ -72,6 +72,7 @@ class GroupChat:
) )
class GroupChatManager(Worker): class GroupChatManager(Worker):
def __init__( def __init__(
self, self,

Loading…
Cancel
Save