pull/362/head^2
Kye 12 months ago
parent aee3a79c01
commit b097e6016d

@ -108,6 +108,56 @@ generated_data = agent.run(task)
print(generated_data)
```
### `Worker`
The `Worker` is a simple all-in-one agent equipped with an LLM, tools, and RAG. Get started below:
✅ Plug in and Play LLM. Utilize any LLM from anywhere and any framework
✅ Reliable RAG: Utilizes FAISS for efficient RAG but it's modular so you can use any DB.
✅ Multi-Step Parallel Function Calling: Use any tool
```python
# Importing necessary modules
import os
from dotenv import load_dotenv
from swarms import Worker, OpenAIChat, tool
# Loading environment variables from .env file
load_dotenv()
# Retrieving the OpenAI API key from environment variables
api_key = os.getenv("OPENAI_API_KEY")
# Create a tool
@tool
def search_api(query: str):
pass
# Creating a Worker instance
worker = Worker(
name="My Worker",
role="Worker",
human_in_the_loop=False,
tools=[search_api],
temperature=0.5,
llm=OpenAIChat(openai_api_key=api_key),
)
# Running the worker with a prompt
out = worker.run(
"Hello, how are you? Create an image of how your are doing!"
)
# Printing the output
print(out)
```
------

@ -1,12 +1,16 @@
# Importing necessary modules
import os
from dotenv import load_dotenv
from swarms.agents.worker_agent import Worker
from swarms import OpenAIChat
# Loading environment variables from .env file
load_dotenv()
# Retrieving the OpenAI API key from environment variables
api_key = os.getenv("OPENAI_API_KEY")
# Creating a Worker instance
worker = Worker(
name="My Worker",
role="Worker",
@ -14,9 +18,13 @@ worker = Worker(
tools=[],
temperature=0.5,
llm=OpenAIChat(openai_api_key=api_key),
verbose = True,
)
# Running the worker with a prompt
out = worker.run(
"Hello, how are you? Create an image of how your are doing!"
)
# Printing the output
print(out)

@ -0,0 +1,5 @@
"""
This tutorial shows you how to integrate swarms with Langchain
"""

@ -15,6 +15,7 @@ from swarms.agents.stopping_conditions import (
)
from swarms.agents.tool_agent import ToolAgent
from swarms.agents.worker_agent import Worker
from swarms.agents.agent_wrapper import agent_wrapper
__all__ = [
"AbstractAgent",
@ -32,4 +33,5 @@ __all__ = [
"check_exit",
"check_end",
"Worker",
"agent_wrapper",
]

@ -0,0 +1,25 @@
from swarms.structs.agent import Agent
def agent_wrapper(ClassToWrap):
"""
This function takes a class 'ClassToWrap' and returns a new class that
inherits from both 'ClassToWrap' and 'Agent'. The new class overrides
the '__init__' method of 'Agent' to call the '__init__' method of 'ClassToWrap'.
Args:
ClassToWrap (type): The class to be wrapped and made to inherit from 'Agent'.
Returns:
type: The new class that inherits from both 'ClassToWrap' and 'Agent'.
"""
class WrappedClass(ClassToWrap, Agent):
def __init__(self, *args, **kwargs):
try:
Agent.__init__(self, *args, **kwargs)
ClassToWrap.__init__(self, *args, **kwargs)
except Exception as e:
print(f"Error initializing WrappedClass: {e}")
raise e
return WrappedClass

@ -51,6 +51,7 @@ class Worker:
tools: List[Any] = None,
embedding_size: int = 1536,
search_kwargs: dict = {"k": 8},
verbose: bool = False,
*args,
**kwargs,
):
@ -64,6 +65,7 @@ class Worker:
self.tools = tools
self.embedding_size = embedding_size
self.search_kwargs = search_kwargs
self.verbose = verbose
self.setup_tools(external_tools)
self.setup_memory()

@ -29,9 +29,7 @@ from langchain.schema.language_model import BaseLanguageModel
from langchain.schema.output import GenerationChunk
from langchain.schema.prompt import PromptValue
from langchain.utils import (
check_package_version,
get_from_dict_or_env,
get_pydantic_field_names,
)
from packaging.version import parse
from requests import HTTPError, Response

@ -233,6 +233,7 @@ class Task:
if task.description is not None
else ""
)
result = (
task.result if task.result is not None else ""
)

Loading…
Cancel
Save