parent
4097a9b703
commit
9ef126e8cf
@ -1,67 +0,0 @@
|
|||||||
from swarms.models import Anthropic
|
|
||||||
from swarms.structs import Agent
|
|
||||||
from swarms.tools.tool import tool
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
|
|
||||||
|
|
||||||
llm = Anthropic(
|
|
||||||
anthropic_api_key="",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_load_playwright(url: str) -> str:
|
|
||||||
"""Load the specified URLs using Playwright and parse using BeautifulSoup."""
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
from playwright.async_api import async_playwright
|
|
||||||
|
|
||||||
results = ""
|
|
||||||
async with async_playwright() as p:
|
|
||||||
browser = await p.chromium.launch(headless=True)
|
|
||||||
try:
|
|
||||||
page = await browser.new_page()
|
|
||||||
await page.goto(url)
|
|
||||||
|
|
||||||
page_source = await page.content()
|
|
||||||
soup = BeautifulSoup(page_source, "html.parser")
|
|
||||||
|
|
||||||
for script in soup(["script", "style"]):
|
|
||||||
script.extract()
|
|
||||||
|
|
||||||
text = soup.get_text()
|
|
||||||
lines = (line.strip() for line in text.splitlines())
|
|
||||||
chunks = (
|
|
||||||
phrase.strip()
|
|
||||||
for line in lines
|
|
||||||
for phrase in line.split(" ")
|
|
||||||
)
|
|
||||||
results = "\n".join(chunk for chunk in chunks if chunk)
|
|
||||||
except Exception as e:
|
|
||||||
results = f"Error: {e}"
|
|
||||||
await browser.close()
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
def run_async(coro):
|
|
||||||
event_loop = asyncio.get_event_loop()
|
|
||||||
return event_loop.run_until_complete(coro)
|
|
||||||
|
|
||||||
|
|
||||||
@tool
|
|
||||||
def browse_web_page(url: str) -> str:
|
|
||||||
"""Verbose way to scrape a whole webpage. Likely to cause issues parsing."""
|
|
||||||
return run_async(async_load_playwright(url))
|
|
||||||
|
|
||||||
|
|
||||||
## Initialize the workflow
|
|
||||||
agent = Agent(
|
|
||||||
llm=llm,
|
|
||||||
max_loops=5,
|
|
||||||
tools=[browse_web_page],
|
|
||||||
dashboard=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
out = agent.run(
|
|
||||||
"Generate a 10,000 word blog on mental clarity and the benefits"
|
|
||||||
" of meditation."
|
|
||||||
)
|
|
@ -0,0 +1,19 @@
|
|||||||
|
from swarms.tools.tool import tool
|
||||||
|
from swarms.tools.tool_func_doc_scraper import scrape_tool_func_docs
|
||||||
|
|
||||||
|
|
||||||
|
@tool
|
||||||
|
def search_api(query: str) -> str:
|
||||||
|
"""Search API
|
||||||
|
|
||||||
|
Args:
|
||||||
|
query (str): _description_
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: _description_
|
||||||
|
"""
|
||||||
|
print(f"Searching API for {query}")
|
||||||
|
|
||||||
|
|
||||||
|
tool_docs = scrape_tool_func_docs(search_api)
|
||||||
|
print(tool_docs)
|
@ -0,0 +1,40 @@
|
|||||||
|
import os
|
||||||
|
from swarms.models import OpenAIChat
|
||||||
|
from swarms.structs import Agent
|
||||||
|
from swarms.tools.tool import tool
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
api_key = os.environ.get("OPENAI_API_KEY")
|
||||||
|
|
||||||
|
|
||||||
|
llm = OpenAIChat(
|
||||||
|
api_key=api_key
|
||||||
|
)
|
||||||
|
|
||||||
|
# @tool
|
||||||
|
# def search_api(query: str) -> str:
|
||||||
|
# """Search API
|
||||||
|
|
||||||
|
# Args:
|
||||||
|
# query (str): _description_
|
||||||
|
|
||||||
|
# Returns:
|
||||||
|
# str: _description_
|
||||||
|
# """
|
||||||
|
# print(f"Searching API for {query}")
|
||||||
|
|
||||||
|
|
||||||
|
## Initialize the workflow
|
||||||
|
agent = Agent(
|
||||||
|
llm=llm,
|
||||||
|
max_loops=5,
|
||||||
|
# tools=[search_api],
|
||||||
|
dashboard=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
out = agent.run(
|
||||||
|
"Use the search api to find the best restaurants in New York City."
|
||||||
|
)
|
||||||
|
print(out)
|
@ -0,0 +1,47 @@
|
|||||||
|
import re
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
def extract_tool_commands(self, text: str):
|
||||||
|
"""
|
||||||
|
Extract the tool commands from the text
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"tool": "tool_name",
|
||||||
|
"params": {
|
||||||
|
"tool1": "inputs",
|
||||||
|
"param2": "value2"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Regex to find JSON like strings
|
||||||
|
pattern = r"```json(.+?)```"
|
||||||
|
matches = re.findall(pattern, text, re.DOTALL)
|
||||||
|
json_commands = []
|
||||||
|
for match in matches:
|
||||||
|
try:
|
||||||
|
json_commands = json.loads(match)
|
||||||
|
json_commands.append(json_commands)
|
||||||
|
except Exception as error:
|
||||||
|
print(f"Error parsing JSON command: {error}")
|
||||||
|
|
||||||
|
def parse_and_execute_tools(response: str):
|
||||||
|
"""Parse and execute the tools"""
|
||||||
|
json_commands = extract_tool_commands(response)
|
||||||
|
for command in json_commands:
|
||||||
|
tool_name = command.get("tool")
|
||||||
|
params = command.get("parmas", {})
|
||||||
|
execute_tools(tool_name, params)
|
||||||
|
|
||||||
|
def execute_tools(self, tool_name, params):
|
||||||
|
"""Execute the tool with the provided params"""
|
||||||
|
tool = self.tool_find_by_name(tool_name)
|
||||||
|
if tool:
|
||||||
|
# Execute the tool with the provided parameters
|
||||||
|
tool_result = tool.run(**params)
|
||||||
|
print(tool_result)
|
||||||
|
|
Loading…
Reference in new issue