feat: Inital tool flow integraton

pull/85/head
Zack 2 years ago
parent cc3e48b800
commit 199c332b28

@ -1,5 +1,6 @@
from swarms.models import OpenAIChat
from swarms.structs import Flow
from swarms.tools.interpreter_tool import compile
api_key = ""
@ -11,21 +12,21 @@ llm = OpenAIChat(
)
# Initialize the flow
flow = Flow(llm=llm, max_loops=5, dashboard=True,)
flow = Flow(llm=llm, max_loops=5, dashboard=True, tools=[compile])
flow = Flow(
llm=llm,
max_loops=5,
dashboard=True,
# stopping_condition=None, # You can define a stopping condition as needed.
# loop_interval=1,
# retry_attempts=3,
# retry_interval=1,
# interactive=False, # Set to 'True' for interactive mode.
# dynamic_temperature=False, # Set to 'True' for dynamic temperature handling.
)
# flow = Flow(
# llm=llm,
# max_loops=5,
# dashboard=True,
# # stopping_condition=None, # You can define a stopping condition as needed.
# # loop_interval=1,
# # retry_attempts=3,
# # retry_interval=1,
# # interactive=False, # Set to 'True' for interactive mode.
# # dynamic_temperature=False, # Set to 'True' for dynamic temperature handling.
# )
out = flow.run("Generate a 10,000 word blog on health and wellness.")
out = flow.run("Use your open interpreter tool to print hello world to the terminal")
print(out)
print(out)

@ -8,7 +8,6 @@ TODO:
import json
import logging
import re
import time
from typing import Any, Callable, Dict, List, Optional, Tuple, Generator
from termcolor import colored
@ -16,8 +15,6 @@ import inspect
import random
from swarms.tools.tool import BaseTool
from swarms.models.openai_models import OpenAIChat
# Constants
FLOW_SYSTEM_PROMPT = """
@ -97,8 +94,8 @@ class Flow:
def __init__(
self,
llm: Any,
# template: str,
llm: Any,
max_loops: int = 5,
stopping_condition: Optional[Callable[[str], bool]] = None,
loop_interval: int = 1,
@ -111,11 +108,6 @@ class Flow:
**kwargs: Any,
):
# self.template = template
self.processors = {
'text': self.process_text,
'image': self.process_image,
'audio': self.process_audio,
}
self.llm = llm
self.max_loops = max_loops
self.stopping_condition = stopping_condition
@ -129,45 +121,22 @@ class Flow:
self.interactive = interactive
self.dashboard = dashboard
self.dynamic_temperature = dynamic_temperature
self.tools = tools
def __call__(self, task, **kwargs):
"""Invoke the flow by providing a template and its variables."""
subtasks = self.break_down_task(task)
responses = []
for subtask in subtasks:
mode = self.determine_mode(subtask)
processor = self.processors.get(mode)
if processor:
refined_prompt = self.text_model(f"Define the task '{subtask}' as it relates to the original task '{task}'.")
response = processor(refined_prompt, task)
responses.append(response)
else:
raise ValueError(f'Invalid mode: {mode}')
return responses
def break_down_task(self, task):
# Break down the task into subtasks
subtasks = re.split(r' with | and ', task)
return subtasks
def determine_mode(self, subtask):
result = self.classifier(subtask, candidate_labels=['text', 'image', 'audio', 'video'])
return result['labels'][0]
self.tools = tools or []
def run(self, task: str, **kwargs):
for i in range(self.max_loops):
for tool in self.tools:
tool_prompt = f"\n\nTool: {tool.__name__}\n{tool.__doc__}"
reponse = self.llm(
f"""
{FLOW_SYSTEM_PROMPT}
{tool_prompt}
def process_image(self, image_description):
response = self.image_model(image_description)
return response
History: {reponse}
def process_audio(self, audio_description):
response = self.audio_model(audio_description)
return response
""", **kwargs
)
def process_video(self, video_description):
response = self.video_model(video_description)
return response
return "Video generated from description: " + video_description
def provide_feedback(self, feedback: str) -> None:
"""Allow users to provide feedback on the responses."""
self.feedback.append(feedback)
@ -179,6 +148,11 @@ class Flow:
return self.stopping_condition(response)
return False
def __call__(self, prompt, **kwargs) -> str:
"""Invoke the flow by providing a template and its variables."""
response = self.llm(prompt, **kwargs)
return response
def dynamic_temperature(self):
"""
1. Check the self.llm object for the temperature

@ -1,7 +1,9 @@
import os
from swarms.tools import tool
import interpreter
@tool
def compile(task: str):
"""
Open Interpreter lets LLMs run code (Python, Javascript, Shell, and more) locally. You can chat with Open Interpreter through a ChatGPT-like interface in your terminal by running $ interpreter after installing.

Loading…
Cancel
Save