Former-commit-id: e9b4c953d4
group-chat
Kye 1 year ago
parent 2b07a5b875
commit 757b14839a

@ -0,0 +1,27 @@
class AbsractAgent:
def __init__(
self,
llm,
temperature
) -> None:
pass
#single query
def run(self, task: str):
pass
# # conversational back and forth
# def chat(self, message: str):
# message_historys = []
# message_historys.append(message)
# reply = self.run(message)
# message_historys.append(reply)
# return message_historys
# def step(self, message):
# pass
# def reset(self):
# pass

@ -103,6 +103,7 @@ class OmniModalAgent:
self.chat_planner = load_chat_planner(llm) self.chat_planner = load_chat_planner(llm)
self.response_generator = load_response_generator(llm) self.response_generator = load_response_generator(llm)
# self.task_executor = TaskExecutor # self.task_executor = TaskExecutor
self.history = []
def run( def run(

@ -3,7 +3,7 @@ from abc import ABC, abstractmethod
class AbstractModel(ABC): class AbstractModel(ABC):
#abstract base class for language models #abstract base class for language models
@abstractmethod @abstractmethod
def generate(self, prompt): def run(self, prompt):
#generate text using language model #generate text using language model
pass pass

@ -132,7 +132,9 @@ import interpreter
@tool @tool
def compile(task: str): def compile(task: str):
""" """
Open Interpreter lets LLMs run code (Python, Javascript, Shell, and more) locally. You can chat with Open Interpreter through a ChatGPT-like interface in your terminal by running $ interpreter after installing. Open Interpreter lets LLMs run code (Python, Javascript, Shell, and more) locally.
You can chat with Open Interpreter through a ChatGPT-like interface in your terminal
by running $ interpreter after installing.
This provides a natural-language interface to your computer's general-purpose capabilities: This provides a natural-language interface to your computer's general-purpose capabilities:
@ -142,7 +144,6 @@ def compile(task: str):
...etc. ...etc.
Note: You'll be asked to approve code before it's run. Note: You'll be asked to approve code before it's run.
""" """
task = interpreter.chat(task, return_messages=True) task = interpreter.chat(task, return_messages=True)
interpreter.chat() interpreter.chat()
interpreter.reset(task) interpreter.reset(task)
@ -156,7 +157,6 @@ def compile(task: str):
# mm model workers # mm model workers
import torch import torch
from PIL import Image from PIL import Image
from transformers import ( from transformers import (

@ -39,7 +39,6 @@ class SpeechToText:
subprocess.run(["pip", "install", "pydub"]) subprocess.run(["pip", "install", "pydub"])
def download_youtube_video(self): def download_youtube_video(self):
audio_file = f'video.{self.audio_format}' audio_file = f'video.{self.audio_format}'

@ -142,7 +142,7 @@ class Worker:
query_website_tool, query_website_tool,
HumanInputRun(), HumanInputRun(),
compile, compile,
# VQAinference # VQAinference,
] ]
if external_tools is not None: if external_tools is not None:
self.tools.extend(external_tools) self.tools.extend(external_tools)
@ -311,6 +311,3 @@ class Worker:
""" """
for token in response.split(): for token in response.split():
yield token yield token

Loading…
Cancel
Save