parent
08dff9e62b
commit
8e2b17e28a
@ -0,0 +1,25 @@
|
|||||||
|
import os
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
from swarms.models.revgpt import RevChatGPTModel
|
||||||
|
from swarms.workers.worker import Worker
|
||||||
|
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
config = {
|
||||||
|
"model": os.getenv("REVGPT_MODEL"),
|
||||||
|
"plugin_ids": [os.getenv("REVGPT_PLUGIN_IDS")],
|
||||||
|
"disable_history": os.getenv("REVGPT_DISABLE_HISTORY") == "True",
|
||||||
|
"PUID": os.getenv("REVGPT_PUID"),
|
||||||
|
"unverified_plugin_domains": [os.getenv("REVGPT_UNVERIFIED_PLUGIN_DOMAINS")]
|
||||||
|
}
|
||||||
|
|
||||||
|
llm = RevChatGPTModel(access_token=os.getenv("ACCESS_TOKEN"), **config)
|
||||||
|
|
||||||
|
worker = Worker(
|
||||||
|
ai_name="Optimus Prime",
|
||||||
|
llm=llm
|
||||||
|
)
|
||||||
|
|
||||||
|
task = "What were the winning boston marathon times for the past 5 years (ending in 2022)? Generate a table of the year, name, country of origin, and times."
|
||||||
|
response = worker.run(task)
|
||||||
|
print(response)
|
@ -0,0 +1,24 @@
|
|||||||
|
import os
|
||||||
|
import revChatGPT
|
||||||
|
from revChatGPT.V1 import Chatbot as RevChatGPTV1
|
||||||
|
from revChatGPT.V3 import Chatbot as RevChatGPTV3
|
||||||
|
|
||||||
|
class RevChatGPTModel:
|
||||||
|
def __init__(self, access_token=None, api_key=None, **kwargs):
|
||||||
|
self.config = kwargs
|
||||||
|
if access_token:
|
||||||
|
self.chatbot = RevChatGPTV1(config={"access_token": access_token})
|
||||||
|
elif api_key:
|
||||||
|
self.chatbot = RevChatGPTV3(api_key=api_key)
|
||||||
|
else:
|
||||||
|
raise ValueError("Either access_token or api_key must be provided.")
|
||||||
|
|
||||||
|
def run(self, task: str) -> str:
|
||||||
|
response = ""
|
||||||
|
for data in self.chatbot.ask(task):
|
||||||
|
response = data["message"]
|
||||||
|
return response
|
||||||
|
|
||||||
|
def generate_summary(self, text: str) -> str:
|
||||||
|
# Implement summary generation using RevChatGPT
|
||||||
|
pass
|
Loading…
Reference in new issue