pull/3/head
killian 12 months ago
parent cd7bf2b862
commit 7cd3dd0219

@ -1 +1 @@
{} [{"role": "user", "type": "message", "content": "run schedule to scheduel a task in 20 seconds, then another 20 seconds after that, then another 20 seconds after that. the message shoudl be \"hello world!\" and dont say anything else please. just run code.\n\n"}, {"role": "assistant", "type": "code", "format": "python", "content": "\nfrom datetime import datetime, timedelta\n\n# Calculate future times for scheduling messages\ntime_now = datetime.now()\nfirst_schedule = time_now + timedelta(seconds=20)\nsecond_schedule = first_schedule + timedelta(seconds=20)\nthird_schedule = second_schedule + timedelta(seconds=20)\n\n# Schedule messages\nschedule(first_schedule, \"hello world!\")\nschedule(second_schedule, \"hello world!\")\nschedule(third_schedule, \"hello world!\")\n"}, {"role": "computer", "type": "console", "format": "output", "content": ""}, {"role": "assistant", "type": "message", "content": "We are done with scheduling the \"hello world!\" messages. They should appear as scheduled. If you have any more tasks or need further assistance, feel free to let me know!"}]

@ -6,22 +6,22 @@ https://docs.openinterpreter.com/protocols/lmc-messages
Also needs to be saving conversations, and checking the queue. Also needs to be saving conversations, and checking the queue.
""" """
from typing import Optional, Tuple
import uvicorn import uvicorn
from fastapi import FastAPI, WebSocket from fastapi import FastAPI, WebSocket
import asyncio import asyncio
import json import json
import os import os
import glob import glob
from interpreter.core.core import OpenInterpreter
def check_queue(): def check_queue() -> dict:
queue_files = glob.glob("interpreter/queue/*.json") queue_files = glob.glob("interpreter/queue/*.json")
if queue_files: if queue_files:
with open(queue_files[0], 'r') as file: with open(queue_files[0], 'r') as file:
data = json.load(file) data = json.load(file)
os.remove(queue_files[0]) os.remove(queue_files[0])
return data return data
else:
return None
def save_conversation(messages): def save_conversation(messages):
with open('interpreter/conversations/user.json', 'w') as file: with open('interpreter/conversations/user.json', 'w') as file:
@ -35,8 +35,30 @@ def load_conversation():
except (FileNotFoundError, json.JSONDecodeError): except (FileNotFoundError, json.JSONDecodeError):
return [] return []
def main(interpreter):
def check_for_new_messages(task) -> Tuple[Optional[str], Optional[str]]:
# Has the user sent a message?
if task.done():
return {"role": "user", "type": "message", "content": task.result()}
# Has the queue recieved a message?
queued_message = check_queue()
if queued_message:
return queued_message
return None
async def get_new_messages(task) -> Tuple[Optional[str], Optional[str]]:
message = check_for_new_messages(task)
if message:
return message
else:
await asyncio.sleep(0.2)
return await get_new_messages(task)
def main(interpreter: OpenInterpreter):
app = FastAPI() app = FastAPI()
@app.websocket("/") @app.websocket("/")
@ -49,60 +71,26 @@ def main(interpreter):
task = asyncio.create_task(websocket.receive_text()) task = asyncio.create_task(websocket.receive_text())
if data == None: # Data will have stuff in it if we inturrupted it. if data == None: # Data will have stuff in it if we inturrupted it.
while True: data = await get_new_messages(task)
# Has the user sent a message?
if task.done():
data = task.result()
break
# Has the queue recieved a message?
queued_message = check_queue()
if queued_message:
data = queued_message
break
# Wait 0.2 seconds
await asyncio.sleep(0.2)
### FOR DEV ONLY: SIMULATE LMC MESSAGES
# This lets users simulate any kind of LMC message by passing a JSON into the textbox in index.html.
try:
data_dict = json.loads(data)
data = data_dict
except json.JSONDecodeError:
pass
### CONVERSATION / DISC MANAGEMENT ### CONVERSATION / DISC MANAGEMENT
if type(data) == str: # This means it's from the frontend / user. message = data
data = {"role": "user", "type": "message", "content": data}
messages = load_conversation() messages = load_conversation()
messages.append(data) messages.append(message)
save_conversation(messages) save_conversation(messages)
### RESPONDING ### RESPONDING
# This is the task for waiting for user inturruptions. # This is the task for waiting for user inturruptions.
if task:
task.cancel()
task = asyncio.create_task(websocket.receive_text()) task = asyncio.create_task(websocket.receive_text())
for chunk in interpreter.chat( for chunk in interpreter.chat(
messages, stream=True, display=True messages, stream=True, display=True
): ):
print(chunk) data = check_for_new_messages(task)
# Check queue if data:
queued_message = check_queue()
if queued_message:
data = queued_message
save_conversation(interpreter.messages) save_conversation(interpreter.messages)
break break
# Check for new user messages
if task.done():
data = task.result() # Get the new message
save_conversation(interpreter.messages)
break # Break the loop and start processing the new message
# Send out chunks # Send out chunks
await websocket.send_json(chunk) await websocket.send_json(chunk)
@ -112,6 +100,9 @@ def main(interpreter):
if "end" in chunk: if "end" in chunk:
save_conversation(interpreter.messages) save_conversation(interpreter.messages)
data = None data = None
if data == None:
task.cancel() # The user didn't inturrupt

@ -36,7 +36,7 @@ When the user tells you about a set of tasks, you should intelligently order tas
After starting a task, you should check in with the user around the estimated completion time to see if the task is completed. Use the `schedule(datetime, message)` function, which has already been imported. After starting a task, you should check in with the user around the estimated completion time to see if the task is completed. Use the `schedule(datetime, message)` function, which has already been imported.
To do this, schedule a reminder based on estimated completion time using `computer.clock.schedule(datetime_object, "Your message here.")`. You'll recieve the message at `datetime_object`. To do this, schedule a reminder based on estimated completion time using the function `schedule(datetime_object, "Your message here.")`, WHICH HAS ALREADY BEEN IMPORTED. YOU DON'T NEED TO IMPORT THE `schedule` FUNCTION. IT IS AVALIABLE. You'll recieve the message at `datetime_object`.
You guide the user through the list one task at a time, convincing them to move forward, giving a pep talk if need be. Your job is essentially to answer "what should I (the user) be doing right now?" for every moment of the day. You guide the user through the list one task at a time, convincing them to move forward, giving a pep talk if need be. Your job is essentially to answer "what should I (the user) be doing right now?" for every moment of the day.
@ -64,8 +64,9 @@ for file in glob.glob('interpreter/tools/*.py'):
# Hosted settings # Hosted settings
interpreter.llm.api_key = os.getenv('OPENAI_API_KEY') interpreter.llm.api_key = os.getenv('OPENAI_API_KEY')
interpreter.llm.model = "gpt-4" interpreter.llm.model = "gpt-4-0125-preview"
interpreter.auto_run = True interpreter.auto_run = True
# interpreter.force_task_completion = True
### MISC SETTINGS ### MISC SETTINGS

@ -8,13 +8,14 @@ def add_message_to_queue(message):
# Define the message data and convert it to JSON # Define the message data and convert it to JSON
message_json = json.dumps({ message_json = json.dumps({
"role": "computer", "role": "computer",
"type": "message", "type": "console",
"format": "output",
"content": message "content": message
}) })
# Write the JSON data to the file # Write the JSON data to the file
timestamp = str(int(time.time())) timestamp = str(int(time.time()))
with open(f"/01/core/queue/{timestamp}.json", "w") as file: with open(f"interpreter/queue/{timestamp}.json", "w") as file:
file.write(message_json) file.write(message_json)
def schedule(dt, message): def schedule(dt, message):

Loading…
Cancel
Save