pull/3/head
killian 12 months ago
parent cd7bf2b862
commit 7cd3dd0219

@ -1 +1 @@
{}
[{"role": "user", "type": "message", "content": "run schedule to scheduel a task in 20 seconds, then another 20 seconds after that, then another 20 seconds after that. the message shoudl be \"hello world!\" and dont say anything else please. just run code.\n\n"}, {"role": "assistant", "type": "code", "format": "python", "content": "\nfrom datetime import datetime, timedelta\n\n# Calculate future times for scheduling messages\ntime_now = datetime.now()\nfirst_schedule = time_now + timedelta(seconds=20)\nsecond_schedule = first_schedule + timedelta(seconds=20)\nthird_schedule = second_schedule + timedelta(seconds=20)\n\n# Schedule messages\nschedule(first_schedule, \"hello world!\")\nschedule(second_schedule, \"hello world!\")\nschedule(third_schedule, \"hello world!\")\n"}, {"role": "computer", "type": "console", "format": "output", "content": ""}, {"role": "assistant", "type": "message", "content": "We are done with scheduling the \"hello world!\" messages. They should appear as scheduled. If you have any more tasks or need further assistance, feel free to let me know!"}]

@ -6,22 +6,22 @@ https://docs.openinterpreter.com/protocols/lmc-messages
Also needs to be saving conversations, and checking the queue.
"""
from typing import Optional, Tuple
import uvicorn
from fastapi import FastAPI, WebSocket
import asyncio
import json
import os
import glob
from interpreter.core.core import OpenInterpreter
def check_queue():
def check_queue() -> dict:
queue_files = glob.glob("interpreter/queue/*.json")
if queue_files:
with open(queue_files[0], 'r') as file:
data = json.load(file)
os.remove(queue_files[0])
return data
else:
return None
def save_conversation(messages):
with open('interpreter/conversations/user.json', 'w') as file:
@ -35,8 +35,30 @@ def load_conversation():
except (FileNotFoundError, json.JSONDecodeError):
return []
def main(interpreter):
def check_for_new_messages(task) -> Tuple[Optional[str], Optional[str]]:
# Has the user sent a message?
if task.done():
return {"role": "user", "type": "message", "content": task.result()}
# Has the queue recieved a message?
queued_message = check_queue()
if queued_message:
return queued_message
return None
async def get_new_messages(task) -> Tuple[Optional[str], Optional[str]]:
message = check_for_new_messages(task)
if message:
return message
else:
await asyncio.sleep(0.2)
return await get_new_messages(task)
def main(interpreter: OpenInterpreter):
app = FastAPI()
@app.websocket("/")
@ -49,61 +71,27 @@ def main(interpreter):
task = asyncio.create_task(websocket.receive_text())
if data == None: # Data will have stuff in it if we inturrupted it.
while True:
# Has the user sent a message?
if task.done():
data = task.result()
break
# Has the queue recieved a message?
queued_message = check_queue()
if queued_message:
data = queued_message
break
# Wait 0.2 seconds
await asyncio.sleep(0.2)
### FOR DEV ONLY: SIMULATE LMC MESSAGES
# This lets users simulate any kind of LMC message by passing a JSON into the textbox in index.html.
try:
data_dict = json.loads(data)
data = data_dict
except json.JSONDecodeError:
pass
data = await get_new_messages(task)
### CONVERSATION / DISC MANAGEMENT
if type(data) == str: # This means it's from the frontend / user.
data = {"role": "user", "type": "message", "content": data}
message = data
messages = load_conversation()
messages.append(data)
messages.append(message)
save_conversation(messages)
### RESPONDING
# This is the task for waiting for user inturruptions.
if task:
task.cancel()
task = asyncio.create_task(websocket.receive_text())
for chunk in interpreter.chat(
messages, stream=True, display=True
):
print(chunk)
# Check queue
queued_message = check_queue()
if queued_message:
data = queued_message
data = check_for_new_messages(task)
if data:
save_conversation(interpreter.messages)
break
# Check for new user messages
if task.done():
data = task.result() # Get the new message
save_conversation(interpreter.messages)
break # Break the loop and start processing the new message
# Send out chunks
await websocket.send_json(chunk)
await asyncio.sleep(0.01) # Add a small delay
@ -113,6 +101,9 @@ def main(interpreter):
save_conversation(interpreter.messages)
data = None
if data == None:
task.cancel() # The user didn't inturrupt
uvicorn.run(app, host="0.0.0.0", port=8000)

@ -36,7 +36,7 @@ When the user tells you about a set of tasks, you should intelligently order tas
After starting a task, you should check in with the user around the estimated completion time to see if the task is completed. Use the `schedule(datetime, message)` function, which has already been imported.
To do this, schedule a reminder based on estimated completion time using `computer.clock.schedule(datetime_object, "Your message here.")`. You'll recieve the message at `datetime_object`.
To do this, schedule a reminder based on estimated completion time using the function `schedule(datetime_object, "Your message here.")`, WHICH HAS ALREADY BEEN IMPORTED. YOU DON'T NEED TO IMPORT THE `schedule` FUNCTION. IT IS AVALIABLE. You'll recieve the message at `datetime_object`.
You guide the user through the list one task at a time, convincing them to move forward, giving a pep talk if need be. Your job is essentially to answer "what should I (the user) be doing right now?" for every moment of the day.
@ -64,8 +64,9 @@ for file in glob.glob('interpreter/tools/*.py'):
# Hosted settings
interpreter.llm.api_key = os.getenv('OPENAI_API_KEY')
interpreter.llm.model = "gpt-4"
interpreter.llm.model = "gpt-4-0125-preview"
interpreter.auto_run = True
# interpreter.force_task_completion = True
### MISC SETTINGS

@ -8,13 +8,14 @@ def add_message_to_queue(message):
# Define the message data and convert it to JSON
message_json = json.dumps({
"role": "computer",
"type": "message",
"type": "console",
"format": "output",
"content": message
})
# Write the JSON data to the file
timestamp = str(int(time.time()))
with open(f"/01/core/queue/{timestamp}.json", "w") as file:
with open(f"interpreter/queue/{timestamp}.json", "w") as file:
file.write(message_json)
def schedule(dt, message):

Loading…
Cancel
Save