pull/1/head
killian 12 months ago
parent f07bed8954
commit 7512783b5c

@ -2,13 +2,15 @@
Responsible for taking an interpreter, then serving it at "/" as a POST SSE endpoint, accepting and streaming LMC Messages. Responsible for taking an interpreter, then serving it at "/" as a POST SSE endpoint, accepting and streaming LMC Messages.
https://docs.openinterpreter.com/protocols/lmc-messages https://docs.openinterpreter.com/protocols/lmc-messages
Also needs to be saving conversations, and checking the queue.
""" """
from typing import Generator from typing import Generator
import uvicorn import uvicorn
from fastapi import FastAPI, Request, Response from fastapi import FastAPI, Request, Response
def serve(interpreter): def main(interpreter):
app = FastAPI() app = FastAPI()

@ -1,8 +1,8 @@
""" """
Responsible for configuring an interpreter, then using server.py to serve it at "/". Responsible for configuring an interpreter, then using main.py to serve it at "/".
""" """
from .server import serve from .main import main
from interpreter import interpreter from interpreter import interpreter
@ -58,6 +58,6 @@ interpreter.offline = True
interpreter.id = 206 # Used to identify itself to other interpreters. This should be changed programatically so it's unique. interpreter.id = 206 # Used to identify itself to other interpreters. This should be changed programatically so it's unique.
### START SERVER ### SERVE INTERPRETER AT "/"
serve(interpreter) main(interpreter)

@ -2,6 +2,8 @@
Responsible for setting up the language model, downloading it if necessary. Responsible for setting up the language model, downloading it if necessary.
Ideally should pick the best LLM for the hardware. Ideally should pick the best LLM for the hardware.
Should this be a shell script?
""" """
import os import os
@ -11,7 +13,7 @@ import subprocess
### LLM SETUP ### LLM SETUP
# Define the path to the models directory # Define the path to the models directory
models_dir = "01/core/models/" models_dir = "models/"
# Check and create the models directory if it doesn't exist # Check and create the models directory if it doesn't exist
if not os.path.exists(models_dir): if not os.path.exists(models_dir):

@ -0,0 +1,15 @@
[tool.poetry]
name = "01-core"
version = "0.0.1"
description = "The python at the heart of the 01."
authors = ["Open Interpreter <killian@openinterpreter.com>"]
license = "AGPL"
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.11"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

@ -0,0 +1,7 @@
### START THE LANGUAGE MODEL
python llm/start.py
### START THE INTERPRETER
python interpreter/start.py

@ -1,5 +1,11 @@
### APP
# Display app/index.html on the second monitor in full-screen mode # Display app/index.html on the second monitor in full-screen mode
google-chrome --kiosk --app=file:///app/index.html
# Setup the language model ### CORE
# Setup and serve the interpreter at "/" cd /core
poetry install
poetry run bash start.sh

@ -14,6 +14,10 @@ This folder contains everything we want to change from the base Ubuntu. A folder
I imagine we'll use something like Cubic to then press this + Ubuntu into an ISO image. I imagine we'll use something like Cubic to then press this + Ubuntu into an ISO image.
# Setup & Usage
Clone this repo, then run `OS/01/start.sh`.
# Structure # Structure
### `start.sh` ### `start.sh`

Loading…
Cancel
Save