pull/49/head
killian 11 months ago
parent 5be2b40e62
commit d374da01b5

@ -43,7 +43,7 @@ def configure_interpreter(interpreter: OpenInterpreter):
### SKILLS
try:
interpreter.computer.skills.skills_dir = Path(__file__).parent / 'skills'
interpreter.computer.skills.path = Path(os.getenv('OI_SKILLS_PATH'))
interpreter.computer.skills.import_skills()
except:
print("Temporarily skipping skills (OI 0.2.1, which is unreleased) so we can push to `pip`.")

@ -0,0 +1,5 @@
import sys
def explore_functions():
"""None"""
import sys

@ -1,9 +0,0 @@
def openSafari():
"""open safari"""
import os
os.system('open -a Safari')
import os
os.system('osascript -e \'tell application "Safari" to open location "https://www.youtube.com"\'')

@ -1,6 +0,0 @@
import threading
import time
def print_message():
"""None"""
time.sleep(30)

@ -76,6 +76,7 @@ Try multiple methods before saying the task is impossible. **You can do it!**
import sys
import os
import json
original_stdout = sys.stdout
sys.stdout = open(os.devnull, 'w')
@ -118,31 +119,29 @@ finally:
# SKILLS
Prefer to use the following functions (assume they're imported) to complete your goals whenever possible:
You may use the following functions (assume they're imported) to complete your goals whenever possible:
{{
import sys
import os
import json
original_stdout = sys.stdout
sys.stdout = open(os.devnull, 'w')
original_stderr = sys.stderr
sys.stderr = open(os.devnull, 'w')
from interpreter import interpreter
from pathlib import Path
try:
from interpreter import interpreter
from pathlib import Path
combined_messages = "\\n".join(json.dumps(x) for x in messages[-5:])
query_msg = interpreter.chat(f"This is the conversation so far: {combined_messages}. What is a <10 words query that could be used to find functions that would help answer the user's question?")
query = query_msg[0]['content']
skills_path = Path().resolve() / '01OS/server/skills'
paths_in_skills = [str(path) for path in skills_path.glob('**/*.py')]
skills = interpreter.computer.skills.search(query, paths=paths_in_skills)
lowercase_skills = [skill[0].lower() + skill[1:] for skill in skills]
output = "\\n".join(lowercase_skills)
finally:
sys.stdout = original_stdout
sys.stderr = original_stderr
interpreter.model = "gpt-3.5"
combined_messages = "\\n".join(json.dumps(x) for x in messages[-3:])
query_msg = interpreter.chat(f"This is the conversation so far: {combined_messages}. What is a <10 words query that could be used to find functions that would help answer the user's question?")
query = query_msg[0]['content']
skills_path = Path().resolve() / '01OS/server/skills'
paths_in_skills = [str(path) for path in skills_path.glob('**/*.py')]
skills = interpreter.computer.skills.search(query)
lowercase_skills = [skill[0].lower() + skill[1:] for skill in skills]
output = "\\n".join(lowercase_skills)
# VERY HACKY! We should fix this, we hard code it for noisy code^:
print("IGNORE_ALL_ABOVE_THIS_LINE")
print(output)
}}
@ -168,37 +167,3 @@ For example:
ALWAYS REMEMBER: You are running on a device called the O1, where the interface is entirely speech-based. Make your responses to the user **VERY short.**
""".strip()
test_system_message = """Just return the following to the user:
{{
import sys
import os
original_stdout = sys.stdout
sys.stdout = open(os.devnull, 'w')
original_stderr = sys.stderr
sys.stderr = open(os.devnull, 'w')
try:
from interpreter import interpreter
from pathlib import Path
interpreter.model = "gpt-3.5"
combined_messages = "\\n".join(json.dumps(x) for x in messages[-3:])
query_msg = interpreter.chat(f"This is the conversation so far: {combined_messages}. What is a <10 words query that could be used to find functions that would help answer the user's question?")
query = query_msg[0]['content']
skills_path = Path().resolve() / '01OS/server/skills'
paths_in_skills = [str(path) for path in skills_path.glob('**/*.py')]
skills = interpreter.computer.skills.search(query)
lowercase_skills = [skill[0].lower() + skill[1:] for skill in skills]
output = "\\n".join(lowercase_skills)
finally:
sys.stdout = original_stdout
sys.stderr = original_stderr
print(output)
}}
"""

10
01OS/poetry.lock generated

@ -2,13 +2,13 @@
[[package]]
name = "aifs"
version = "0.0.8"
version = "0.0.9"
description = "Local semantic search. Stupidly simple."
optional = false
python-versions = ">=3.9,<4.0"
files = [
{file = "aifs-0.0.8-py3-none-any.whl", hash = "sha256:7f1d581faecacbcb1a33d3c155656379768ea30349d29ebf18822b9a6c49379b"},
{file = "aifs-0.0.8.tar.gz", hash = "sha256:47b779f2fa008f29fe1db17eaa238dee5ea5d0353707d5722b5b9cf8b8aeb83a"},
{file = "aifs-0.0.9-py3-none-any.whl", hash = "sha256:beada6d7a0bd45a1ff9c9699b6c3d19c289c8708297925b3500c08f504be41b7"},
{file = "aifs-0.0.9.tar.gz", hash = "sha256:dea45935fa2a89e6134b5f651980026b45dc5768798c8159b9f77755570e5ab1"},
]
[package.dependencies]
@ -2702,7 +2702,7 @@ files = []
develop = false
[package.dependencies]
aifs = "^0.0.8"
aifs = "^0.0.9"
appdirs = "^1.4.4"
astor = "^0.8.1"
fastapi = "^0.109.0"
@ -2735,7 +2735,7 @@ safe = ["semgrep (>=1.52.0,<2.0.0)", "yaspin (>=3.0.1,<4.0.0)"]
type = "git"
url = "https://github.com/KillianLucas/open-interpreter.git"
reference = "HEAD"
resolved_reference = "afe9860ae3abfbf4de3bc9311cbe3192f02bf864"
resolved_reference = "797bb4725d3ad1f2b493d8b3f11b7212fe7dfaf4"
[[package]]
name = "openai"

@ -70,6 +70,10 @@ if [[ "$@" == *"--clear-local"* ]]; then
exit 0
fi
### SKILLS PATH
OI_SKILLS_PATH="$SCRIPT_DIR/01OS/server/skills"
### SETUP
if [[ "$ALL_LOCAL" == "True" ]]; then

Loading…
Cancel
Save