pull/336/head
parent
d454733dff
commit
36b022ed41
@ -1,26 +1,43 @@
|
||||
from swarms.agents.simple_agent import SimpleAgent
|
||||
from swarms.structs import Agent
|
||||
from swarms.models import OpenAIChat
|
||||
import os
|
||||
|
||||
api_key = ""
|
||||
from dotenv import load_dotenv
|
||||
|
||||
llm = OpenAIChat(
|
||||
openai_api_key=api_key,
|
||||
temperature=0.5,
|
||||
from swarms import (
|
||||
OpenAIChat,
|
||||
Conversation,
|
||||
# display_markdown_message,
|
||||
)
|
||||
|
||||
# Initialize the agent
|
||||
agent = Agent(
|
||||
llm=llm,
|
||||
max_loops=5,
|
||||
)
|
||||
conv = Conversation()
|
||||
|
||||
# Load the environment variables
|
||||
load_dotenv()
|
||||
|
||||
# Get the API key from the environment
|
||||
api_key = os.environ.get("OPENAI_API_KEY")
|
||||
|
||||
# Initialize the language model
|
||||
llm = OpenAIChat(openai_api_key=api_key, model_name="gpt-4")
|
||||
|
||||
# Run the language model in a loop
|
||||
def interactive_conversation(llm):
|
||||
conv = Conversation()
|
||||
while True:
|
||||
user_input = input("User: ")
|
||||
conv.add("user", user_input)
|
||||
if user_input.lower() == "quit":
|
||||
break
|
||||
task = (
|
||||
conv.return_history_as_string()
|
||||
) # Get the conversation history
|
||||
out = llm(task)
|
||||
conv.add("assistant", out)
|
||||
print(
|
||||
f"Assistant: {out}", #color="cyan"
|
||||
)
|
||||
conv.display_conversation()
|
||||
conv.export_conversation("conversation.txt")
|
||||
|
||||
agent = SimpleAgent(
|
||||
name="Optimus Prime",
|
||||
agent=agent,
|
||||
# Memory
|
||||
)
|
||||
|
||||
out = agent.run("Generate a 10,000 word blog on health and wellness.")
|
||||
print(out)
|
||||
# Replace with your LLM instance
|
||||
interactive_conversation(llm)
|
||||
|
@ -0,0 +1,11 @@
|
||||
import subprocess
|
||||
from swarms.telemetry.check_update import check_for_update
|
||||
|
||||
|
||||
def auto_update():
|
||||
"""auto update swarms"""
|
||||
try:
|
||||
if check_for_update():
|
||||
subprocess.run(["pip", "install", "--upgrade", "swarms"])
|
||||
except Exception as e:
|
||||
print(e)
|
@ -0,0 +1,46 @@
|
||||
import pkg_resources
|
||||
import requests
|
||||
from packaging import version
|
||||
|
||||
import importlib.util
|
||||
import sys
|
||||
|
||||
|
||||
# borrowed from: https://stackoverflow.com/a/1051266/656011
|
||||
def check_for_package(package):
|
||||
if package in sys.modules:
|
||||
return True
|
||||
elif (spec := importlib.util.find_spec(package)) is not None:
|
||||
try:
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
|
||||
sys.modules[package] = module
|
||||
spec.loader.exec_module(module)
|
||||
|
||||
return True
|
||||
except ImportError:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def check_for_update():
|
||||
"""Check for updates
|
||||
|
||||
Returns:
|
||||
BOOL: Flag to indicate if there is an update
|
||||
"""
|
||||
# Fetch the latest version from the PyPI API
|
||||
response = requests.get(f"https://pypi.org/pypi/swarms/json")
|
||||
latest_version = response.json()["info"]["version"]
|
||||
|
||||
# Get the current version using pkg_resources
|
||||
current_version = pkg_resources.get_distribution("swarms").version
|
||||
|
||||
return version.parse(latest_version) > version.parse(
|
||||
current_version
|
||||
)
|
||||
|
||||
|
||||
# out = check_for_update()
|
||||
# print(out)
|
@ -0,0 +1,158 @@
|
||||
import platform
|
||||
import subprocess
|
||||
|
||||
import pkg_resources
|
||||
import psutil
|
||||
import toml
|
||||
|
||||
|
||||
def get_python_version():
|
||||
return platform.python_version()
|
||||
|
||||
|
||||
def get_pip_version():
|
||||
try:
|
||||
pip_version = (
|
||||
subprocess.check_output(["pip", "--version"])
|
||||
.decode()
|
||||
.split()[1]
|
||||
)
|
||||
except Exception as e:
|
||||
pip_version = str(e)
|
||||
return pip_version
|
||||
|
||||
|
||||
def get_oi_version():
|
||||
try:
|
||||
oi_version_cmd = (
|
||||
subprocess.check_output(["interpreter", "--version"])
|
||||
.decode()
|
||||
.split()[1]
|
||||
)
|
||||
except Exception as e:
|
||||
oi_version_cmd = str(e)
|
||||
oi_version_pkg = pkg_resources.get_distribution(
|
||||
"open-interpreter"
|
||||
).version
|
||||
oi_version = oi_version_cmd, oi_version_pkg
|
||||
return oi_version
|
||||
|
||||
|
||||
def get_os_version():
|
||||
return platform.platform()
|
||||
|
||||
|
||||
def get_cpu_info():
|
||||
return platform.processor()
|
||||
|
||||
|
||||
def get_ram_info():
|
||||
vm = psutil.virtual_memory()
|
||||
used_ram_gb = vm.used / (1024**3)
|
||||
free_ram_gb = vm.free / (1024**3)
|
||||
total_ram_gb = vm.total / (1024**3)
|
||||
return (
|
||||
f"{total_ram_gb:.2f} GB, used: {used_ram_gb:.2f}, free:"
|
||||
f" {free_ram_gb:.2f}"
|
||||
)
|
||||
|
||||
|
||||
def get_package_mismatches(file_path="pyproject.toml"):
|
||||
with open(file_path, "r") as file:
|
||||
pyproject = toml.load(file)
|
||||
dependencies = pyproject["tool"]["poetry"]["dependencies"]
|
||||
dev_dependencies = pyproject["tool"]["poetry"]["group"]["dev"][
|
||||
"dependencies"
|
||||
]
|
||||
dependencies.update(dev_dependencies)
|
||||
|
||||
installed_packages = {
|
||||
pkg.key: pkg.version for pkg in pkg_resources.working_set
|
||||
}
|
||||
|
||||
mismatches = []
|
||||
for package, version_info in dependencies.items():
|
||||
if isinstance(version_info, dict):
|
||||
version_info = version_info["version"]
|
||||
installed_version = installed_packages.get(package)
|
||||
if installed_version and version_info.startswith("^"):
|
||||
expected_version = version_info[1:]
|
||||
if not installed_version.startswith(expected_version):
|
||||
mismatches.append(
|
||||
f"\t {package}: Mismatch,"
|
||||
f" pyproject.toml={expected_version},"
|
||||
f" pip={installed_version}"
|
||||
)
|
||||
else:
|
||||
mismatches.append(f"\t {package}: Not found in pip list")
|
||||
|
||||
return "\n" + "\n".join(mismatches)
|
||||
|
||||
|
||||
def interpreter_info(interpreter):
|
||||
try:
|
||||
if interpreter.offline and interpreter.llm.api_base:
|
||||
try:
|
||||
curl = subprocess.check_output(
|
||||
f"curl {interpreter.llm.api_base}"
|
||||
)
|
||||
except Exception as e:
|
||||
curl = str(e)
|
||||
else:
|
||||
curl = "Not local"
|
||||
|
||||
messages_to_display = []
|
||||
for message in interpreter.messages:
|
||||
message = message.copy()
|
||||
try:
|
||||
if len(message["content"]) > 600:
|
||||
message["content"] = (
|
||||
message["content"][:300]
|
||||
+ "..."
|
||||
+ message["content"][-300:]
|
||||
)
|
||||
except Exception as e:
|
||||
print(str(e), "for message:", message)
|
||||
messages_to_display.append(message)
|
||||
|
||||
return f"""
|
||||
|
||||
# Interpreter Info
|
||||
|
||||
Vision: {interpreter.llm.supports_vision}
|
||||
Model: {interpreter.llm.model}
|
||||
Function calling: {interpreter.llm.supports_functions}
|
||||
Context window: {interpreter.llm.context_window}
|
||||
Max tokens: {interpreter.llm.max_tokens}
|
||||
|
||||
Auto run: {interpreter.auto_run}
|
||||
API base: {interpreter.llm.api_base}
|
||||
Offline: {interpreter.offline}
|
||||
|
||||
Curl output: {curl}
|
||||
|
||||
# Messages
|
||||
|
||||
System Message: {interpreter.system_message}
|
||||
|
||||
""" + "\n\n".join([str(m) for m in messages_to_display])
|
||||
except:
|
||||
return "Error, couldn't get interpreter info"
|
||||
|
||||
|
||||
def system_info(interpreter):
|
||||
oi_version = get_oi_version()
|
||||
print(f"""
|
||||
Python Version: {get_python_version()}
|
||||
Pip Version: {get_pip_version()}
|
||||
Open-interpreter Version: cmd:{oi_version[0]}, pkg: {oi_version[1]}
|
||||
OS Version and Architecture: {get_os_version()}
|
||||
CPU Info: {get_cpu_info()}
|
||||
RAM Info: {get_ram_info()}
|
||||
{interpreter_info(interpreter)}
|
||||
""")
|
||||
|
||||
# Removed the following, as it causes `FileNotFoundError: [Errno 2] No such file or directory: 'pyproject.toml'`` on prod
|
||||
# (i think it works on dev, but on prod the pyproject.toml will not be in the cwd. might not be accessible at all)
|
||||
# Package Version Mismatches:
|
||||
# {get_package_mismatches()}
|
@ -0,0 +1,24 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
|
||||
def find_image_path(text):
|
||||
"""Find the image path from the text
|
||||
|
||||
Args:
|
||||
text (_type_): _description_
|
||||
|
||||
Returns:
|
||||
_type_: _description_
|
||||
"""
|
||||
pattern = r"([A-Za-z]:\\[^:\n]*?\.(png|jpg|jpeg|PNG|JPG|JPEG))|(/[^:\n]*?\.(png|jpg|jpeg|PNG|JPG|JPEG))"
|
||||
matches = [
|
||||
match.group()
|
||||
for match in re.finditer(pattern, text)
|
||||
if match.group()
|
||||
]
|
||||
matches += [match.replace("\\", "") for match in matches if match]
|
||||
existing_paths = [
|
||||
match for match in matches if os.path.exists(match)
|
||||
]
|
||||
return max(existing_paths, key=len) if existing_paths else None
|
@ -1,23 +1,27 @@
|
||||
from rich import print as rich_print
|
||||
from rich.console import Console
|
||||
from rich.markdown import Markdown
|
||||
from rich.rule import Rule
|
||||
|
||||
|
||||
def display_markdown_message(message: str):
|
||||
def display_markdown_message(message: str, color: str = "cyan"):
|
||||
"""
|
||||
Display markdown message. Works with multiline strings with lots of indentation.
|
||||
Will automatically make single line > tags beautiful.
|
||||
"""
|
||||
|
||||
console = Console()
|
||||
for line in message.split("\n"):
|
||||
line = line.strip()
|
||||
if line == "":
|
||||
print("")
|
||||
console.print("")
|
||||
elif line == "---":
|
||||
rich_print(Rule(style="white"))
|
||||
console.print(Rule(style=color))
|
||||
else:
|
||||
rich_print(Markdown(line))
|
||||
console.print(Markdown(line, style=color))
|
||||
|
||||
if "\n" not in message and message.startswith(">"):
|
||||
# Aesthetic choice. For these tags, they need a space below them
|
||||
print("")
|
||||
console.print("")
|
||||
|
||||
|
||||
# display_markdown_message("I love you and you are beautiful.", "cyan")
|
||||
|
Loading…
Reference in new issue