diff --git a/pyproject.toml b/pyproject.toml index 0fa7b2db..c09edb2b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "swarms" -version = "3.2.8" +version = "3.3.4" description = "Swarms - Pytorch" license = "MIT" authors = ["Kye Gomez "] @@ -24,7 +24,7 @@ classifiers = [ [tool.poetry.dependencies] python = "^3.6.1" torch = "2.1.1" -transformers = "4.35.0" +transformers = "4.36.2" openai = "0.28.0" langchain = "0.0.333" asyncio = "3.4.3" @@ -37,7 +37,7 @@ opencv-python-headless = "4.8.1.78" faiss-cpu = "1.7.4" backoff = "2.2.1" marshmallow = "3.19.0" -datasets = "2.10.1" +datasets = "*" optimum = "1.15.0" diffusers = "*" PyPDF2 = "3.0.1" @@ -53,7 +53,7 @@ ggl = "1.1.0" ratelimit = "2.2.1" beautifulsoup4 = "4.11.2" cohere = "4.24" -huggingface-hub = "0.16.4" +huggingface-hub = "*" pydantic = "1.10.12" tenacity = "8.2.2" Pillow = "9.4.0" diff --git a/requirements.txt b/requirements.txt index 9250997a..e8e2380e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ torch==2.1.1 -transformers>2.10==4.35.0 +transformers>2.10==4.36.2 pandas==1.5.3 langchain==0.0.333 nest_asyncio==1.5.6 @@ -72,6 +72,5 @@ pre-commit==3.2.2 sqlalchemy pgvector qdrant-client -sentence-transformers peft modelscope==1.10.0 \ No newline at end of file diff --git a/swarms/models/vllm.py b/swarms/models/vllm.py index 58745a75..0caeb3c8 100644 --- a/swarms/models/vllm.py +++ b/swarms/models/vllm.py @@ -1,13 +1,11 @@ import torch from swarms.models.base_llm import AbstractLLM -import subprocess if torch.cuda.is_available() or torch.cuda.device_count() > 0: # Download vllm with pip try: - subprocess.run(["pip", "install", "vllm"]) from vllm import LLM, SamplingParams - except Exception as error: + except ImportError as error: print(f"[ERROR] [vLLM] {error}") raise error else: