From 34765111092051415351db221e936b9587a12c69 Mon Sep 17 00:00:00 2001 From: Kye Date: Wed, 13 Dec 2023 15:51:41 -0800 Subject: [PATCH] [EXACT DEPENDENCY VERISONS] --- playground/models/gemini.py => gemini.py | 0 pyproject.toml | 77 ++++++++++++------------ swarms/models/gemini.py | 48 +++++++-------- 3 files changed, 63 insertions(+), 62 deletions(-) rename playground/models/gemini.py => gemini.py (100%) diff --git a/playground/models/gemini.py b/gemini.py similarity index 100% rename from playground/models/gemini.py rename to gemini.py diff --git a/pyproject.toml b/pyproject.toml index 387acff2..2ce94831 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,47 +24,48 @@ classifiers = [ [tool.poetry.dependencies] python = "^3.6.1" torch = "2.1.1" -transformers = "2.10" +transformers = "4.35.0" openai = "0.28.0" -langchain = "*" -asyncio = "*" -einops = "*" -google-generativeai = "0.3.0" -langchain-experimental = "*" -playwright = "*" -weaviate-client = "*" -opencv-python-headless = "*" -faiss-cpu = "*" -backoff = "*" -marshmallow = "*" -datasets = "*" +langchain = "0.0.333" +asyncio = "3.4.3" +einops = "0.7.0" +google-generativeai = "0.3.1" +langchain-experimental = "0.0.10" +playwright = "1.34.0" +weaviate-client = "3.25.3" +opencv-python-headless = "4.8.1.78" +faiss-cpu = "1.7.4" +backoff = "2.2.1" +marshmallow = "3.19.0" +datasets = "2.10.1" optimum = "1.15.0" -diffusers = "*" -PyPDF2 = "*" -accelerate = "*" -sentencepiece = "*" -wget = "*" -tensorflow = "2.15.0" -httpx = "*" -tiktoken = "*" -safetensors = "*" -attrs = "*" -ggl = "*" -ratelimit = "*" -beautifulsoup4 = "*" -cohere = "*" -huggingface-hub = "*" +diffusers = "0.17.1" +PyPDF2 = "3.0.1" +accelerate = "0.22.0" +sentencepiece = "0.1.98" +wget = "3.2" +tensorflow = "2.12.0" +httpx = "0.24.1" +tiktoken = "0.4.0" +safetensors = "0.3.3" +attrs = "22.2.0" +ggl = "1.1.0" +ratelimit = "2.2.1" +beautifulsoup4 = "4.11.2" +cohere = "4.24" +huggingface-hub = "0.16.4" pydantic = "1.10.12" -tenacity = "*" -Pillow = "*" -chromadb = "*" -tabulate = "*" -termcolor = "*" -black = "*" -open_clip_torch = "*" -soundfile = "*" -torchvision = "*" -rich = "*" +tenacity = "8.2.2" +Pillow = "9.4.0" +chromadb = "0.4.14" +tabulate = "0.9.0" +termcolor = "2.2.0" +black = "23.3.0" +open_clip_torch = "2.20.0" +soundfile = "0.12.1" +torchvision = "0.16.1" +rich = "13.5.2" + [tool.poetry.group.lint.dependencies] ruff = ">=0.0.249,<0.1.7" diff --git a/swarms/models/gemini.py b/swarms/models/gemini.py index 25696193..28fa7c85 100644 --- a/swarms/models/gemini.py +++ b/swarms/models/gemini.py @@ -151,30 +151,30 @@ class Gemini(BaseMultiModalModel): str: output from the model """ try: - if img: - # process_img = self.process_img(img, *args, **kwargs) - process_img = self.process_img_pil(img) - response = self.model.generate_content( - contents=[task, process_img], - generation_config=self.generation_config, - stream=self.stream, - *args, - **kwargs, - ) - - # if self.candidates: - # return response.candidates - # elif self.safety: - # return response.safety - # else: - # return response.text - - return response.text - else: - response = self.model.generate_content( - task, *args, **kwargs - ) - return response.text + # if img: + # # process_img = self.process_img(img, *args, **kwargs) + # process_img = self.process_img_pil(img) + # response = self.model.generate_content( + # contents=[task, process_img], + # generation_config=self.generation_config, + # stream=self.stream, + # *args, + # **kwargs, + # ) + + # # if self.candidates: + # # return response.candidates + # # elif self.safety: + # # return response.safety + # # else: + # # return response.text + + # return response.text + # else: + response = self.model.generate_content( + task, *args, **kwargs + ) + return response.text except Exception as error: print(f"Error running Gemini model: {error}") print(f"Please check the task and image: {task}, {img}")