[EXACT DEPENDENCY VERISONS]

pull/299/head
Kye 1 year ago
parent 5962d9f506
commit 3476511109

@ -24,47 +24,48 @@ classifiers = [
[tool.poetry.dependencies]
python = "^3.6.1"
torch = "2.1.1"
transformers = "2.10"
transformers = "4.35.0"
openai = "0.28.0"
langchain = "*"
asyncio = "*"
einops = "*"
google-generativeai = "0.3.0"
langchain-experimental = "*"
playwright = "*"
weaviate-client = "*"
opencv-python-headless = "*"
faiss-cpu = "*"
backoff = "*"
marshmallow = "*"
datasets = "*"
langchain = "0.0.333"
asyncio = "3.4.3"
einops = "0.7.0"
google-generativeai = "0.3.1"
langchain-experimental = "0.0.10"
playwright = "1.34.0"
weaviate-client = "3.25.3"
opencv-python-headless = "4.8.1.78"
faiss-cpu = "1.7.4"
backoff = "2.2.1"
marshmallow = "3.19.0"
datasets = "2.10.1"
optimum = "1.15.0"
diffusers = "*"
PyPDF2 = "*"
accelerate = "*"
sentencepiece = "*"
wget = "*"
tensorflow = "2.15.0"
httpx = "*"
tiktoken = "*"
safetensors = "*"
attrs = "*"
ggl = "*"
ratelimit = "*"
beautifulsoup4 = "*"
cohere = "*"
huggingface-hub = "*"
diffusers = "0.17.1"
PyPDF2 = "3.0.1"
accelerate = "0.22.0"
sentencepiece = "0.1.98"
wget = "3.2"
tensorflow = "2.12.0"
httpx = "0.24.1"
tiktoken = "0.4.0"
safetensors = "0.3.3"
attrs = "22.2.0"
ggl = "1.1.0"
ratelimit = "2.2.1"
beautifulsoup4 = "4.11.2"
cohere = "4.24"
huggingface-hub = "0.16.4"
pydantic = "1.10.12"
tenacity = "*"
Pillow = "*"
chromadb = "*"
tabulate = "*"
termcolor = "*"
black = "*"
open_clip_torch = "*"
soundfile = "*"
torchvision = "*"
rich = "*"
tenacity = "8.2.2"
Pillow = "9.4.0"
chromadb = "0.4.14"
tabulate = "0.9.0"
termcolor = "2.2.0"
black = "23.3.0"
open_clip_torch = "2.20.0"
soundfile = "0.12.1"
torchvision = "0.16.1"
rich = "13.5.2"
[tool.poetry.group.lint.dependencies]
ruff = ">=0.0.249,<0.1.7"

@ -151,30 +151,30 @@ class Gemini(BaseMultiModalModel):
str: output from the model
"""
try:
if img:
# process_img = self.process_img(img, *args, **kwargs)
process_img = self.process_img_pil(img)
response = self.model.generate_content(
contents=[task, process_img],
generation_config=self.generation_config,
stream=self.stream,
*args,
**kwargs,
)
# if self.candidates:
# return response.candidates
# elif self.safety:
# return response.safety
# else:
# return response.text
return response.text
else:
response = self.model.generate_content(
task, *args, **kwargs
)
return response.text
# if img:
# # process_img = self.process_img(img, *args, **kwargs)
# process_img = self.process_img_pil(img)
# response = self.model.generate_content(
# contents=[task, process_img],
# generation_config=self.generation_config,
# stream=self.stream,
# *args,
# **kwargs,
# )
# # if self.candidates:
# # return response.candidates
# # elif self.safety:
# # return response.safety
# # else:
# # return response.text
# return response.text
# else:
response = self.model.generate_content(
task, *args, **kwargs
)
return response.text
except Exception as error:
print(f"Error running Gemini model: {error}")
print(f"Please check the task and image: {task}, {img}")

Loading…
Cancel
Save