[EXACT DEPENDENCY VERISONS]

pull/299/head
Kye 1 year ago
parent 5962d9f506
commit 3476511109

@ -24,47 +24,48 @@ classifiers = [
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.6.1" python = "^3.6.1"
torch = "2.1.1" torch = "2.1.1"
transformers = "2.10" transformers = "4.35.0"
openai = "0.28.0" openai = "0.28.0"
langchain = "*" langchain = "0.0.333"
asyncio = "*" asyncio = "3.4.3"
einops = "*" einops = "0.7.0"
google-generativeai = "0.3.0" google-generativeai = "0.3.1"
langchain-experimental = "*" langchain-experimental = "0.0.10"
playwright = "*" playwright = "1.34.0"
weaviate-client = "*" weaviate-client = "3.25.3"
opencv-python-headless = "*" opencv-python-headless = "4.8.1.78"
faiss-cpu = "*" faiss-cpu = "1.7.4"
backoff = "*" backoff = "2.2.1"
marshmallow = "*" marshmallow = "3.19.0"
datasets = "*" datasets = "2.10.1"
optimum = "1.15.0" optimum = "1.15.0"
diffusers = "*" diffusers = "0.17.1"
PyPDF2 = "*" PyPDF2 = "3.0.1"
accelerate = "*" accelerate = "0.22.0"
sentencepiece = "*" sentencepiece = "0.1.98"
wget = "*" wget = "3.2"
tensorflow = "2.15.0" tensorflow = "2.12.0"
httpx = "*" httpx = "0.24.1"
tiktoken = "*" tiktoken = "0.4.0"
safetensors = "*" safetensors = "0.3.3"
attrs = "*" attrs = "22.2.0"
ggl = "*" ggl = "1.1.0"
ratelimit = "*" ratelimit = "2.2.1"
beautifulsoup4 = "*" beautifulsoup4 = "4.11.2"
cohere = "*" cohere = "4.24"
huggingface-hub = "*" huggingface-hub = "0.16.4"
pydantic = "1.10.12" pydantic = "1.10.12"
tenacity = "*" tenacity = "8.2.2"
Pillow = "*" Pillow = "9.4.0"
chromadb = "*" chromadb = "0.4.14"
tabulate = "*" tabulate = "0.9.0"
termcolor = "*" termcolor = "2.2.0"
black = "*" black = "23.3.0"
open_clip_torch = "*" open_clip_torch = "2.20.0"
soundfile = "*" soundfile = "0.12.1"
torchvision = "*" torchvision = "0.16.1"
rich = "*" rich = "13.5.2"
[tool.poetry.group.lint.dependencies] [tool.poetry.group.lint.dependencies]
ruff = ">=0.0.249,<0.1.7" ruff = ">=0.0.249,<0.1.7"

@ -151,30 +151,30 @@ class Gemini(BaseMultiModalModel):
str: output from the model str: output from the model
""" """
try: try:
if img: # if img:
# process_img = self.process_img(img, *args, **kwargs) # # process_img = self.process_img(img, *args, **kwargs)
process_img = self.process_img_pil(img) # process_img = self.process_img_pil(img)
response = self.model.generate_content( # response = self.model.generate_content(
contents=[task, process_img], # contents=[task, process_img],
generation_config=self.generation_config, # generation_config=self.generation_config,
stream=self.stream, # stream=self.stream,
*args, # *args,
**kwargs, # **kwargs,
) # )
# if self.candidates: # # if self.candidates:
# return response.candidates # # return response.candidates
# elif self.safety: # # elif self.safety:
# return response.safety # # return response.safety
# else: # # else:
# return response.text # # return response.text
return response.text # return response.text
else: # else:
response = self.model.generate_content( response = self.model.generate_content(
task, *args, **kwargs task, *args, **kwargs
) )
return response.text return response.text
except Exception as error: except Exception as error:
print(f"Error running Gemini model: {error}") print(f"Error running Gemini model: {error}")
print(f"Please check the task and image: {task}, {img}") print(f"Please check the task and image: {task}, {img}")

Loading…
Cancel
Save