[UNUSED DEPENDENCY]

pull/378/head
Kye 11 months ago
parent c7bf2274a0
commit 070cccac92

@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "swarms"
version = "3.9.1"
version = "3.9.3"
description = "Swarms - Pytorch"
license = "MIT"
authors = ["Kye Gomez <kye@apac.ai>"]
@ -23,7 +23,7 @@ classifiers = [
[tool.poetry.dependencies]
python = "^3.6.1"
torch = "2.1.1"
torch = "2.1.1"p
transformers = "4.37.1"
openai = "0.28.0"
langchain = "0.0.333"
@ -56,10 +56,8 @@ pydantic = "1.10.12"
tenacity = "8.2.2"
Pillow = "9.4.0"
chromadb = "0.4.14"
tabulate = "0.9.0"
termcolor = "2.2.0"
black = "23.3.0"
open_clip_torch = "2.20.0"
soundfile = "0.12.1"
torchvision = "0.16.1"
rich = "13.5.2"

@ -11,7 +11,6 @@ ggl==1.1.0
Pillow==9.4.0
faiss-cpu==1.7.4
openai==0.28.0
attrs==22.2.0
datasets==2.14.5
pydantic==1.10.12
bitsandbytes
@ -25,13 +24,11 @@ chromadb==0.4.14
tensorflow
optimum
tiktoken==0.4.0
tabulate==0.9.0
colored
addict
backoff==2.2.1
ratelimit==2.2.1
termcolor==2.2.0
controlnet-aux
diffusers
einops==0.7.0
imageio==2.25.1
@ -40,14 +37,10 @@ imageio-ffmpeg==0.4.9
safetensors==0.3.3
numpy
omegaconf==2.3.0
open_clip_torch==2.20.0
openai==0.28.0
opencv-python==4.7.0.72
prettytable==3.9.0
safetensors==0.3.3
timm
torchmetrics
webdataset
marshmallow==3.19.0
yapf
autopep8

@ -3,7 +3,6 @@ import logging
from concurrent.futures import ThreadPoolExecutor, as_completed
from typing import Callable, List
from tabulate import tabulate
from termcolor import colored
# Configure logging
@ -77,23 +76,6 @@ class ModelParallelizer:
f"[ERROR][ModelParallelizer] [ROOT CAUSE] [{error}]"
)
def print_responses(self, task):
"""Prints the responses in a tabular format"""
responses = self.run_all(task)
table = []
for i, response in enumerate(responses):
table.append([f"LLM {i+1}", response])
print(
colored(
tabulate(
table,
headers=["LLM", "Response"],
tablefmt="pretty",
),
"cyan",
)
)
def run_all(self, task):
"""Run the task on all LLMs"""
responses = []
@ -101,23 +83,7 @@ class ModelParallelizer:
responses.append(llm(task))
return responses
def print_arun_all(self, task):
"""Prints the responses in a tabular format"""
responses = self.arun_all(task)
table = []
for i, response in enumerate(responses):
table.append([f"LLM {i+1}", response])
print(
colored(
tabulate(
table,
headers=["LLM", "Response"],
tablefmt="pretty",
),
"cyan",
)
)
# New Features
def save_responses_to_file(self, filename):
"""Save responses to file"""
@ -126,7 +92,7 @@ class ModelParallelizer:
[f"LLM {i+1}", response]
for i, response in enumerate(self.last_responses)
]
file.write(tabulate(table, headers=["LLM", "Response"]))
file.write(table)
@classmethod
def load_llms_from_file(cls, filename):
@ -151,11 +117,7 @@ class ModelParallelizer:
]
print(
colored(
tabulate(
table,
headers=["LLM", "Response"],
tablefmt="pretty",
),
table,
"cyan",
)
)

Loading…
Cancel
Save