FEAT: [BEAUTIFY in GPT4Vision][Disable logging in __init__ of swarms]

pull/197/head
Kye 1 year ago
parent 51c82cf1f2
commit f895497f88

@ -1,18 +1,6 @@
import logging
import os
import warnings
from swarms.utils.disable_logging import disable_logging
warnings.filterwarnings("ignore", category=UserWarning)
# disable tensorflow warnings
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2"
try:
log = logging.getLogger("pytorch")
log.propagate = False
log.setLevel(logging.ERROR)
except Exception as error:
print(f"Pytorch logging not disabled: {error}")
disable_logging()
from swarms.agents import * # noqa: E402, F403
from swarms.swarms import * # noqa: E402, F403

@ -20,6 +20,7 @@ class BaseMultiModalModel:
max_workers: Optional[int] = 10,
top_p: Optional[int] = 1,
top_k: Optional[int] = 50,
beautify: Optional[bool] = False,
device: Optional[str] = "cuda",
max_new_tokens: Optional[int] = 500,
retries: Optional[int] = 3,
@ -30,6 +31,7 @@ class BaseMultiModalModel:
self.max_workers = max_workers
self.top_p = top_p
self.top_k = top_k
self.beautify = beautify
self.device = device
self.max_new_tokens = max_new_tokens
self.retries = retries
@ -206,4 +208,8 @@ class BaseMultiModalModel:
def get_chat_history_tokens(self):
"""Get the chat history tokens"""
return self._num_tokens()
def print_beautiful(self, content: str, color: str = "cyan"):
"""Print Beautifully with termcolor"""
content = colored(content, color)
print(content)

@ -1,3 +1,4 @@
import logging
import asyncio
import base64
import concurrent.futures
@ -54,16 +55,27 @@ class GPT4VisionAPI:
self,
openai_api_key: str = openai_api_key,
model_name: str = "gpt-4-vision-preview",
logging_enabled: bool = False,
max_workers: int = 10,
max_tokens: str = 300,
openai_proxy: str = "https://api.openai.com/v1/chat/completions",
beautify: bool = False
):
super().__init__()
self.openai_api_key = openai_api_key
self.logging_enabled = logging_enabled
self.model_name = model_name
self.max_workers = max_workers
self.max_tokens = max_tokens
self.openai_proxy = openai_proxy
self.beautify = beautify
if self.logging_enabled:
logging.basicConfig(level=logging.DEBUG)
else:
# Disable debug logs for requests and urllib3
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
def encode_image(self, img: str):
"""Encode image to base64."""
@ -83,7 +95,7 @@ class GPT4VisionAPI:
"Authorization": f"Bearer {openai_api_key}",
}
payload = {
"model": self.model_name,
"model": "gpt-4-vision-preview",
"messages": [
{
"role": "user",
@ -103,14 +115,18 @@ class GPT4VisionAPI:
"max_tokens": self.max_tokens,
}
response = requests.post(
"https://api.openai.com/v1/chat/completions",
self.openai_proxy,
headers=headers,
json=payload,
)
out = response.json()
content = out["choices"][0]["message"]["content"]
print(content)
if self.beautify:
content = colored(content, "cyan")
else:
print(content)
except Exception as error:
print(f"Error with the request: {error}")
raise error
@ -151,11 +167,14 @@ class GPT4VisionAPI:
out = response.json()
content = out["choices"][0]["message"]["content"]
print(content)
if self.beautify:
content = colored(content, "cyan")
else:
print(content)
except Exception as error:
print(f"Error with the request: {error}")
raise error
# Function to handle vision tasks
def run_many(
self,

@ -0,0 +1,25 @@
import logging
import os
import warnings
def disable_logging():
warnings.filterwarnings("ignore", category=UserWarning)
# disable tensorflow warnings
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
# Set the logging level for the entire module
logging.basicConfig(level=logging.WARNING)
try:
log = logging.getLogger("pytorch")
log.propagate = False
log.setLevel(logging.ERROR)
except Exception as error:
print(f"Pytorch logging not disabled: {error}")
for logger_name in ['tensorflow', 'h5py', 'numexpr', 'git', 'wandb.docker.auth']:
logger = logging.getLogger(logger_name)
logger.setLevel(logging.WARNING) # Supress DEBUG and info logs
Loading…
Cancel
Save