[FUYU] Model Name + tests

pull/286/head
Kye 1 year ago
parent 336c4c47f1
commit 03d6f689ef

@ -18,7 +18,7 @@ class Fuyu(BaseMultiModalModel):
Args:
BaseMultiModalModel (BaseMultiModalModel): [description]
pretrained_path (str, optional): [description]. Defaults to "adept/fuyu-8b".
model_name (str, optional): [description]. Defaults to "adept/fuyu-8b".
device_map (str, optional): [description]. Defaults to "auto".
max_new_tokens (int, optional): [description]. Defaults to 500.
*args: [description]
@ -30,24 +30,23 @@ class Fuyu(BaseMultiModalModel):
>>> from swarms.models import Fuyu
>>> model = Fuyu()
>>> model.run("Hello, world!", "https://upload.wikimedia.org/wikipedia/commons/8/86/Id%C3%A9fix.JPG")
"""
def __init__(
self,
pretrained_path: str = "adept/fuyu-8b",
model_name: str = "adept/fuyu-8b",
device_map: str = "auto",
max_new_tokens: int = 500,
*args,
**kwargs,
):
super().__init__(*args, **kwargs)
self.pretrained_path = pretrained_path
self.model_name = model_name
self.device_map = device_map
self.max_new_tokens = max_new_tokens
self.tokenizer = AutoTokenizer.from_pretrained(
pretrained_path
model_name
)
self.image_processor = FuyuImageProcessor()
self.processor = FuyuProcessor(
@ -56,7 +55,7 @@ class Fuyu(BaseMultiModalModel):
**kwargs,
)
self.model = FuyuForCausalLM.from_pretrained(
pretrained_path,
model_name,
device_map=device_map,
**kwargs,
)

@ -1,9 +1,12 @@
# tests/test_fuyu.py
from unittest.mock import patch
import pytest
from swarms.models import Fuyu
from transformers import FuyuProcessor, FuyuImageProcessor
import torch
from PIL import Image
from transformers import FuyuImageProcessor, FuyuProcessor
from swarms.models.fuyu import Fuyu
# Basic test to ensure instantiation of class.
@ -122,3 +125,68 @@ def test_default_device_map():
# Testing if processor is correctly initialized
def test_processor_initialization(fuyu_instance):
assert isinstance(fuyu_instance.processor, FuyuProcessor)
# Test `get_img` method with a valid image path
def test_get_img_valid_path(fuyu_instance):
with patch("PIL.Image.open") as mock_open:
mock_open.return_value = "Test image"
result = fuyu_instance.get_img("valid/path/to/image.png")
assert result == "Test image"
# Test `get_img` method with an invalid image path
def test_get_img_invalid_path(fuyu_instance):
with patch("PIL.Image.open") as mock_open:
mock_open.side_effect = FileNotFoundError
with pytest.raises(FileNotFoundError):
fuyu_instance.get_img("invalid/path/to/image.png")
# Test `run` method with valid inputs
def test_run_valid_inputs(fuyu_instance):
with patch.object(fuyu_instance, "get_img") as mock_get_img, \
patch.object(fuyu_instance, "processor") as mock_processor, \
patch.object(fuyu_instance, "model") as mock_model:
mock_get_img.return_value = "Test image"
mock_processor.return_value = {"input_ids": torch.tensor([1, 2, 3])}
mock_model.generate.return_value = torch.tensor([1, 2, 3])
mock_processor.batch_decode.return_value = ["Test text"]
result = fuyu_instance.run("Hello, world!", "valid/path/to/image.png")
assert result == ["Test text"]
# Test `run` method with invalid text input
def test_run_invalid_text_input(fuyu_instance):
with pytest.raises(Exception):
fuyu_instance.run(None, "valid/path/to/image.png")
# Test `run` method with empty text input
def test_run_empty_text_input(fuyu_instance):
with pytest.raises(Exception):
fuyu_instance.run("", "valid/path/to/image.png")
# Test `run` method with very long text input
def test_run_very_long_text_input(fuyu_instance):
with pytest.raises(Exception):
fuyu_instance.run("A" * 10000, "valid/path/to/image.png")
# Test `run` method with invalid image path
def test_run_invalid_image_path(fuyu_instance):
with patch.object(fuyu_instance, "get_img") as mock_get_img:
mock_get_img.side_effect = FileNotFoundError
with pytest.raises(FileNotFoundError):
fuyu_instance.run("Hello, world!", "invalid/path/to/image.png")
# Test `__init__` method with default parameters
def test_init_default_parameters():
fuyu_instance = Fuyu()
assert fuyu_instance.pretrained_path == "adept/fuyu-8b"
assert fuyu_instance.device_map == "auto"
assert fuyu_instance.max_new_tokens == 500
# Test `__init__` method with custom parameters
def test_init_custom_parameters():
fuyu_instance = Fuyu("custom/path", "cpu", 1000)
assert fuyu_instance.pretrained_path == "custom/path"
assert fuyu_instance.device_map == "cpu"
assert fuyu_instance.max_new_tokens == 1000

@ -136,3 +136,66 @@ def test_run_with_empty_prompts(idefics_instance):
Exception
): # Replace Exception with the actual exception that may arise for an empty prompt.
idefics_instance.run([])
# Test `run` method with batched_mode set to False
def test_run_batched_mode_false(idefics_instance):
task = "User: Test"
with patch.object(
idefics_instance, "processor"
) as mock_processor, patch.object(
idefics_instance, "model"
) as mock_model:
mock_processor.return_value = {
"input_ids": torch.tensor([1, 2, 3])
}
mock_model.generate.return_value = torch.tensor([1, 2, 3])
mock_processor.batch_decode.return_value = ["Test"]
idefics_instance.batched_mode = False
result = idefics_instance.run(task)
assert result == ["Test"]
# Test `run` method with an exception
def test_run_with_exception(idefics_instance):
task = "User: Test"
with patch.object(
idefics_instance, "processor"
) as mock_processor:
mock_processor.side_effect = Exception('Test exception')
with pytest.raises(Exception):
idefics_instance.run(task)
# Test `set_model_name` method
def test_set_model_name(idefics_instance):
new_model_name = "new_model_name"
with patch.object(
IdeficsForVisionText2Text, "from_pretrained"
) as mock_from_pretrained, patch.object(
AutoProcessor, "from_pretrained"
):
idefics_instance.set_model_name(new_model_name)
assert idefics_instance.model_name == new_model_name
mock_from_pretrained.assert_called_with(
new_model_name, torch_dtype=torch.bfloat16
)
# Test `__init__` method with device set to None
def test_init_device_none():
with patch(
"torch.cuda.is_available",
return_value=False,
):
instance = Idefics(device=None)
assert instance.device == "cpu"
# Test `__init__` method with device set to "cuda"
def test_init_device_cuda():
with patch(
"torch.cuda.is_available",
return_value=True,
):
instance = Idefics(device="cuda")
assert instance.device == "cuda"
Loading…
Cancel
Save