@ -49,7 +49,9 @@ def test_cohere_stream_api_error_handling(cohere_instance):
cohere_instance . model = " base "
cohere_instance . cohere_api_key = " invalid-api-key "
with pytest . raises ( Exception ) :
generator = cohere_instance . stream ( " Error handling with invalid API key. " )
generator = cohere_instance . stream (
" Error handling with invalid API key. "
)
for token in generator :
pass
@ -94,13 +96,17 @@ def test_cohere_call_with_stop(cohere_instance):
def test_cohere_stream_with_stop ( cohere_instance ) :
generator = cohere_instance . stream ( " Write a story. " , stop = [ " stop1 " , " stop2 " ] )
generator = cohere_instance . stream (
" Write a story. " , stop = [ " stop1 " , " stop2 " ]
)
for token in generator :
assert isinstance ( token , str )
def test_cohere_async_call_with_stop ( cohere_instance ) :
response = cohere_instance . async_call ( " Tell me a joke. " , stop = [ " stop1 " , " stop2 " ] )
response = cohere_instance . async_call (
" Tell me a joke. " , stop = [ " stop1 " , " stop2 " ]
)
assert response == " Mocked Response from Cohere "
@ -187,14 +193,22 @@ def test_cohere_generate_with_embed_english_v2(cohere_instance):
def test_cohere_generate_with_embed_english_light_v2 ( cohere_instance ) :
cohere_instance . model = " embed-english-light-v2.0 "
response = cohere_instance ( " Generate embeddings with English Light v2.0 model. " )
assert response . startswith ( " Generated embeddings with English Light v2.0 model " )
response = cohere_instance (
" Generate embeddings with English Light v2.0 model. "
)
assert response . startswith (
" Generated embeddings with English Light v2.0 model "
)
def test_cohere_generate_with_embed_multilingual_v2 ( cohere_instance ) :
cohere_instance . model = " embed-multilingual-v2.0 "
response = cohere_instance ( " Generate embeddings with Multilingual v2.0 model. " )
assert response . startswith ( " Generated embeddings with Multilingual v2.0 model " )
response = cohere_instance (
" Generate embeddings with Multilingual v2.0 model. "
)
assert response . startswith (
" Generated embeddings with Multilingual v2.0 model "
)
def test_cohere_generate_with_embed_english_v3 ( cohere_instance ) :
@ -205,14 +219,22 @@ def test_cohere_generate_with_embed_english_v3(cohere_instance):
def test_cohere_generate_with_embed_english_light_v3 ( cohere_instance ) :
cohere_instance . model = " embed-english-light-v3.0 "
response = cohere_instance ( " Generate embeddings with English Light v3.0 model. " )
assert response . startswith ( " Generated embeddings with English Light v3.0 model " )
response = cohere_instance (
" Generate embeddings with English Light v3.0 model. "
)
assert response . startswith (
" Generated embeddings with English Light v3.0 model "
)
def test_cohere_generate_with_embed_multilingual_v3 ( cohere_instance ) :
cohere_instance . model = " embed-multilingual-v3.0 "
response = cohere_instance ( " Generate embeddings with Multilingual v3.0 model. " )
assert response . startswith ( " Generated embeddings with Multilingual v3.0 model " )
response = cohere_instance (
" Generate embeddings with Multilingual v3.0 model. "
)
assert response . startswith (
" Generated embeddings with Multilingual v3.0 model "
)
def test_cohere_generate_with_embed_multilingual_light_v3 ( cohere_instance ) :
@ -423,7 +445,9 @@ def test_cohere_representation_model_classification(cohere_instance):
def test_cohere_representation_model_language_detection ( cohere_instance ) :
# Test using the Representation model for language detection
cohere_instance . model = " embed-english-v3.0 "
language = cohere_instance . detect_language ( " Detect the language of this text. " )
language = cohere_instance . detect_language (
" Detect the language of this text. "
)
assert isinstance ( language , str )
@ -447,7 +471,9 @@ def test_cohere_representation_model_multilingual_embedding(cohere_instance):
assert len ( embedding ) > 0
def test_cohere_representation_model_multilingual_classification ( cohere_instance ) :
def test_cohere_representation_model_multilingual_classification (
cohere_instance ,
) :
# Test using the Representation model for multilingual text classification
cohere_instance . model = " embed-multilingual-v3.0 "
classification = cohere_instance . classify ( " Classify multilingual text. " )
@ -456,7 +482,9 @@ def test_cohere_representation_model_multilingual_classification(cohere_instance
assert " score " in classification
def test_cohere_representation_model_multilingual_language_detection ( cohere_instance ) :
def test_cohere_representation_model_multilingual_language_detection (
cohere_instance ,
) :
# Test using the Representation model for multilingual language detection
cohere_instance . model = " embed-multilingual-v3.0 "
language = cohere_instance . detect_language (
@ -471,12 +499,17 @@ def test_cohere_representation_model_multilingual_max_tokens_limit_exceeded(
# Test handling max tokens limit exceeded error for multilingual model
cohere_instance . model = " embed-multilingual-v3.0 "
cohere_instance . max_tokens = 10
prompt = " This is a test prompt that will exceed the max tokens limit for multilingual model. "
prompt = (
" This is a test prompt that will exceed the max tokens limit for "
" multilingual model. "
)
with pytest . raises ( ValueError ) :
cohere_instance . embed ( prompt )
def test_cohere_representation_model_multilingual_light_embedding ( cohere_instance ) :
def test_cohere_representation_model_multilingual_light_embedding (
cohere_instance ,
) :
# Test using the Representation model for multilingual light text embedding
cohere_instance . model = " embed-multilingual-light-v3.0 "
embedding = cohere_instance . embed ( " Generate multilingual light embeddings. " )
@ -484,10 +517,14 @@ def test_cohere_representation_model_multilingual_light_embedding(cohere_instanc
assert len ( embedding ) > 0
def test_cohere_representation_model_multilingual_light_classification ( cohere_instance ) :
def test_cohere_representation_model_multilingual_light_classification (
cohere_instance ,
) :
# Test using the Representation model for multilingual light text classification
cohere_instance . model = " embed-multilingual-light-v3.0 "
classification = cohere_instance . classify ( " Classify multilingual light text. " )
classification = cohere_instance . classify (
" Classify multilingual light text. "
)
assert isinstance ( classification , dict )
assert " class " in classification
assert " score " in classification
@ -510,7 +547,10 @@ def test_cohere_representation_model_multilingual_light_max_tokens_limit_exceede
# Test handling max tokens limit exceeded error for multilingual light model
cohere_instance . model = " embed-multilingual-light-v3.0 "
cohere_instance . max_tokens = 10
prompt = " This is a test prompt that will exceed the max tokens limit for multilingual light model. "
prompt = (
" This is a test prompt that will exceed the max tokens limit for "
" multilingual light model. "
)
with pytest . raises ( ValueError ) :
cohere_instance . embed ( prompt )
@ -553,19 +593,26 @@ def test_cohere_representation_model_english_classification(cohere_instance):
assert " score " in classification
def test_cohere_representation_model_english_language_detection ( cohere_instance ) :
def test_cohere_representation_model_english_language_detection (
cohere_instance ,
) :
# Test using the Representation model for English language detection
cohere_instance . model = " embed-english-v3.0 "
language = cohere_instance . detect_language ( " Detect the language of English text. " )
language = cohere_instance . detect_language (
" Detect the language of English text. "
)
assert isinstance ( language , str )
def test_cohere_representation_model_english_max_tokens_limit_exceeded ( cohere_instance ) :
def test_cohere_representation_model_english_max_tokens_limit_exceeded (
cohere_instance ,
) :
# Test handling max tokens limit exceeded error for English model
cohere_instance . model = " embed-english-v3.0 "
cohere_instance . max_tokens = 10
prompt = (
" This is a test prompt that will exceed the max tokens limit for English model. "
" This is a test prompt that will exceed the max tokens limit for "
" English model. "
)
with pytest . raises ( ValueError ) :
cohere_instance . embed ( prompt )
@ -579,7 +626,9 @@ def test_cohere_representation_model_english_light_embedding(cohere_instance):
assert len ( embedding ) > 0
def test_cohere_representation_model_english_light_classification ( cohere_instance ) :
def test_cohere_representation_model_english_light_classification (
cohere_instance ,
) :
# Test using the Representation model for English light text classification
cohere_instance . model = " embed-english-light-v3.0 "
classification = cohere_instance . classify ( " Classify English light text. " )
@ -588,7 +637,9 @@ def test_cohere_representation_model_english_light_classification(cohere_instanc
assert " score " in classification
def test_cohere_representation_model_english_light_language_detection ( cohere_instance ) :
def test_cohere_representation_model_english_light_language_detection (
cohere_instance ,
) :
# Test using the Representation model for English light language detection
cohere_instance . model = " embed-english-light-v3.0 "
language = cohere_instance . detect_language (
@ -603,7 +654,10 @@ def test_cohere_representation_model_english_light_max_tokens_limit_exceeded(
# Test handling max tokens limit exceeded error for English light model
cohere_instance . model = " embed-english-light-v3.0 "
cohere_instance . max_tokens = 10
prompt = " This is a test prompt that will exceed the max tokens limit for English light model. "
prompt = (
" This is a test prompt that will exceed the max tokens limit for "
" English light model. "
)
with pytest . raises ( ValueError ) :
cohere_instance . embed ( prompt )