diff --git a/swarms/tokenizers/anthropic_tokenizer.py b/swarms/tokenizers/anthropic_tokenizer.py index 94bced96..4fc00858 100644 --- a/swarms/tokenizers/anthropic_tokenizer.py +++ b/swarms/tokenizers/anthropic_tokenizer.py @@ -61,10 +61,7 @@ class AnthropicTokenizer(BaseTokenizer): } self.model = self.model # or self.DEFAULT_MODEL self.max_tokens = self.max_tokens or self.default_max_tokens() - self.client = ( - self.client - or import_optional_dependency("anthropic").Anthropic() - ) + self.client = (self.client or import_optional_dependency("anthropic").Anthropic()) def default_max_tokens(self) -> int: """ diff --git a/swarms/tokenizers/r_tokenizers.py b/swarms/tokenizers/r_tokenizers.py index 85cdd3ba..2eac74f4 100644 --- a/swarms/tokenizers/r_tokenizers.py +++ b/swarms/tokenizers/r_tokenizers.py @@ -147,7 +147,7 @@ class HuggingFaceTokenizer: backend_tokenizer_file = osp.join(model_dir, "tokenizer.json") model_file_exists = osp.exists(model_file) self.logger = get_logger("lmdeploy") - if ( not osp.exists(backend_tokenizer_file) and model_file_exists ): + if (not osp.exists(backend_tokenizer_file) and model_file_exists): self.logger.warning( "Can not find tokenizer.json. " "It may take long time to initialize the tokenizer."