From 50215f229f43843944a4725079f80e7078dcf6ee Mon Sep 17 00:00:00 2001 From: Kye Date: Tue, 19 Mar 2024 16:13:50 -0700 Subject: [PATCH] [CLEANUP] --- example.py | 4 +- playground/swarms/hierarchical_swarm.py | 23 +++++++++ pyproject.toml | 2 +- swarms/models/__init__.py | 63 ++++++++++++++----------- swarms/tools/tool.py | 6 +++ swarms/tools/tool_type.py | 14 ++++-- swarms/utils/json_utils.py | 20 +++++++- 7 files changed, 95 insertions(+), 37 deletions(-) create mode 100644 playground/swarms/hierarchical_swarm.py diff --git a/example.py b/example.py index 05cbbcdc..c4691952 100644 --- a/example.py +++ b/example.py @@ -1,4 +1,4 @@ -from swarms import Agent, AnthropicChat +from swarms import Agent, Anthropic from langchain.tools import tool @@ -13,7 +13,7 @@ def search_api(query: str, max_results: int = 10): ## Initialize the workflow agent = Agent( - llm=AnthropicChat(), + llm=Anthropic(), max_loops="auto", autosave=True, dashboard=False, diff --git a/playground/swarms/hierarchical_swarm.py b/playground/swarms/hierarchical_swarm.py new file mode 100644 index 00000000..f785dfb0 --- /dev/null +++ b/playground/swarms/hierarchical_swarm.py @@ -0,0 +1,23 @@ +""" +Boss selects what agent to use +B -> W1, W2, W3 +""" +from typing import List, Optional +from pydantic import BaseModel, Field +from swarms.utils.json_utils import str_to_json + +class HierarchicalSwarm(BaseModel): + class Config: + arbitrary_types_allowed = True + + agents: Optional[List[str]] = Field( + None, title="List of agents in the hierarchical swarm" + ) + task: Optional[str] = Field(None, title="Task to be done by the agents") + + +all_agents = HierarchicalSwarm() + +agents_schema = HierarchicalSwarm.model_json_schema() +agents_schema = str_to_json(agents_schema) +print(agents_schema) diff --git a/pyproject.toml b/pyproject.toml index 2d91c34d..4f5fb67d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,7 +47,7 @@ tiktoken = "0.4.0" ratelimit = "2.2.1" loguru = "0.7.2" huggingface-hub = "*" -pydantic = "*" +pydantic = "2.6.4" tenacity = "8.2.2" Pillow = "9.4.0" chromadb = "*" diff --git a/swarms/models/__init__.py b/swarms/models/__init__.py index d68bb2e5..92b0e929 100644 --- a/swarms/models/__init__.py +++ b/swarms/models/__init__.py @@ -1,12 +1,3 @@ -from swarms.models.popular_llms import ( - AnthropicChat, - CohereChat, - MosaicMLChat, - OpenAILLM, - ReplicateLLM, - AzureOpenAILLM, - OpenAIChatLLM, -) from swarms.models.base_embedding_model import BaseEmbeddingModel from swarms.models.base_llm import AbstractLLM # noqa: E402 from swarms.models.base_multimodal_model import BaseMultiModalModel @@ -15,7 +6,6 @@ from swarms.models.clipq import CLIPQ # noqa: E402 from swarms.models.fire_function import FireFunctionCaller from swarms.models.fuyu import Fuyu # noqa: E402 from swarms.models.gemini import Gemini # noqa: E402 -from swarms.models.gigabind import Gigabind # noqa: E402 from swarms.models.gpt4_vision_api import GPT4VisionAPI # noqa: E402 from swarms.models.huggingface import HuggingfaceLLM # noqa: E402 from swarms.models.idefics import Idefics # noqa: E402 @@ -28,8 +18,30 @@ from swarms.models.mpt import MPT7B # noqa: E402 from swarms.models.nougat import Nougat # noqa: E402 from swarms.models.openai_tts import OpenAITTS # noqa: E402 from swarms.models.petals import Petals # noqa: E402 +from swarms.models.popular_llms import ( + AnthropicChat as Anthropic, +) +from swarms.models.popular_llms import ( + AzureOpenAILLM as AzureOpenAI, +) +from swarms.models.popular_llms import ( + CohereChat as Cohere, +) +from swarms.models.popular_llms import ( + MosaicMLChat as MosaicML, +) +from swarms.models.popular_llms import ( + OpenAIChatLLM as OpenAIChat, +) +from swarms.models.popular_llms import ( + OpenAILLM as OpenAI, +) +from swarms.models.popular_llms import ( + ReplicateLLM as Replicate, +) from swarms.models.qwen import QwenVLMultiModal # noqa: E402 -from swarms.models.roboflow_model import RoboflowMultiModal + +# from swarms.models.roboflow_model import RoboflowMultiModal from swarms.models.sam_supervision import SegmentAnythingMarkGenerator from swarms.models.sampling_params import SamplingParams, SamplingType from swarms.models.timm import TimmModel # noqa: E402 @@ -41,57 +53,54 @@ from swarms.models.types import ( # noqa: E402 TextModality, VideoModality, ) -from swarms.models.ultralytics_model import UltralyticsModel + +# from swarms.models.ultralytics_model import UltralyticsModel from swarms.models.vilt import Vilt # noqa: E402 from swarms.models.wizard_storytelling import WizardLLMStoryTeller from swarms.models.zephyr import Zephyr # noqa: E402 from swarms.models.zeroscope import ZeroscopeTTV # noqa: E402 - __all__ = [ "AbstractLLM", + "Anthropic", + "AzureOpenAI", "BaseEmbeddingModel", "BaseMultiModalModel", "BioGPT", "CLIPQ", + "Cohere", "FireFunctionCaller", "Fuyu", - "Gigabind", "GPT4VisionAPI", + "Gemini", "HuggingfaceLLM", "Idefics", "Kosmos", "LayoutLMDocumentQA", "LavaMultiModal", + "Replicate", + "MPT7B", "Mistral", "Mixtral", - "MPT7B", + "MosaicML", "Nougat", + "OpenAI", + "OpenAIChat", "OpenAITTS", "Petals", "QwenVLMultiModal", - "RoboflowMultiModal", "SamplingParams", "SamplingType", "SegmentAnythingMarkGenerator", + "TextModality", "TimmModel", "TogetherLLM", - "UltralyticsModel", "Vilt", + "VideoModality", "WizardLLMStoryTeller", "Zephyr", "ZeroscopeTTV", - "AnthropicChat", - "CohereChat", - "MosaicMLChat", - "OpenAILLM", - "ReplicateLLM", - "AzureOpenAILLM", - "OpenAIChatLLM", "AudioModality", "ImageModality", "MultimodalData", - "TextModality", - "Gemini", - "VideoModality", ] diff --git a/swarms/tools/tool.py b/swarms/tools/tool.py index e69de29b..97cf9b84 100644 --- a/swarms/tools/tool.py +++ b/swarms/tools/tool.py @@ -0,0 +1,6 @@ +from langchain.tools import ( + BaseTool, + Tool, + tool, + StructuredTool, +) # noqa diff --git a/swarms/tools/tool_type.py b/swarms/tools/tool_type.py index 05cd30bf..6f84b54e 100644 --- a/swarms/tools/tool_type.py +++ b/swarms/tools/tool_type.py @@ -45,7 +45,7 @@ class OmniTool(BaseModel): Tuple: A tuple containing the arguments and keyword arguments. """ - try: + try: self.transform_models_to_tools() logger.info(f"Number of tools: {len(self.tools)}") try: @@ -53,9 +53,13 @@ class OmniTool(BaseModel): logger.info(f"Running tool: {tool}") tool(*args, **kwargs) except Exception as e: - logger.error(f"Error occurred while running tools: {e}") + logger.error( + f"Error occurred while running tools: {e}" + ) return args, kwargs - + except Exception as error: - logger.error(f"Error occurred while running tools: {error}") - return args, kwargs \ No newline at end of file + logger.error( + f"Error occurred while running tools: {error}" + ) + return args, kwargs diff --git a/swarms/utils/json_utils.py b/swarms/utils/json_utils.py index 62dc2323..1ce818bc 100644 --- a/swarms/utils/json_utils.py +++ b/swarms/utils/json_utils.py @@ -3,7 +3,7 @@ import json from pydantic import BaseModel -def base_model_schema_to_json(model: BaseModel): +def base_model_schema_to_json(model: BaseModel, indent: int = 3): """ Converts the JSON schema of a base model to a formatted JSON string. @@ -13,7 +13,7 @@ def base_model_schema_to_json(model: BaseModel): Returns: str: The JSON schema of the base model as a formatted JSON string. """ - return json.dumps(model.model_json_schema(), indent=2) + return json.dumps(model.model_json_schema(), indent=indent) def extract_json_from_str(response: str): @@ -48,3 +48,19 @@ def base_model_to_json(base_model_instance: BaseModel) -> str: json_string = json.dumps(model_dict) return json_string + + + +def str_to_json(response: str, indent: int = 3): + """ + Converts a string representation of JSON to a JSON object. + + Args: + response (str): The string representation of JSON. + indent (int, optional): The number of spaces to use for indentation in the JSON output. Defaults to 3. + + Returns: + str: The JSON object as a string. + + """ + return json.dumps(response, indent=indent) \ No newline at end of file