diff --git a/example.py b/example.py
index 0e48169e..04a4c6c8 100644
--- a/example.py
+++ b/example.py
@@ -1,7 +1,11 @@
 from langchain.llms import OpenAIChat
 from swarms import Worker
 
-llm = OpenAIChat(model_name="gpt-4", openai_api_key="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", temperature=0.5)
+llm = OpenAIChat(
+    model_name="gpt-4",
+    openai_api_key="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",
+    temperature=0.5,
+)
 
 node = Worker(
     llm=llm,
diff --git a/swarms/swarms/base.py b/swarms/swarms/base.py
index 46181585..f8bc8100 100644
--- a/swarms/swarms/base.py
+++ b/swarms/swarms/base.py
@@ -7,6 +7,79 @@ class AbstractSwarm(ABC):
     """
     Abstract class for swarm simulation architectures
 
+
+    Methods:
+    ---------
+
+    communicate()
+        Communicate with the swarm through the orchestrator, protocols, and the universal communication layer
+    
+    run()
+        Run the swarm
+    
+    arun()
+        Run the swarm Asynchronously
+    
+    add_worker(worker: "AbstractWorker")
+        Add a worker to the swarm
+    
+    remove_worker(worker: "AbstractWorker")
+        Remove a worker from the swarm
+    
+    broadcast(message: str, sender: Optional["AbstractWorker"] = None)
+        Broadcast a message to all workers
+    
+    reset()
+        Reset the swarm
+    
+    plan(task: str)
+        Workers must individually plan using a workflow or pipeline
+    
+    direct_message(message: str, sender: "AbstractWorker", recipient: "AbstractWorker")
+        Send a direct message to a worker
+    
+    autoscaler(num_workers: int, worker: ["AbstractWorker"])
+        Autoscaler that acts like kubernetes for autonomous agents
+    
+    get_worker_by_id(id: str) -> "AbstractWorker"
+        Locate a worker by id
+    
+    get_worker_by_name(name: str) -> "AbstractWorker"
+        Locate a worker by name
+    
+    assign_task(worker: "AbstractWorker", task: Any) -> Dict
+        Assign a task to a worker
+    
+    get_all_tasks(worker: "AbstractWorker", task: Any)
+        Get all tasks
+    
+    get_finished_tasks() -> List[Dict]
+        Get all finished tasks
+    
+    get_pending_tasks() -> List[Dict]
+        Get all pending tasks
+    
+    pause_worker(worker: "AbstractWorker", worker_id: str)
+        Pause a worker
+    
+    resume_worker(worker: "AbstractWorker", worker_id: str)
+        Resume a worker
+    
+    stop_worker(worker: "AbstractWorker", worker_id: str)
+        Stop a worker
+    
+    restart_worker(worker: "AbstractWorker")
+        Restart worker
+    
+    scale_up(num_worker: int)
+        Scale up the number of workers
+    
+    scale_down(num_worker: int)
+        Scale down the number of workers
+    
+
+    
+
     """
 
     # TODO: Pass in abstract LLM class that can utilize Hf or Anthropic models, Move away from OPENAI