From a56d88904eb4dc0a899ad0d165190947050a5e4c Mon Sep 17 00:00:00 2001
From: Zack <zack@zackbradshaw.com>
Date: Fri, 20 Oct 2023 02:53:23 -0500
Subject: [PATCH] fix: Debugging integration

Former-commit-id: 7875286776f4b547824b5cc8a6ead02550945891
---
 bingchat.py                   | 19 +++++++++++++++++++
 cookies.json                  |  6 ++++++
 playground/models/bingchat.py | 26 ++++++++++++++++++++------
 swarms/models/bing_chat.py    |  1 +
 swarms/structs/__init__.py    |  4 ++--
 swarms/structs/task.py        |  5 +----
 swarms/tools/autogpt.py       | 13 +++++++++++++
 swarms/tools/edge_gpt.py      |  9 ---------
 8 files changed, 62 insertions(+), 21 deletions(-)
 create mode 100644 bingchat.py
 create mode 100644 cookies.json
 delete mode 100644 swarms/tools/edge_gpt.py

diff --git a/bingchat.py b/bingchat.py
new file mode 100644
index 00000000..3ba508d1
--- /dev/null
+++ b/bingchat.py
@@ -0,0 +1,19 @@
+from swarms.models.bing_chat import BingChat
+from swarms.workers.worker import Worker
+from swarms.tools.autogpt import EdgeGPTTool, tool
+
+
+# Initialize the language model,
+# This model can be swapped out with Anthropic, ETC, Huggingface Models like Mistral, ETC
+llm = BingChat(cookies_path="./cookies.json")
+
+# Initialize the Worker with the custom tool
+worker = Worker(
+    llm=llm,
+    ai_name="EdgeGPT Worker",
+)
+
+# Use the worker to process a task
+task = "Hello, my name is ChatGPT"
+response = worker.run(task)
+print(response)
diff --git a/cookies.json b/cookies.json
new file mode 100644
index 00000000..bd49de75
--- /dev/null
+++ b/cookies.json
@@ -0,0 +1,6 @@
+[
+    {
+        "name": "cookie1",
+        "value": "1GJjj1-tM6Jlo4HFtnbocQ3r0QbQ9Aq_R65dqbcSWKzKxnN8oEMW1xa4RlsJ_nGyNjFlXQRzMWRR2GK11bve8-6n_bjF0zTczYcQQ8oDB8W66jgpIWSL7Hr4hneB0R9dIt-OQ4cVPs4eehL2lcRCObWQr0zkG14MHlH5EMwAKthv_NNIQSfThq4Ey2Hmzhq9sRuyS04JveHdLC9gfthJ8xk3J12yr7j4HsynpzmvFUcA"
+    }
+]
diff --git a/playground/models/bingchat.py b/playground/models/bingchat.py
index 746ac2e0..bd2589b8 100644
--- a/playground/models/bingchat.py
+++ b/playground/models/bingchat.py
@@ -1,17 +1,31 @@
-from swarms.models.bing_chat import EdgeGPTModel
+from swarms.models.bing_chat import BingChat
 from swarms.workers.worker import Worker
-from swarms.tools.tool import EdgeGPTTool
+from swarms.tools.autogpt import EdgeGPTTool, tool
+from swarms.models import OpenAIChat
+import os 
+
+api_key = os.getenv("OPENAI_API_KEY")
 
 # Initialize the EdgeGPTModel
-edgegpt = EdgeGPTModel(cookies_path="./cookies.txt")
+edgegpt = BingChat(cookies_path="./cookies.txt")
+
+@tool
+def edgegpt(task: str = None):
+    """A tool to run infrence on the EdgeGPT Model"""
+    return EdgeGPTTool.run(task)
 
-# Initialize the custom tool
-edgegpt_tool = EdgeGPTTool(edgegpt)
+# Initialize the language model,
+# This model can be swapped out with Anthropic, ETC, Huggingface Models like Mistral, ETC
+llm = OpenAIChat(
+    openai_api_key=api_key,
+    temperature=0.5,
+)
 
 # Initialize the Worker with the custom tool
 worker = Worker(
+    llm=llm,
     ai_name="EdgeGPT Worker",
-    external_tools=[edgegpt_tool],
+    external_tools=[edgegpt]
 )
 
 # Use the worker to process a task
diff --git a/swarms/models/bing_chat.py b/swarms/models/bing_chat.py
index c91690e5..0672ef28 100644
--- a/swarms/models/bing_chat.py
+++ b/swarms/models/bing_chat.py
@@ -29,6 +29,7 @@ class BingChat:
         self.cookies = json.loads(open(cookies_path, encoding="utf-8").read())
         self.bot = asyncio.run(Chatbot.create(cookies=self.cookies))
 
+        print(self.bot.___dict__)
     def __call__(self, prompt: str, style: ConversationStyle = ConversationStyle.creative) -> str:
         """
         Get a text response using the EdgeGPT model based on the provided prompt.
diff --git a/swarms/structs/__init__.py b/swarms/structs/__init__.py
index c7454474..045f4c92 100644
--- a/swarms/structs/__init__.py
+++ b/swarms/structs/__init__.py
@@ -1,2 +1,2 @@
-from swarms.structs.workflow import Workflow
-from swarms.structs.task import Task
+# from swarms.structs.workflow import Workflow
+# from swarms.structs.task import Task
diff --git a/swarms/structs/task.py b/swarms/structs/task.py
index 66dced87..a62817b2 100644
--- a/swarms/structs/task.py
+++ b/swarms/structs/task.py
@@ -7,7 +7,7 @@ from abc import ABC, abstractmethod
 from enum import Enum
 from typing import Any, List, Optional, Union
 
-from pydantic import BaseModel, Field, StrictStr, conlist
+from pydantic import BaseModel, Field, StrictStr
 from swarms.artifacts.main import Artifact
 from swarms.artifacts.error_artifact import ErrorArtifact
 
@@ -137,9 +137,6 @@ class Task(BaseModel):
         None, description="Input parameters for the task. Any value is allowed"
     )
     task_id: StrictStr = Field(..., description="ID of the task")
-    artifacts: conlist(Artifact, min_items=1) = Field(
-        ..., description="A list of artifacts that the task has been produced"
-    )
 
     class Config:
         allow_population_by_field_name = True
diff --git a/swarms/tools/autogpt.py b/swarms/tools/autogpt.py
index ebd802d1..a0e26491 100644
--- a/swarms/tools/autogpt.py
+++ b/swarms/tools/autogpt.py
@@ -142,6 +142,19 @@ class WebpageQATool(BaseTool):
     async def _arun(self, url: str, question: str) -> str:
         raise NotImplementedError
 
+class EdgeGPTTool:
+# Initialize the custom tool
+    def __init__(
+        self,
+        model,
+        name="EdgeGPTTool",
+        description="Tool that uses EdgeGPTModel to generate responses",
+    ):
+        super().__init__(name=name, description=description)
+        self.model = model
+        
+    def _run(self, prompt):
+        return self.model.__call__(prompt)
 
 @tool
 def VQAinference(self, inputs):
diff --git a/swarms/tools/edge_gpt.py b/swarms/tools/edge_gpt.py
deleted file mode 100644
index bef44cfb..00000000
--- a/swarms/tools/edge_gpt.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from swarms.tools.tool import BaseTool
-
-class EdgeGPTTool(BaseTool):
-    def __init__(self, model, name="EdgeGPTTool", description="Tool that uses EdgeGPTModel to generate responses"):
-        super().__init__(name=name, description=description)
-        self.model = model
-
-    def _run(self, prompt):
-        return self.model.__call__(prompt)
\ No newline at end of file