Merge pull request #65 from ZackBradshaw/master
sync with kye
Former-commit-id: f596c40fa9
discord-bot-framework
commit
d5cae4ca61
@ -0,0 +1,19 @@
|
||||
from swarms.models.bing_chat import BingChat
|
||||
from swarms.workers.worker import Worker
|
||||
from swarms.tools.autogpt import EdgeGPTTool, tool
|
||||
|
||||
|
||||
# Initialize the language model,
|
||||
# This model can be swapped out with Anthropic, ETC, Huggingface Models like Mistral, ETC
|
||||
llm = BingChat(cookies_path="./cookies.json")
|
||||
|
||||
# Initialize the Worker with the custom tool
|
||||
worker = Worker(
|
||||
llm=llm,
|
||||
ai_name="EdgeGPT Worker",
|
||||
)
|
||||
|
||||
# Use the worker to process a task
|
||||
task = "Hello, my name is ChatGPT"
|
||||
response = worker.run(task)
|
||||
print(response)
|
@ -0,0 +1,6 @@
|
||||
[
|
||||
{
|
||||
"name": "cookie1",
|
||||
"value": "1GJjj1-tM6Jlo4HFtnbocQ3r0QbQ9Aq_R65dqbcSWKzKxnN8oEMW1xa4RlsJ_nGyNjFlXQRzMWRR2GK11bve8-6n_bjF0zTczYcQQ8oDB8W66jgpIWSL7Hr4hneB0R9dIt-OQ4cVPs4eehL2lcRCObWQr0zkG14MHlH5EMwAKthv_NNIQSfThq4Ey2Hmzhq9sRuyS04JveHdLC9gfthJ8xk3J12yr7j4HsynpzmvFUcA"
|
||||
}
|
||||
]
|
@ -0,0 +1,70 @@
|
||||
## BingChat User Guide
|
||||
|
||||
Welcome to the BingChat user guide! This document provides a step-by-step tutorial on how to leverage the BingChat class, an interface to the EdgeGPT model by OpenAI.
|
||||
|
||||
### Table of Contents
|
||||
1. [Installation & Prerequisites](#installation)
|
||||
2. [Setting Up BingChat](#setup)
|
||||
3. [Interacting with BingChat](#interacting)
|
||||
4. [Generating Images](#images)
|
||||
5. [Managing Cookies](#cookies)
|
||||
|
||||
### Installation & Prerequisites <a name="installation"></a>
|
||||
|
||||
Before initializing the BingChat model, ensure you have the necessary dependencies installed:
|
||||
|
||||
```shell
|
||||
pip install EdgeGPT
|
||||
```
|
||||
|
||||
Additionally, you must have a `cookies.json` file which is necessary for authenticating with EdgeGPT.
|
||||
|
||||
### Setting Up BingChat <a name="setup"></a>
|
||||
|
||||
To start, import the BingChat class:
|
||||
|
||||
```python
|
||||
from bing_chat import BingChat
|
||||
```
|
||||
|
||||
Initialize BingChat with the path to your `cookies.json`:
|
||||
|
||||
```python
|
||||
chat = BingChat(cookies_path="./path/to/cookies.json")
|
||||
```
|
||||
|
||||
### Interacting with BingChat <a name="interacting"></a>
|
||||
|
||||
You can obtain text responses from the EdgeGPT model by simply calling the instantiated object:
|
||||
|
||||
```python
|
||||
response = chat("Hello, my name is ChatGPT")
|
||||
print(response)
|
||||
```
|
||||
|
||||
You can also specify the conversation style:
|
||||
|
||||
```python
|
||||
from bing_chat import ConversationStyle
|
||||
response = chat("Tell me a joke", style=ConversationStyle.creative)
|
||||
print(response)
|
||||
```
|
||||
|
||||
### Generating Images <a name="images"></a>
|
||||
|
||||
BingChat allows you to generate images based on text prompts:
|
||||
|
||||
```python
|
||||
image_path = chat.create_img("Sunset over mountains", auth_cookie="YOUR_AUTH_COOKIE")
|
||||
print(f"Image saved at: {image_path}")
|
||||
```
|
||||
|
||||
Ensure you provide the required `auth_cookie` for image generation.
|
||||
|
||||
### Managing Cookies <a name="cookies"></a>
|
||||
|
||||
You can set a directory path for managing cookies using the `set_cookie_dir_path` method:
|
||||
|
||||
BingChat.set_cookie_dir_path("./path/to/cookies_directory")
|
||||
|
||||
|
@ -0,0 +1,118 @@
|
||||
## ChatGPT User Guide with Abstraction
|
||||
|
||||
Welcome to the ChatGPT user guide! This document will walk you through the Reverse Engineered ChatGPT API, its usage, and how to leverage the abstraction in `revgpt.py` for seamless integration.
|
||||
|
||||
### Table of Contents
|
||||
1. [Installation](#installation)
|
||||
2. [Initial Setup and Configuration](#initial-setup)
|
||||
3. [Using the Abstract Class from `revgpt.py`](#using-abstract-class)
|
||||
4. [V1 Standard ChatGPT](#v1-standard-chatgpt)
|
||||
5. [V3 Official Chat API](#v3-official-chat-api)
|
||||
6. [Credits & Disclaimers](#credits-disclaimers)
|
||||
|
||||
### Installation <a name="installation"></a>
|
||||
|
||||
To kickstart your journey with ChatGPT, first, install the ChatGPT package:
|
||||
|
||||
```shell
|
||||
python -m pip install --upgrade revChatGPT
|
||||
```
|
||||
|
||||
**Supported Python Versions:**
|
||||
- Minimum: Python3.9
|
||||
- Recommended: Python3.11+
|
||||
|
||||
### Initial Setup and Configuration <a name="initial-setup"></a>
|
||||
|
||||
1. **Account Setup:** Register on [OpenAI's ChatGPT](https://chat.openai.com/).
|
||||
2. **Authentication:** Obtain your access token from OpenAI's platform.
|
||||
3. **Environment Variables:** Configure your environment with the necessary variables. An example of these variables can be found at the bottom of the guide.
|
||||
|
||||
### Using the Abstract Class from `revgpt.py` <a name="using-abstract-class"></a>
|
||||
|
||||
The abstraction provided in `revgpt.py` is designed to simplify your interactions with ChatGPT.
|
||||
|
||||
1. **Import the Necessary Modules:**
|
||||
|
||||
```python
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
from revgpt import AbstractChatGPT
|
||||
```
|
||||
|
||||
2. **Load Environment Variables:**
|
||||
|
||||
```python
|
||||
load_dotenv()
|
||||
```
|
||||
|
||||
3. **Initialize the ChatGPT Abstract Class:**
|
||||
|
||||
```python
|
||||
chat = AbstractChatGPT(api_key=os.getenv("ACCESS_TOKEN"), **config)
|
||||
```
|
||||
|
||||
4. **Start Interacting with ChatGPT:**
|
||||
|
||||
```python
|
||||
response = chat.ask("Hello, ChatGPT!")
|
||||
print(response)
|
||||
```
|
||||
|
||||
With the abstract class, you can seamlessly switch between different versions or models of ChatGPT without changing much of your code.
|
||||
|
||||
### V1 Standard ChatGPT <a name="v1-standard-chatgpt"></a>
|
||||
|
||||
If you wish to use V1 specifically:
|
||||
|
||||
1. Import the model:
|
||||
|
||||
```python
|
||||
from swarms.models.revgptV1 import RevChatGPTModelv1
|
||||
```
|
||||
|
||||
2. Initialize:
|
||||
|
||||
```python
|
||||
model = RevChatGPTModelv1(access_token=os.getenv("ACCESS_TOKEN"), **config)
|
||||
```
|
||||
|
||||
3. Interact:
|
||||
|
||||
```python
|
||||
response = model.run("What's the weather like?")
|
||||
print(response)
|
||||
```
|
||||
|
||||
### V3 Official Chat API <a name="v3-official-chat-api"></a>
|
||||
|
||||
For users looking to integrate the official V3 API:
|
||||
|
||||
1. Import the model:
|
||||
|
||||
```python
|
||||
from swarms.models.revgptV4 import RevChatGPTModelv4
|
||||
```
|
||||
|
||||
2. Initialize:
|
||||
|
||||
```python
|
||||
model = RevChatGPTModelv4(access_token=os.getenv("OPENAI_API_KEY"), **config)
|
||||
```
|
||||
|
||||
3. Interact:
|
||||
|
||||
```python
|
||||
response = model.run("Tell me a fun fact!")
|
||||
print(response)
|
||||
```
|
||||
|
||||
### Credits & Disclaimers <a name="credits-disclaimers"></a>
|
||||
|
||||
- This project is not an official OpenAI product and is not affiliated with OpenAI. Use at your own discretion.
|
||||
- Many thanks to all the contributors who have made this project possible.
|
||||
- Special acknowledgment to [virtualharby](https://www.youtube.com/@virtualharby) for the motivating music!
|
||||
|
||||
---
|
||||
|
||||
By following this guide, you should now have a clear understanding of how to use the Reverse Engineered ChatGPT API and its abstraction. Happy coding!
|
File diff suppressed because one or more lines are too long
@ -0,0 +1,25 @@
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
from swarms.models.revgptV4 import RevChatGPTModel
|
||||
from swarms.workers.worker import Worker
|
||||
|
||||
load_dotenv()
|
||||
|
||||
config = {
|
||||
"model": os.getenv("REVGPT_MODEL"),
|
||||
"plugin_ids": [os.getenv("REVGPT_PLUGIN_IDS")],
|
||||
"disable_history": os.getenv("REVGPT_DISABLE_HISTORY") == "True",
|
||||
"PUID": os.getenv("REVGPT_PUID"),
|
||||
"unverified_plugin_domains": [os.getenv("REVGPT_UNVERIFIED_PLUGIN_DOMAINS")]
|
||||
}
|
||||
|
||||
llm = RevChatGPTModel(access_token=os.getenv("ACCESS_TOKEN"), **config)
|
||||
|
||||
worker = Worker(
|
||||
ai_name="Optimus Prime",
|
||||
llm=llm
|
||||
)
|
||||
|
||||
task = "What were the winning boston marathon times for the past 5 years (ending in 2022)? Generate a table of the year, name, country of origin, and times."
|
||||
response = worker.run(task)
|
||||
print(response)
|
@ -0,0 +1,29 @@
|
||||
import os
|
||||
import sys
|
||||
from dotenv import load_dotenv
|
||||
from swarms.models.revgptV4 import RevChatGPTModelv4
|
||||
from swarms.models.revgptV1 import RevChatGPTModelv1
|
||||
|
||||
root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
sys.path.append(root_dir)
|
||||
|
||||
load_dotenv()
|
||||
|
||||
config = {
|
||||
"model": os.getenv("REVGPT_MODEL"),
|
||||
"plugin_ids": [os.getenv("REVGPT_PLUGIN_IDS")],
|
||||
"disable_history": os.getenv("REVGPT_DISABLE_HISTORY") == "True",
|
||||
"PUID": os.getenv("REVGPT_PUID"),
|
||||
"unverified_plugin_domains": [os.getenv("REVGPT_UNVERIFIED_PLUGIN_DOMAINS")]
|
||||
}
|
||||
|
||||
# For v1 model
|
||||
model = RevChatGPTModelv1(access_token=os.getenv("ACCESS_TOKEN"), **config)
|
||||
# model = RevChatGPTModelv4(access_token=os.getenv("ACCESS_TOKEN"), **config)
|
||||
|
||||
# For v3 model
|
||||
# model = RevChatGPTModel(access_token=os.getenv("OPENAI_API_KEY"), **config)
|
||||
|
||||
task = "Write a cli snake game"
|
||||
response = model.run(task)
|
||||
print(response)
|
@ -0,0 +1,198 @@
|
||||
"""
|
||||
A module that contains all the types used in this project
|
||||
"""
|
||||
|
||||
import os
|
||||
import platform
|
||||
from enum import Enum
|
||||
from typing import Union
|
||||
|
||||
|
||||
python_version = list(platform.python_version_tuple())
|
||||
SUPPORT_ADD_NOTES = int(python_version[0]) >= 3 and int(python_version[1]) >= 11
|
||||
|
||||
|
||||
class ChatbotError(Exception):
|
||||
"""
|
||||
Base class for all Chatbot errors in this Project
|
||||
"""
|
||||
|
||||
def __init__(self, *args: object) -> None:
|
||||
if SUPPORT_ADD_NOTES:
|
||||
super().add_note(
|
||||
"Please check that the input is correct, or you can resolve this issue by filing an issue",
|
||||
)
|
||||
super().add_note("Project URL: https://github.com/acheong08/ChatGPT")
|
||||
super().__init__(*args)
|
||||
|
||||
|
||||
class ActionError(ChatbotError):
|
||||
"""
|
||||
Subclass of ChatbotError
|
||||
|
||||
An object that throws an error because the execution of an operation is blocked
|
||||
"""
|
||||
|
||||
def __init__(self, *args: object) -> None:
|
||||
if SUPPORT_ADD_NOTES:
|
||||
super().add_note(
|
||||
"The current operation is not allowed, which may be intentional",
|
||||
)
|
||||
super().__init__(*args)
|
||||
|
||||
|
||||
class ActionNotAllowedError(ActionError):
|
||||
"""
|
||||
Subclass of ActionError
|
||||
|
||||
An object that throws an error because the execution of an unalloyed operation is blocked
|
||||
"""
|
||||
|
||||
|
||||
class ActionRefuseError(ActionError):
|
||||
"""
|
||||
Subclass of ActionError
|
||||
|
||||
An object that throws an error because the execution of a refused operation is blocked.
|
||||
"""
|
||||
|
||||
|
||||
class CLIError(ChatbotError):
|
||||
"""
|
||||
Subclass of ChatbotError
|
||||
|
||||
The error caused by a CLI program error
|
||||
"""
|
||||
|
||||
|
||||
class ErrorType(Enum):
|
||||
"""
|
||||
Enumeration class for different types of errors.
|
||||
"""
|
||||
|
||||
USER_ERROR = -1
|
||||
UNKNOWN_ERROR = 0
|
||||
SERVER_ERROR = 1
|
||||
RATE_LIMIT_ERROR = 2
|
||||
INVALID_REQUEST_ERROR = 3
|
||||
EXPIRED_ACCESS_TOKEN_ERROR = 4
|
||||
INVALID_ACCESS_TOKEN_ERROR = 5
|
||||
PROHIBITED_CONCURRENT_QUERY_ERROR = 6
|
||||
AUTHENTICATION_ERROR = 7
|
||||
CLOUDFLARE_ERROR = 8
|
||||
|
||||
|
||||
class Error(ChatbotError):
|
||||
"""
|
||||
Base class for exceptions in V1 module.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
source: str,
|
||||
message: str,
|
||||
*args: object,
|
||||
code: Union[ErrorType, int] = ErrorType.UNKNOWN_ERROR,
|
||||
) -> None:
|
||||
self.source: str = source
|
||||
self.message: str = message
|
||||
self.code: ErrorType | int = code
|
||||
super().__init__(*args)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.source}: {self.message} (code: {self.code})"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.source}: {self.message} (code: {self.code})"
|
||||
|
||||
|
||||
class AuthenticationError(ChatbotError):
|
||||
"""
|
||||
Subclass of ChatbotError
|
||||
|
||||
The object of the error thrown by a validation failure or exception
|
||||
"""
|
||||
|
||||
def __init__(self, *args: object) -> None:
|
||||
if SUPPORT_ADD_NOTES:
|
||||
super().add_note(
|
||||
"Please check if your key is correct, maybe it may not be valid",
|
||||
)
|
||||
super().__init__(*args)
|
||||
|
||||
|
||||
class APIConnectionError(ChatbotError):
|
||||
"""
|
||||
Subclass of ChatbotError
|
||||
|
||||
An exception object thrown when an API connection fails or fails to connect due to network or
|
||||
other miscellaneous reasons
|
||||
"""
|
||||
|
||||
def __init__(self, *args: object) -> None:
|
||||
if SUPPORT_ADD_NOTES:
|
||||
super().add_note(
|
||||
"Please check if there is a problem with your network connection",
|
||||
)
|
||||
super().__init__(*args)
|
||||
|
||||
|
||||
class NotAllowRunning(ActionNotAllowedError):
|
||||
"""
|
||||
Subclass of ActionNotAllowedError
|
||||
|
||||
Direct startup is not allowed for some reason
|
||||
"""
|
||||
|
||||
|
||||
class ResponseError(APIConnectionError):
|
||||
"""
|
||||
Subclass of APIConnectionError
|
||||
|
||||
Error objects caused by API request errors due to network or other miscellaneous reasons
|
||||
"""
|
||||
|
||||
|
||||
class OpenAIError(APIConnectionError):
|
||||
"""
|
||||
Subclass of APIConnectionError
|
||||
|
||||
Error objects caused by OpenAI's own server errors
|
||||
"""
|
||||
|
||||
|
||||
class RequestError(APIConnectionError):
|
||||
"""
|
||||
Subclass of APIConnectionError
|
||||
|
||||
There is a problem with the API response due to network or other miscellaneous reasons, or there
|
||||
is no reply to the object that caused the error at all
|
||||
"""
|
||||
|
||||
|
||||
class Colors:
|
||||
"""
|
||||
Colors for printing
|
||||
"""
|
||||
|
||||
HEADER = "\033[95m"
|
||||
OKBLUE = "\033[94m"
|
||||
OKCYAN = "\033[96m"
|
||||
OKGREEN = "\033[92m"
|
||||
WARNING = "\033[93m"
|
||||
FAIL = "\033[91m"
|
||||
ENDC = "\033[0m"
|
||||
BOLD = "\033[1m"
|
||||
UNDERLINE = "\033[4m"
|
||||
|
||||
def __init__(self) -> None:
|
||||
if os.getenv("NO_COLOR"):
|
||||
Colors.HEADER = ""
|
||||
Colors.OKBLUE = ""
|
||||
Colors.OKCYAN = ""
|
||||
Colors.OKGREEN = ""
|
||||
Colors.WARNING = ""
|
||||
Colors.FAIL = ""
|
||||
Colors.ENDC = ""
|
||||
Colors.BOLD = ""
|
||||
Colors.UNDERLINE = ""
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,2 +1,2 @@
|
||||
from swarms.structs.workflow import Workflow
|
||||
from swarms.structs.task import Task
|
||||
# from swarms.structs.workflow import Workflow
|
||||
# from swarms.structs.task import Task
|
||||
|
@ -0,0 +1,166 @@
|
||||
from functools import wraps
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import secrets
|
||||
|
||||
import time
|
||||
from prompt_toolkit import prompt
|
||||
from prompt_toolkit import PromptSession
|
||||
from prompt_toolkit.auto_suggest import AutoSuggestFromHistory
|
||||
from prompt_toolkit.completion import WordCompleter
|
||||
from prompt_toolkit.history import InMemoryHistory
|
||||
from prompt_toolkit.key_binding import KeyBindings
|
||||
from schemas.typings import Colors
|
||||
|
||||
bindings = KeyBindings()
|
||||
|
||||
# BASE_URL = environ.get("CHATGPT_BASE_URL", "http://192.168.250.249:9898/api/")
|
||||
BASE_URL = os.environ.get("CHATGPT_BASE_URL", "https://ai.fakeopen.com/api/")
|
||||
# BASE_URL = environ.get("CHATGPT_BASE_URL", "https://bypass.churchless.tech/")
|
||||
|
||||
def create_keybindings(key: str = "c-@") -> KeyBindings:
|
||||
"""
|
||||
Create keybindings for prompt_toolkit. Default key is ctrl+space.
|
||||
For possible keybindings, see: https://python-prompt-toolkit.readthedocs.io/en/stable/pages/advanced_topics/key_bindings.html#list-of-special-keys
|
||||
"""
|
||||
|
||||
@bindings.add(key)
|
||||
def _(event: dict) -> None:
|
||||
event.app.exit(result=event.app.current_buffer.text)
|
||||
|
||||
return bindings
|
||||
|
||||
|
||||
def create_session() -> PromptSession:
|
||||
return PromptSession(history=InMemoryHistory())
|
||||
|
||||
|
||||
def create_completer(commands: list, pattern_str: str = "$") -> WordCompleter:
|
||||
return WordCompleter(words=commands, pattern=re.compile(pattern_str))
|
||||
|
||||
|
||||
def get_input(
|
||||
session: PromptSession = None,
|
||||
completer: WordCompleter = None,
|
||||
key_bindings: KeyBindings = None,
|
||||
) -> str:
|
||||
"""
|
||||
Multiline input function.
|
||||
"""
|
||||
return (
|
||||
session.prompt(
|
||||
completer=completer,
|
||||
multiline=True,
|
||||
auto_suggest=AutoSuggestFromHistory(),
|
||||
key_bindings=key_bindings,
|
||||
)
|
||||
if session
|
||||
else prompt(multiline=True)
|
||||
)
|
||||
|
||||
|
||||
async def get_input_async(
|
||||
session: PromptSession = None,
|
||||
completer: WordCompleter = None,
|
||||
) -> str:
|
||||
"""
|
||||
Multiline input function.
|
||||
"""
|
||||
return (
|
||||
await session.prompt_async(
|
||||
completer=completer,
|
||||
multiline=True,
|
||||
auto_suggest=AutoSuggestFromHistory(),
|
||||
)
|
||||
if session
|
||||
else prompt(multiline=True)
|
||||
)
|
||||
|
||||
|
||||
def get_filtered_keys_from_object(obj: object, *keys: str) -> any:
|
||||
"""
|
||||
Get filtered list of object variable names.
|
||||
:param keys: List of keys to include. If the first key is "not", the remaining keys will be removed from the class keys.
|
||||
:return: List of class keys.
|
||||
"""
|
||||
class_keys = obj.__dict__.keys()
|
||||
if not keys:
|
||||
return set(class_keys)
|
||||
|
||||
# Remove the passed keys from the class keys.
|
||||
if keys[0] == "not":
|
||||
return {key for key in class_keys if key not in keys[1:]}
|
||||
# Check if all passed keys are valid
|
||||
if invalid_keys := set(keys) - class_keys:
|
||||
raise ValueError(
|
||||
f"Invalid keys: {invalid_keys}",
|
||||
)
|
||||
# Only return specified keys that are in class_keys
|
||||
return {key for key in keys if key in class_keys}
|
||||
|
||||
def generate_random_hex(length: int = 17) -> str:
|
||||
"""Generate a random hex string
|
||||
Args:
|
||||
length (int, optional): Length of the hex string. Defaults to 17.
|
||||
Returns:
|
||||
str: Random hex string
|
||||
"""
|
||||
return secrets.token_hex(length)
|
||||
|
||||
|
||||
def random_int(min: int, max: int) -> int:
|
||||
"""Generate a random integer
|
||||
Args:
|
||||
min (int): Minimum value
|
||||
max (int): Maximum value
|
||||
Returns:
|
||||
int: Random integer
|
||||
"""
|
||||
return secrets.randbelow(max - min) + min
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(
|
||||
format="%(asctime)s - %(name)s - %(levelname)s - %(funcName)s - %(message)s",
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def logger(is_timed: bool):
|
||||
"""Logger decorator
|
||||
Args:
|
||||
is_timed (bool): Whether to include function running time in exit log
|
||||
Returns:
|
||||
_type_: decorated function
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
wraps(func)
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
log.debug(
|
||||
"Entering %s with args %s and kwargs %s",
|
||||
func.__name__,
|
||||
args,
|
||||
kwargs,
|
||||
)
|
||||
start = time.time()
|
||||
out = func(*args, **kwargs)
|
||||
end = time.time()
|
||||
if is_timed:
|
||||
log.debug(
|
||||
"Exiting %s with return value %s. Took %s seconds.",
|
||||
func.__name__,
|
||||
out,
|
||||
end - start,
|
||||
)
|
||||
else:
|
||||
log.debug("Exiting %s with return value %s", func.__name__, out)
|
||||
return out
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
@ -0,0 +1,58 @@
|
||||
import unittest
|
||||
import json
|
||||
import os
|
||||
|
||||
# Assuming the BingChat class is in a file named "bing_chat.py"
|
||||
from bing_chat import BingChat, ConversationStyle
|
||||
|
||||
class TestBingChat(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
# Path to a mock cookies file for testing
|
||||
self.mock_cookies_path = "./mock_cookies.json"
|
||||
with open(self.mock_cookies_path, 'w') as file:
|
||||
json.dump({"mock_cookie": "mock_value"}, file)
|
||||
|
||||
self.chat = BingChat(cookies_path=self.mock_cookies_path)
|
||||
|
||||
def tearDown(self):
|
||||
os.remove(self.mock_cookies_path)
|
||||
|
||||
def test_init(self):
|
||||
self.assertIsInstance(self.chat, BingChat)
|
||||
self.assertIsNotNone(self.chat.bot)
|
||||
|
||||
def test_call(self):
|
||||
# Mocking the asynchronous behavior for the purpose of the test
|
||||
self.chat.bot.ask = lambda *args, **kwargs: {"text": "Hello, Test!"}
|
||||
response = self.chat("Test prompt")
|
||||
self.assertEqual(response, "Hello, Test!")
|
||||
|
||||
def test_create_img(self):
|
||||
# Mocking the ImageGen behavior for the purpose of the test
|
||||
class MockImageGen:
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def get_images(self, *args, **kwargs):
|
||||
return [{"path": "mock_image.png"}]
|
||||
|
||||
@staticmethod
|
||||
def save_images(*args, **kwargs):
|
||||
pass
|
||||
|
||||
original_image_gen = BingChat.ImageGen
|
||||
BingChat.ImageGen = MockImageGen
|
||||
|
||||
img_path = self.chat.create_img("Test prompt", auth_cookie="mock_auth_cookie")
|
||||
self.assertEqual(img_path, "./output/mock_image.png")
|
||||
|
||||
BingChat.ImageGen = original_image_gen
|
||||
|
||||
def test_set_cookie_dir_path(self):
|
||||
test_path = "./test_path"
|
||||
BingChat.set_cookie_dir_path(test_path)
|
||||
self.assertEqual(BingChat.Cookie.dir_path, test_path)
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
@ -0,0 +1,82 @@
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
from Sswarms.models.revgptv1 import RevChatGPTModelv1
|
||||
|
||||
class TestRevChatGPT(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.access_token = "<your_access_token>"
|
||||
self.model = RevChatGPTModelv1(access_token=self.access_token)
|
||||
|
||||
def test_run(self):
|
||||
prompt = "What is the capital of France?"
|
||||
response = self.model.run(prompt)
|
||||
self.assertEqual(response, "The capital of France is Paris.")
|
||||
|
||||
def test_run_time(self):
|
||||
prompt = "Generate a 300 word essay about technology."
|
||||
response = self.model.run(prompt)
|
||||
self.assertLess(self.model.end_time - self.model.start_time, 60)
|
||||
|
||||
def test_generate_summary(self):
|
||||
text = "This is a sample text to summarize. It has multiple sentences and details. The summary should be concise."
|
||||
summary = self.model.generate_summary(text)
|
||||
self.assertLess(len(summary), len(text)/2)
|
||||
|
||||
def test_enable_plugin(self):
|
||||
plugin_id = "some_plugin_id"
|
||||
self.model.enable_plugin(plugin_id)
|
||||
self.assertIn(plugin_id, self.model.config["plugin_ids"])
|
||||
|
||||
def test_list_plugins(self):
|
||||
plugins = self.model.list_plugins()
|
||||
self.assertGreater(len(plugins), 0)
|
||||
self.assertIsInstance(plugins[0], dict)
|
||||
self.assertIn("id", plugins[0])
|
||||
self.assertIn("name", plugins[0])
|
||||
|
||||
def test_get_conversations(self):
|
||||
conversations = self.model.chatbot.get_conversations()
|
||||
self.assertIsInstance(conversations, list)
|
||||
|
||||
@patch("RevChatGPTModelv1.Chatbot.get_msg_history")
|
||||
def test_get_msg_history(self, mock_get_msg_history):
|
||||
conversation_id = "convo_id"
|
||||
self.model.chatbot.get_msg_history(conversation_id)
|
||||
mock_get_msg_history.assert_called_with(conversation_id)
|
||||
|
||||
@patch("RevChatGPTModelv1.Chatbot.share_conversation")
|
||||
def test_share_conversation(self, mock_share_conversation):
|
||||
self.model.chatbot.share_conversation()
|
||||
mock_share_conversation.assert_called()
|
||||
|
||||
def test_gen_title(self):
|
||||
convo_id = "123"
|
||||
message_id = "456"
|
||||
title = self.model.chatbot.gen_title(convo_id, message_id)
|
||||
self.assertIsInstance(title, str)
|
||||
|
||||
def test_change_title(self):
|
||||
convo_id = "123"
|
||||
title = "New Title"
|
||||
self.model.chatbot.change_title(convo_id, title)
|
||||
self.assertEqual(self.model.chatbot.get_msg_history(convo_id)["title"], title)
|
||||
|
||||
def test_delete_conversation(self):
|
||||
convo_id = "123"
|
||||
self.model.chatbot.delete_conversation(convo_id)
|
||||
with self.assertRaises(Exception):
|
||||
self.model.chatbot.get_msg_history(convo_id)
|
||||
|
||||
def test_clear_conversations(self):
|
||||
self.model.chatbot.clear_conversations()
|
||||
conversations = self.model.chatbot.get_conversations()
|
||||
self.assertEqual(len(conversations), 0)
|
||||
|
||||
def test_rollback_conversation(self):
|
||||
original_convo_id = self.model.chatbot.conversation_id
|
||||
self.model.chatbot.rollback_conversation(1)
|
||||
self.assertNotEqual(original_convo_id, self.model.chatbot.conversation_id)
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
@ -0,0 +1,92 @@
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
from RevChatGPTModelv4 import RevChatGPTModelv4
|
||||
|
||||
class TestRevChatGPT(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.access_token = "123"
|
||||
self.model = RevChatGPTModelv4(access_token=self.access_token)
|
||||
|
||||
def test_run(self):
|
||||
prompt = "What is the capital of France?"
|
||||
self.model.start_time = 10
|
||||
self.model.end_time = 20
|
||||
response = self.model.run(prompt)
|
||||
self.assertEqual(response, "The capital of France is Paris.")
|
||||
self.assertEqual(self.model.start_time, 10)
|
||||
self.assertEqual(self.model.end_time, 20)
|
||||
|
||||
def test_generate_summary(self):
|
||||
text = "Hello world. This is some text. It has multiple sentences."
|
||||
summary = self.model.generate_summary(text)
|
||||
self.assertEqual(summary, "")
|
||||
|
||||
@patch("RevChatGPTModelv4.Chatbot.install_plugin")
|
||||
def test_enable_plugin(self, mock_install_plugin):
|
||||
plugin_id = "plugin123"
|
||||
self.model.enable_plugin(plugin_id)
|
||||
mock_install_plugin.assert_called_with(plugin_id=plugin_id)
|
||||
|
||||
@patch("RevChatGPTModelv4.Chatbot.get_plugins")
|
||||
def test_list_plugins(self, mock_get_plugins):
|
||||
mock_get_plugins.return_value = [{"id": "123", "name": "Test Plugin"}]
|
||||
plugins = self.model.list_plugins()
|
||||
self.assertEqual(len(plugins), 1)
|
||||
self.assertEqual(plugins[0]["id"], "123")
|
||||
self.assertEqual(plugins[0]["name"], "Test Plugin")
|
||||
|
||||
@patch("RevChatGPTModelv4.Chatbot.get_conversations")
|
||||
def test_get_conversations(self, mock_get_conversations):
|
||||
self.model.chatbot.get_conversations()
|
||||
mock_get_conversations.assert_called()
|
||||
|
||||
@patch("RevChatGPTModelv4.Chatbot.get_msg_history")
|
||||
def test_get_msg_history(self, mock_get_msg_history):
|
||||
convo_id = "123"
|
||||
self.model.chatbot.get_msg_history(convo_id)
|
||||
mock_get_msg_history.assert_called_with(convo_id)
|
||||
|
||||
@patch("RevChatGPTModelv4.Chatbot.share_conversation")
|
||||
def test_share_conversation(self, mock_share_conversation):
|
||||
self.model.chatbot.share_conversation()
|
||||
mock_share_conversation.assert_called()
|
||||
|
||||
@patch("RevChatGPTModelv4.Chatbot.gen_title")
|
||||
def test_gen_title(self, mock_gen_title):
|
||||
convo_id = "123"
|
||||
message_id = "456"
|
||||
self.model.chatbot.gen_title(convo_id, message_id)
|
||||
mock_gen_title.assert_called_with(convo_id, message_id)
|
||||
|
||||
@patch("RevChatGPTModelv4.Chatbot.change_title")
|
||||
def test_change_title(self, mock_change_title):
|
||||
convo_id = "123"
|
||||
title = "New Title"
|
||||
self.model.chatbot.change_title(convo_id, title)
|
||||
mock_change_title.assert_called_with(convo_id, title)
|
||||
|
||||
@patch("RevChatGPTModelv4.Chatbot.delete_conversation")
|
||||
def test_delete_conversation(self, mock_delete_conversation):
|
||||
convo_id = "123"
|
||||
self.model.chatbot.delete_conversation(convo_id)
|
||||
mock_delete_conversation.assert_called_with(convo_id)
|
||||
|
||||
@patch("RevChatGPTModelv4.Chatbot.clear_conversations")
|
||||
def test_clear_conversations(self, mock_clear_conversations):
|
||||
self.model.chatbot.clear_conversations()
|
||||
mock_clear_conversations.assert_called()
|
||||
|
||||
@patch("RevChatGPTModelv4.Chatbot.rollback_conversation")
|
||||
def test_rollback_conversation(self, mock_rollback_conversation):
|
||||
num = 2
|
||||
self.model.chatbot.rollback_conversation(num)
|
||||
mock_rollback_conversation.assert_called_with(num)
|
||||
|
||||
@patch("RevChatGPTModelv4.Chatbot.reset_chat")
|
||||
def test_reset_chat(self, mock_reset_chat):
|
||||
self.model.chatbot.reset_chat()
|
||||
mock_reset_chat.assert_called()
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
Loading…
Reference in new issue