[Gemini][System Prompt] [ShortTermMemory]

pull/317/head
Kye 1 year ago
parent abf2048f5b
commit a7ca5b79a5

@ -0,0 +1,25 @@
import os
from dotenv import load_dotenv
from swarms.models.gemini import Gemini
from swarms.prompts.react import react_prompt
load_dotenv()
api_key = os.environ["GEMINI_API_KEY"]
# Establish the prompt and image
task = "What is your name"
img = "images/github-banner-swarms.png"
# Initialize the model
model = Gemini(
gemini_api_key=api_key,
model_name="gemini-pro",
max_tokens=1000,
system_prompt=react_prompt(task=task),
temperature=0.5,
)
out = model.chat("Create the code for a react component that displays a name", img=img)
print(out)

@ -0,0 +1,26 @@
import os
from dotenv import load_dotenv
from swarms.models.gemini import Gemini
from swarms.prompts.react import react_prompt
load_dotenv()
api_key = os.environ["GEMINI_API_KEY"]
# Establish the prompt and image
task = "What is your name"
img = "images/github-banner-swarms.png"
# Initialize the model
model = Gemini(
gemini_api_key=api_key,
model_name="gemini-pro",
max_tokens=1000,
system_prompt=react_prompt(task=task),
temperature=0.5,
)
# Run the model
out = model.run("Create the code for a react component that displays a name")
print(out)

@ -16,7 +16,7 @@ llm = Gemini(
gemini_api_key=api_key,
temperature=0.5,
max_tokens=1000,
system_prompt=VISUAL_CHAIN_OF_THOUGHT
system_prompt=VISUAL_CHAIN_OF_THOUGHT,
)
# Initialize the task

@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "swarms"
version = "2.9.6"
version = "2.9.8"
description = "Swarms - Pytorch"
license = "MIT"
authors = ["Kye Gomez <kye@apac.ai>"]

@ -2,10 +2,29 @@ import logging
from swarms.structs.base import BaseStructure
import threading
import json
import os
class ShortTermMemory(BaseStructure):
"""Short term memory.
Args:
return_str (bool, optional): _description_. Defaults to True.
autosave (bool, optional): _description_. Defaults to True.
*args: _description_
**kwargs: _description_
Example:
>>> from swarms.memory.short_term_memory import ShortTermMemory
>>> stm = ShortTermMemory()
>>> stm.add(role="agent", message="Hello world!")
>>> stm.add(role="agent", message="How are you?")
>>> stm.add(role="agent", message="I am fine.")
>>> stm.add(role="agent", message="How are you?")
>>> stm.add(role="agent", message="I am fine.")
"""
def __init__(
self,
return_str: bool = True,
@ -68,6 +87,14 @@ class ShortTermMemory(BaseStructure):
def update_short_term(
self, index, role: str, message: str, *args, **kwargs
):
"""Update the short term memory.
Args:
index (_type_): _description_
role (str): _description_
message (str): _description_
"""
self.short_term_memory[index] = {
"role": role,
"message": message,

@ -32,6 +32,7 @@ from swarms.models.gpt4_vision_api import GPT4VisionAPI # noqa: E402
from swarms.models.openai_tts import OpenAITTS # noqa: E402
from swarms.models.gemini import Gemini # noqa: E402
from swarms.models.gigabind import Gigabind # noqa: E402
# from swarms.models.gpt4v import GPT4Vision
# from swarms.models.dalle3 import Dalle3
# from swarms.models.distilled_whisperx import DistilWhisperModel # noqa: E402
@ -64,6 +65,5 @@ __all__ = [
# "vLLM",
"OpenAITTS",
"Gemini",
"Gigabind"
"Gigabind",
]

@ -98,7 +98,11 @@ class BaseMultiModalModel:
@abstractmethod
def run(
self, task: Optional[str] = None, img: Optional[str] = None, *args, **kwargs
self,
task: Optional[str] = None,
img: Optional[str] = None,
*args,
**kwargs,
):
"""Run the model"""
pass

@ -98,7 +98,7 @@ class Gemini(BaseMultiModalModel):
self.max_tokens = max_tokens
self.temperature = temperature
self.system_prompt = system_prompt
# Configure the API key
genai.configure(api_key=gemini_api_key, transport=transport)
@ -121,9 +121,8 @@ class Gemini(BaseMultiModalModel):
if self.gemini_api_key is None:
raise ValueError("Please provide a Gemini API key")
def system_prompt(
def system_prompt_prep(
self,
system_prompt: str = None,
task: str = None,
*args,
**kwargs,
@ -135,7 +134,9 @@ class Gemini(BaseMultiModalModel):
"""
PROMPT = f"""
{system_prompt}
{self.system_prompt}
######
{task}
@ -159,28 +160,21 @@ class Gemini(BaseMultiModalModel):
str: output from the model
"""
try:
prepare_prompt = self.system_prompt_prep(task)
if img:
# process_img = self.process_img(img, *args, **kwargs)
process_img = self.process_img_pil(img)
response = self.model.generate_content(
contents=[task, process_img],
contents=[prepare_prompt, process_img],
generation_config=self.generation_config,
stream=self.stream,
*args,
**kwargs,
)
# if self.candidates:
# return response.candidates
# elif self.safety:
# return response.safety
# else:
# return response.text
return response.text
else:
response = self.model.generate_content(
task, stream=self.stream, *args, **kwargs
prepare_prompt, stream=self.stream, *args, **kwargs
)
return response.text
except Exception as error:

@ -2,7 +2,6 @@ import requests
from tenacity import retry, stop_after_attempt, wait_fixed
class Gigabind:
"""Gigabind API.

Loading…
Cancel
Save