You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
swarms/perplexity_agent.py

103 lines
3.0 KiB

"""
$ pip install swarms
- Add docs into the database
- Use better llm
- use better prompts [System and SOPs]
- Use a open source model like Command R
- Better SOPS ++ System Prompts
-
"""
from swarms import Agent
from swarms.models.llama3_hosted import llama3Hosted
from playground.memory.chromadb_example import ChromaDB
from swarms.tools.prebuilt.bing_api import fetch_web_articles_bing_api
# Let's create a text file with the provided prompt.
research_system_prompt = """
Research Agent LLM Prompt: Summarizing Sources and Content
Objective:
Your task is to summarize the provided sources and the content within those sources. The goal is to create concise, accurate, and informative summaries that capture the key points of the original content.
Instructions:
1. Identify Key Information:
- Extract the most important information from each source. Focus on key facts, main ideas, significant arguments, and critical data.
2. Summarize Clearly and Concisely:
- Use clear and straightforward language. Avoid unnecessary details and keep the summary concise.
- Ensure that the summary is coherent and easy to understand.
3. Preserve Original Meaning:
- While summarizing, maintain the original meaning and intent of the content. Do not omit essential information that changes the context or understanding.
4. Include Relevant Details:
- Mention the source title, author, publication date, and any other relevant details that provide context.
5. Structure:
- Begin with a brief introduction to the source.
- Follow with a summary of the main content.
- Conclude with any significant conclusions or implications presented in the source.
"""
# Initialize
memory = ChromaDB(
output_dir="research_base",
n_results=2,
)
llm = llama3Hosted(temperature=0.2, max_tokens=3500)
# Initialize the agent
agent = Agent(
agent_name="Research Agent",
system_prompt=research_system_prompt,
llm=llm,
max_loops="auto",
autosave=True,
dashboard=False,
interactive=True,
long_term_memory=memory,
# tools=[fetch_web_articles_bing_api],
)
def perplexity_agent(task: str = None, *args, **kwargs):
"""
This function takes a task as input and uses the Bing API to fetch web articles related to the task.
It then combines the task and the fetched articles as prompts and runs them through an agent.
The agent generates a response based on the prompts and returns it.
Args:
task (str): The task for which web articles need to be fetched.
Returns:
str: The response generated by the agent.
"""
out = fetch_web_articles_bing_api(
task,
subscription_key="940fe346f0a149ea9f34d9969359aed7",
)
# Sources
sources = [task, out]
sources_prompts = "".join(sources)
# Run a question
agent_response = agent.run(sources_prompts)
return agent_response
out = perplexity_agent(
"What are the biggest GPU chips alternatives for transformer modelsm, look up Etched"
)
print(out)