|
|
@ -1,46 +1,3 @@
|
|
|
|
"""
|
|
|
|
|
|
|
|
Flow,
|
|
|
|
|
|
|
|
A chain like structure from langchain that provides the autonomy to language models
|
|
|
|
|
|
|
|
to generate sequential responses.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Features:
|
|
|
|
|
|
|
|
* User defined queries
|
|
|
|
|
|
|
|
* Dynamic keep generating until <DONE> is outputted by the agent
|
|
|
|
|
|
|
|
* Interactive, AI generates, then user input
|
|
|
|
|
|
|
|
* Message history and performance history fed -> into context
|
|
|
|
|
|
|
|
* Ability to save and load flows
|
|
|
|
|
|
|
|
* Ability to provide feedback on responses
|
|
|
|
|
|
|
|
* Ability to provide a stopping condition
|
|
|
|
|
|
|
|
* Ability to provide a retry mechanism
|
|
|
|
|
|
|
|
* Ability to provide a loop interval
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
----------------------------------
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Example:
|
|
|
|
|
|
|
|
from swarms.models import OpenAIChat
|
|
|
|
|
|
|
|
from swarms.structs import Flow
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Initialize the language model,
|
|
|
|
|
|
|
|
# This model can be swapped out with Anthropic, ETC, Huggingface Models like Mistral, ETC
|
|
|
|
|
|
|
|
llm = OpenAIChat(
|
|
|
|
|
|
|
|
openai_api_key=api_key,
|
|
|
|
|
|
|
|
temperature=0.5,
|
|
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Initialize the flow
|
|
|
|
|
|
|
|
flow = Flow(
|
|
|
|
|
|
|
|
llm=llm, max_loops=5,
|
|
|
|
|
|
|
|
#system_prompt=SYSTEM_PROMPT,
|
|
|
|
|
|
|
|
#retry_interval=1,
|
|
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
flow.run("Generate a 10,000 word blog")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Now save the flow
|
|
|
|
|
|
|
|
flow.save("path/flow.yaml")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import json
|
|
|
|
import json
|
|
|
|
import logging
|
|
|
|
import logging
|
|
|
|
import time
|
|
|
|
import time
|
|
|
|