[EXAMPLE][CLEANUP]

pull/299/head
Kye 1 year ago
parent 7384e2401e
commit 005ef9bd67

@ -60,8 +60,8 @@ llm = OpenAIChat(
agent = Agent(llm=llm, max_loops=1, autosave=True, dashboard=True)
# Run the workflow on a task
out = agent.run("Generate a 10,000 word blog on health and wellness.")
print(out)
agent.run("Generate a 10,000 word blog on health and wellness.")
```
@ -168,8 +168,7 @@ agent = Agent(
)
# Run the workflow on a task
out = agent.run(task=task, img=img)
print(out)
agent.run(task=task, img=img)
```
@ -199,8 +198,7 @@ llm = OpenAIChat(
agent = OmniModalAgent(llm)
response = agent.run("Generate a video of a swarm of fish and then make an image out of the video")
print(response)
agent.run("Generate a video of a swarm of fish and then make an image out of the video")
```
---

@ -29,5 +29,4 @@ agent = Agent(
)
# Run the workflow on a task
out = agent.run("Generate a 10,000 word blog on health and wellness.")
print(out)
agent.run("Generate a 10,000 word blog on health and wellness.")

@ -172,7 +172,7 @@ class Gemini(BaseMultiModalModel):
# return response.text
# else:
response = self.model.generate_content(
task, *args, **kwargs
task, stream=self.stream, *args, **kwargs
)
return response.text
except Exception as error:

Loading…
Cancel
Save