>>>model.run("Generate a summary of this text","https://www.google.com/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png")
>>>model.run("Generate a summary of this text",link)
>>>model.run_batch(["Generate a summary of this text","Generate a summary of this text"])
>>>model.run_batch([("Generate a summary of this text","https://www.google.com/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png"),("Generate a summary of this text","https://www.google.com/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png")])
>>>model.run_batch_async(["Generate a summary of this text","Generate a summary of this text"])
>>>model.run_batch_async([("Generate a summary of this text","https://www.google.com/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png"),("Generate a summary of this text","https://www.google.com/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png")])
>>>model.run_batch_async_with_retries(["Generate a summary of this text","Generate a summary of this text"])
>>>model.run_batch_async_with_retries([("Generate a summary of this text","https://www.google.com/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png"),("Generate a summary of this text","https://www.google.com/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png")])
>>>model.run_batch([
("Generate a summary of this text",link),
("Generate a summary of this text",link)
])
>>>model.run_batch_async([
"Generate a summary of this text",
"Generate a summary of this text"
])
>>>model.run_batch_async([
("Generate a summary of this text",link),
("Generate a summary of this text",link)
])
>>>model.run_batch_async_with_retries([
"Generate a summary of this text",
"Generate a summary of this text"
])
>>>model.run_batch_async_with_retries([
("Generate a summary of this text",link),
("Generate a summary of this text",link)
])
>>>model.generate_summary("Generate a summary of this text")
@ -24,7 +25,6 @@ advantage of BioGPT on biomedical literature to generate fluent descriptions for
number={6},
year={2022},
month={09},
abstract="{Pre-trained language models have attracted increasing attention in the biomedical domain, inspired by their great success in the general natural language domain. Among the two main branches of pre-trained language models in the general language domain, i.e. BERT (and its variants) and GPT (and its variants), the first one has been extensively studied in the biomedical domain, such as BioBERT and PubMedBERT. While they have achieved great success on a variety of discriminative downstream biomedical tasks, the lack of generation ability constrains their application scope. In this paper, we propose BioGPT, a domain-specific generative Transformer language model pre-trained on large-scale biomedical literature. We evaluate BioGPT on six biomedical natural language processing tasks and demonstrate that our model outperforms previous models on most tasks. Especially, we get 44.98\%, 38.42\% and 40.76\% F1 score on BC5CDR, KD-DTI and DDI end-to-end relation extraction tasks, respectively, and 78.2\% accuracy on PubMedQA, creating a new record. Our case study on text generation further demonstrates the advantage of BioGPT on biomedical literature to generate fluent descriptions for biomedical terms.}",
Defaultsto"A wrapper around Eleven Labs Text2Speech. Useful for when you need to convert text to speech. It supports multiple languages, including English, German, Polish, Spanish, Italian, French, Portuguese, and Hindi."
Defaultsto"A wrapper around Eleven Labs Text2Speech. Useful for when you need to convert text to speech.
# TODO: The 'openai.proxy' option isn't read in the client API. You will need to pass it when you instantiate the client, e.g. 'OpenAI(proxy={"http": self.openai_proxy, "https": self.openai_proxy})'
# TODO: The 'openai.proxy' option isn't read in the client API. You will need to pass it when you instantiate the
# client, e.g. 'OpenAI(proxy={"http": self.openai_proxy, "https": self.openai_proxy})'
@ -181,7 +181,16 @@ class BaseWorkflow(BaseStructure):
>>>workflow.add("Create a report on these metrics",llm)
>>>workflow.delete_task("What's the weather in miami")
>>>workflow.tasks
[Task(description='Create a report on these metrics',agent=Agent(llm=OpenAIChat(openai_api_key=''),max_loops=1,dashboard=False),args=[],kwargs={},result=None,history=[])]
str:Astringcontainingthefunction's name, documentation string, and a list of its parameters. Each parameter is represented as a line containing the parameter'sname,defaultvalue,andannotation.
str:Astringcontainingthefunction's name, documentation string, and a list of its parameters.
Eachparameterisrepresentedasalinecontainingtheparameter's name, default value, and annotation.
"""
try:
# If the function is a tool, get the original function