parent
99997aad68
commit
bf1510298b
@ -0,0 +1,48 @@
|
|||||||
|
# This is a Dockerfile for running unit tests
|
||||||
|
|
||||||
|
ARG POETRY_HOME=/opt/poetry
|
||||||
|
|
||||||
|
# Use the Python base image
|
||||||
|
FROM python:3.11.2-bullseye AS builder
|
||||||
|
|
||||||
|
# Define the version of Poetry to install (default is 1.4.2)
|
||||||
|
ARG POETRY_VERSION=1.4.2
|
||||||
|
|
||||||
|
# Define the directory to install Poetry to (default is /opt/poetry)
|
||||||
|
ARG POETRY_HOME
|
||||||
|
|
||||||
|
# Create a Python virtual environment for Poetry and install it
|
||||||
|
RUN python3 -m venv ${POETRY_HOME} && \
|
||||||
|
$POETRY_HOME/bin/pip install --upgrade pip && \
|
||||||
|
$POETRY_HOME/bin/pip install poetry==${POETRY_VERSION}
|
||||||
|
|
||||||
|
# Test if Poetry is installed in the expected path
|
||||||
|
RUN echo "Poetry version:" && $POETRY_HOME/bin/poetry --version
|
||||||
|
|
||||||
|
# Set the working directory for the app
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Use a multi-stage build to install dependencies
|
||||||
|
FROM builder AS dependencies
|
||||||
|
|
||||||
|
ARG POETRY_HOME
|
||||||
|
|
||||||
|
# Copy only the dependency files for installation
|
||||||
|
COPY pyproject.toml poetry.lock poetry.toml ./
|
||||||
|
|
||||||
|
# Install the Poetry dependencies (this layer will be cached as long as the dependencies don't change)
|
||||||
|
RUN $POETRY_HOME/bin/poetry install --no-interaction --no-ansi --with test
|
||||||
|
|
||||||
|
# Use a multi-stage build to run tests
|
||||||
|
FROM dependencies AS tests
|
||||||
|
|
||||||
|
# Copy the rest of the app source code (this layer will be invalidated and rebuilt whenever the source code changes)
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN /opt/poetry/bin/poetry install --no-interaction --no-ansi --with test
|
||||||
|
|
||||||
|
# Set the entrypoint to run tests using Poetry
|
||||||
|
ENTRYPOINT ["/opt/poetry/bin/poetry", "run", "pytest"]
|
||||||
|
|
||||||
|
# Set the default command to run all unit tests
|
||||||
|
CMD ["tests/"]
|
@ -0,0 +1,4 @@
|
|||||||
|
from swarms.models import OpenAIChat
|
||||||
|
|
||||||
|
llm = OpenAIChat(openai_api_key="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")
|
||||||
|
out = llm("Hello, I am a robot and I like to talk about robots.")
|
@ -1,10 +1,7 @@
|
|||||||
# from swarms.models.palm import GooglePalm
|
|
||||||
# from swarms.models.openai import OpenAIChat
|
|
||||||
#prompts
|
#prompts
|
||||||
from swarms.models.anthropic import Anthropic
|
from swarms.models.anthropic import Anthropic
|
||||||
# from swarms.models.palm import GooglePalm
|
# from swarms.models.palm import GooglePalm
|
||||||
from swarms.models.petals import Petals
|
from swarms.models.petals import Petals
|
||||||
# from swarms.models.openai import OpenAIChat
|
from swarms.models.chat_openai import OpenAIChat
|
||||||
#prompts
|
|
||||||
from swarms.models.prompts.debate import *
|
from swarms.models.prompts.debate import *
|
||||||
from swarms.models.mistral import Mistral
|
from swarms.models.mistral import Mistral
|
@ -0,0 +1,4 @@
|
|||||||
|
from typing import List, Dict, Any, Union
|
||||||
|
from concurrent.futures import Executor, ThreadPoolExecutor, as_completed
|
||||||
|
from graphlib import TopologicalSorter
|
||||||
|
|
Loading…
Reference in new issue