missed a spot

pull/601/head
zzstoatzz 6 months ago
parent 7bb43c7e1e
commit fb4e34d098

@ -1,31 +0,0 @@
WORKSPACE_DIR="agent_workspace"
SWARMS_API_KEY=""
USE_TELEMETRY=True
OPENAI_API_KEY="sk-"
GOOGLE_API_KEY=""
ANTHROPIC_API_KEY=""
AI21_API_KEY="your_api_key_here"
COHERE_API_KEY="your_api_key_here"
ALEPHALPHA_API_KEY="your_api_key_here"
HUGGINFACEHUB_API_KEY="your_api_key_here"
EVAL_PORT=8000
MODEL_NAME="gpt-4"
USE_GPU=True
PLAYGROUND_DIR="examples"
LOG_LEVEL="INFO"
BOT_NAME="Orca"
HF_API_KEY="your_huggingface_api_key_here"
AGENTOPS_API_KEY=""
FIREWORKS_API_KEY=""
OPENAI_API_KEY=your_openai_api_key
ANTHROPIC_API_KEY=your_anthropic_api_key
AZURE_OPENAI_ENDPOINT=your_azure_openai_endpoint
AZURE_OPENAI_DEPLOYMENT=your_azure_openai_deployment
OPENAI_API_VERSION=your_openai_api_version
AZURE_OPENAI_API_KEY=your_azure_openai_api_key
AZURE_OPENAI_AD_TOKEN=your_azure_openai_ad_token

@ -1,13 +0,0 @@
---
# These are supported funding model platforms
github: [kyegomez]
# patreon: # Replace with a single Patreon username
# open_collective: # Replace with a single Open Collective username
# ko_fi: # Replace with a single Ko-fi username
# tidelift: # Replace with a single Tidelift platform-name/package-name
# community_bridge: # Replace with a single Community Bridge project-name
# liberapay: # Replace with a single Liberapay username
# issuehunt: # Replace with a single IssueHunt username
# otechie: # Replace with a single Otechie username
# lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name
# custom: #Nothing

@ -1,27 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: "[BUG] "
labels: bug
assignees: kyegomez
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Additional context**
Add any other context about the problem here.

@ -1,20 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: ''
assignees: 'kyegomez'
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

@ -1,29 +0,0 @@
Thank you for contributing to Swarms!
Replace this comment with:
- Description: a description of the change,
- Issue: the issue # it fixes (if applicable),
- Dependencies: any dependencies required for this change,
- Tag maintainer: for a quicker response, tag the relevant maintainer (see below),
- Twitter handle: we announce bigger features on Twitter. If your PR gets announced and you'd like a mention, we'll gladly shout you out!
Please make sure your PR is passing linting and testing before submitting. Run `make format`, `make lint` and `make test` to check this locally.
See contribution guidelines for more information on how to write/run tests, lint, etc:
https://github.com/kyegomez/swarms/blob/master/CONTRIBUTING.md
If you're adding a new integration, please include:
1. a test for the integration, preferably unit tests that do not rely on network access,
2. an example notebook showing its use.
Maintainer responsibilities:
- General / Misc / if you don't know who to tag: kye@apac.ai
- DataLoaders / VectorStores / Retrievers: kye@apac.ai
- swarms.models: kye@apac.ai
- swarms.memory: kye@apac.ai
- swarms.structures: kye@apac.ai
If no one reviews your PR within a few days, feel free to email Kye at kye@apac.ai
See contribution guidelines for more information on how to write/run tests, lint, etc: https://github.com/kyegomez/swarms

33
.github/action.yml vendored

@ -1,33 +0,0 @@
---
name: "Init Environment"
description: "Initialize environment for tests"
runs:
using: "composite"
steps:
- name: Checkout actions
uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install and configure Poetry
uses: snok/install-poetry@v1
with:
virtualenvs-create: true
virtualenvs-in-project: true
installer-parallel: true
- name: Load cached venv
id: cached-poetry-dependencies
uses: actions/cache@v3
with:
path: .venv
key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{hashFiles('**/poetry.lock') }}
- name: Install dependencies
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
run: poetry install --no-interaction --no-root --with test --with dev --all-extras
shell: bash
- name: Activate venv
run: |
source .venv/bin/activate
echo PATH=$PATH >> $GITHUB_ENV
shell: bash

@ -1,12 +0,0 @@
---
# https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "weekly"

@ -1,34 +0,0 @@
---
documentation:
- changed-files:
- any-glob-to-any-file: ["docs/**", "*.md"]
tests:
- changed-files:
- any-glob-to-any-file: "tests/**"
agents:
- changed-files:
- any-glob-to-any-file: "swarms/agents/**"
artifacts:
- changed-files:
- any-glob-to-any-file: "swarms/artifacts/**"
memory:
- changed-files:
- any-glob-to-any-file: "swarms/memory/**"
models:
- changed-files:
- any-glob-to-any-file: "swarms/models/**"
prompts:
- changed-files:
- any-glob-to-any-file: "swarms/prompts/**"
structs:
- changed-files:
- any-glob-to-any-file: "swarms/structs/**"
telemetry:
- changed-files:
- any-glob-to-any-file: "swarms/telemetry/**"
tools:
- changed-files:
- any-glob-to-any-file: "swarms/tools/**"
utils:
- changed-files:
- any-glob-to-any-file: "swarms/utils/**"

@ -1,47 +0,0 @@
---
name: release
on:
pull_request:
types:
- closed
branches:
- master
paths:
- "pyproject.toml"
env:
POETRY_VERSION: "1.4.2"
jobs:
if_release:
if: |
${{ github.event.pull_request.merged == true }}
&& ${{ contains(github.event.pull_request.labels.*.name, 'release') }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install poetry
run: pipx install poetry==$POETRY_VERSION
- name: Set up Python 3.9
uses: actions/setup-python@v5
with:
python-version: "3.9"
cache: "poetry"
- name: Build project for distribution
run: poetry build
- name: Check Version
id: check-version
run: |
echo version=$(poetry version --short) >> $GITHUB_OUTPUT
- name: Create Release
uses: ncipollo/release-action@v1
with:
artifacts: "dist/*"
token: ${{ secrets.GITHUB_TOKEN }}
draft: false
generateReleaseNotes: true
tag: v${{ steps.check-version.outputs.version }}
commit: master
- name: Publish to PyPI
env:
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }}
run: |-
poetry publish

@ -1,25 +0,0 @@
name: autofix.ci
on:
pull_request:
push:
branches: ["main"]
permissions:
contents: read
jobs:
autofix:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
- run: go install github.com/google/yamlfmt/cmd/yamlfmt@latest
- run: yamlfmt .
- uses: actions/setup-python@v5
- run: pip install ruff
- run: ruff format .
- run: ruff check --fix .
- uses: autofix-ci/action@dd55f44df8f7cdb7a6bf74c78677eb8acd40cd0a

@ -1,45 +0,0 @@
---
name: Codacy
on:
push:
branches: ["master"]
pull_request:
branches: ["master"]
schedule:
- cron: "0 0 * * "
permissions:
contents: read
jobs:
codacy-security-scan:
permissions:
contents: read # for actions/checkout to fetch code
security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
name: Codacy Security Scan
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
# Execute Codacy Analysis CLI and generate a SARIF output with the security issues identified during the analysis
- name: Run Codacy Analysis CLI
uses: codacy/codacy-analysis-cli-action@97bf5df3c09e75f5bcd72695998f96ebd701846e
with:
# Check https://github.com/codacy/codacy-analysis-cli#project-token to
# get your project token from your Codacy repository
# You can also omit the token and run the tools that support default configurations
project-token: ${{ secrets.CODACY_PROJECT_TOKEN }}
verbose: true
output: results.sarif
format: sarif
# Adjust severity of non-security issues
gh-code-scanning-compat: true
# Force 0 exit code to allow SARIF file generation
# This will handover control about PR rejection to the GitHub side
max-allowed-issues: 2147483647
# Upload the SARIF file generated in the previous step
- name: Upload SARIF results file
uses: github/codeql-action/upload-sarif@v3
with:
sarif_file: results.sarif

@ -1,41 +0,0 @@
---
name: "CodeQL"
on:
push:
branches: ["master"]
pull_request:
# The branches below must be a subset of the branches above
branches: ["master"]
schedule:
- cron: "33 12 * * 5"
jobs:
analyze:
name: Analyze
# Runner size impacts CodeQL analysis time. To learn more, please see:
# - https://gh.io/recommended-hardware-resources-for-running-codeql
# - https://gh.io/supported-runners-and-hardware-resources
# - https://gh.io/using-larger-runners
# Consider using larger runners for possible analysis time improvements.
runs-on: ubuntu-latest
timeout-minutes: 360
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: ["python"]
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
- name: Autobuild
uses: github/codeql-action/autobuild@v3
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

@ -1,16 +0,0 @@
name: Documentation Links
on:
pull_request_target:
types:
- opened
permissions:
pull-requests: write
jobs:
documentation-links:
runs-on: ubuntu-latest
steps:
- uses: readthedocs/actions/preview@v1
with:
project-slug: "swarms"

@ -1,24 +0,0 @@
name: Documentation
on:
push:
branches:
- master
- main
- develop
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: 3.11
- run: pip install mkdocs-material
- run: pip install mkdocs-glightbox
- run: pip install "mkdocstrings[python]"
- run: pip3 install mkdocs-git-authors-plugin
- run: pip install mkdocs-jupyter==0.16.0
- run: pip install --upgrade lxml_html_clean
- run: pip install mkdocs-git-committers-plugin
- run: pip3 install mkdocs-git-revision-date-localized-plugin
- run: mkdocs gh-deploy --force -f docs/mkdocs.yml

@ -1,20 +0,0 @@
---
# This workflow will triage pull requests and apply a label based on the
# paths that are modified in the pull request.
#
# To use this workflow, you will need to set up a .github/labeler.yml
# file with configuration. For more information, see:
# https://github.com/actions/labeler
name: Labeler
on: [pull_request_target]
jobs:
label:
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
steps:
- uses: actions/labeler@v5
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"

@ -1,33 +0,0 @@
---
name: Lint
on: [push, pull_request] # yamllint disable-line rule:truthy
jobs:
yaml-lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- run: pip install yamllint
- run: yamllint .
flake8-lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- run: pip install flake8
- run: flake8 .
ruff-lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- run: pip install ruff
- run: ruff format .
- run: ruff check --fix .
pylint-lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- run: pip install pylint
- run: pylint swarms --recursive=y

@ -1,40 +0,0 @@
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
name: Python package
on:
push:
branches: [ "master" ]
pull_request:
branches: [ "master" ]
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.9", "3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install flake8 pytest
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Test with pytest
run: |
pytest

@ -1,49 +0,0 @@
---
# This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time.
#
# You can adjust the behavior by modifying this file.
# For more information, see:
# https://github.com/actions/stale
name: Stale
on:
schedule:
# Scheduled to run at 1.30 UTC everyday
- cron: "0 0 * * *"
jobs:
stale:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
steps:
- uses: actions/stale@v9
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-issue-stale: 14
days-before-issue-close: 14
stale-issue-label: "status:stale"
close-issue-reason: not_planned
any-of-labels: "status:awaiting user response,status:more data needed"
stale-issue-message: >
Marking this issue as stale since it has been open for 14 days with no
activity. This issue will be closed if no further activity occurs.
close-issue-message: >
This issue was closed because it has been inactive for 28 days. Please
post a new issue if you need further assistance. Thanks!
days-before-pr-stale: 14
days-before-pr-close: 14
stale-pr-label: "status:stale"
stale-pr-message: >
Marking this pull request as stale since it has been open for 14 days
with no activity. This PR will be closed if no further activity occurs.
close-pr-message: >
This pull request was closed because it has been inactive for 28 days.
Please open a new pull request if you need further assistance. Thanks!
# Label that can be assigned to issues to exclude them from being marked as stale
exempt-issue-labels: "override-stale"
# Label that can be assigned to PRs to exclude them from being marked as stale
exempt-pr-labels: "override-stale"

@ -1,60 +0,0 @@
name: Tests
on:
push:
schedule:
- cron: "0 0 * * *"
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Python
uses: actions/setup-python@v5
- name: Install Poetry
uses: snok/install-poetry@v1
- name: Setup a local virtual environment
run: |
poetry config virtualenvs.create true --local
poetry config virtualenvs.in-project true --local
- uses: actions/cache@v4
name: Define a cache for the virtual environment
file
with:
path: ./.venv
key: venv-${{ hashFiles('poetry.lock') }}
- name: Install the project dependencies
run: poetry install
- name: Install OpenCV
run: sudo apt-get install python3-opencv
- name: Enter the virtual environment
run: source $VENV
- name: Run the tests
run: poetry run pytest --verbose
run-examples:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Python
uses: actions/setup-python@v5
- name: Install Poetry
uses: snok/install-poetry@v1
- name: Setup a local virtual environment
run: |
poetry config virtualenvs.create true --local
poetry config virtualenvs.in-project true --local
- uses: actions/cache@v4
name: Define a cache for the virtual environment
file
with:
path: ./.venv
key: venv-${{ hashFiles('poetry.lock') }}
- name: Install the project dependencies
run: poetry install
- name: Install OpenCV
run: sudo apt-get install python3-opencv
- name: Enter the virtual environment
run: source $VENV
- name: Make Script Executable and Run
run: |-
chmod +x ./scripts/run_examples.sh
./scripts/run_examples.sh

@ -1,22 +0,0 @@
---
name: Welcome
on:
issues:
types: [opened]
pull_request_target:
types: [opened]
jobs:
build:
name: 👋 Welcome
permissions: write-all
runs-on: ubuntu-latest
steps:
- uses: actions/first-interaction@v1.3.0
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
issue-message:
"Hello there, thank you for opening an Issue ! 🙏🏻 The team
was notified and they will get back to you asap."
pr-message:
"Hello there, thank you for opening an PR ! 🙏🏻 The team was
notified and they will get back to you asap."

224
.gitignore vendored

@ -1,224 +0,0 @@
__pycache__/
.venv/
.env
image/
audio/
video/
artifacts_three
dataframe/
static/generated
runs
Financial-Analysis-Agent_state.json
artifacts_five
encryption
errors
chroma
agent_workspace
.pt
Accounting Assistant_state.json
Unit Testing Agent_state.json
sec_agent
Devin_state.json
poetry.lock
hire_researchers
agent_workspace
json_logs
Medical Image Diagnostic Agent_state.json
flight agent_state.json
D_state.json
artifacts_six
artifacts_seven
swarms/__pycache__
artifacts_once
transcript_generator.json
venv
.DS_Store
Cargo.lock
.DS_STORE
artifacts_logs
Cargo.lock
Medical Treatment Recommendation Agent_state.json
swarms/agents/.DS_Store
artifacts_two
logs
T_state.json
_build
conversation.txt
t1_state.json
stderr_log.txt
t2_state.json
.vscode
.DS_STORE
# Byte-compiled / optimized / DLL files
Transcript Generator_state.json
__pycache__/
*.py[cod]
*$py.class
.grit
swarm-worker-01_state.json
error.txt
Devin Worker 2_state.json
# C extensions
*.so
.ruff_cache
errors.txt
Autonomous-Agent-XYZ1B_state.json
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
.DS_Store
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
.vscode/settings.json

@ -1,18 +0,0 @@
repos:
- repo: https://github.com/ambv/black
rev: 22.3.0
hooks:
- id: black
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: 'v0.0.255'
hooks:
- id: ruff
args: [----unsafe-fixes]
- repo: https://github.com/nbQA-dev/nbQA
rev: 1.6.3
hooks:
- id: nbqa-black
additional_dependencies: [ipython==8.12, black]
- id: nbqa-ruff
args: ["--ignore=I001"]
additional_dependencies: [ipython==8.12, ruff]
Loading…
Cancel
Save