Anton Zhuravlev 1 year ago
commit 23e954b710

@ -4,7 +4,7 @@ root = true
[*]
charset = utf-8
indent_style = space
indent_size = 2
indent_size = 4
insert_final_newline = true
trim_trailing_whitespace = true

168
.gitignore vendored

@ -38,4 +38,170 @@ testem.log
.DS_Store
Thumbs.db
.nx/cache
.nx/cache
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
.idea/
test.py
track-pay.json

@ -0,0 +1,7 @@
POSTGRES_USER=admin
POSTGRES_PASSWORD=password
POSTGRES_HOST=db
POSTGRES_PORT=5432
#POSTGRES_HOST=localhost
#POSTGRES_PORT=5430
POSTGRES_DB=TrackPay

@ -1,6 +1,6 @@
{
"/api": {
"target": "http://localhost:3000",
"target": "http://localhost:8000",
"secure": false
}
}

@ -0,0 +1,21 @@
version: '3.8'
services:
db:
image: postgres
restart: on-failure
env_file:
- .env
ports:
- "5430:5432"
http_server:
build: ./py-conveyor-service/
restart: on-failure
env_file:
- .env
ports:
- "8000:8000"
depends_on:
- db
volumes:
- ./py-conveyor-service/:/app

@ -0,0 +1,7 @@
POSTGRES_USER=admin
POSTGRES_PASSWORD=password
POSTGRES_HOST=db
POSTGRES_PORT=5432
#POSTGRES_HOST=localhost
#POSTGRES_PORT=5430
POSTGRES_DB=TrackPay

@ -0,0 +1,25 @@
FROM python:3.10 AS compile-image
## install dependencies
RUN apt-get update && \
apt-get install -y --no-install-recommends
## add and install requirements
COPY requirements.txt .
RUN pip install --upgrade pip && \
pip install --no-cache-dir -r requirements.txt
FROM python:3.10 AS build-image
# copy env from prev img
COPY --from=compile-image /usr/local/lib/python3.10/site-packages /usr/local/lib/python3.10/site-packages
COPY . /app
EXPOSE 8000
WORKDIR /app
CMD ["sh", "-c" ,"python3 -m alembic upgrade head && python3 -m uvicorn main:app --host 0.0.0.0 --port 8000 --reload"]

@ -0,0 +1,102 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = db/migrations
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to db/migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:db/migrations/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

@ -0,0 +1,11 @@
from typing import Optional
from api.request.base import RequestBase
from datetime import datetime
from pydantic import Field
class AnalyticsFilters(RequestBase):
start_time: Optional[datetime] = Field(None)
end_time: Optional[datetime] = Field(None)

@ -0,0 +1,6 @@
from pydantic import BaseModel
class RequestBase(BaseModel):
class Config:
use_enum_values = True

@ -0,0 +1,18 @@
from datetime import datetime
from api.response.base import ResponseBase
class AreaCharts:
name: str
value: int
class TimelineCharts(AreaCharts):
time: datetime
class RequestAnalytics(ResponseBase):
timeline_charts: list[TimelineCharts]
area_charts: list[AreaCharts]
bar_chars: list[AreaCharts]

@ -0,0 +1,6 @@
from pydantic import BaseModel
class ResponseBase(BaseModel):
class Config:
use_enum_values = True

@ -0,0 +1,21 @@
import os
from dotenv import load_dotenv
load_dotenv()
DB_HOST = os.environ.get("POSTGRES_HOST")
DB_PORT = os.environ.get("POSTGRES_PORT")
DB_NAME = os.environ.get("POSTGRES_DB")
DB_USER = os.environ.get("POSTGRES_USER")
DB_PASS = os.environ.get("POSTGRES_PASSWORD")
db_url = f"postgresql+asyncpg://{DB_USER}:{DB_PASS}@{DB_HOST}:{DB_PORT}/{DB_NAME}?async_fallback=True"
secret = os.environ.get("secret_key")
encrypt_algorithm = os.environ.get('encrypt_algorithm')
SMTP_USER = os.environ.get("SMTP_USER")
SMTP_PORT = os.environ.get("SMTP_PORT")
SMTP_PASS = os.environ.get("SMTP_PASS")
SMTP_SERVER = os.environ.get("SMTP_SERVER")

@ -0,0 +1 @@
Generic single-database configuration.

@ -0,0 +1,85 @@
import os
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
from db.models.base import Base
from configs.config import db_url
from db.models.conveer import DBConveer
from db.models.camera import DBCamera
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name) # type: ignore
# add your model's MetaData object here
# for 'autogenerate' support
target_metadata = Base.metadata
config.set_main_option("sqlalchemy.url", db_url)
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

@ -0,0 +1,52 @@
"""initial
Revision ID: 3c651a0d1fe0
Revises:
Create Date: 2023-10-27 18:19:47.402501
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '3c651a0d1fe0'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('camera',
sa.Column('camera_type', sa.String(), nullable=False),
sa.Column('order_numb', sa.Integer(), nullable=True),
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_camera')),
sa.UniqueConstraint('id', name=op.f('uq_camera_id')),
sa.UniqueConstraint('order_numb', name=op.f('uq_camera_order_numb'))
)
op.create_table('conveer',
sa.Column('wood', sa.Integer(), nullable=True),
sa.Column('metal', sa.Integer(), nullable=True),
sa.Column('glass', sa.Integer(), nullable=True),
sa.Column('plastic', sa.Integer(), nullable=True),
sa.Column('camera_id', sa.Integer(), nullable=False),
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.ForeignKeyConstraint(['camera_id'], ['camera.id'], name=op.f('fk_conveer_camera_id_camera')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_conveer')),
sa.UniqueConstraint('id', name=op.f('uq_conveer_id'))
)
op.execute('''INSERT INTO camera(id,order_numb, camera_type) VALUES (1,1, 'По умолчанию')''')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('conveer')
op.drop_table('camera')
# ### end Alembic commands ###

@ -0,0 +1,44 @@
import datetime
from typing import Optional
import sqlalchemy.types as types
from sqlalchemy import Column, text, MetaData
from sqlalchemy.orm import declarative_base
meta = MetaData(naming_convention={
"ix": "ix_%(column_0_label)s",
"uq": "uq_%(table_name)s_%(column_0_name)s",
"ck": "ck_%(table_name)s_%(constraint_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
"pk": "pk_%(table_name)s"
}, schema=None)
Base = declarative_base(metadata=meta) # type: ignore
class BaseModel(Base):
__abstract__ = True
id = Column(types.Integer, nullable=False, unique=True, primary_key=True, autoincrement=True)
created_at = Column(
types.TIMESTAMP, nullable=False, server_default=text('CURRENT_TIMESTAMP')
)
updated_at = Column(
types.TIMESTAMP, nullable=False, server_default=text('CURRENT_TIMESTAMP'), onupdate=text('CURRENT_TIMESTAMP')
)
__mapper_args__ = {"eager_defaults": True}
@property
def created_at_timestamp(self) -> int:
return int(self.created_at.timestamp())
@property
def updated_at_timestamp(self) -> int:
return int(self.updated_at.timestamp())
def set_updated_at(self, date_time: Optional[datetime.datetime] = None) -> None:
if date_time:
self.updated_at = date_time
else:
self.updated_at = datetime.datetime.now()

@ -0,0 +1,12 @@
from db.models.base import BaseModel
from sqlalchemy import (
Column,
Integer,
String
)
class DBCamera(BaseModel):
__tablename__ = "camera"
camera_type = Column(String, nullable=False)
order_numb = Column(Integer, unique=True)

@ -0,0 +1,22 @@
from db.models.base import BaseModel
from sqlalchemy import (
Column,
Integer,
ForeignKey
)
from sqlalchemy.orm import relationship
from db.models.camera import DBCamera
class DBConveer(BaseModel):
__tablename__ = "conveer"
wood = Column(Integer)
metal = Column(Integer)
glass = Column(Integer)
plastic = Column(Integer)
camera_id = Column(Integer, ForeignKey('camera.id'), nullable=False)
camera = relationship('DBCamera', lazy="raise", uselist=False)

@ -0,0 +1,31 @@
from datetime import datetime
from typing import Optional
from db.repository.base import BaseRepository
from db.models.conveer import DBConveer
from sqlalchemy import (
select,
func
)
class AnalyticsRepository(BaseRepository):
async def get_analytic(self,
start_date: Optional[datetime],
end_date: Optional[datetime]):
query = (
select(DBConveer)
.select_from(DBConveer)
)
if start_date is not None:
query = query.filter(DBConveer.created_at >= func.timezone('UTC', start_date))
if end_date is not None:
query = query.filter(DBConveer.created_at <= func.timezone('UTC', end_date))
return await self.all_ones(query)

@ -0,0 +1,76 @@
from typing import Any, Union
from sqlalchemy import select, exists
from sqlalchemy.engine import ChunkedIteratorResult
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.sql import Select, Delete
from db.models.base import BaseModel
class BaseRepository:
def __init__(self, session: AsyncSession) -> None:
self._session = session
async def execute(self, query: Select) -> ChunkedIteratorResult:
return await self._session.execute(query)
async def execute_parametrize(self, query: Union[str, Select], params: dict) -> ChunkedIteratorResult:
return await self._session.execute(statement=query, params=params)
async def one(self, query: Select) -> Any:
result = await self.execute(query)
return result.one()
async def one_or_none(self, query: Select) -> Any:
result = await self.execute(query)
return result.one_or_none()
async def one_val(self, query: Select) -> Any:
result = await self.one(query)
return result[0]
async def one_or_none_val(self, query: Select) -> Any:
result = await self.one_or_none(query)
if not result:
return None
return result[0]
async def add_model(self, model: BaseModel) -> None:
self._session.add(model)
await self._session.commit()
async def refresh_model(self, model: BaseModel):
await self._session.refresh(model)
async def add_model_ignore_exceptions(self, model: BaseModel) -> None:
try:
async with self._session.begin_nested():
self._session.add(model)
except IntegrityError:
pass
async def add_models(self, models: list[BaseModel]) -> None:
for model in models:
await self.add_model(model)
await self._session.commit()
async def delete(self, model: BaseModel) -> None:
await self._session.delete(model)
async def delete_many(self, models: list[BaseModel]) -> None:
for model in models:
await self.delete(model)
async def all(self, query: Select) -> list[Any]:
result = await self.execute(query)
return result.all()
async def all_ones(self, query: Select) -> list[Any]:
result = await self.execute(query)
return [row[0] for row in result.all()]
async def exists(self, query: Select) -> bool:
query = select(exists(query))
return await self.one_val(query)

@ -0,0 +1,14 @@
import uvicorn
from fastapi import FastAPI
from server.routers_init import all_routers
app = FastAPI(
title="Конвеер"
)
for router in all_routers:
app.include_router(router)
if __name__ == "__main__":
uvicorn.run(app="main:app", reload=True)

@ -0,0 +1,18 @@
from datetime import datetime
from typing import Optional
from sqlalchemy.ext.asyncio import AsyncSession
from db.repository.analytics import AnalyticsRepository
class AnalyticsManager:
@classmethod
async def get_by_filters(cls,
session: AsyncSession,
start_date: Optional[datetime],
end_date: Optional[datetime]):
datas = await AnalyticsRepository(session).get_analytic(start_date=start_date,
end_date=end_date)
return datas

@ -0,0 +1,10 @@
fastapi
uvicorn
SQLAlchemy
psycopg2-binary
asyncpg
alembic
python-dotenv
python-jose
python-multipart
pytz

@ -0,0 +1,26 @@
from datetime import datetime, timedelta
from typing import Optional
from sqlalchemy.ext.asyncio import AsyncSession
from fastapi import Depends, Query
from configs.config import secret, encrypt_algorithm
from vendors.db import async_session
from fastapi.exceptions import HTTPException
async def get_session() -> AsyncSession:
async with async_session() as session:
yield session
class PagesPaginationParams:
def __init__(
self,
limit: int = Query(50, ge=0, le=1_000),
offset: int = Query(0, ge=0, alias='skip'),
) -> None:
self.limit = limit
self.offset = offset

@ -0,0 +1,22 @@
from fastapi import APIRouter, Depends
from sqlalchemy.ext.asyncio import AsyncSession
from api.request.analytics import AnalyticsFilters
from managers.analytics import AnalyticsManager
from server.depends import get_session, PagesPaginationParams
router = APIRouter(prefix="/analytics", tags=['Ride'])
@router.post('/all')
async def get_all_analytics(
filters: AnalyticsFilters,
session: AsyncSession = Depends(get_session),
):
data = await AnalyticsManager.get_by_filters(
session=session,
start_date=filters.start_time,
end_date=filters.end_time
)
return data

@ -0,0 +1,3 @@
from server.routers.analytics import router as analytics_router
all_routers = [analytics_router]

@ -0,0 +1,22 @@
from typing import AsyncGenerator
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from configs.config import db_url
DATABASE_URL = db_url
Base = declarative_base()
engine = create_async_engine(DATABASE_URL)
async_session = sessionmaker(
bind=engine,
class_=AsyncSession,
expire_on_commit=False,
)
async def get_async_session() -> AsyncGenerator[AsyncSession, None]:
async with async_session() as session:
yield session
Loading…
Cancel
Save