From 876a9656569af92fd15728f04263ed37fcefef56 Mon Sep 17 00:00:00 2001 From: vladimirgubarik Date: Thu, 17 Jul 2025 14:41:11 +0300 Subject: [PATCH 01/15] init --- Dockerfile.jinja => Dockerfile | 15 +- LICENSE.md | 55 - README.md | 330 --- README.md.jinja | 46 - ...nced-deploy.md.jinja => advanced-deploy.md | 26 +- alembic.ini | 2 +- app/api/endpoints/healthcheck.py.jinja | 45 - app/api/routers.py | 11 - app/{api => application}/__init__.py | 0 .../repository}/__init__.py | 0 app/application/repository/exceptions.py | 22 + app/application/repository/interfaces.py | 67 + .../service}/__init__.py | 0 app/application/service/interfaces.py | 8 + .../use_cases}/__init__.py | 0 app/application/use_cases/healthcheck.py | 25 + app/application/use_cases/interfaces.py | 43 + app/application/use_cases/record_use_cases.py | 52 + .../commands/{% if CI %}test.py{% endif %} | 154 -- app/bot/middlewares/db_session.py | 15 - app/db/crud.py | 87 - app/db/record/repo.py | 56 - app/db/sqlalchemy.py | 62 - app/{bot/commands => domain}/__init__.py | 0 .../entities}/__init__.py | 0 .../entities/healthcheck.py} | 10 +- app/domain/entities/sample_record.py | 25 + .../exceptions}/__init__.py | 0 app/domain/exceptions/domain_exceptions.py | 16 + app/{caching => infrastructure}/__init__.py | 0 .../caching}/__init__.py | 0 .../caching/callback_redis_repo.py | 73 +- .../caching/exception_handlers.py | 0 .../caching/redis_repo.py | 0 app/infrastructure/containers.py | 110 + .../db}/__init__.py | 0 .../db/migrations}/__init__.py | 0 app/{ => infrastructure}/db/migrations/env.py | 6 +- .../db/migrations/script.py.mako | 0 .../migrations/versions/765dcfed2d16_init.py} | 19 +- .../db/migrations/versions}/__init__.py | 0 .../db/sample_record}/__init__.py | 0 .../db/sample_record}/models.py | 10 +- app/infrastructure/db/sqlalchemy.py | 99 + .../middlewares}/__init__.py | 0 .../middlewares/answer_error.py | 0 .../middlewares/smart_logger.py | 0 .../repositories}/__init__.py | 0 .../repositories/sample_record.py | 139 + .../infrastructure/services}/__init__.py | 0 app/infrastructure/services/healthcheck.py | 48 + .../infrastructure/worker}/__init__.py | 0 .../worker/worker.py} | 6 +- app/main.py | 36 +- app/presentation/__init__.py | 0 app/presentation/api/__init__.py | 0 .../dependencies => presentation/api}/bot.py | 0 .../endpoints => presentation/api}/botx.py | 2 +- app/presentation/api/healthcheck.py | 41 + app/presentation/api/routers.py | 9 + app/presentation/api/schemas/__init__.py | 0 app/presentation/api/schemas/healthcheck.py | 24 + app/presentation/bot/__init__.py | 0 .../bot.py.jinja => presentation/bot/bot.py} | 18 +- app/presentation/bot/commands/__init__.py | 0 .../bot/commands/command_listing.py | 28 + app/{ => presentation}/bot/commands/common.py | 2 +- .../bot/commands/sample_record_fsm.py | 26 + .../bot/commands/sample_record_simple.py | 28 + .../bot/error_handlers/__init__.py | 0 app/presentation/bot/handlers/__init__.py | 0 app/presentation/bot/handlers/command.py | 54 + app/presentation/bot/handlers/error.py | 48 + .../bot/handlers}/internal_error.py | 5 +- .../bot/handlers/sample_record.py | 30 + app/presentation/bot/resources/__init__.py | 0 .../bot/resources/strings.py} | 15 +- .../templates/common}/chat_created.txt.mako | 0 .../resources/templates/common}/help.txt.mako | 0 .../common}/something_goes_wrong.txt.mako | 0 .../sample_record_created_answer.txt.mako | 2 + app/presentation/bot/schemas/__init__.py | 0 app/presentation/bot/schemas/sample_record.py | 40 + app/presentation/bot/validators/__init__.py | 0 app/presentation/bot/validators/base.py | 32 + app/presentation/bot/validators/exceptions.py | 2 + .../bot/validators/sample_record.py | 21 + app/presentation/dependencies/__init__.py | 0 .../dependencies/healthcheck.py} | 8 +- app/schemas/record.py | 14 - app/services/botx_user_search.py | 38 - app/services/healthcheck.py | 43 +- app/settings.py | 2 +- app/utils/__init__.py | 0 app/utils/bot_exception_answer.py | 53 + app/utils/exceptions_mapper.py | 154 ++ copier.yaml | 53 - ...er-compose.yml.jinja => docker-compose.yml | 20 +- extensions/context.py | 18 - poetry.lock | 2346 ----------------- pyproject.toml.jinja => pyproject.toml | 49 +- ruff.toml | 33 + scripts/format | 15 + scripts/lint | 17 +- scripts/test | 85 + tests/application/__init__.py | 0 tests/application/integration/__init__.py | 0 tests/application/integration/conftest.py | 13 + .../test_sample_record_use_cases_int.py | 88 + tests/application/unit/__init__.py | 0 tests/application/unit/conftest.py | 12 + tests/application/unit/fake_repository.py | 44 + .../unit/test_sample_record_use_cases.py | 116 + .../{% if CI %}test_test.py{% endif %} | 425 --- tests/conftest.py | 104 +- tests/factories.py | 59 + tests/infrastructure/__init__.py | 0 tests/infrastructure/repository/__init__.py | 0 .../test_sample_record_repository.py | 120 + tests/presentation/__init__.py | 0 tests/presentation/commands/__init__.py | 0 .../commands/test_common.py} | 2 +- tests/presentation/endpoints/__init__.py | 0 .../{ => presentation}/endpoints/conftest.py | 2 +- .../{ => presentation}/endpoints/fixtures.py | 0 .../{ => presentation}/endpoints/test_botx.py | 3 +- tests/presentation/unit/__init__.py | 0 ..._ccsteam %}.gitlab-ci.yml{% endif %}.jinja | 219 -- {{_copier_conf.answers_file}}.jinja | 2 - 129 files changed, 2149 insertions(+), 4153 deletions(-) rename Dockerfile.jinja => Dockerfile (70%) delete mode 100644 LICENSE.md delete mode 100644 README.md delete mode 100644 README.md.jinja rename advanced-deploy.md.jinja => advanced-deploy.md (90%) delete mode 100644 app/api/endpoints/healthcheck.py.jinja delete mode 100644 app/api/routers.py rename app/{api => application}/__init__.py (100%) rename app/{api/dependencies => application/repository}/__init__.py (100%) create mode 100644 app/application/repository/exceptions.py create mode 100644 app/application/repository/interfaces.py rename app/{api/endpoints => application/service}/__init__.py (100%) create mode 100644 app/application/service/interfaces.py rename app/{bot => application/use_cases}/__init__.py (100%) create mode 100644 app/application/use_cases/healthcheck.py create mode 100644 app/application/use_cases/interfaces.py create mode 100644 app/application/use_cases/record_use_cases.py delete mode 100644 app/bot/commands/{% if CI %}test.py{% endif %} delete mode 100644 app/bot/middlewares/db_session.py delete mode 100644 app/db/crud.py delete mode 100644 app/db/record/repo.py delete mode 100644 app/db/sqlalchemy.py rename app/{bot/commands => domain}/__init__.py (100%) rename app/{bot/error_handlers => domain/entities}/__init__.py (100%) rename app/{schemas/enums.py => domain/entities/healthcheck.py} (52%) create mode 100644 app/domain/entities/sample_record.py rename app/{bot/middlewares => domain/exceptions}/__init__.py (100%) create mode 100644 app/domain/exceptions/domain_exceptions.py rename app/{caching => infrastructure}/__init__.py (100%) rename app/{db => infrastructure/caching}/__init__.py (100%) rename app/{ => infrastructure}/caching/callback_redis_repo.py (51%) rename app/{ => infrastructure}/caching/exception_handlers.py (100%) rename app/{ => infrastructure}/caching/redis_repo.py (100%) create mode 100644 app/infrastructure/containers.py rename app/{db/migrations => infrastructure/db}/__init__.py (100%) rename app/{db/migrations/versions => infrastructure/db/migrations}/__init__.py (100%) rename app/{ => infrastructure}/db/migrations/env.py (82%) rename app/{ => infrastructure}/db/migrations/script.py.mako (100%) rename app/{db/migrations/versions/d6e3a38b1fbd_.py => infrastructure/db/migrations/versions/765dcfed2d16_init.py} (65%) rename app/{db/record => infrastructure/db/migrations/versions}/__init__.py (100%) rename app/{resources => infrastructure/db/sample_record}/__init__.py (100%) rename app/{db/record => infrastructure/db/sample_record}/models.py (57%) create mode 100644 app/infrastructure/db/sqlalchemy.py rename app/{schemas => infrastructure/middlewares}/__init__.py (100%) rename app/{bot => infrastructure}/middlewares/answer_error.py (100%) rename app/{bot => infrastructure}/middlewares/smart_logger.py (100%) rename app/{{% if add_worker %}worker{% endif %} => infrastructure/repositories}/__init__.py (100%) create mode 100644 app/infrastructure/repositories/sample_record.py rename {tests/commands => app/infrastructure/services}/__init__.py (100%) create mode 100644 app/infrastructure/services/healthcheck.py rename {tests/endpoints => app/infrastructure/worker}/__init__.py (100%) rename app/{{% if add_worker %}worker{% endif %}/worker.py.jinja => infrastructure/worker/worker.py} (81%) create mode 100644 app/presentation/__init__.py create mode 100644 app/presentation/api/__init__.py rename app/{api/dependencies => presentation/api}/bot.py (100%) rename app/{api/endpoints => presentation/api}/botx.py (98%) create mode 100644 app/presentation/api/healthcheck.py create mode 100644 app/presentation/api/routers.py create mode 100644 app/presentation/api/schemas/__init__.py create mode 100644 app/presentation/api/schemas/healthcheck.py create mode 100644 app/presentation/bot/__init__.py rename app/{bot/bot.py.jinja => presentation/bot/bot.py} (62%) create mode 100644 app/presentation/bot/commands/__init__.py create mode 100644 app/presentation/bot/commands/command_listing.py rename app/{ => presentation}/bot/commands/common.py (97%) create mode 100644 app/presentation/bot/commands/sample_record_fsm.py create mode 100644 app/presentation/bot/commands/sample_record_simple.py create mode 100644 app/presentation/bot/error_handlers/__init__.py create mode 100644 app/presentation/bot/handlers/__init__.py create mode 100644 app/presentation/bot/handlers/command.py create mode 100644 app/presentation/bot/handlers/error.py rename app/{bot/error_handlers => presentation/bot/handlers}/internal_error.py (85%) create mode 100644 app/presentation/bot/handlers/sample_record.py create mode 100644 app/presentation/bot/resources/__init__.py rename app/{resources/strings.py.jinja => presentation/bot/resources/strings.py} (85%) rename app/{resources/templates => presentation/bot/resources/templates/common}/chat_created.txt.mako (100%) rename app/{resources/templates => presentation/bot/resources/templates/common}/help.txt.mako (100%) rename app/{resources/templates => presentation/bot/resources/templates/common}/something_goes_wrong.txt.mako (100%) create mode 100644 app/presentation/bot/resources/templates/sample_record/sample_record_created_answer.txt.mako create mode 100644 app/presentation/bot/schemas/__init__.py create mode 100644 app/presentation/bot/schemas/sample_record.py create mode 100644 app/presentation/bot/validators/__init__.py create mode 100644 app/presentation/bot/validators/base.py create mode 100644 app/presentation/bot/validators/exceptions.py create mode 100644 app/presentation/bot/validators/sample_record.py create mode 100644 app/presentation/dependencies/__init__.py rename app/{api/dependencies/healthcheck.py.jinja => presentation/dependencies/healthcheck.py} (90%) delete mode 100644 app/schemas/record.py delete mode 100644 app/services/botx_user_search.py create mode 100644 app/utils/__init__.py create mode 100644 app/utils/bot_exception_answer.py create mode 100644 app/utils/exceptions_mapper.py delete mode 100644 copier.yaml rename docker-compose.yml.jinja => docker-compose.yml (72%) delete mode 100644 extensions/context.py delete mode 100644 poetry.lock rename pyproject.toml.jinja => pyproject.toml (63%) create mode 100644 ruff.toml create mode 100644 tests/application/__init__.py create mode 100644 tests/application/integration/__init__.py create mode 100644 tests/application/integration/conftest.py create mode 100644 tests/application/integration/test_sample_record_use_cases_int.py create mode 100644 tests/application/unit/__init__.py create mode 100644 tests/application/unit/conftest.py create mode 100644 tests/application/unit/fake_repository.py create mode 100644 tests/application/unit/test_sample_record_use_cases.py delete mode 100644 tests/commands/{% if CI %}test_test.py{% endif %} create mode 100644 tests/factories.py create mode 100644 tests/infrastructure/__init__.py create mode 100644 tests/infrastructure/repository/__init__.py create mode 100644 tests/infrastructure/repository/test_sample_record_repository.py create mode 100644 tests/presentation/__init__.py create mode 100644 tests/presentation/commands/__init__.py rename tests/{commands/test_common.py.jinja => presentation/commands/test_common.py} (97%) create mode 100644 tests/presentation/endpoints/__init__.py rename tests/{ => presentation}/endpoints/conftest.py (60%) rename tests/{ => presentation}/endpoints/fixtures.py (100%) rename tests/{ => presentation}/endpoints/test_botx.py (98%) create mode 100644 tests/presentation/unit/__init__.py delete mode 100644 {% if from_ccsteam %}.gitlab-ci.yml{% endif %}.jinja delete mode 100644 {{_copier_conf.answers_file}}.jinja diff --git a/Dockerfile.jinja b/Dockerfile similarity index 70% rename from Dockerfile.jinja rename to Dockerfile index 658ad58..1ab8444 100644 --- a/Dockerfile.jinja +++ b/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.11.5-alpine -{% if from_ccsteam %}LABEL Maintainer="eXpress Unlimited Production"{% endif %} + # Install system-wide dependencies RUN apk update && \ @@ -36,18 +36,9 @@ RUN pip install --user --no-cache-dir poetry==1.4.2 && \ COPY poetry.lock pyproject.toml ./ -{% if has_private_dependencies == "yes" %} -ARG CI_JOB_TOKEN="" -ARG GIT_HOST="" -ARG GIT_PASSWORD=${CI_JOB_TOKEN} -ARG GIT_LOGIN="gitlab-ci-token" -# Poetry can't read password to download private repos -RUN echo -e "machine ${GIT_HOST}\nlogin ${GIT_LOGIN}\npassword ${GIT_PASSWORD}" > ~/.netrc && \ - poetry install --only main && \ - rm -rf ~/.netrc -{% else %} + RUN poetry install --only main -{% endif %} + COPY alembic.ini . COPY app app diff --git a/LICENSE.md b/LICENSE.md deleted file mode 100644 index b86b6f8..0000000 --- a/LICENSE.md +++ /dev/null @@ -1,55 +0,0 @@ -Лицензия на программное обеспечение -Дата 10 июня 2021г. - -Настоящая лицензия регламентирует использование следующего Программного -обеспечения: -* Библиотека Pybotx - сборник подпрограмм или объектов, используемых для - разработки ботов, обеспечивающий интеграцию с ПО «Система коммуникаций - Express». -* Авторский программный модуль (АПМ) (Bot Template) - программная платформа для - создания ботов для Системы коммуникаций Express. -* Библиотека FSM – сборник подпрограмм или объектов, используемых для - разработки ботов, для представления и управления потоком выполнения команд - ботов. -* Модуль Proxier – модуль ПО, позволяющий перенаправлять запросы на локально - развернутого бота. -* ToDo Бот - индивидуальная разработка ООО «Анлимитед Продакшен» для - демонстрации процесса и результата разработки ботов на платформе. -* Библиотека Pybotx Widgets – библиотека переиспользуемых компонентов для - взаимодействия с пользователями. - -Далее по тексту «Программное обеспечение» Правообладателем Программного -обеспечения является ООО «Анлимитед Продакшен» (ИНН: 7707374451, ОГРН: -5167746251240) Данная неисключительная лицензия разрешает лицам, получившим -законным способом копию программного кода Программного обеспечения и -сопутствующей документации, безвозмездно использовать Программное обеспечение -без ограничений, включая неограниченное право на использование, копирование, -доработку, адаптацию, публикацию, распространение, сублицензирование, при -соблюдении следующих условий: - -Уведомление о правообладателе должны быть включены во все копии или значимые -части Программных продуктов, в том числе созданных с использованием -Программного обеспечения. Неисключительная лицензия ограничена «правом на -использование», ни одно из положений настоящей лицензии не означает передачи -кому бы то ни было исключительного права на Платформу, Библиотеки, Блоки и -любые элементы Программного обеспечения. Запрещается вносить изменения в -Программное обеспечение, переводить или создавать производные продукты, -основанные на Программном обеспечении и его элементах с целью регистрации -исключительных прав на программный код Программного обеспечения, а также -интегрировать Программное обеспечение или его элементы в другие результаты -интеллектуальной деятельности с последующей регистрацией исключительных прав на -Программное обеспечение в составе результатов интеллектуальной деятельности. - -Неисключительная лицензия предоставляется на срок действия авторских прав на -территории всего мира. ДАННОЕ ПРОГРАММНОЕ ОБЕСПЕЧЕНИЕ ПРЕДОСТАВЛЯЕТСЯ «КАК -ЕСТЬ», БЕЗ КАКИХ-ЛИБО ГАРАНТИЙ, ЯВНО ВЫРАЖЕННЫХ ИЛИ ПОДРАЗУМЕВАЕМЫХ, ВКЛЮЧАЯ -ГАРАНТИИ ТОВАРНОЙ ПРИГОДНОСТИ, СООТВЕТСТВИЯ ПО ЕГО КОНКРЕТНОМУ НАЗНАЧЕНИЮ И -ОТСУТСТВИЯ НАРУШЕНИЙ, НО НЕ ОГРАНИЧИВАЯСЬ ИМИ. НИ В КАКОМ СЛУЧАЕ АВТОРЫ ИЛИ -ПРАВООБЛАДАТЕЛИ НЕ НЕСУТ ОТВЕТСТВЕННОСТИ ПО КАКИМ-ЛИБО ИСКАМ, ЗА УЩЕРБ ИЛИ ПО -ИНЫМ ТРЕБОВАНИЯМ, В ТОМ ЧИСЛЕ, ПРИ ДЕЙСТВИИ КОНТРАКТА, ДЕЛИКТЕ ИЛИ ИНОЙ -СИТУАЦИИ, ВОЗНИКШИМ ИЗ-ЗА ИСПОЛЬЗОВАНИЯ ПРОГРАММНОГО ОБЕСПЕЧЕНИЯ ИЛИ ИНЫХ -ДЕЙСТВИЙ С ПРОГРАММНЫМ ОБЕСПЕЧЕНИЕМ. В ТОМ ЧИСЛЕ, НЕ ГАРАНТИРУЕТ (ВКЛЮЧАЯ, НО -НЕ ОГРАНИЧИВАЯСЬ): ПРИГОДНОСТЬ ДЛЯ КОНКРЕТНЫХ ЦЕЛЕЙ, ТОЧНОСТЬ, ПОЛНОТУ, -ПРОИЗВОДИТЕЛЬНОСТЬ, СИСТЕМНУЮ ИНТЕГРАЦИЮ, БЕСПЕРЕБОЙНОЕ ФУНКЦИОНИРОВАНИЕ, -ОТСУТСТВИЕ ОШИБОК, ИСПРАВЛЕНИЕ НЕПОЛАДОК, ЗАКОННОСТЬ ИСПОЛЬЗОВАНИЯ НА ЛЮБЫХ -ТЕРРИТОРИЯХ ЗА ПРЕДЕЛАМИ РОССИЙСКОЙ ФЕДЕРАЦИИ. diff --git a/README.md b/README.md deleted file mode 100644 index 92aa563..0000000 --- a/README.md +++ /dev/null @@ -1,330 +0,0 @@ -# Bot Template Tutorial - -## Введение - -> :information_source: Инфо -> Для взаимодействия с платформой **botx** используется библиотека **[pybotx](https://github.com/ExpressApp/pybotx)**. В **[документации](https://pypi.org/project/pybotx/)** можно посмотреть примеры её использования. Перед прочтением данного туториала следует с ней ознакомиться. - ----- - -Вне зависимости от решаемых ботами задач, во всех повторяется один и тот же код. - -Чтобы разработчикам не приходилось писать базовый однообразный код для каждого нового бота, существует шаблонный проект - Bot Template. Он задает структуру проекта и основной стек технологий. Это значительно упрощает разработку, позволяя сконцентрироваться на реализации бота. - ----- - - -## 1. Развертывание из шаблона и структура проекта - -Для развертывания проекта необходимо установить [copier](https://github.com/copier-org/copier) и выполнить команду: -```bash -$ copier bot-template bot-example -``` - -Структура шаблонного бота состоит из нескольких следующих пакетов и модулей: - -``` -. -├── app -│ ├── api - реализация http роутов для приложения, включая необходимые для бота -│ ├── bot - команды бота и вспомогательные функции для них -│ ├── caching - классы и функции для работы с in-memory БД -│ ├── db - модели, функции для работы с БД и миграции -│ ├── resources - текстовые или файловые ресурсы бота -│ ├── schemas - сериализаторы, енамы, доменные модели -│ ├── services - сервисы с логикой (бизнес-логика) -│ ├── logger.py - логгер -│ ├── main.py - запуск сервера с инициализацией необходимых сервисов -│ └── settings.py - настройки приложения -├── scripts - скрипты для запуска тестов, форматеров, линтеров -├── tests - тесты, структура которых соответствует структуре проекта, и хелперы для них -├── poetry.lock - конфигурация текущих зависимостей. используется для их установки -├── pyproject.toml - конфигурация зависимостей, мета информация проекта (название, версия, авторы и т.п.) -└── setup.cfg - конфигурация линтеров и тестов -``` - -## 2. Запуск проекта - -### Настройка окружения - -1. Устанавливаем зависимости проекта через [poetry](https://github.com/python-poetry/poetry#poetry-dependency-management-for-python): -```bash -$ poetry install -``` -2. Определяем переменные окружения в файле **`.env`**. Примеры переменных окружения находятся в файле **`example.env`**. -3. Запускаем `postges` и `redis` используя [docker-compose](https://docs.docker.com/compose/): -```bash -$ docker-compose -f docker-compose.dev.yml up -d -``` -4. Применяем все миграции для инициализации таблиц с помощью [alembic](https://alembic.sqlalchemy.org/en/latest/tutorial.html): -```bash -$ alembic upgrade head -``` -5. Запускаем бота как приложение [FastAPI](https://fastapi.tiangolo.com/tutorial/) через [gunicorn](https://fastapi.tiangolo.com/deployment/server-workers/?h=gunicorn#run-gunicorn-with-uvicorn-workers). -Флаг `--reload` используется только при разработке для автоматического перезапуска сервера при изменениях в коде: -```bash -$ gunicorn "app.main:get_application()" --worker-class uvicorn.workers.UvicornWorker -``` -По необходимости добавить флаг `--workers` и их колличество, в данном случае 4 рабочих процесса: -```bash -$ gunicorn "app.main:get_application()" --worker-class uvicorn.workers.UvicornWorker --workers 4 -``` - ----- - -## 3. Добавление нового функционала - -### 3.1. Команды бота - -#### Структура пакета команд -Команды бота находятся в пакете **`app.bot.commands`** и группируются в отдельные модули в зависимости от логики. Команды добавляются с помощью [коллекторов pybotx](https://expressapp.github.io/pybotx/development/collector/). - -Основные команды, такие как `/help` и системные команды, находятся в модуле **`common.py`**. Для команд, относящихся к определенной задаче, создается свой модуль. Например, для интеграции с Atlassian Jira будет создан модуль **`jira.py`**. В результате структура пакета **`app.bot.commands`** будет выглядеть так: - -``` -bot -├── commands -│ ├── common.py -│ ├── jira.py -``` - -Если в модуле становится слишком много команд, следует разбить его на новые модули и сложить в один пакет с названием старого модуля. Например, так: - -``` -bot -├── commands -│ ├── common.py -│ ├── jira -│ ├── projects.py -│ ├── issues.py -``` - - -#### Регистрация команд -Для добавления модуля с командами нужно импортировать `collector` в **`app/bot/bot.py`** и добавить его в инстанс бота: -```python3 -from app.bot.commands import common - -bot.include_handlers(common.collector) -``` - ----- - -### 3.2. Взаимодействие с БД - -#### Создание новых моделей - -Взаимодействовать с новыми таблицами можно через модели [sqlalchemy](https://www.sqlalchemy.org/). С примерами использования можно ознакомиться [тут](https://www.sqlalchemy.org/library.html#tutorials). Модели располагаются в пакете **`app.db.package_name`**. Там же хранятся `crud` функции и [репозитории](https://gist.github.com/maestrow/594fd9aee859c809b043). Структура пакета выглядит следующим образом: -``` -├── app -│ ├── db -│ ├── migrations -│ ├── exampleapp -│ ├── repo.py - репозиторий/crud функции -│ ├── models.py - модели таблиц -``` - -Пример модели: -``` python -from sqlalchemy import Column, Integer, String -from app.db.sqlalchemy import Base - -class ExampleModel(Base): - __tablename__ = "examples" - - id: int = Column(Integer, primary_key=True, autoincrement=True) - text: str = Column(String) -``` - -Пример репозитория: -``` python -from sqlalchemy import insert -from app.db.sqlalchemy import session -from app.db.example.models import ExampleModel - -class ExampleRepo: - async def create(self, text: str) -> None: - query = insert(ExampleModel).values(text=text) - async with session.begin(): - await session.execute(query) -``` - -#### Создание новых миграций - -Для генерации миграций используется [alembic](https://alembic.sqlalchemy.org/en/latest/). Все файлы миграции хранятся в директории **`app.db.migrations`**. Для генерации новой миграции необходимо создать модель `sqlalchemy` и выполнить команду: - -```bash -$ alembic revision --autogenerate -m "migration message" -``` - -Новый файл миграции будет создан в следующей директории: -``` -├── app -│ ├── db -│ ├── migrations -│ ├── versions -│ ├── 0123456789ab_migration_message.py -``` - -Чтобы применить все миграции, следует выполнить команду: -```bash -$ alembic upgrade head -``` -или: -```bash -$ alembic upgrade 1 -``` -для применения только одной миграции. - -Для отмены одной миграции необходимо выолнить: -```bash -$ alembic downgrade -1 -``` - ----- - -### 3.3. Сервисы и бизнес-логика - -Вся бизнес-логика проекта выносится в пакет **`app.services`**. Бизнес-логика - логика, характерная только для данного проекта. Туда же выносятся запросы, клиенты для использования API сторонних сервисов, обработка данных по заданным (в ТЗ) правилам. - -Структура следующая: -``` -├── app -│ ├── services -│ │ ├── errors.py - исключения, вызываемые в клиенте -│ │ ├── client.py - клиент для обращения к стороннему сервису -``` - ----- - -### 3.4. Конфиги и переменные среды - -Новые переменные среды можно добавить в класс `AppSettings` из файла `app/settings.py`. Если у переменной нет значения по умолчанию, то оно будет браться из файла `.env`. -Чтобы использовать эту переменную в боте, необходимо: -``` python -from app.settings import settings -... -settings.MY_VAR -``` - -> :information_source: Инфо -> Через переменные среды можно указывать окружения, в которых будет запускаться бот. `test`, `dev` или `prod`. Просто добавьте в файл `.env` переменную `APP_ENV=prod`. - ----- - -## 4. Линтеры и форматирование кода - -#### Запуск -Для запуска всех форматеров необходимо выполнить скрипт: -```bash -$ ./scripts/format -``` - -Для запуска всех линтеров необходимо выполнить скрипт: -```bash -$ ./scripts/lint -``` - -#### Описание -* [black](https://github.com/psf/black) - -Используется для форматирования кода к единому стилю: разбивает длинные строки, следит за отступами и импортами. - -> :warning: Примечание -> В некоторых моментах isort конфликтует с black. Конфликт решается настройкой файла конфигурации **`setup.cfg`**. - -* [isort](https://github.com/timothycrosley/isort) - -Используется для сортировки импортов. Сначала импорты из стандартных библиотек python, затем из внешних библиотек и в конце из модулей данного проекта. -Между собой импорты сортируются по алфавиту. - -* [autoflake](https://github.com/myint/autoflake) - -Используется для удаления неиспользуемых импортов и переменных. - -* [mypy](https://github.com/python/mypy) - -Используется для проверки типов. Помогает находить некоторые ошибки еще на стадии разработки. - -> :warning: Примечание -> К сожалению, не все библиотеки поддерживают типизацию. Чтобы подсказать это **mypy** необходимо добавить следующие строки в файл конфигурации **`setup.cfg`**: - -``` -[mypy] - -# ... - -[mypy-your_library_name.*] -ignore_missing_imports = True -``` - -Некоторые же наоборот имеют специальные плагины для **mypy**, например **pydantic**: - -``` -[mypy] -plugins = pydantic.mypy - -... - -[pydantic-mypy] -init_forbid_extra = True -init_typed = True -warn_required_dynamic_aliases = True -warn_untyped_fields = True -``` - -* [wemake-python-styleguide](https://github.com/wemake-services/wemake-python-styleguide) - -Используется для комплексной проверки. Анализирует допустимые имена перменных и их длину, сложность вложенных конструкций, правильную обработку исключений и многое другое. Для каждого типа ошибок есть свой уникальный номер, объяснение, почему так делать не стоит, и объяснение, как делать правильно. Список ошибок можно посмотреть [тут](https://wemake-python-stylegui.de/en/latest/pages/usage/violations/index.html). - -> :information_source: Инфо -> В некоторых редких случаях можно игнорировать правила линтера. Для этого необходимо либо прописать комментарий с меткой `noqa` на проблемной строке: -> ```python3 -> var = problem_function() # noqa: WPS999 -> ``` -> либо указать `ignore` ошибки в **`setup.cfg`**: -> ``` -> [flake8] -> # ... -> ignore = -> # f-strings are useful -> WPS305, -> ``` -> Также можно исключать модули и пакеты. - - ----- - - -## 5. Тестирование - - - -### 5.1. Запуск и добавление тестов - -Все тесты пишутся с помощью библиотеки [pytest](https://docs.pytest.org/en/latest/). Запустить тесты можно командой: - -```bash -$ pytest -``` - -Во время тестирования поднимается docker-контейнер с БД. Порт выбирается свободный, поэтому запущенная локально БД не будет мешать. Если вы хотите запускать тесты используя вашу локальную БД, необходимо добавить в **`.env`** переменную `DB=1`, либо выполнить команду: -```bash -$ DB=1 pytest -``` - - -> :information_source: Инфо -> Поскольку **pytest** не умеет в асинхронные тесты, для работы с ними ему необходим плагин **[pytest-asyncio](https://github.com/pytest-dev/pytest-asyncio)**. - -### 5.2. Покрытие - -Покрытие показывает процент протестированного исходного кода, как всего, так и отдельных модулей. Покрытие помогает определить какие фрагменты кода не запускались в тестах. Для генерации отчетов покрытия используется плагин [pytest-cov](https://pytest-cov.readthedocs.io/en/latest/reporting.html). - -Чтобы не прописывать все флаги каждый раз, можно использовать эти скрипты: -```bash -$ ./scripts/test -$ ./scripts/html-cov-test -``` - -Первый выводит отчет в терминале, второй генерирует отчет в виде `html`страниц с подсветкой непокрытых участков кода. diff --git a/README.md.jinja b/README.md.jinja deleted file mode 100644 index 4fc8a1d..0000000 --- a/README.md.jinja +++ /dev/null @@ -1,46 +0,0 @@ -# {{bot_project_name}} - -Бот создан на базе шаблона [bot-template](https://github.com/ExpressApp/bot-template). - -## Описание - -{{bot_description}} - -## Инструкция по развёртыванию {{bot_project_name}} - -**NOTE**: *Для развёртывания нескольких ботов на сервере используйте -продвинутый вариант инструкции: [advanced-deploy.md](advanced-deploy.md).* - -1. Воспользуйтесь инструкцией [Руководство - администратора](https://express.ms/admin_guide.pdf) `-> Эксплуатация корпоративного - сервера -> Управление контактами -> Чат-боты`, чтобы создать бота в панели - администратора eXpress. -2. Получите `secret_key` и `bot_id`, нажав на имя созданного бота. -3. Получите `cts_host` в строке браузера в панели администратора. - - -4. Скачайте репозиторий на сервер: - -```bash -git clone /opt/express/bots/{{bot_project_name}} -cd /opt/express/bots/{{bot_project_name}} -``` - -5. Отредактируйте `docker-compose.yml`, заменив `cts_host`, `secret_key` и `bot_id` на реальные значения. - - -6. Запустите контейнеры командой: - -```bash -docker-compose up -d -``` - -7. Убедитесь, что в логах нет ошибок. - -```bash -docker-compose logs -``` - -8. Найдите бота через поиск корпоративных контактов (иконка человечка слева-сверху в - мессенджере). -9. Напишите боту для проверки. Например, вызовите справку командой `/help`. diff --git a/advanced-deploy.md.jinja b/advanced-deploy.md similarity index 90% rename from advanced-deploy.md.jinja rename to advanced-deploy.md index 191407b..8244097 100644 --- a/advanced-deploy.md.jinja +++ b/advanced-deploy.md @@ -15,7 +15,7 @@ * `SQL_DEBUG` [`false`]: Включает вывод запросов к БД PostgreSQL. -## Продвинутая инструкция по развертыванию {{bot_project_name}} +## Продвинутая инструкция по развертыванию bot_refactor **Примечание**: Чтобы легко добавлять новых ботов на сервер, хранилища находятся в отдельной docker-сети и используются несколькими ботами сразу (каждый обращается к своей @@ -103,43 +103,43 @@ docker exec -it storages_postgres_1 psql --user postgres ``` ```sql -CREATE USER {{bot_name_underscored}}_user PASSWORD ''; -CREATE DATABASE {{bot_name_underscored}}_db; -GRANT ALL PRIVILEGES ON DATABASE {{bot_name_underscored}}_db - TO {{bot_name_underscored}}_user; +CREATE USER bot_refactor_user PASSWORD ''; +CREATE DATABASE bot_refactor_db; +GRANT ALL PRIVILEGES ON DATABASE bot_refactor_db + TO bot_refactor_user; ``` 2. Скачайте репозиторий на сервер: ```bash -git clone /opt/express/bots/{{bot_project_name}} -cd /opt/express/bots/{{bot_project_name}} +git clone /opt/express/bots/bot_refactor +cd /opt/express/bots/bot_refactor ``` 3. Соберите образ: ```bash -docker build -t {{bot_project_name}} . +docker build -t bot_refactor . ``` При необходимости можно добавить [дополнительные параметры](https://www.uvicorn.org/#command-line-options). Например: ```bash -docker build -t {{bot_project_name}} +docker build -t bot_refactor --build-args UVICORN_CMD_ARGS="--ssl-ca-certs TEXT" . ``` -4. В директории `/opt/express/bots/{{bot_project_name}}` создайте файл +4. В директории `/opt/express/bots/bot_refactor` создайте файл `docker-compose.deploy.yml` со следующим содержимым: ```yaml version: "3.8" services: - {{bot_project_name}}: - image: {{bot_project_name}} - container_name: {{bot_project_name}} + bot_refactor: + image: bot_refactor + container_name: bot_refactor env_file: .env ports: - "8000:8000" # Отредактируйте порт хоста (первый), если он уже занят diff --git a/alembic.ini b/alembic.ini index ef1d150..df269c6 100644 --- a/alembic.ini +++ b/alembic.ini @@ -1,7 +1,7 @@ # https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file [alembic] -script_location = ./app/db/migrations +script_location = ./app/infrastructure/db/migrations [loggers] keys = root,sqlalchemy,alembic diff --git a/app/api/endpoints/healthcheck.py.jinja b/app/api/endpoints/healthcheck.py.jinja deleted file mode 100644 index 305e2d2..0000000 --- a/app/api/endpoints/healthcheck.py.jinja +++ /dev/null @@ -1,45 +0,0 @@ -"""Endpoint healthcheck.""" - -from typing import Optional - -from fastapi import APIRouter - -from app.api.dependencies.healthcheck import ( - check_db_connection_dependency, - check_redis_connection_dependency, - {% if add_worker -%} - check_worker_status_dependency, - {%- endif %} -) -from app.services.healthcheck import ( - HealthCheckResponse, - HealthCheckResponseBuilder, - HealthCheckServiceResult, -) - -router = APIRouter() - - -@router.get("/healthcheck") -async def healthcheck( - redis_connection_error: Optional[str] = check_redis_connection_dependency, - db_connection_error: Optional[str] = check_db_connection_dependency, - {% if add_worker -%} - worker_status_error: Optional[str] = check_worker_status_dependency, - {%- endif %} -) -> HealthCheckResponse: - """Check the health of the bot and services.""" - healthcheck_builder = HealthCheckResponseBuilder() - healthcheck_builder.add_healthcheck_result( - HealthCheckServiceResult(name="postgres", error=db_connection_error) - ) - healthcheck_builder.add_healthcheck_result( - HealthCheckServiceResult(name="redis", error=redis_connection_error) - ) - {% if add_worker -%} - healthcheck_builder.add_healthcheck_result( - HealthCheckServiceResult(name="worker", error=worker_status_error) - ) - {%- endif %} - - return healthcheck_builder.build() diff --git a/app/api/routers.py b/app/api/routers.py deleted file mode 100644 index 35a3140..0000000 --- a/app/api/routers.py +++ /dev/null @@ -1,11 +0,0 @@ -"""Configuration of routers for all endpoints.""" - -from fastapi import APIRouter - -from app.api.endpoints.botx import router as bot_router -from app.api.endpoints.healthcheck import router as healthcheck_router - -router = APIRouter() - -router.include_router(healthcheck_router) -router.include_router(bot_router) diff --git a/app/api/__init__.py b/app/application/__init__.py similarity index 100% rename from app/api/__init__.py rename to app/application/__init__.py diff --git a/app/api/dependencies/__init__.py b/app/application/repository/__init__.py similarity index 100% rename from app/api/dependencies/__init__.py rename to app/application/repository/__init__.py diff --git a/app/application/repository/exceptions.py b/app/application/repository/exceptions.py new file mode 100644 index 0000000..0043878 --- /dev/null +++ b/app/application/repository/exceptions.py @@ -0,0 +1,22 @@ +class BaseRepositoryError(Exception): + """Base exception for all repository-specific exceptions.""" + + +class RecordDoesNotExistError(BaseRepositoryError): + """Raised when an entity does not exist.""" + + +class RecordUpdateError(BaseRepositoryError): + """Raised when an update fails.""" + + +class RecordDeleteError(BaseRepositoryError): + """Raised when a delete fails.""" + + +class RecordCreateError(BaseRepositoryError): + """Raised when a creation fails.""" + + +class RecordRetreiveError(BaseRepositoryError): + """Raised when a get fails.""" diff --git a/app/application/repository/interfaces.py b/app/application/repository/interfaces.py new file mode 100644 index 0000000..cdf5242 --- /dev/null +++ b/app/application/repository/interfaces.py @@ -0,0 +1,67 @@ +"""Record repository interface.""" + +from abc import ABC, abstractmethod +from typing import List, Optional + +from app.domain.entities.sample_record import SampleRecord + + +class ISampleRecordRepository(ABC): + """Interface for record repository operations.""" + + @abstractmethod + async def create(self, record: SampleRecord) -> SampleRecord: + """ + Create a new record in database + Parameters: + record: The record to be created. + + Returns: + SampleRecord: The newly created record of type SampleRecord. + """ + pass + + @abstractmethod + async def update(self, record: SampleRecord) -> SampleRecord: + """ + Update an existing record in database + Parameters: + record: The record to be created. + + Returns: + SampleRecord: The newly created record of type SampleRecord. + """ + pass + + @abstractmethod + async def delete(self, record_id: int) -> int: + """ + Delete a record from the database by provided id + + Parameters: + record_id: The unique identifier of the record to be deleted. + + Returns: + None + + This method does not return any value. + + Raises: + NotImplementedError: If this method is not overridden in the implementing class. + """ + pass + + @abstractmethod + async def get_by_id(self, record_id: int) -> SampleRecord: + """Get the record from the database by provided id + Parameters: + record_id: The record id to be created. + + Returns: + SampleRecord: The record from database.""" + pass + + @abstractmethod + async def get_all(self) -> List[SampleRecord]: + """Get all records from the database""" + pass diff --git a/app/api/endpoints/__init__.py b/app/application/service/__init__.py similarity index 100% rename from app/api/endpoints/__init__.py rename to app/application/service/__init__.py diff --git a/app/application/service/interfaces.py b/app/application/service/interfaces.py new file mode 100644 index 0000000..1ea9ad1 --- /dev/null +++ b/app/application/service/interfaces.py @@ -0,0 +1,8 @@ +from abc import ABC, abstractmethod +from typing import Optional + + +class HealthCheckService(ABC): + @abstractmethod + async def check(self) -> Optional[str]: # Return error or None + ... diff --git a/app/bot/__init__.py b/app/application/use_cases/__init__.py similarity index 100% rename from app/bot/__init__.py rename to app/application/use_cases/__init__.py diff --git a/app/application/use_cases/healthcheck.py b/app/application/use_cases/healthcheck.py new file mode 100644 index 0000000..2d105e4 --- /dev/null +++ b/app/application/use_cases/healthcheck.py @@ -0,0 +1,25 @@ +from app.application.service.interfaces import HealthCheckService +from app.domain.entities.healthcheck import ( + HealthCheckStatuses, + HealthCheckServiceResult, +) + + +class HealthCheckUseCase: + def __init__(self, services: list[tuple[str, HealthCheckService]]): + self.services = services + + async def execute( + self, + ) -> tuple[HealthCheckStatuses, list[HealthCheckServiceResult]]: + results = [] + healthy = True + + for name, service in self.services: + error = await service.check() + results.append(HealthCheckServiceResult(name=name, error=error)) + if error: + healthy = False + + status = HealthCheckStatuses.OK if healthy else HealthCheckStatuses.ERROR + return status, results diff --git a/app/application/use_cases/interfaces.py b/app/application/use_cases/interfaces.py new file mode 100644 index 0000000..d3aff69 --- /dev/null +++ b/app/application/use_cases/interfaces.py @@ -0,0 +1,43 @@ +"""Interfaces for application use cases.""" + +from abc import ABC, abstractmethod + +from app.presentation.bot.schemas.sample_record import ( + SampleRecordCreateRequestSchema, + SampleRecordResponseSchema, + SampleRecordUpdateRequestSchema, + SampleRecordResponseListSchema, +) + + +class ISampleRecordUseCases(ABC): + """Interface for record use cases.""" + + @abstractmethod + async def create_record( + self, create_request: SampleRecordCreateRequestSchema + ) -> SampleRecordResponseSchema: + """Create a new record.""" + pass + + @abstractmethod + async def update_record( + self, update_request: SampleRecordUpdateRequestSchema + ) -> SampleRecordResponseSchema: + """Update an existing record.""" + pass + + @abstractmethod + async def delete_record(self, record_id: int) -> None: + """Delete a record.""" + pass + + @abstractmethod + async def get_record(self, record_id: int) -> SampleRecordResponseSchema: + """Get a record by ID.""" + pass + + @abstractmethod + async def get_all_records(self) -> SampleRecordResponseListSchema: + """Get all records.""" + pass diff --git a/app/application/use_cases/record_use_cases.py b/app/application/use_cases/record_use_cases.py new file mode 100644 index 0000000..2b6bb90 --- /dev/null +++ b/app/application/use_cases/record_use_cases.py @@ -0,0 +1,52 @@ +"""Implementation of record use cases.""" + +from app.application.repository.interfaces import ISampleRecordRepository +from app.application.use_cases.interfaces import ISampleRecordUseCases +from app.domain.entities.sample_record import SampleRecord +from app.presentation.bot.schemas.sample_record import ( + SampleRecordCreateRequestSchema, + SampleRecordResponseSchema, + SampleRecordUpdateRequestSchema, + SampleRecordResponseListSchema, +) + + +class SampleRecordUseCases(ISampleRecordUseCases): + """Implementation of record use cases.""" + + def __init__(self, record_repo: ISampleRecordRepository): + self._repo = record_repo + + async def create_record( + self, request_object: SampleRecordCreateRequestSchema + ) -> SampleRecordResponseSchema: + domain_object = SampleRecord(record_data=request_object.record_data) + created_record = await self._repo.create(domain_object) + return SampleRecordResponseSchema.from_orm(created_record) + + async def update_record( + self, update_request: SampleRecordUpdateRequestSchema + ) -> SampleRecordResponseSchema: + """Update an existing record.""" + domain_object = SampleRecord( + record_data=update_request.record_data, id=update_request.id + ) + updated_record = await self._repo.update(domain_object) + return SampleRecordResponseSchema.from_orm(updated_record) + + async def delete_record(self, record_id: int) -> int: + """Delete a record.""" + return await self._repo.delete(record_id) + + async def get_record(self, record_id: int) -> SampleRecordResponseSchema: + """Get a record by ID.""" + fetched_record = await self._repo.get_by_id(record_id) + return SampleRecordResponseSchema.from_orm(fetched_record) + + async def get_all_records(self) -> SampleRecordResponseListSchema: + """Get all records.""" + fetched_records = await self._repo.get_all() + response_records = [ + SampleRecordResponseSchema.from_orm(record) for record in fetched_records + ] + return SampleRecordResponseListSchema(data=response_records) diff --git a/app/bot/commands/{% if CI %}test.py{% endif %} b/app/bot/commands/{% if CI %}test.py{% endif %} deleted file mode 100644 index 288f03d..0000000 --- a/app/bot/commands/{% if CI %}test.py{% endif %} +++ /dev/null @@ -1,154 +0,0 @@ -"""Handlers for tests.""" - -from unittest.mock import AsyncMock - -from pybotx import ( # noqa: WPS235 - AttachmentTypes, - Bot, - BotShuttingDownError, - BubbleMarkup, - HandlerCollector, - IncomingMessage, - IncomingMessageHandlerFunc, - KeyboardMarkup, - OutgoingMessage, -) -from pybotx.models.attachments import AttachmentVideo -from pybotx_smart_logger import smart_log - -from app.bot.middlewares.db_session import db_session_middleware -from app.db.record.repo import RecordRepo -from app.services.answer_error import AnswerError, AnswerMessageError - - -async def fsm_middleware( - message: IncomingMessage, bot: Bot, call_next: IncomingMessageHandlerFunc -) -> None: - message.state.fsm = AsyncMock() - await call_next(message, bot) - - -collector = HandlerCollector(middlewares=[fsm_middleware]) - - -@collector.command("/_test-redis-callback-repo", visible=False) -async def test_redis_callback_repo(message: IncomingMessage, bot: Bot) -> None: - """Testing redis callback.""" - await bot.answer_message("Hello!", callback_timeout=0.5) - - -@collector.command("/_test-redis-callback-repo-wait", visible=False) -async def test_redis_callback_repo_wait(message: IncomingMessage, bot: Bot) -> None: - """Testing redis wait callback.""" - sync_id = await bot.answer_message( - "Hello!", callback_timeout=0.5, wait_callback=False - ) - await bot.wait_botx_method_callback(sync_id) - - -@collector.command("/_test-redis-callback-repo-no-wait", visible=False) -async def test_redis_callback_repo_not_wait(message: IncomingMessage, bot: Bot) -> None: - """Testing redis repo callback not wait.""" - await bot.answer_message("Hello!", callback_timeout=0, wait_callback=False) - - -@collector.command("/_test-answer-message-error", visible=False) -async def test_answer_message_error(message: IncomingMessage, bot: Bot) -> None: - """Testing AnswerMessageError error exception.""" - raise AnswerMessageError( - "test", - metadata={"test": 1}, - bubbles=BubbleMarkup([[]]), - keyboard=KeyboardMarkup([[]]), - file=AttachmentVideo( - type=AttachmentTypes.VIDEO, - filename="test_file.mp4", - size=len(b"Hello, world!\n"), - is_async_file=False, - content=b"Hello, world!\n", - duration=10, - ), - recipients=[message.sender.huid], - silent_response=False, - markup_auto_adjust=False, - stealth_mode=False, - send_push=False, - ignore_mute=False, - wait_callback=True, - callback_timeout=1, - ) - - -@collector.command("/_test-answer-error", visible=False) -async def test_answer_error(message: IncomingMessage, bot: Bot) -> None: - """Testing AnswerError exception.""" - raise AnswerError( - message=OutgoingMessage( - bot_id=message.bot.id, - chat_id=message.chat.id, - body="test", - metadata={"test": 1}, - bubbles=BubbleMarkup([[]]), - keyboard=KeyboardMarkup([[]]), - file=AttachmentVideo( - type=AttachmentTypes.VIDEO, - filename="test_file.mp4", - size=len(b"Hello, world!\n"), - is_async_file=False, - content=b"Hello, world!\n", - duration=10, - ), - recipients=[message.sender.huid], - silent_response=False, - markup_auto_adjust=False, - stealth_mode=False, - send_push=False, - ignore_mute=False, - ), - wait_callback=True, - callback_timeout=1, - ) - - -@collector.command("/_test-fail-shutting-down", visible=False) -async def test_fail_shutting_down(message: IncomingMessage, bot: Bot) -> None: - """Testing fail while shutting down.""" - raise BotShuttingDownError("test") - - -@collector.command("/_test-fail", visible=False) -async def test_fail(message: IncomingMessage, bot: Bot) -> None: - """Testing internal error.""" - smart_log("Test smart_log output") - raise ValueError - - -@collector.command("/_test-redis", visible=False) -async def test_redis(message: IncomingMessage, bot: Bot) -> None: - """Testing redis.""" - # This test just for coverage - # Better to assert bot answers instead of using direct DB/Redis access - - redis_repo = bot.state.redis_repo - - await redis_repo.set("test_key", "test_value") - - -@collector.command("/_test-db", visible=False, middlewares=[db_session_middleware]) -async def test_db(message: IncomingMessage, bot: Bot) -> None: - """Testing db session.""" - # This test just for coverage - # Better to assert bot answers instead of using direct DB/Redis access - - # add text to history - # example of using database - record_repo = RecordRepo(message.state.db_session) - - await record_repo.create(record_data="test 1") - await record_repo.update(record_id=1, record_data="test 1 (updated)") - - await record_repo.create(record_data="test 2") - await record_repo.delete(record_id=2) - - await record_repo.create(record_data="test not unique data") - await record_repo.create(record_data="test not unique data") diff --git a/app/bot/middlewares/db_session.py b/app/bot/middlewares/db_session.py deleted file mode 100644 index 1592ccf..0000000 --- a/app/bot/middlewares/db_session.py +++ /dev/null @@ -1,15 +0,0 @@ -"""Middleware for creating db_session per-request.""" - -from pybotx import Bot, IncomingMessage, IncomingMessageHandlerFunc - - -async def db_session_middleware( - message: IncomingMessage, bot: Bot, call_next: IncomingMessageHandlerFunc -) -> None: - session_factory = bot.state.db_session_factory - - async with session_factory() as db_session: - message.state.db_session = db_session - - await call_next(message, bot) - await db_session.commit() diff --git a/app/db/crud.py b/app/db/crud.py deleted file mode 100644 index 8e95bdc..0000000 --- a/app/db/crud.py +++ /dev/null @@ -1,87 +0,0 @@ -"""CRUD implementation.""" - -from typing import Any, Dict, TypeVar - -from sqlalchemy import delete, insert, select, update -from sqlalchemy.inspection import inspect - -from app.db.sqlalchemy import AsyncSession - -T = TypeVar("T") # noqa: WPS111 - - -class CRUD: - """CRUD operations for models.""" - - def __init__(self, session: AsyncSession, cls_model: Any): - self._session = session - self._cls_model = cls_model - - async def create(self, *, model_data: Dict[str, Any]) -> Any: - """Create object.""" - query = insert(self._cls_model).values(**model_data) - - res = await self._session.execute(query) # type: ignore - return res.inserted_primary_key # type: ignore - - async def update( - self, - *, - pkey_val: Any, - model_data: Dict[str, Any], - ) -> None: - """Update object by primary key.""" - primary_key = inspect(self._cls_model).primary_key[0] - query = ( - update(self._cls_model) # type: ignore - .where(primary_key == pkey_val) - .values(**model_data) - .execution_options(synchronize_session="fetch") - ) - - await self._session.execute(query) - - async def delete(self, *, pkey_val: Any) -> None: - """Delete object by primary key value.""" - primary_key = inspect(self._cls_model).primary_key[0].name - query = ( - delete(self._cls_model) # type: ignore - .where(getattr(self._cls_model, primary_key) == pkey_val) - .execution_options(synchronize_session="fetch") - ) - - await self._session.execute(query) - - async def get(self, *, pkey_val: Any) -> Any: - """Get object by primary key.""" - primary_key = inspect(self._cls_model).primary_key[0] - query = select(self._cls_model).where(primary_key == pkey_val) - - rows = await self._session.execute(query) # type: ignore - return rows.scalars().one() - - async def get_or_none(self, *, pkey_val: Any) -> Any: - """Get object by primary key or none.""" - primary_key = inspect(self._cls_model).primary_key[0] - query = select(self._cls_model).where(primary_key == pkey_val) - - rows = await self._session.execute(query) # type: ignore - return rows.scalar() - - async def all( - self, - ) -> Any: - """Get all objects by db model.""" - query = select(self._cls_model) - - rows = await self._session.execute(query) - return rows.scalars().all() - - async def get_by_field(self, *, field: str, field_value: Any) -> Any: - """Return objects from db with condition field=val.""" - query = select(self._cls_model).where( - getattr(self._cls_model, field) == field_value - ) - - rows = await self._session.execute(query) # type: ignore - return rows.scalars().all() diff --git a/app/db/record/repo.py b/app/db/record/repo.py deleted file mode 100644 index aa32887..0000000 --- a/app/db/record/repo.py +++ /dev/null @@ -1,56 +0,0 @@ -"""Record repo.""" - -from typing import List, Optional - -from app.db.crud import CRUD -from app.db.record.models import RecordModel -from app.db.sqlalchemy import AsyncSession -from app.schemas.record import Record - - -class RecordRepo: - def __init__(self, session: AsyncSession): - """Initialize repo with CRUD.""" - self._crud = CRUD(session=session, cls_model=RecordModel) - - async def create(self, record_data: str) -> Record: - """Create record row in db.""" - row = await self._crud.create(model_data={"record_data": record_data}) - record_in_db = await self._crud.get(pkey_val=row.id) - return Record.from_orm(record_in_db) - - async def update(self, record_id: int, record_data: str) -> Record: - """Update record row in db.""" - await self._crud.update( - pkey_val=record_id, - model_data={"record_data": record_data}, - ) - record_in_db = await self._crud.get(pkey_val=record_id) - return Record.from_orm(record_in_db) - - async def delete(self, record_id: int) -> None: - await self._crud.delete(pkey_val=record_id) - - async def get(self, record_id: int) -> Record: - record = await self._crud.get(pkey_val=record_id) - return Record.from_orm(record) - - async def get_or_none(self, record_id: int) -> Optional[Record]: - record = await self._crud.get_or_none(pkey_val=record_id) - if record: - return Record.from_orm(record) - - return None - - async def get_all(self) -> List[Record]: - """Get all objects.""" - records_in_db = await self._crud.all() - return [Record.from_orm(record) for record in records_in_db] - - async def filter_by_record_data(self, record_data: str) -> List[Record]: - """Get all objects.""" - records_in_db = await self._crud.get_by_field( - field="record_data", - field_value=record_data, - ) - return [Record.from_orm(record) for record in records_in_db] diff --git a/app/db/sqlalchemy.py b/app/db/sqlalchemy.py deleted file mode 100644 index 8eaaa61..0000000 --- a/app/db/sqlalchemy.py +++ /dev/null @@ -1,62 +0,0 @@ -"""SQLAlchemy helpers.""" - -from asyncio import current_task -from typing import Callable - -from sqlalchemy import MetaData -from sqlalchemy.ext.asyncio import ( - AsyncEngine, - AsyncSession, - async_scoped_session, - async_sessionmaker, - create_async_engine, -) -from sqlalchemy.orm import declarative_base -from sqlalchemy.pool.impl import AsyncAdaptedQueuePool - -from app.settings import settings - -AsyncSessionFactory = Callable[..., AsyncSession] - - -def make_url_async(url: str) -> str: - """Add +asyncpg to url scheme.""" - return "postgresql+asyncpg" + url[url.find(":") :] # noqa: WPS336 - - -def make_url_sync(url: str) -> str: - """Remove +asyncpg from url scheme.""" - return "postgresql" + url[url.find(":") :] # noqa: WPS336 - - -convention = { - "ix": "ix_%(column_0_label)s", - "uq": "uq_%(table_name)s_%(column_0_name)s", - "ck": "ck_%(table_name)s_%(constraint_name)s", - "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", - "pk": "pk_%(table_name)s", -} - -Base = declarative_base(metadata=MetaData(naming_convention=convention)) - -engine: AsyncEngine = create_async_engine( - make_url_async(settings.POSTGRES_DSN), poolclass=AsyncAdaptedQueuePool -) - - -async def build_db_session_factory() -> AsyncSessionFactory: - await verify_db_connection(engine) - - return async_scoped_session( - async_sessionmaker(bind=engine, expire_on_commit=False), - scopefunc=current_task, - ) - - -async def verify_db_connection(engine: AsyncEngine) -> None: - connection = await engine.connect() - await connection.close() - - -async def close_db_connections() -> None: - await engine.dispose() diff --git a/app/bot/commands/__init__.py b/app/domain/__init__.py similarity index 100% rename from app/bot/commands/__init__.py rename to app/domain/__init__.py diff --git a/app/bot/error_handlers/__init__.py b/app/domain/entities/__init__.py similarity index 100% rename from app/bot/error_handlers/__init__.py rename to app/domain/entities/__init__.py diff --git a/app/schemas/enums.py b/app/domain/entities/healthcheck.py similarity index 52% rename from app/schemas/enums.py rename to app/domain/entities/healthcheck.py index 4fef0c2..47d1931 100644 --- a/app/schemas/enums.py +++ b/app/domain/entities/healthcheck.py @@ -1,6 +1,12 @@ -"""Module for enums.""" - +from dataclasses import dataclass from enum import Enum +from typing import Optional + + +@dataclass +class HealthCheckServiceResult: + name: str + error: Optional[str] class StrEnum(str, Enum): # noqa: WPS600 diff --git a/app/domain/entities/sample_record.py b/app/domain/entities/sample_record.py new file mode 100644 index 0000000..026b6a2 --- /dev/null +++ b/app/domain/entities/sample_record.py @@ -0,0 +1,25 @@ +"""Record entity for the domain layer.""" + +from dataclasses import dataclass +from typing import Optional + +from app.domain.exceptions.domain_exceptions import WrongRecordData + + +@dataclass +class SampleRecord: + """Record entity representing a simple record in the system.""" + + record_data: str + id: int | None = None + + def __str__(self) -> str: + """Return string representation of the record.""" + return self.record_data + + def __post_init__(self) -> None: + """Insert business validation here + For example for some reason record data shouldn't start with A123 + """ + if self.record_data.startswith("A123"): + raise WrongRecordData("Record data shouldn't start with A") diff --git a/app/bot/middlewares/__init__.py b/app/domain/exceptions/__init__.py similarity index 100% rename from app/bot/middlewares/__init__.py rename to app/domain/exceptions/__init__.py diff --git a/app/domain/exceptions/domain_exceptions.py b/app/domain/exceptions/domain_exceptions.py new file mode 100644 index 0000000..c732f51 --- /dev/null +++ b/app/domain/exceptions/domain_exceptions.py @@ -0,0 +1,16 @@ +"""Domain-specific exceptions.""" + + +class DomainException(Exception): + """Base exception for all domain-specific exceptions.""" + + def __init__(self, message: str = "Domain error occurred"): + self.message = message + super().__init__(self.message) + + +class WrongRecordData(DomainException): + """Raised when record data is not valid.""" + + def __init__(self, message: str = "Wrong record data"): + super().__init__(message) diff --git a/app/caching/__init__.py b/app/infrastructure/__init__.py similarity index 100% rename from app/caching/__init__.py rename to app/infrastructure/__init__.py diff --git a/app/db/__init__.py b/app/infrastructure/caching/__init__.py similarity index 100% rename from app/db/__init__.py rename to app/infrastructure/caching/__init__.py diff --git a/app/caching/callback_redis_repo.py b/app/infrastructure/caching/callback_redis_repo.py similarity index 51% rename from app/caching/callback_redis_repo.py rename to app/infrastructure/caching/callback_redis_repo.py index 2871a2d..0cfb6ed 100644 --- a/app/caching/callback_redis_repo.py +++ b/app/infrastructure/caching/callback_redis_repo.py @@ -12,11 +12,23 @@ class CallbackRedisRepo(CallbackRepoProto): + """Repository for storing and retrieving BotX method callbacks using Redis Pub/Sub. + + This class allows asynchronous waiting for callbacks by using Redis as a transport + mechanism and associating each callback with a unique sync_id. It supports setting, + waiting, and cleaning up callbacks, and handles proper shutdown behavior. + """ + def __init__( self, redis: Redis, prefix: Optional[str] = None, ): + """Initialize the callback repository with a Redis client. + Args: + redis: The Redis client instance. + prefix: Optional prefix to use for Redis keys. + """ self._redis = redis self._prefix = prefix or "" self._futures: Dict[UUID, asyncio.Future[BotXMethodCallback]] = {} @@ -27,18 +39,31 @@ async def create_botx_method_callback( self, sync_id: UUID, ) -> None: + """Prepare to receive a callback by subscribing to a Redis channel. + + Args: + sync_id: Unique identifier for the callback. + """ self._futures[sync_id] = asyncio.Future() await self.pubsub.subscribe( - **{f"{self._prefix}:{sync_id}": self._message_handler} + **{self._get_channel_name(sync_id): self._message_handler} ) async def set_botx_method_callback_result( self, callback: BotXMethodCallback, ) -> None: + """Publish the callback result to Redis. + + Args: + callback: The callback data to publish. + + Raises: + BotXMethodCallbackNotFoundError: If no subscriber is listening on the channel. + """ dump = pickle.dumps(callback) status_code = await self._redis.publish( - f"{self._prefix}:{callback.sync_id}", dump + self._get_channel_name(callback.sync_id), dump ) if status_code != 1: raise BotXMethodCallbackNotFoundError(sync_id=callback.sync_id) @@ -48,6 +73,18 @@ async def wait_botx_method_callback( sync_id: UUID, timeout: float, ) -> BotXMethodCallback: + """Wait for the callback to be received within a timeout. + + Args: + sync_id: Unique identifier of the callback. + timeout: Time to wait for the callback in seconds. + + Returns: + The received BotXMethodCallback. + + Raises: + CallbackNotReceivedError: If the callback is not received in time. + """ try: callback = await asyncio.wait_for(self._futures[sync_id], timeout=timeout) except asyncio.TimeoutError: @@ -61,10 +98,22 @@ async def pop_botx_method_callback( self, sync_id: UUID, ) -> "asyncio.Future[BotXMethodCallback]": - await self.pubsub.unsubscribe(f"{self._prefix}:{sync_id}") + """Remove the callback from tracking and unsubscribe from Redis. + + Args: + sync_id: Unique identifier of the callback. + + Returns: + The future that was associated with the callback. + """ + await self.pubsub.unsubscribe(self._get_channel_name(sync_id)) return self._futures.pop(sync_id) async def stop_callbacks_waiting(self) -> None: + """Cancel all pending callbacks due to bot shutdown. + + Sets an exception on all incomplete futures and unsubscribes from Redis. + """ await self.pubsub.unsubscribe() for sync_id, future in self._futures.items(): @@ -76,6 +125,13 @@ async def stop_callbacks_waiting(self) -> None: ) async def _message_handler(self, message: Any) -> None: + """Handle incoming Redis Pub/Sub messages. + + Deserializes and delivers the callback result to the appropriate future. + + Args: + message: The message from Redis. + """ if message["type"] == "message": callback: BotXMethodCallback = pickle.loads(message["data"]) # noqa: S301 @@ -85,3 +141,14 @@ async def _message_handler(self, message: Any) -> None: future.result() else: future.set_result(callback) + + def _get_channel_name(self, sync_id: UUID) -> str: + """Construct the Redis channel name for a given sync_id. + + Args: + sync_id: Unique identifier used to correlate the callback. + + Returns: + A string representing the full Redis channel name. + """ + return f"{self._prefix}:{sync_id}" diff --git a/app/caching/exception_handlers.py b/app/infrastructure/caching/exception_handlers.py similarity index 100% rename from app/caching/exception_handlers.py rename to app/infrastructure/caching/exception_handlers.py diff --git a/app/caching/redis_repo.py b/app/infrastructure/caching/redis_repo.py similarity index 100% rename from app/caching/redis_repo.py rename to app/infrastructure/caching/redis_repo.py diff --git a/app/infrastructure/containers.py b/app/infrastructure/containers.py new file mode 100644 index 0000000..c6086f0 --- /dev/null +++ b/app/infrastructure/containers.py @@ -0,0 +1,110 @@ +from dependency_injector import containers +from dependency_injector.providers import Factory, Callable, Singleton +from redis import asyncio as aioredis + +from app.application.use_cases.record_use_cases import SampleRecordUseCases +from app.infrastructure.caching.redis_repo import RedisRepo +from app.infrastructure.repositories.sample_record import SampleRecordRepository +from app.infrastructure.db.sqlalchemy import build_db_session_factory +from app.presentation.bot.resources import strings +from app.settings import settings + + +class BotSampleRecordCommandContainer(containers.DeclarativeContainer): + wiring_config = containers.WiringConfiguration( + modules=["app.presentation.bot.commands.sample_records"] + ) + + # Session factory provider - returns a factory that creates AsyncSession instances + session_factory = Factory(build_db_session_factory) + + record_use_cases_factory = Callable( + lambda session: SampleRecordUseCases( + record_repo=SampleRecordRepository(session=session) + ) + ) + + +class HealthCheckContainer(containers.DeclarativeContainer): + wiring_config = containers.WiringConfiguration( + modules=["app.presentation.api.healthcheck"] + ) + + # Session factory provider - returns a factory that creates AsyncSession instances + session_factory = Factory(build_db_session_factory) + + record_use_cases_factory = Callable( + lambda session: SampleRecordUseCases( + record_repo=SampleRecordRepository(session=session) + ) + ) + + +# class StorageContainer(containers.DeclarativeContainer): +# wiring_config = containers.WiringConfiguration( +# modules=["app.presentation.bot.commands.sample_records"] +# ) +# +# # Provider that returns a factory to create sessions +# session_factory = Factory(build_db_session_factory) +# +# # Provider that creates a session (e.g., AsyncSession instance) +# session = Resource(session_factory) +# +# # Provider that creates the SampleRecordUseCases, injecting the session +# record_use_cases = Factory( +# SampleRecordUseCases, +# record_repo=Factory( +# SampleRecordRepository, +# session=session +# ) +# ) + + +class ApplicationStartupContainer(containers.DeclarativeContainer): + """Container for application startup dependencies.""" + + wiring_config = containers.WiringConfiguration(modules=["app.main"]) + + # Database + # db_session_factory = Factory(build_db_session_factory) + + # Redis client + redis_client = Singleton( + aioredis.from_url, + settings.REDIS_DSN, + ) + + redis_repo = Factory( + RedisRepo, + redis=redis_client, + prefix=strings.BOT_PROJECT_NAME, + ) + + ## Configure connection pool for Redis + # redis_connection_pool = Callable( + # lambda: aioredis.BlockingConnectionPool( + # max_connections=settings.REDIS_CONNECTION_POOL_SIZE, + # **(redis_client.provided.connection_pool.connection_kwargs), + # ) + # ) + # + # # Set connection pool for Redis client + # redis_client_with_pool = Callable( + # lambda: redis_client.provided.__setattr__( + # "connection_pool", redis_connection_pool() + # ) or redis_client.provided + # ) + # + # # Redis repo + # redis_repo = Factory( + # RedisRepo, + # redis=redis_client_with_pool, + # prefix=strings.BOT_PROJECT_NAME, + # ) + # + # # Callback repo + # callback_repo = Factory( + # CallbackRedisRepo, + # redis=redis_client_with_pool, + # ) diff --git a/app/db/migrations/__init__.py b/app/infrastructure/db/__init__.py similarity index 100% rename from app/db/migrations/__init__.py rename to app/infrastructure/db/__init__.py diff --git a/app/db/migrations/versions/__init__.py b/app/infrastructure/db/migrations/__init__.py similarity index 100% rename from app/db/migrations/versions/__init__.py rename to app/infrastructure/db/migrations/__init__.py diff --git a/app/db/migrations/env.py b/app/infrastructure/db/migrations/env.py similarity index 82% rename from app/db/migrations/env.py rename to app/infrastructure/db/migrations/env.py index 9490a59..9812b7b 100644 --- a/app/db/migrations/env.py +++ b/app/infrastructure/db/migrations/env.py @@ -6,14 +6,14 @@ from sqlalchemy import engine_from_config, pool # init config -sys.path.append(str(pathlib.Path(__file__).resolve().parents[3])) +sys.path.append(str(pathlib.Path(__file__).resolve().parents[4])) # Imports from `app` should go after `path` patch from app.settings import settings # isort:skip -from app.db.sqlalchemy import Base, make_url_sync # isort:skip +from app.infrastructure.db.sqlalchemy import Base, make_url_sync # isort:skip # Import models to make them visible by alembic -import app.db.record.models # isort:skip +import app.infrastructure.db.sample_record.models # isort:skip postgres_dsn = make_url_sync(settings.POSTGRES_DSN) context_config = context.config diff --git a/app/db/migrations/script.py.mako b/app/infrastructure/db/migrations/script.py.mako similarity index 100% rename from app/db/migrations/script.py.mako rename to app/infrastructure/db/migrations/script.py.mako diff --git a/app/db/migrations/versions/d6e3a38b1fbd_.py b/app/infrastructure/db/migrations/versions/765dcfed2d16_init.py similarity index 65% rename from app/db/migrations/versions/d6e3a38b1fbd_.py rename to app/infrastructure/db/migrations/versions/765dcfed2d16_init.py index a3af451..50deaef 100644 --- a/app/db/migrations/versions/d6e3a38b1fbd_.py +++ b/app/infrastructure/db/migrations/versions/765dcfed2d16_init.py @@ -1,16 +1,17 @@ -"""empty message +"""init -Revision ID: d6e3a38b1fbd +Revision ID: 765dcfed2d16 Revises: -Create Date: 2021-07-09 13:40:29.058513 +Create Date: 2025-06-11 16:30:02.133576 Doc: https://alembic.sqlalchemy.org/en/latest/tutorial.html#create-a-migration-script """ -import sqlalchemy as sa from alembic import op +import sqlalchemy as sa + -revision = "d6e3a38b1fbd" +revision = "765dcfed2d16" down_revision = None branch_labels = None depends_on = None @@ -19,15 +20,15 @@ def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( - "records", + "sample_record", sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), - sa.Column("record_data", sa.String(), nullable=True), - sa.PrimaryKeyConstraint("id"), + sa.Column("record_data", sa.String(), nullable=False), + sa.PrimaryKeyConstraint("id", name=op.f("pk_sample_record")), ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("records") + op.drop_table("sample_record") # ### end Alembic commands ### diff --git a/app/db/record/__init__.py b/app/infrastructure/db/migrations/versions/__init__.py similarity index 100% rename from app/db/record/__init__.py rename to app/infrastructure/db/migrations/versions/__init__.py diff --git a/app/resources/__init__.py b/app/infrastructure/db/sample_record/__init__.py similarity index 100% rename from app/resources/__init__.py rename to app/infrastructure/db/sample_record/__init__.py diff --git a/app/db/record/models.py b/app/infrastructure/db/sample_record/models.py similarity index 57% rename from app/db/record/models.py rename to app/infrastructure/db/sample_record/models.py index f531421..a71e741 100644 --- a/app/db/record/models.py +++ b/app/infrastructure/db/sample_record/models.py @@ -2,17 +2,15 @@ from sqlalchemy.orm import Mapped, mapped_column -from app.db.sqlalchemy import Base +from app.infrastructure.db.sqlalchemy import Base -class RecordModel(Base): +class SampleRecordModel(Base): """Simple database model for example.""" - __tablename__ = "records" + __tablename__ = "sample_record" - id: Mapped[int] = mapped_column( - primary_key=True, autoincrement=True - ) # noqa: WPS125 + id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) # noqa: WPS125 record_data: Mapped[str] def __repr__(self) -> str: diff --git a/app/infrastructure/db/sqlalchemy.py b/app/infrastructure/db/sqlalchemy.py new file mode 100644 index 0000000..4de8c1d --- /dev/null +++ b/app/infrastructure/db/sqlalchemy.py @@ -0,0 +1,99 @@ +"""SQLAlchemy helpers.""" + +from asyncio import current_task +from contextlib import asynccontextmanager +from functools import wraps, lru_cache +from typing import Callable, Any + +from sqlalchemy import MetaData +from sqlalchemy.ext.asyncio import ( + AsyncEngine, + AsyncSession, + async_scoped_session, + async_sessionmaker, + create_async_engine, +) +from sqlalchemy.orm import declarative_base +from sqlalchemy.pool.impl import AsyncAdaptedQueuePool + +from app.settings import settings + +AsyncSessionFactory = Callable[..., AsyncSession] + + +def make_url_async(url: str) -> str: + """Add +asyncpg to url scheme.""" + return "postgresql+asyncpg" + url[url.find(":") :] # noqa: WPS336 + + +def make_url_sync(url: str) -> str: + """Remove +asyncpg from url scheme.""" + return "postgresql" + url[url.find(":") :] # noqa: WPS336 + + +convention = { + "ix": "ix_%(column_0_label)s", + "uq": "uq_%(table_name)s_%(column_0_name)s", + "ck": "ck_%(table_name)s_%(constraint_name)s", + "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", + "pk": "pk_%(table_name)s", +} + +Base = declarative_base(metadata=MetaData(naming_convention=convention)) + + +@lru_cache(maxsize=1) +def get_engine() -> AsyncEngine: + """Lazily initialize and cache a single SQLAlchemy async engine.""" + return create_async_engine( + make_url_async(settings.POSTGRES_DSN), + poolclass=AsyncAdaptedQueuePool, + ) + + +async def build_db_session_factory() -> AsyncSessionFactory: + await verify_db_connection(get_engine()) + + return async_scoped_session( + async_sessionmaker(bind=get_engine(), expire_on_commit=False), + scopefunc=current_task, + ) + + +@asynccontextmanager +async def session_resource(): + factory = await build_db_session_factory() + session: AsyncSession = factory() + try: + yield session + finally: + await session.close() + + +async def verify_db_connection(engine: AsyncEngine) -> None: + connection = await engine.connect() + await connection.close() + + +async def close_db_connections() -> None: + await get_engine().dispose() + + +def provide_session(func: Callable) -> Callable: + """ + Provides a database session to an async function if one is not already passed. + + :param func: The asynchronous function to wrap. It must accept a `session` + keyword argument. + :return: The wrapped function with automatic session provisioning.""" + + @wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + if kwargs.get("session"): + return await func(*args, **kwargs) + + session_factory = await build_db_session_factory() + async with session_factory() as session: + return await func(*args, **kwargs, session=session) + + return wrapper diff --git a/app/schemas/__init__.py b/app/infrastructure/middlewares/__init__.py similarity index 100% rename from app/schemas/__init__.py rename to app/infrastructure/middlewares/__init__.py diff --git a/app/bot/middlewares/answer_error.py b/app/infrastructure/middlewares/answer_error.py similarity index 100% rename from app/bot/middlewares/answer_error.py rename to app/infrastructure/middlewares/answer_error.py diff --git a/app/bot/middlewares/smart_logger.py b/app/infrastructure/middlewares/smart_logger.py similarity index 100% rename from app/bot/middlewares/smart_logger.py rename to app/infrastructure/middlewares/smart_logger.py diff --git a/app/{% if add_worker %}worker{% endif %}/__init__.py b/app/infrastructure/repositories/__init__.py similarity index 100% rename from app/{% if add_worker %}worker{% endif %}/__init__.py rename to app/infrastructure/repositories/__init__.py diff --git a/app/infrastructure/repositories/sample_record.py b/app/infrastructure/repositories/sample_record.py new file mode 100644 index 0000000..fed23a2 --- /dev/null +++ b/app/infrastructure/repositories/sample_record.py @@ -0,0 +1,139 @@ +"""Record repository implementation.""" + +from typing import List, Optional + +from sqlalchemy import delete, insert, select, update +from sqlalchemy.exc import NoResultFound, SQLAlchemyError + +from app.application.repository.exceptions import ( + RecordDoesNotExistError, + RecordUpdateError, + RecordCreateError, + RecordDeleteError, + RecordRetreiveError, +) +from app.application.repository.interfaces import ISampleRecordRepository +from app.domain.entities.sample_record import SampleRecord +from app.infrastructure.db.sample_record.models import SampleRecordModel +from app.infrastructure.db.sqlalchemy import AsyncSession +from app.utils.exceptions_mapper import exception_mapper + + +class SampleRecordRepository(ISampleRecordRepository): + """Implementation of the record repository interface.""" + + def __init__(self, session: AsyncSession): + """Initialize the repository with a database session. + + Args: + session: The database session. + """ + self._session = session + + @exception_mapper( + catch_exceptions=SQLAlchemyError, raise_exception=RecordCreateError + ) + async def create(self, record: SampleRecord) -> SampleRecord: + query = ( + insert(SampleRecordModel) + .values(record_data=record.record_data) + .returning(SampleRecordModel) + ) + result = await self._session.execute(query) + await self._session.flush() + record_model = result.scalar_one() + return self._to_domain_object(record_model) + + @exception_mapper( + catch_exceptions=SQLAlchemyError, raise_exception=RecordUpdateError + ) + async def update(self, record: SampleRecord) -> SampleRecord: + query = ( + update(SampleRecordModel) + .where(SampleRecordModel.id == record.id) + .values(record_data=record.record_data) + .returning(SampleRecordModel) + ) + execute_result = (await self._session.execute(query)).scalar_one_or_none() + await self._session.flush() + if execute_result is None: + raise RecordDoesNotExistError( + f"Sample record with id={record.id} does not exist." + ) + + return self._to_domain_object(execute_result) + + @exception_mapper( + catch_exceptions=SQLAlchemyError, raise_exception=RecordDeleteError + ) + async def delete(self, record_id: int) -> int: + """Delete a record. + + Args: + record_id: The ID of the record to delete. + """ + query = ( + delete(SampleRecordModel) + .where(SampleRecordModel.id == record_id) + .execution_options(synchronize_session="fetch") + .returning(SampleRecordModel.id) + ) + + deletion_result = (await self._session.execute(query)).scalar_one_or_none() + + if deletion_result is None: + raise RecordDoesNotExistError( + f"Sample record with id={record_id} does not exist." + ) + + await self._session.flush() + + return deletion_result + + @exception_mapper( + catch_exceptions=NoResultFound, raise_exception=RecordDoesNotExistError + ) + async def get_by_id(self, record_id: int) -> SampleRecord: + """Get a record by ID. + + Args: + record_id: The ID of the record to get. + + Returns: + The record as a domain entity. + + Raises: + RecordNotFoundError: If the record is not found. + """ + query = select(SampleRecordModel).where(SampleRecordModel.id == record_id) + result = await self._session.execute(query) + return self._to_domain_object(result.scalar_one()) + + @exception_mapper( + catch_exceptions=SQLAlchemyError, raise_exception=RecordRetreiveError + ) + async def get_all(self) -> List[SampleRecord]: + """Get all records. + + Returns: + A list of all records as domain entities. + """ + query = select(SampleRecordModel) + result = await self._session.execute(query) + record_models = result.scalars().all() + + return [self._to_domain_object(record) for record in record_models] + + def _to_domain_object(self, record_model: SampleRecordModel) -> SampleRecord: + """Convert a database model to a domain entity. + + Args: + record_model: The database model to convert. + + Returns: + The corresponding domain entity. + """ + return SampleRecord( + id=record_model.id, + record_data=record_model.record_data, + ) diff --git a/tests/commands/__init__.py b/app/infrastructure/services/__init__.py similarity index 100% rename from tests/commands/__init__.py rename to app/infrastructure/services/__init__.py diff --git a/app/infrastructure/services/healthcheck.py b/app/infrastructure/services/healthcheck.py new file mode 100644 index 0000000..e7162dc --- /dev/null +++ b/app/infrastructure/services/healthcheck.py @@ -0,0 +1,48 @@ +from fastapi import Request +from sqlalchemy.sql import text +from asyncio.exceptions import TimeoutError +from pybotx import Bot + +from app.application.service.interfaces import HealthCheckService +from app.infrastructure.worker.worker import queue +from app.settings import settings + + +class PostgresHealthCheck(HealthCheckService): + def __init__(self, request: Request): + self._request = request + + async def check(self): + bot: Bot = self._request.app.state.bot + session_factory = bot.state.db_session_factory + + async with session_factory() as db_session: + try: + await db_session.execute(text("SELECT 1")) + except Exception as exc: + return str(exc) + return None + + +class RedisHealthCheck(HealthCheckService): + def __init__(self, request: Request): + self._request = request + + async def check(self): + bot: Bot = self._request.app.state.bot + return await bot.state.redis_repo.ping() + + +class WorkerHealthCheck(HealthCheckService): + async def check(self): + job = await queue.enqueue("healthcheck") + if not job: + return None + + try: + await job.refresh(settings.WORKER_TIMEOUT_SEC) + except TimeoutError: + return "Worker is overloaded or not launched" + except Exception as exc: + return str(exc) + return None diff --git a/tests/endpoints/__init__.py b/app/infrastructure/worker/__init__.py similarity index 100% rename from tests/endpoints/__init__.py rename to app/infrastructure/worker/__init__.py diff --git a/app/{% if add_worker %}worker{% endif %}/worker.py.jinja b/app/infrastructure/worker/worker.py similarity index 81% rename from app/{% if add_worker %}worker{% endif %}/worker.py.jinja rename to app/infrastructure/worker/worker.py index ac26aa8..2941733 100644 --- a/app/{% if add_worker %}worker{% endif %}/worker.py.jinja +++ b/app/infrastructure/worker/worker.py @@ -6,7 +6,7 @@ from redis import asyncio as aioredis from saq import Queue -from app.caching.callback_redis_repo import CallbackRedisRepo +from app.infrastructure.caching.callback_redis_repo import CallbackRedisRepo from app.logger import logger # `saq` import its own settings and hides our module @@ -16,7 +16,7 @@ async def startup(ctx: SaqCtx) -> None: - from app.bot.bot import get_bot # noqa: WPS433 + from app.presentation.bot.bot import get_bot # noqa: WPS433 callback_repo = CallbackRedisRepo(aioredis.from_url(app_settings.REDIS_DSN)) bot = get_bot(callback_repo, raise_exceptions=False) @@ -39,7 +39,7 @@ async def healthcheck(_: SaqCtx) -> Literal[True]: return True -queue = Queue(aioredis.from_url(app_settings.REDIS_DSN), name="{{bot_project_name}}") +queue = Queue(aioredis.from_url(app_settings.REDIS_DSN), name="bot_refactor") settings = { "queue": queue, diff --git a/app/main.py b/app/main.py index 4c9be06..0b298a9 100644 --- a/app/main.py +++ b/app/main.py @@ -3,32 +3,32 @@ import asyncio from functools import partial +from dependency_injector.wiring import Provide from fastapi import FastAPI from pybotx import Bot from redis import asyncio as aioredis -from app.api.routers import router -from app.bot.bot import get_bot -from app.caching.callback_redis_repo import CallbackRedisRepo -from app.caching.exception_handlers import PubsubExceptionHandler -from app.caching.redis_repo import RedisRepo -from app.db.sqlalchemy import build_db_session_factory, close_db_connections -from app.resources import strings +from app.presentation.api.routers import router +from app.presentation.bot.bot import get_bot +from app.infrastructure.caching.callback_redis_repo import CallbackRedisRepo +from app.infrastructure.caching.exception_handlers import PubsubExceptionHandler +from app.infrastructure.containers import ApplicationStartupContainer +from app.infrastructure.db.sqlalchemy import close_db_connections +from app.presentation.bot.resources import strings from app.settings import settings -async def startup(application: FastAPI, raise_bot_exceptions: bool) -> None: - # -- Database -- - db_session_factory = await build_db_session_factory() - - # -- Redis -- - redis_client = aioredis.from_url(settings.REDIS_DSN) +async def startup( + application: FastAPI, + raise_bot_exceptions: bool, + redis_client=Provide[ApplicationStartupContainer.redis_client], + redis_repo=Provide[ApplicationStartupContainer.redis_repo], +) -> None: pool = aioredis.BlockingConnectionPool( - max_connections=settings.CONNECTION_POOL_SIZE, + max_connections=settings.REDIS_CONNECTION_POOL_SIZE, **redis_client.connection_pool.connection_kwargs, ) redis_client.connection_pool = pool - redis_repo = RedisRepo(redis=redis_client, prefix=strings.BOT_PROJECT_NAME) # -- Bot -- callback_repo = CallbackRedisRepo(redis_client) @@ -39,7 +39,7 @@ async def startup(application: FastAPI, raise_bot_exceptions: bool) -> None: await bot.startup() - bot.state.db_session_factory = db_session_factory + # bot.state.db_session_factory = db_session_factory bot.state.redis_repo = redis_repo application.state.bot = bot @@ -66,6 +66,10 @@ async def shutdown(application: FastAPI) -> None: def get_application(raise_bot_exceptions: bool = False) -> FastAPI: """Create configured server application instance.""" + # Initialize the container + container = ApplicationStartupContainer() + container.wire(modules=["app.main"]) + application = FastAPI(title=strings.BOT_PROJECT_NAME, openapi_url=None) application.add_event_handler( diff --git a/app/presentation/__init__.py b/app/presentation/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/presentation/api/__init__.py b/app/presentation/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/api/dependencies/bot.py b/app/presentation/api/bot.py similarity index 100% rename from app/api/dependencies/bot.py rename to app/presentation/api/bot.py diff --git a/app/api/endpoints/botx.py b/app/presentation/api/botx.py similarity index 98% rename from app/api/endpoints/botx.py rename to app/presentation/api/botx.py index 60f2eb5..b39bade 100644 --- a/app/api/endpoints/botx.py +++ b/app/presentation/api/botx.py @@ -16,7 +16,7 @@ ) from pybotx.constants import BOT_API_VERSION -from app.api.dependencies.bot import bot_dependency +from app.presentation.api.bot import bot_dependency from app.logger import logger from app.settings import settings diff --git a/app/presentation/api/healthcheck.py b/app/presentation/api/healthcheck.py new file mode 100644 index 0000000..687c5da --- /dev/null +++ b/app/presentation/api/healthcheck.py @@ -0,0 +1,41 @@ +from fastapi import APIRouter, Request + +from app.application.use_cases.healthcheck import HealthCheckUseCase +from app.infrastructure.services.healthcheck import ( + PostgresHealthCheck, + RedisHealthCheck, + WorkerHealthCheck, +) +from app.presentation.api.schemas.healthcheck import ( + HealthCheckResponse, + HealthCheckFailed, + HealthCheckSucceed, +) + +router = APIRouter() + + +@router.get("/healthcheck", response_model=HealthCheckResponse) +async def healthcheck(request: Request): + services = [ + ("postgres", PostgresHealthCheck(request)), + ("redis", RedisHealthCheck(request)), + ("worker", WorkerHealthCheck()), + ] + + use_case = HealthCheckUseCase(services) + status, raw_results = await use_case.execute() + + response_models = [] + for r in raw_results: + if r.error: + response_models.append( + HealthCheckFailed(name=r.name, error=r.error, status="error") + ) + else: + response_models.append(HealthCheckSucceed(name=r.name, status="ok")) + + return HealthCheckResponse( + status=status, + services=response_models, + ) diff --git a/app/presentation/api/routers.py b/app/presentation/api/routers.py new file mode 100644 index 0000000..53b715e --- /dev/null +++ b/app/presentation/api/routers.py @@ -0,0 +1,9 @@ +"""Configuration of routers for all endpoints.""" + +from fastapi import APIRouter + +from app.presentation.api.botx import router as bot_router + +router = APIRouter() + +router.include_router(bot_router) diff --git a/app/presentation/api/schemas/__init__.py b/app/presentation/api/schemas/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/presentation/api/schemas/healthcheck.py b/app/presentation/api/schemas/healthcheck.py new file mode 100644 index 0000000..9c298a0 --- /dev/null +++ b/app/presentation/api/schemas/healthcheck.py @@ -0,0 +1,24 @@ +from typing import Literal, Union, Optional, List + +from pydantic import BaseModel + +from app.domain.entities.healthcheck import HealthCheckStatuses + + +class HealthCheckSucceed(BaseModel): + name: str + status: Literal[HealthCheckStatuses.OK] = HealthCheckStatuses.OK + + +class HealthCheckFailed(BaseModel): + name: str + error: str + status: Literal[HealthCheckStatuses.ERROR] = HealthCheckStatuses.ERROR + + +HealthCheckResult = Union[HealthCheckSucceed, HealthCheckFailed] + + +class HealthCheckResponse(BaseModel): + status: Optional[HealthCheckStatuses] + services: List[HealthCheckResult] diff --git a/app/presentation/bot/__init__.py b/app/presentation/bot/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/bot/bot.py.jinja b/app/presentation/bot/bot.py similarity index 62% rename from app/bot/bot.py.jinja rename to app/presentation/bot/bot.py index e4a0df3..0018e85 100644 --- a/app/bot/bot.py.jinja +++ b/app/presentation/bot/bot.py @@ -1,13 +1,13 @@ """Configuration for bot instance.""" from httpx import AsyncClient, Limits -from pybotx import Bot, CallbackRepoProto{% if add_fsm %} -from pybotx_fsm import FSMMiddleware{% endif %} +from pybotx import Bot, CallbackRepoProto +from pybotx_fsm import FSMMiddleware -from app.bot.commands import common{% if CI %}, test{% endif %} -from app.bot.error_handlers.internal_error import internal_error_handler -from app.bot.middlewares.answer_error import answer_error_middleware -from app.bot.middlewares.smart_logger import smart_logger_middleware +from app.presentation.bot.commands import common, sample_record_simple +from app.presentation.bot.handlers.internal_error import internal_error_handler +from app.infrastructure.middlewares.answer_error import answer_error_middleware +from app.infrastructure.middlewares.smart_logger import smart_logger_middleware from app.settings import settings BOTX_CALLBACK_TIMEOUT = 30 @@ -19,7 +19,7 @@ def get_bot(callback_repo: CallbackRepoProto, raise_exceptions: bool) -> Bot: exception_handlers[Exception] = internal_error_handler return Bot( - collectors=[common.collector{% if CI %}, test.collector{% endif %}], + collectors=[common.collector, sample_record_simple.collector], bot_accounts=settings.BOT_CREDENTIALS, exception_handlers=exception_handlers, # type: ignore default_callback_timeout=BOTX_CALLBACK_TIMEOUT, @@ -29,11 +29,11 @@ def get_bot(callback_repo: CallbackRepoProto, raise_exceptions: bool) -> Bot: ), middlewares=[ smart_logger_middleware, - answer_error_middleware,{% if add_fsm %} + answer_error_middleware, FSMMiddleware( [], state_repo_key="redis_repo", - ),{% endif %} + ), ], callback_repo=callback_repo, ) diff --git a/app/presentation/bot/commands/__init__.py b/app/presentation/bot/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/presentation/bot/commands/command_listing.py b/app/presentation/bot/commands/command_listing.py new file mode 100644 index 0000000..3f94f40 --- /dev/null +++ b/app/presentation/bot/commands/command_listing.py @@ -0,0 +1,28 @@ +from pydantic import BaseModel + + +class BotCommand(BaseModel): + command_name: str + description: str | None + visible: bool = True + + def command_data(self) -> dict: + return { + "command_name": self.command_name, + "description": self.description, + "visible": self.visible, + } + + +class SampleRecordCommands: + CREATE_RECORD = BotCommand( + command_name="/create_record", + description="Создать запись", + ) + + +class SampleRecordFSMCommands: + CREATE_RECORD = BotCommand( + command_name="/create_record_fsm", + description="Создать запись, используя fsm", + ) diff --git a/app/bot/commands/common.py b/app/presentation/bot/commands/common.py similarity index 97% rename from app/bot/commands/common.py rename to app/presentation/bot/commands/common.py index 0c7d634..35ccb0e 100644 --- a/app/bot/commands/common.py +++ b/app/presentation/bot/commands/common.py @@ -12,7 +12,7 @@ StatusRecipient, ) -from app.resources import strings +from app.presentation.bot.resources import strings collector = HandlerCollector() diff --git a/app/presentation/bot/commands/sample_record_fsm.py b/app/presentation/bot/commands/sample_record_fsm.py new file mode 100644 index 0000000..5c3a4e0 --- /dev/null +++ b/app/presentation/bot/commands/sample_record_fsm.py @@ -0,0 +1,26 @@ +from dependency_injector.wiring import inject, Provider +from pybotx import HandlerCollector, Bot, IncomingMessage +from sqlalchemy.ext.asyncio import AsyncSession + +from app.infrastructure.containers import BotSampleRecordCommandContainer +from app.infrastructure.db.sqlalchemy import provide_session +from app.presentation.bot.commands.command_listing import SampleRecordCommands +from app.presentation.bot.handlers import CreateSampleRecordHandler + +collector = HandlerCollector() + + +@collector.command(**SampleRecordCommands.CREATE_RECORD.command_data()) +@provide_session +@inject +async def create_sample_record_with_fsm( + message: IncomingMessage, + bot: Bot, + session: AsyncSession, + record_use_cases_factory=Provider[ + BotSampleRecordCommandContainer.record_use_cases_factory + ], +): + await CreateSampleRecordHandler( + bot=bot, message=message, use_cases=record_use_cases_factory.provider(session) + ).execute() diff --git a/app/presentation/bot/commands/sample_record_simple.py b/app/presentation/bot/commands/sample_record_simple.py new file mode 100644 index 0000000..7f44173 --- /dev/null +++ b/app/presentation/bot/commands/sample_record_simple.py @@ -0,0 +1,28 @@ +from dependency_injector.providers import Factory +from dependency_injector.wiring import inject, Provider +from pybotx import HandlerCollector, Bot, IncomingMessage +from sqlalchemy.ext.asyncio import AsyncSession + +from app.application.use_cases.interfaces import ISampleRecordUseCases +from app.infrastructure.containers import BotSampleRecordCommandContainer +from app.infrastructure.db.sqlalchemy import provide_session +from app.presentation.bot.commands.command_listing import SampleRecordCommands +from app.presentation.bot.handlers.sample_record import CreateSampleRecordHandler + +collector = HandlerCollector() + + +@collector.command(**SampleRecordCommands.CREATE_RECORD.command_data()) +@provide_session +@inject +async def create_sample_record( + message: IncomingMessage, + bot: Bot, + session: AsyncSession, + record_use_cases_factory: Factory[ISampleRecordUseCases] = Provider[ + BotSampleRecordCommandContainer.record_use_cases_factory + ], +): + await CreateSampleRecordHandler( + bot=bot, message=message, use_cases=record_use_cases_factory.provider(session) + ).execute() diff --git a/app/presentation/bot/error_handlers/__init__.py b/app/presentation/bot/error_handlers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/presentation/bot/handlers/__init__.py b/app/presentation/bot/handlers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/presentation/bot/handlers/command.py b/app/presentation/bot/handlers/command.py new file mode 100644 index 0000000..57735aa --- /dev/null +++ b/app/presentation/bot/handlers/command.py @@ -0,0 +1,54 @@ +import abc +from abc import ABC + +from pybotx import Bot, IncomingMessage +from pydantic import BaseModel + +from app.presentation.bot.handlers.error import BaseExceptionHandler +from app.presentation.bot.validators.base import IBotRequestParser + + +class BaseCommandHandler(ABC): + def __init__( + self, + bot: Bot, + message: IncomingMessage, + exception_handler: BaseExceptionHandler | None = None, + ): + self._bot = bot + self._message = message + self._exception_handler = exception_handler or BaseExceptionHandler() + + @property + @abc.abstractmethod + def incoming_argument_parser( + self, + ) -> IBotRequestParser | None: + pass + + @abc.abstractmethod + async def handle_logic( + self, + request_parameter: BaseModel | str, + ) -> None: + pass + + def get_request_parameter( + self, + ) -> BaseModel | str | None: + return ( + self.incoming_argument_parser.parse(self._message) + if self.incoming_argument_parser + else self._message.argument + ) + + async def execute( + self, + ): + try: + parameter = self.get_request_parameter() + await self.handle_logic(parameter) + except Exception as exc: + await self._exception_handler.handle_exception( + exc, self._bot, self._message + ) diff --git a/app/presentation/bot/handlers/error.py b/app/presentation/bot/handlers/error.py new file mode 100644 index 0000000..5c5c35e --- /dev/null +++ b/app/presentation/bot/handlers/error.py @@ -0,0 +1,48 @@ +from typing import Callable +from uuid import uuid4 + +from pybotx import Bot, IncomingMessage, BotShuttingDownError + +from app.logger import logger +from app.presentation.bot.validators.exceptions import MessageValidationError +from app.presentation.bot.resources import strings + + +class BaseExceptionHandler: + def __init__( + self, + exception_explain_mapping: dict[type[Exception], str | Callable] | None = None, + ): + self.exception_explain_mapping = exception_explain_mapping or {} + + async def handle_exception( + self, exc: Exception, bot: Bot, message: IncomingMessage + ) -> None: + if fsm_manager := getattr(message.state, "fsm", None): + await fsm_manager.drop_state() + + user_answer = await self._get_exception_message_for_user( + exc, + ) + + logger.error(f"Error: {user_answer}", exc_info=exc) + + await bot.answer_message( + user_answer, + wait_callback=not isinstance(exc, BotShuttingDownError), + ) + + async def _get_exception_message_for_user(self, exc: Exception) -> str: + error_uuid = uuid4() + + if explanation := self.exception_explain_mapping.get(type(exc)): + if isinstance(explanation, str): + raw_message = explanation + else: + raw_message = explanation(exc) + + return f"{raw_message}. Идентификатор ошибки:{error_uuid}" + elif isinstance(exc, MessageValidationError): + return f"Ошибка валидации запроса: {exc}. Идентификатор ошибки:{error_uuid}" + else: + return strings.SOMETHING_GOES_WRONG.format(error_uuid=error_uuid) diff --git a/app/bot/error_handlers/internal_error.py b/app/presentation/bot/handlers/internal_error.py similarity index 85% rename from app/bot/error_handlers/internal_error.py rename to app/presentation/bot/handlers/internal_error.py index b631126..5c63cd5 100644 --- a/app/bot/error_handlers/internal_error.py +++ b/app/presentation/bot/handlers/internal_error.py @@ -5,7 +5,7 @@ from pybotx import Bot, BotShuttingDownError, IncomingMessage from app.logger import logger -from app.resources import strings +from app.presentation.bot.resources import strings async def internal_error_handler( @@ -14,8 +14,7 @@ async def internal_error_handler( error_uuid = uuid4() logger.exception(f"Internal error {error_uuid}:") - fsm_manager = getattr(message.state, "fsm", None) - if fsm_manager: + if fsm_manager := getattr(message.state, "fsm", None): await fsm_manager.drop_state() is_bot_active = not isinstance(exc, BotShuttingDownError) diff --git a/app/presentation/bot/handlers/sample_record.py b/app/presentation/bot/handlers/sample_record.py new file mode 100644 index 0000000..fa9ef23 --- /dev/null +++ b/app/presentation/bot/handlers/sample_record.py @@ -0,0 +1,30 @@ +from pybotx import Bot, IncomingMessage + +from app.application.use_cases.interfaces import ISampleRecordUseCases +from app.presentation.bot.handlers.command import BaseCommandHandler +from app.presentation.bot.validators.base import BotXJsonRequestParser +from app.presentation.bot.resources.strings import SAMPLE_RECORD_CREATED_ANSWER +from app.presentation.bot.schemas.sample_record import SampleRecordCreateRequestSchema + + +class CreateSampleRecordHandler(BaseCommandHandler): + def __init__( + self, + bot: Bot, + message: IncomingMessage, + use_cases: ISampleRecordUseCases, + ): + self._use_cases = use_cases + super().__init__(bot, message) + + exception_explain_mapping = {} + incoming_argument_parser = BotXJsonRequestParser(SampleRecordCreateRequestSchema) + + async def handle_logic( + self, + request_parameter: SampleRecordCreateRequestSchema, + ) -> None: + created_record = await self._use_cases.create_record(request_parameter) + await self._bot.answer_message( + SAMPLE_RECORD_CREATED_ANSWER.format(**created_record.dict()) + ) diff --git a/app/presentation/bot/resources/__init__.py b/app/presentation/bot/resources/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/resources/strings.py.jinja b/app/presentation/bot/resources/strings.py similarity index 85% rename from app/resources/strings.py.jinja rename to app/presentation/bot/resources/strings.py index 50360af..87ec35e 100644 --- a/app/resources/strings.py.jinja +++ b/app/presentation/bot/resources/strings.py @@ -1,4 +1,5 @@ """Text and templates for messages and api responses.""" + from typing import Any, Protocol, cast from mako.lookup import TemplateLookup @@ -31,13 +32,16 @@ def _format(**kwargs: Any) -> str: # noqa: WPS430 lookup = TemplateFormatterLookup( - directories=["app/resources/templates"], + directories=[ + "app/presentation/bot/resources/templates/common", + "app/presentation/bot/resources/templates/sample_record", + ], input_encoding="utf-8", strict_undefined=True, ) -BOT_PROJECT_NAME = "{{bot_project_name}}" -BOT_DISPLAY_NAME = "{{bot_display_name}}" +BOT_PROJECT_NAME = "bot_refactor" +BOT_DISPLAY_NAME = "gubarik_bot_refactor" CHAT_CREATED_TEMPLATE = lookup.get_template("chat_created.txt.mako") HELP_COMMAND_MESSAGE_TEMPLATE = lookup.get_template("help.txt.mako") @@ -52,7 +56,6 @@ def _format(**kwargs: Any) -> str: # noqa: WPS430 ] ) - OTHER_CTS_WITH_BOT_MENTION_WARNING = "\n".join( [ "Данный бот зарегистрирован на другом CTS.", @@ -61,3 +64,7 @@ def _format(**kwargs: Any) -> str: # noqa: WPS430 ) SOMETHING_GOES_WRONG = lookup.get_template("something_goes_wrong.txt.mako") + +SAMPLE_RECORD_CREATED_ANSWER = lookup.get_template( + "sample_record_created_answer.txt.mako" +) diff --git a/app/resources/templates/chat_created.txt.mako b/app/presentation/bot/resources/templates/common/chat_created.txt.mako similarity index 100% rename from app/resources/templates/chat_created.txt.mako rename to app/presentation/bot/resources/templates/common/chat_created.txt.mako diff --git a/app/resources/templates/help.txt.mako b/app/presentation/bot/resources/templates/common/help.txt.mako similarity index 100% rename from app/resources/templates/help.txt.mako rename to app/presentation/bot/resources/templates/common/help.txt.mako diff --git a/app/resources/templates/something_goes_wrong.txt.mako b/app/presentation/bot/resources/templates/common/something_goes_wrong.txt.mako similarity index 100% rename from app/resources/templates/something_goes_wrong.txt.mako rename to app/presentation/bot/resources/templates/common/something_goes_wrong.txt.mako diff --git a/app/presentation/bot/resources/templates/sample_record/sample_record_created_answer.txt.mako b/app/presentation/bot/resources/templates/sample_record/sample_record_created_answer.txt.mako new file mode 100644 index 0000000..ca77ab0 --- /dev/null +++ b/app/presentation/bot/resources/templates/sample_record/sample_record_created_answer.txt.mako @@ -0,0 +1,2 @@ +Запись успешно создана: +**id**: ${ id } **record_data**: ${ record_data }. diff --git a/app/presentation/bot/schemas/__init__.py b/app/presentation/bot/schemas/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/presentation/bot/schemas/sample_record.py b/app/presentation/bot/schemas/sample_record.py new file mode 100644 index 0000000..068666b --- /dev/null +++ b/app/presentation/bot/schemas/sample_record.py @@ -0,0 +1,40 @@ +"""Domains.""" + +from pydantic import BaseModel, Field + + +class SampleRecordResponseSchema(BaseModel): + """Base schema for sample record presentation.""" + + id: int + record_data: str + + class Config: + orm_mode = True + + +class SampleRecordResponseListSchema(BaseModel): + data: list[SampleRecordResponseSchema] + + class Config: + orm_mode = True + + +class SampleRecordCreateRequestSchema( + BaseModel, +): + record_data: str = Field(..., min_length=1) + + +class SampleRecordDeleteRequestSchema(BaseModel): + id: int + + +class SampleRecordUpdateRequestSchema(BaseModel): + id: int + record_data: str = Field(..., min_length=1) + + @classmethod + def _from_plain_message_data(cls, message_data: str): + record_id, record_data = message_data.split(" ") + return cls(id=record_id, record_data=record_data) diff --git a/app/presentation/bot/validators/__init__.py b/app/presentation/bot/validators/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/presentation/bot/validators/base.py b/app/presentation/bot/validators/base.py new file mode 100644 index 0000000..0bb217a --- /dev/null +++ b/app/presentation/bot/validators/base.py @@ -0,0 +1,32 @@ +from abc import ABC, abstractmethod +from typing import Generic, TypeVar, Any + +from orjson import orjson, JSONDecodeError +from pybotx import IncomingMessage +from pydantic import BaseModel, ValidationError + +from app.presentation.bot.validators.exceptions import MessageValidationError + +T = TypeVar("T", bound=BaseModel) + + +class IBotRequestParser(ABC, Generic[T]): + @abstractmethod + def parse(self, raw_input: Any) -> T: + """Parse raw input to model.""" + + +class BotXJsonRequestParser(IBotRequestParser[T]): + def __init__(self, model: type[T]): + self.model = model + + def parse(self, raw_input: IncomingMessage) -> T: + try: + message_json = orjson.loads(raw_input.argument) + return self.model.parse_obj(message_json) + except JSONDecodeError as ex: + raise MessageValidationError(str(ex)) from ex + except ValidationError as ex: + raise MessageValidationError( + ",".join(error["msg"] for error in ex.errors()) + ) from ex diff --git a/app/presentation/bot/validators/exceptions.py b/app/presentation/bot/validators/exceptions.py new file mode 100644 index 0000000..f2b48fa --- /dev/null +++ b/app/presentation/bot/validators/exceptions.py @@ -0,0 +1,2 @@ +class MessageValidationError(Exception): + """Base class for message validation errors.""" diff --git a/app/presentation/bot/validators/sample_record.py b/app/presentation/bot/validators/sample_record.py new file mode 100644 index 0000000..eeddd84 --- /dev/null +++ b/app/presentation/bot/validators/sample_record.py @@ -0,0 +1,21 @@ +from orjson import orjson, JSONDecodeError +from pybotx import IncomingMessage +from pydantic import ValidationError + +from app.presentation.bot.schemas.sample_record import SampleRecordCreateRequestSchema +from app.presentation.validators.base import IBotRequestParser +from app.presentation.validators.exceptions import MessageValidationError +from app.utils.exceptions_mapper import exception_mapper + + +class SampleRecordJsonCreateRequestValidator( + IBotRequestParser[SampleRecordCreateRequestSchema] +): + @exception_mapper( + catch_exceptions=(JSONDecodeError, ValidationError), + raise_exception=MessageValidationError, + ) + def parse(self, raw_input: IncomingMessage) -> SampleRecordCreateRequestSchema: + message_json = orjson.loads(raw_input.argument) + # TODO replace to model_validate during migration to pydantic 2.0 + return SampleRecordCreateRequestSchema.parse_obj(message_json) diff --git a/app/presentation/dependencies/__init__.py b/app/presentation/dependencies/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/api/dependencies/healthcheck.py.jinja b/app/presentation/dependencies/healthcheck.py similarity index 90% rename from app/api/dependencies/healthcheck.py.jinja rename to app/presentation/dependencies/healthcheck.py index 1d4bd71..fa6d3e5 100644 --- a/app/api/dependencies/healthcheck.py.jinja +++ b/app/presentation/dependencies/healthcheck.py @@ -1,18 +1,14 @@ """Bot dependency for healthcheck.""" -{% if add_worker -%} from asyncio.exceptions import TimeoutError -{%- endif %} from typing import Optional from fastapi import Depends, Request from pybotx import Bot from sqlalchemy.sql import text -{% if add_worker -%} from app.settings import settings -from app.worker.worker import queue -{%- endif %} +from app.infrastructure.worker.worker import queue async def check_db_connection(request: Request) -> Optional[str]: @@ -41,7 +37,6 @@ async def check_redis_connection(request: Request) -> Optional[str]: check_redis_connection_dependency = Depends(check_redis_connection) -{%- if add_worker %} async def check_worker_status() -> Optional[str]: @@ -61,4 +56,3 @@ async def check_worker_status() -> Optional[str]: check_worker_status_dependency = Depends(check_worker_status) -{%- endif %} diff --git a/app/schemas/record.py b/app/schemas/record.py deleted file mode 100644 index 5f36c73..0000000 --- a/app/schemas/record.py +++ /dev/null @@ -1,14 +0,0 @@ -"""Domains.""" - -from pydantic import BaseModel - -from app.db.record.models import RecordModel - - -class Record(BaseModel): - id: int - record_data: str - - @classmethod - def from_orm(cls, record: RecordModel) -> "Record": - return cls(id=record.id, record_data=record.record_data) diff --git a/app/services/botx_user_search.py b/app/services/botx_user_search.py deleted file mode 100644 index 178c00d..0000000 --- a/app/services/botx_user_search.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Module for user searching on cts.""" - -from typing import Optional, Tuple -from uuid import UUID - -from pybotx import ( - Bot, - BotAccountWithSecret, - UserFromSearch, - UserKinds, - UserNotFoundError, -) - - -class UserIsBotError(Exception): - """Error for raising when found user is bot.""" - - -async def search_user_on_each_cts( - bot: Bot, huid: UUID -) -> Optional[Tuple[UserFromSearch, BotAccountWithSecret]]: - """Search user by huid on all cts on which bot is registered. - - return type: tuple of UserFromSearch instance and host. - """ - - for bot_account in bot.bot_accounts: - try: - user = await bot.search_user_by_huid(bot_id=bot_account.id, huid=huid) - except UserNotFoundError: - continue - - if user.user_kind == UserKinds.BOT: - raise UserIsBotError - - return user, bot_account - - return None diff --git a/app/services/healthcheck.py b/app/services/healthcheck.py index 2d5ac82..7608273 100644 --- a/app/services/healthcheck.py +++ b/app/services/healthcheck.py @@ -1,36 +1,17 @@ """Healthcheck service bot.""" -from dataclasses import dataclass -from typing import List, Literal, Optional, Union - -from pydantic import BaseModel - -from app.schemas.enums import HealthCheckStatuses - - -@dataclass -class HealthCheckServiceResult: - name: str - error: Optional[str] - - -class HealthCheckSucceed(BaseModel): - name: str - status: Literal[HealthCheckStatuses.OK] = HealthCheckStatuses.OK - - -class HealthCheckFailed(BaseModel): - name: str - error: str - status: Literal[HealthCheckStatuses.ERROR] = HealthCheckStatuses.ERROR - - -HealthCheckResult = Union[HealthCheckSucceed, HealthCheckFailed] - - -class HealthCheckResponse(BaseModel): - status: Optional[HealthCheckStatuses] - services: List[HealthCheckResult] +from typing import List + +from app.domain.entities.healthcheck import ( + HealthCheckServiceResult, + HealthCheckStatuses, +) +from app.presentation.api.schemas.healthcheck import ( + HealthCheckSucceed, + HealthCheckFailed, + HealthCheckResult, + HealthCheckResponse, +) class HealthCheckResponseBuilder: diff --git a/app/settings.py b/app/settings.py index 028c748..feebabe 100644 --- a/app/settings.py +++ b/app/settings.py @@ -66,7 +66,7 @@ def _build_credentials_from_string( # redis REDIS_DSN: str - CONNECTION_POOL_SIZE: int = 10 + REDIS_CONNECTION_POOL_SIZE: int = 10 # healthcheck WORKER_TIMEOUT_SEC: float = 4 diff --git a/app/utils/__init__.py b/app/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/utils/bot_exception_answer.py b/app/utils/bot_exception_answer.py new file mode 100644 index 0000000..dc0e56d --- /dev/null +++ b/app/utils/bot_exception_answer.py @@ -0,0 +1,53 @@ +"""Module for translation Exception errors to user friendly messages.""" + +import functools +import logging +from typing import Any, Callable + +from pybotx import Bot + +logger = logging.getLogger(__name__) + + +def _get_user_message( + exception_map: dict[type[Exception], str | Callable], exc: Exception +) -> str | None: + """Extract user message from exception mapping.""" + exception_message_to_user = exception_map.get(type(exc)) + if callable(exception_message_to_user): + return exception_message_to_user(exc) + return exception_message_to_user + + +def explain_exception_to_user( # noqa: WPS231 + mapping: dict[type[Exception], str | Callable[[Exception], str]], # noqa: WPS221 +) -> Callable: + """ + Decorate a function to catch specified exceptions and send a response to the user. + + For each caught exception, it responds using either a string message or a callable + response provided in the `exception_map`. + + :param mapping: A dictionary mapping exception types to either string messages + or callables that construct a response when invoked with the exception as an + argument. The keys must be subclasses of `Exception`, and the values must be + either strings or callables. + """ + + def decorator(func: Callable) -> Callable: # type: ignore + @functools.wraps(func) + async def wrapper(bot: Bot, *args, **kwargs) -> Any: # type: ignore + try: + return await func( + bot, + *args, + **kwargs, + ) + except tuple(mapping.keys()) as exc: # noqa: WPS455 + if (message := _get_user_message(mapping, exc)) is not None: + await bot.answer_message(message) + raise # noqa: WPS220 + + return wrapper + + return decorator diff --git a/app/utils/exceptions_mapper.py b/app/utils/exceptions_mapper.py new file mode 100644 index 0000000..7f40e34 --- /dev/null +++ b/app/utils/exceptions_mapper.py @@ -0,0 +1,154 @@ +"""Decorators to rethrow and log exceptions.""" + +import asyncio +import inspect +from functools import wraps +from typing import Any, Callable, Tuple, Type, TypeVar, Union, cast + +from app.logger import logger + +FunctionType = TypeVar("FunctionType", bound=Callable[..., Any]) + +CatchExceptionClass = Union[Type[Exception], Tuple[Type[Exception], ...]] # noqa: WPS221 +T = TypeVar("T") # noqa:WPS111 +Decorator = Callable[[Callable[..., T]], Callable[..., T]] # noqa: WPS221 + + +def _get_error_message( + ex: Exception, + func: Callable[..., Any], + args: tuple[Any, ...] | None = None, + kwargs: dict[str, Any] | None = None, + use_short_error_message: bool = True, +) -> str: + """ + Generate an error message string based on the given exception and function context. + + :param ex: The exception that occurred. + :param func: The function in which the exception occurred. + :param args: Optional tuple of positional arguments passed to the function. + :param kwargs: Optional dictionary of keyword arguments passed to the function. + :param use_short_error_message: Flag to indicate whether to generate a brief + error message (True) or a detailed one (False). + :return: A formatted error message string representing the exception and its + context. + """ + + if use_short_error_message: + return str(ex) + + error_context = [ + f"Error in function '{func.__module__}.{func.__qualname__}'", + f"Original exception: {ex.__class__.__name__}: {str(ex)}", # noqa: WPS237 + ] + + filtered_args = args[1:] if args and inspect.ismethod(func) else args + + if filtered_args: + args_str = ", ".join(str(arg)[:100] for arg in filtered_args) + error_context.append(f"Args: [{args_str}]") + + if kwargs: + kwargs_str = ", ".join( + f"{k}={str(v)[:100]}" # noqa: WPS237, WPS221 + for k, v in kwargs.items() # noqa: WPS111 + ) + error_context.append(f"Kwargs: {kwargs_str}") + + return "\n".join(error_context) + + +def _create_sync_wrapper( + func: Callable[..., Any], + catch_exceptions: CatchExceptionClass, + raise_exception: Type[Exception], + use_short_erroro_message: bool, + log_exception: bool, +) -> Callable[..., Any]: + """Create a synchronous wrapper function for exception mapping.""" + + @wraps(func) + def sync_wrapper(*args: Any, **kwargs: Any) -> Any: # noqa: WPS430 + try: + return func(*args, **kwargs) + except catch_exceptions as ex: + if log_exception: + logger.error(f"Error in {func.__name__}", exc_info=True) + error_message = _get_error_message( + ex, func, args, kwargs, use_short_erroro_message + ) + raise raise_exception(error_message) from ex + + return sync_wrapper + + +def _create_async_wrapper( + func: Callable[..., Any], + catch_exceptions: CatchExceptionClass, + raise_exception_class: Type[Exception], + use_short_error_message: bool, + log_exception: bool, +) -> Callable[..., Any]: + """Create an asynchronous wrapper function for exception mapping.""" + + @wraps(func) + async def async_wrapper(*args: Any, **kwargs: Any) -> Any: # noqa: WPS430 + try: + return await func(*args, **kwargs) + except catch_exceptions as ex: + if log_exception: + logger.error(f"Error in {func.__name__}", exc_info=True) + error_message = _get_error_message( + ex, func, args, kwargs, use_short_error_message + ) + raise raise_exception_class(error_message) from ex + + return async_wrapper + + +def exception_mapper( + raise_exception: Type[Exception], + catch_exceptions: CatchExceptionClass = Exception, + use_short_error_message: bool = False, + log_exception: bool = False, +) -> Decorator: + """ + Map exceptions from one to another, with optional logging and message adjustments. + + This function creates a decorator to wrap a function or coroutine and modify its + exception handling behavior. Specifically, it catches specified exceptions and + raises them as another exception type, with options to log the exception and + adjust whether a short error message is used. + + :param raise_exception: The exception type to raise instead of the caught exception. + :param catch_exceptions: The exception type or types to catch within the function. + :param use_short_error_message: Whether to use a shortened error message when + raising the new exception. + :param log_exception: Whether to log the exception when it is caught. + :return: A decorator for handling exceptions as per the specified parameters. + """ + + def decorator(func: Callable[..., Any]) -> Callable[..., Any]: + if asyncio.iscoroutinefunction(func): + return cast( + Callable[..., Any], + _create_async_wrapper( + func, + catch_exceptions, + raise_exception, + use_short_error_message, + log_exception, + ), + ) + return cast( + Callable[..., Any], + _create_sync_wrapper( + func, + catch_exceptions, + raise_exception, + use_short_error_message, + log_exception, + ), + ) + + return decorator diff --git a/copier.yaml b/copier.yaml deleted file mode 100644 index abfd6a1..0000000 --- a/copier.yaml +++ /dev/null @@ -1,53 +0,0 @@ -bot_project_name: - help: Git project name - default: bot-example - -bot_display_name: - help: Bot name in messenger - default: Bot Example - -bot_description: - multiline: true - help: Description for README.md. First line will be added pyproject.toml - default: TODO - -add_fsm: - help: Add pybotx-fsm to project? - type: bool - default: no - -add_worker: - help: Include tasks worker in `docker-compose.yml` - type: bool - default: yes - -from_ccsteam: - help: Are you from CCS team? - type: bool - default: yes - - -bot_name_underscored: - default: "{{bot_project_name|replace('-', '_')}}" - when: false - -bot_short_description: - default: "{{bot_description.split('\n')|first}}" - when: false - -has_private_dependencies: - type: bool - default: no - when: false - -_jinja_extensions: - - copier_templates_extensions.TemplateExtensionLoader - - extensions/context.py:ContextUpdater - -_exclude: - - ".git" - - ".github" - - "LICENSE.md" - - "copier.yml" - - "extensions" - - "README.md" diff --git a/docker-compose.yml.jinja b/docker-compose.yml similarity index 72% rename from docker-compose.yml.jinja rename to docker-compose.yml index abf033a..0b1698f 100644 --- a/docker-compose.yml.jinja +++ b/docker-compose.yml @@ -1,13 +1,13 @@ version: "3.8" services: - {{bot_project_name}}: + bot_refactor: build: . - container_name: {{bot_project_name}} + container_name: bot_refactor environment: &environment - BOT_CREDENTIALS=$BOT_CREDENTIALS # cts_host@secret_key@bot_id - - POSTGRES_DSN=postgres://postgres:postgres@{{bot_project_name}}-postgres/{{bot_name_underscored}}_db - - REDIS_DSN=redis://{{bot_project_name}}-redis/0 + - POSTGRES_DSN=postgres://postgres:postgres@bot_refactor-postgres/bot_refactor_db + - REDIS_DSN=redis://bot_refactor-redis/0 - DEBUG=true - SMARTLOG_DEBUG_HUIDS=$SMARTLOG_DEBUG_HUIDS ports: @@ -27,10 +27,9 @@ services: soft: 20000 hard: 40000 - {% if add_worker -%} - {{bot_project_name}}-worker: + bot_refactor-worker: build: . - container_name: {{bot_project_name}}-worker + container_name: bot_refactor-worker # '$$' prevents docker-compose from interpolating a value command: /bin/sh -c 'PYTHONPATH="$$PYTHONPATH:$$PWD" saq app.worker.worker.settings' environment: *environment @@ -39,14 +38,13 @@ services: logging: *logging ulimits: *ulimits - {% endif -%} postgres: image: postgres:15.3-alpine - container_name: {{bot_project_name}}-postgres + container_name: bot_refactor-postgres environment: - POSTGRES_USER=postgres - POSTGRES_PASSWORD=postgres - - POSTGRES_DB={{bot_name_underscored}}_db + - POSTGRES_DB=bot_refactor_db restart: always volumes: - ./.storages/postgresdata:/var/lib/postgresql/data @@ -54,7 +52,7 @@ services: redis: image: redis:7.0-alpine - container_name: {{bot_project_name}}-redis + container_name: bot_refactor-redis restart: always volumes: - ./.storages/redisdata:/data diff --git a/extensions/context.py b/extensions/context.py deleted file mode 100644 index e8e104b..0000000 --- a/extensions/context.py +++ /dev/null @@ -1,18 +0,0 @@ -import os - -from copier_templates_extensions import ContextHook -from copier.errors import UserMessageError - - -class ContextUpdater(ContextHook): - def hook(self, context): - context["CI"] = os.environ.get("CI", False) - - if context.get("from_ccsteam", False): - try: - context["PROD_SERVER_HOST"] = os.environ["PROD_SERVER_HOST"] - context["DEV_SERVER_HOST"] = os.environ["DEV_SERVER_HOST"] - except KeyError as exc: - raise UserMessageError(f"{exc.args[0]} is not provided in environment") - - return context diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index 709dbad..0000000 --- a/poetry.lock +++ /dev/null @@ -1,2346 +0,0 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. - -[[package]] -name = "add-trailing-comma" -version = "2.2.1" -description = "Automatically add trailing commas to calls and literals" -optional = false -python-versions = ">=3.6.1" -groups = ["dev"] -files = [ - {file = "add_trailing_comma-2.2.1-py2.py3-none-any.whl", hash = "sha256:981c18282b38ec5bceab80ef11485440334d2a274fcf3fce1f91692374b6d818"}, - {file = "add_trailing_comma-2.2.1.tar.gz", hash = "sha256:1640e97c4e85132633a6cb19b29e392dbaf9516292388afa685f7ef1012468e0"}, -] - -[package.dependencies] -tokenize-rt = ">=3.0.1" - -[[package]] -name = "aiocsv" -version = "1.2.5" -description = "Asynchronous CSV reading/writing" -optional = false -python-versions = ">=3.6, <4" -groups = ["main"] -files = [ - {file = "aiocsv-1.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8221a24220c3dfed5df80c87bb1e15d4863816954b5f1fca1dcfc14328c0131"}, - {file = "aiocsv-1.2.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:274df72bc8d0d11060c148523203f93cfa830dc9901a053d27032e4be0acb50e"}, - {file = "aiocsv-1.2.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c99bc418c869e23bbae52dc7c9f05c97b351460c848cb033d7b09b472d8d3e46"}, - {file = "aiocsv-1.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:f78600e29e9dd7c35711bc1a07176fc76921fc42a65da0135e1c213abb5dc6d5"}, - {file = "aiocsv-1.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a499b9b2edf618142e107a54f91536e9b9457f182d721e993c9ed14a8c7353b5"}, - {file = "aiocsv-1.2.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4681ae9b7f57423fa09718369e5ecd234889c58ba4f4c203f825e682afc240a"}, - {file = "aiocsv-1.2.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ceff6f3ea9e09b7c48736823261de7b5e7b9b9daec18862c25033ed5ef426590"}, - {file = "aiocsv-1.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:7064193c3d3145d763315118df1abe93a57c2b879ab62c73fc6da77009652e50"}, - {file = "aiocsv-1.2.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f1768d44e15ab2f8789e1fd0fbe2feaf9168642cf65fcf760bf61fbc1a100ada"}, - {file = "aiocsv-1.2.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:733c820e8138b96b6110a44f7eebe3e4e9d9e1261ed67c6a93e8b964807c4346"}, - {file = "aiocsv-1.2.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:030d8f1bc33b7746ced9ff328ce881406e1c78a52f83ad26832c220d2dec8777"}, - {file = "aiocsv-1.2.5-cp36-cp36m-win_amd64.whl", hash = "sha256:790393c322db1be0353045b2db255e3147a0cab1ee78ecc1b14a1df7d8651460"}, - {file = "aiocsv-1.2.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a23c450fe3f6f3b96b826348a4b30cd884edfdc46a3c11ac30802e720cabafc2"}, - {file = "aiocsv-1.2.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1663f81741a2660d0669acb71e2f8c820c997f8761531018800d74cc669889a"}, - {file = "aiocsv-1.2.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:cf867084f3ef09075a605847fec54715eeccaf1510f0f49d0cd68bfd83535f99"}, - {file = "aiocsv-1.2.5-cp37-cp37m-win_amd64.whl", hash = "sha256:b8db0fc1269e9b432616eb5af90f896188b93ea6b971524a1216145070c43e4b"}, - {file = "aiocsv-1.2.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2a6b99770d044a59de244ad2f0c5de8461a7bbf689277a46f9251e95e0909c88"}, - {file = "aiocsv-1.2.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f84f92ed95f4fb0b59e377f6b400a2b252b613c8319c1005add1b33ab052fd77"}, - {file = "aiocsv-1.2.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:72a302a7f6f80fabfc3c36273d8cee2782245bd1cceb6cc8bbd1ea611905498b"}, - {file = "aiocsv-1.2.5-cp38-cp38-win_amd64.whl", hash = "sha256:9fa70db03e3c4dc053c2fc469c2d7cfaedfcf254bd9ad90bdf08ba021fc0909e"}, - {file = "aiocsv-1.2.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d4c285f01e18fdd47381cb72a8e18f2ce02ecfe36f02976f038ef9c5e0fbc770"}, - {file = "aiocsv-1.2.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c9064f0ed61b63af0ca26d32b78c790e8a9f5b2e2907b04b6021b2c7de6e1e3"}, - {file = "aiocsv-1.2.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c7a959f40131bf8cc598e93e542fc341a56062e39254ca35f4f61fc3d17d01aa"}, - {file = "aiocsv-1.2.5-cp39-cp39-win_amd64.whl", hash = "sha256:53544ba47224d67f4123bae894de77fd89a4472b98873da86d5814d44f7c4a41"}, - {file = "aiocsv-1.2.5.tar.gz", hash = "sha256:807a61335ff3b461e84abcdb68445207d1dbd518d046f570a0048f4fcb0bb8ec"}, -] - -[[package]] -name = "aiofiles" -version = "23.2.1" -description = "File support for asyncio." -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "aiofiles-23.2.1-py3-none-any.whl", hash = "sha256:19297512c647d4b27a2cf7c34caa7e405c0d60b5560618a29a9fe027b18b0107"}, - {file = "aiofiles-23.2.1.tar.gz", hash = "sha256:84ec2218d8419404abcb9f0c02df3f34c6e0a68ed41072acfb1cef5cbc29051a"}, -] - -[[package]] -name = "alembic" -version = "1.13.1" -description = "A database migration tool for SQLAlchemy." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, - {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, -] - -[package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.9\""} -importlib-resources = {version = "*", markers = "python_version < \"3.9\""} -Mako = "*" -SQLAlchemy = ">=1.3.0" -typing-extensions = ">=4" - -[package.extras] -tz = ["backports.zoneinfo ; python_version < \"3.9\""] - -[[package]] -name = "anyio" -version = "4.3.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.8" -groups = ["main", "dev"] -files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} - -[package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\""] -trio = ["trio (>=0.23)"] - -[[package]] -name = "asgi-lifespan" -version = "1.0.1" -description = "Programmatic startup/shutdown of ASGI apps." -optional = false -python-versions = ">=3.6" -groups = ["dev"] -files = [ - {file = "asgi-lifespan-1.0.1.tar.gz", hash = "sha256:9a33e7da2073c4764bc79bd6136501d6c42f60e3d2168ba71235e84122eadb7f"}, - {file = "asgi_lifespan-1.0.1-py3-none-any.whl", hash = "sha256:9ea969dc5eb5cf08e52c08dce6f61afcadd28112e72d81c972b1d8eb8691ab53"}, -] - -[package.dependencies] -sniffio = "*" - -[[package]] -name = "astor" -version = "0.8.1" -description = "Read/rewrite/write Python ASTs" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -groups = ["dev"] -files = [ - {file = "astor-0.8.1-py2.py3-none-any.whl", hash = "sha256:070a54e890cefb5b3739d19f30f5a5ec840ffc9c50ffa7d23cc9fc1a38ebbfc5"}, - {file = "astor-0.8.1.tar.gz", hash = "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e"}, -] - -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - -[[package]] -name = "asyncpg" -version = "0.29.0" -description = "An asyncio PostgreSQL driver" -optional = false -python-versions = ">=3.8.0" -groups = ["main"] -files = [ - {file = "asyncpg-0.29.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72fd0ef9f00aeed37179c62282a3d14262dbbafb74ec0ba16e1b1864d8a12169"}, - {file = "asyncpg-0.29.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52e8f8f9ff6e21f9b39ca9f8e3e33a5fcdceaf5667a8c5c32bee158e313be385"}, - {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e6823a7012be8b68301342ba33b4740e5a166f6bbda0aee32bc01638491a22"}, - {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:746e80d83ad5d5464cfbf94315eb6744222ab00aa4e522b704322fb182b83610"}, - {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ff8e8109cd6a46ff852a5e6bab8b0a047d7ea42fcb7ca5ae6eaae97d8eacf397"}, - {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97eb024685b1d7e72b1972863de527c11ff87960837919dac6e34754768098eb"}, - {file = "asyncpg-0.29.0-cp310-cp310-win32.whl", hash = "sha256:5bbb7f2cafd8d1fa3e65431833de2642f4b2124be61a449fa064e1a08d27e449"}, - {file = "asyncpg-0.29.0-cp310-cp310-win_amd64.whl", hash = "sha256:76c3ac6530904838a4b650b2880f8e7af938ee049e769ec2fba7cd66469d7772"}, - {file = "asyncpg-0.29.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4900ee08e85af01adb207519bb4e14b1cae8fd21e0ccf80fac6aa60b6da37b4"}, - {file = "asyncpg-0.29.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a65c1dcd820d5aea7c7d82a3fdcb70e096f8f70d1a8bf93eb458e49bfad036ac"}, - {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b52e46f165585fd6af4863f268566668407c76b2c72d366bb8b522fa66f1870"}, - {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc600ee8ef3dd38b8d67421359779f8ccec30b463e7aec7ed481c8346decf99f"}, - {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:039a261af4f38f949095e1e780bae84a25ffe3e370175193174eb08d3cecab23"}, - {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6feaf2d8f9138d190e5ec4390c1715c3e87b37715cd69b2c3dfca616134efd2b"}, - {file = "asyncpg-0.29.0-cp311-cp311-win32.whl", hash = "sha256:1e186427c88225ef730555f5fdda6c1812daa884064bfe6bc462fd3a71c4b675"}, - {file = "asyncpg-0.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfe73ffae35f518cfd6e4e5f5abb2618ceb5ef02a2365ce64f132601000587d3"}, - {file = "asyncpg-0.29.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6011b0dc29886ab424dc042bf9eeb507670a3b40aece3439944006aafe023178"}, - {file = "asyncpg-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b544ffc66b039d5ec5a7454667f855f7fec08e0dfaf5a5490dfafbb7abbd2cfb"}, - {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d84156d5fb530b06c493f9e7635aa18f518fa1d1395ef240d211cb563c4e2364"}, - {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54858bc25b49d1114178d65a88e48ad50cb2b6f3e475caa0f0c092d5f527c106"}, - {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bde17a1861cf10d5afce80a36fca736a86769ab3579532c03e45f83ba8a09c59"}, - {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:37a2ec1b9ff88d8773d3eb6d3784dc7e3fee7756a5317b67f923172a4748a175"}, - {file = "asyncpg-0.29.0-cp312-cp312-win32.whl", hash = "sha256:bb1292d9fad43112a85e98ecdc2e051602bce97c199920586be83254d9dafc02"}, - {file = "asyncpg-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:2245be8ec5047a605e0b454c894e54bf2ec787ac04b1cb7e0d3c67aa1e32f0fe"}, - {file = "asyncpg-0.29.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0009a300cae37b8c525e5b449233d59cd9868fd35431abc470a3e364d2b85cb9"}, - {file = "asyncpg-0.29.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cad1324dbb33f3ca0cd2074d5114354ed3be2b94d48ddfd88af75ebda7c43cc"}, - {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:012d01df61e009015944ac7543d6ee30c2dc1eb2f6b10b62a3f598beb6531548"}, - {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000c996c53c04770798053e1730d34e30cb645ad95a63265aec82da9093d88e7"}, - {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e0bfe9c4d3429706cf70d3249089de14d6a01192d617e9093a8e941fea8ee775"}, - {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:642a36eb41b6313ffa328e8a5c5c2b5bea6ee138546c9c3cf1bffaad8ee36dd9"}, - {file = "asyncpg-0.29.0-cp38-cp38-win32.whl", hash = "sha256:a921372bbd0aa3a5822dd0409da61b4cd50df89ae85150149f8c119f23e8c408"}, - {file = "asyncpg-0.29.0-cp38-cp38-win_amd64.whl", hash = "sha256:103aad2b92d1506700cbf51cd8bb5441e7e72e87a7b3a2ca4e32c840f051a6a3"}, - {file = "asyncpg-0.29.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5340dd515d7e52f4c11ada32171d87c05570479dc01dc66d03ee3e150fb695da"}, - {file = "asyncpg-0.29.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e17b52c6cf83e170d3d865571ba574577ab8e533e7361a2b8ce6157d02c665d3"}, - {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f100d23f273555f4b19b74a96840aa27b85e99ba4b1f18d4ebff0734e78dc090"}, - {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48e7c58b516057126b363cec8ca02b804644fd012ef8e6c7e23386b7d5e6ce83"}, - {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f9ea3f24eb4c49a615573724d88a48bd1b7821c890c2effe04f05382ed9e8810"}, - {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8d36c7f14a22ec9e928f15f92a48207546ffe68bc412f3be718eedccdf10dc5c"}, - {file = "asyncpg-0.29.0-cp39-cp39-win32.whl", hash = "sha256:797ab8123ebaed304a1fad4d7576d5376c3a006a4100380fb9d517f0b59c1ab2"}, - {file = "asyncpg-0.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:cce08a178858b426ae1aa8409b5cc171def45d4293626e7aa6510696d46decd8"}, - {file = "asyncpg-0.29.0.tar.gz", hash = "sha256:d1c49e1f44fffafd9a55e1a9b101590859d881d639ea2922516f5d9c512d354e"}, -] - -[package.dependencies] -async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.12.0\""} - -[package.extras] -docs = ["Sphinx (>=5.3.0,<5.4.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["flake8 (>=6.1,<7.0)", "uvloop (>=0.15.3) ; platform_system != \"Windows\" and python_version < \"3.12.0\""] - -[[package]] -name = "attrs" -version = "23.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6) ; platform_python_implementation == \"CPython\" and python_version >= \"3.8\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.8\""] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] - -[[package]] -name = "autoflake" -version = "1.4" -description = "Removes unused imports and unused variables" -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "autoflake-1.4.tar.gz", hash = "sha256:61a353012cff6ab94ca062823d1fb2f692c4acda51c76ff83a8d77915fba51ea"}, -] - -[package.dependencies] -pyflakes = ">=1.1.0" - -[[package]] -name = "bandit" -version = "1.7.2" -description = "Security oriented static analyser for python code." -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "bandit-1.7.2-py3-none-any.whl", hash = "sha256:e20402cadfd126d85b68ed4c8862959663c8c372dbbb1fca8f8e2c9f55a067ec"}, - {file = "bandit-1.7.2.tar.gz", hash = "sha256:6d11adea0214a43813887bfe71a377b5a9955e4c826c8ffd341b494e3ab25260"}, -] - -[package.dependencies] -colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} -GitPython = ">=1.0.1" -PyYAML = ">=5.3.1" -stevedore = ">=1.20.0" - -[package.extras] -test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] -toml = ["toml"] -yaml = ["PyYAML"] - -[[package]] -name = "black" -version = "22.3.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.6.2" -groups = ["dev"] -files = [ - {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"}, - {file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"}, - {file = "black-22.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a"}, - {file = "black-22.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968"}, - {file = "black-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"}, - {file = "black-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce"}, - {file = "black-22.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82"}, - {file = "black-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b"}, - {file = "black-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015"}, - {file = "black-22.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b"}, - {file = "black-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a"}, - {file = "black-22.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163"}, - {file = "black-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464"}, - {file = "black-22.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0"}, - {file = "black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"}, - {file = "black-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"}, - {file = "black-22.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20"}, - {file = "black-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a"}, - {file = "black-22.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad"}, - {file = "black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"}, - {file = "black-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"}, - {file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"}, - {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "certifi" -version = "2024.2.2" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -groups = ["main", "dev"] -files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -groups = ["dev"] -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -groups = ["main", "dev"] -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["main", "dev"] -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] -markers = {main = "sys_platform == \"win32\" or platform_system == \"Windows\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\""} - -[[package]] -name = "coverage" -version = "7.4.4" -description = "Code coverage measurement for Python" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, - {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, - {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, - {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, - {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, - {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, - {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, - {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, - {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, - {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, - {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, - {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, - {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, - {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, - {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, - {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, - {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, - {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, - {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, - {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, - {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, - {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, -] - -[package.dependencies] -tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} - -[package.extras] -toml = ["tomli ; python_full_version <= \"3.11.0a6\""] - -[[package]] -name = "croniter" -version = "2.0.3" -description = "croniter provides iteration for datetime object with cron like format" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -groups = ["main"] -files = [ - {file = "croniter-2.0.3-py2.py3-none-any.whl", hash = "sha256:84dc95b2eb6760144cc01eca65a6b9cc1619c93b2dc37d8a27f4319b3eb740de"}, - {file = "croniter-2.0.3.tar.gz", hash = "sha256:28763ad39c404e159140874f08010cfd8a18f4c2a7cea1ce73e9506a4380cfc1"}, -] - -[package.dependencies] -python-dateutil = "*" -pytz = ">2021.1" - -[[package]] -name = "darglint" -version = "1.8.1" -description = "A utility for ensuring Google-style docstrings stay up to date with the source code." -optional = false -python-versions = ">=3.6,<4.0" -groups = ["dev"] -files = [ - {file = "darglint-1.8.1-py3-none-any.whl", hash = "sha256:5ae11c259c17b0701618a20c3da343a3eb98b3bc4b5a83d31cdd94f5ebdced8d"}, - {file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"}, -] - -[[package]] -name = "docutils" -version = "0.20.1" -description = "Docutils -- Python Documentation Utilities" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, - {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, -] - -[[package]] -name = "eradicate" -version = "2.3.0" -description = "Removes commented-out code." -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "eradicate-2.3.0-py3-none-any.whl", hash = "sha256:2b29b3dd27171f209e4ddd8204b70c02f0682ae95eecb353f10e8d72b149c63e"}, - {file = "eradicate-2.3.0.tar.gz", hash = "sha256:06df115be3b87d0fc1c483db22a2ebb12bcf40585722810d809cc770f5031c37"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.0" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -groups = ["main", "dev"] -markers = "python_version < \"3.11\"" -files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "fastapi" -version = "0.110.1" -description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "fastapi-0.110.1-py3-none-any.whl", hash = "sha256:5df913203c482f820d31f48e635e022f8cbfe7350e4830ef05a3163925b1addc"}, - {file = "fastapi-0.110.1.tar.gz", hash = "sha256:6feac43ec359dfe4f45b2c18ec8c94edb8dc2dfc461d417d9e626590c071baad"}, -] - -[package.dependencies] -pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.37.2,<0.38.0" -typing-extensions = ">=4.8.0" - -[package.extras] -all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] - -[[package]] -name = "flake8" -version = "4.0.1" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = ">=3.6" -groups = ["dev"] -files = [ - {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, - {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, -] - -[package.dependencies] -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.8.0,<2.9.0" -pyflakes = ">=2.4.0,<2.5.0" - -[[package]] -name = "flake8-bandit" -version = "2.1.2" -description = "Automated security testing with bandit and flake8." -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "flake8_bandit-2.1.2.tar.gz", hash = "sha256:687fc8da2e4a239b206af2e54a90093572a60d0954f3054e23690739b0b0de3b"}, -] - -[package.dependencies] -bandit = "*" -flake8 = "*" -flake8-polyfill = "*" -pycodestyle = "*" - -[[package]] -name = "flake8-broken-line" -version = "0.4.0" -description = "Flake8 plugin to forbid backslashes for line breaks" -optional = false -python-versions = ">=3.6,<4.0" -groups = ["dev"] -files = [ - {file = "flake8-broken-line-0.4.0.tar.gz", hash = "sha256:771aab5aa0997666796fed249d0e48e6c01cdfeca8c95521eea28a38b7ced4c7"}, - {file = "flake8_broken_line-0.4.0-py3-none-any.whl", hash = "sha256:e9c522856862239a2c7ef2c1de0276fa598572aa864bd4e9c7efc2a827538515"}, -] - -[package.dependencies] -flake8 = ">=3.5,<5" - -[[package]] -name = "flake8-bugbear" -version = "21.11.29" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -optional = false -python-versions = ">=3.6" -groups = ["dev"] -files = [ - {file = "flake8-bugbear-21.11.29.tar.gz", hash = "sha256:8b04cb2fafc6a78e1a9d873bd3988e4282f7959bb6b0d7c1ae648ec09b937a7b"}, - {file = "flake8_bugbear-21.11.29-py36.py37.py38-none-any.whl", hash = "sha256:179e41ddae5de5e3c20d1f61736feeb234e70958fbb56ab3c28a67739c8e9a82"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -flake8 = ">=3.0.0" - -[package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit"] - -[[package]] -name = "flake8-commas" -version = "2.1.0" -description = "Flake8 lint for trailing commas." -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "flake8-commas-2.1.0.tar.gz", hash = "sha256:940441ab8ee544df564ae3b3f49f20462d75d5c7cac2463e0b27436e2050f263"}, - {file = "flake8_commas-2.1.0-py2.py3-none-any.whl", hash = "sha256:ebb96c31e01d0ef1d0685a21f3f0e2f8153a0381430e748bf0bbbb5d5b453d54"}, -] - -[package.dependencies] -flake8 = ">=2" - -[[package]] -name = "flake8-comprehensions" -version = "3.14.0" -description = "A flake8 plugin to help you write better list/set/dict comprehensions." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "flake8_comprehensions-3.14.0-py3-none-any.whl", hash = "sha256:7b9d07d94aa88e62099a6d1931ddf16c344d4157deedf90fe0d8ee2846f30e97"}, - {file = "flake8_comprehensions-3.14.0.tar.gz", hash = "sha256:81768c61bfc064e1a06222df08a2580d97de10cb388694becaf987c331c6c0cf"}, -] - -[package.dependencies] -flake8 = ">=3.0,<3.2.0 || >3.2.0" - -[[package]] -name = "flake8-debugger" -version = "4.1.2" -description = "ipdb/pdb statement checker plugin for flake8" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "flake8-debugger-4.1.2.tar.gz", hash = "sha256:52b002560941e36d9bf806fca2523dc7fb8560a295d5f1a6e15ac2ded7a73840"}, - {file = "flake8_debugger-4.1.2-py3-none-any.whl", hash = "sha256:0a5e55aeddcc81da631ad9c8c366e7318998f83ff00985a49e6b3ecf61e571bf"}, -] - -[package.dependencies] -flake8 = ">=3.0" -pycodestyle = "*" - -[[package]] -name = "flake8-docstrings" -version = "1.7.0" -description = "Extension for flake8 which uses pydocstyle to check docstrings" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, - {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, -] - -[package.dependencies] -flake8 = ">=3" -pydocstyle = ">=2.1" - -[[package]] -name = "flake8-eradicate" -version = "1.4.0" -description = "Flake8 plugin to find commented out code" -optional = false -python-versions = ">=3.7,<4.0" -groups = ["dev"] -files = [ - {file = "flake8-eradicate-1.4.0.tar.gz", hash = "sha256:3088cfd6717d1c9c6c3ac45ef2e5f5b6c7267f7504d5a74b781500e95cb9c7e1"}, - {file = "flake8_eradicate-1.4.0-py3-none-any.whl", hash = "sha256:e3bbd0871be358e908053c1ab728903c114f062ba596b4d40c852fd18f473d56"}, -] - -[package.dependencies] -attrs = "*" -eradicate = ">=2.0,<3.0" -flake8 = ">=3.5,<6" - -[[package]] -name = "flake8-isort" -version = "4.2.0" -description = "flake8 plugin that integrates isort ." -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "flake8-isort-4.2.0.tar.gz", hash = "sha256:26571500cd54976bbc0cf1006ffbcd1a68dd102f816b7a1051b219616ba9fee0"}, - {file = "flake8_isort-4.2.0-py3-none-any.whl", hash = "sha256:5b87630fb3719bf4c1833fd11e0d9534f43efdeba524863e15d8f14a7ef6adbf"}, -] - -[package.dependencies] -flake8 = ">=3.2.1,<6" -isort = ">=4.3.5,<6" - -[package.extras] -test = ["pytest-cov"] - -[[package]] -name = "flake8-polyfill" -version = "1.0.2" -description = "Polyfill package for Flake8 plugins" -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, - {file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"}, -] - -[package.dependencies] -flake8 = "*" - -[[package]] -name = "flake8-quotes" -version = "3.4.0" -description = "Flake8 lint for quotes." -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "flake8-quotes-3.4.0.tar.gz", hash = "sha256:aad8492fb710a2d3eabe68c5f86a1428de650c8484127e14c43d0504ba30276c"}, -] - -[package.dependencies] -flake8 = "*" -setuptools = "*" - -[[package]] -name = "flake8-rst-docstrings" -version = "0.2.7" -description = "Python docstring reStructuredText (RST) validator" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "flake8-rst-docstrings-0.2.7.tar.gz", hash = "sha256:2740067ab9237559dd45a3434d8c987792c7b259ca563621a3b95efe201f5382"}, - {file = "flake8_rst_docstrings-0.2.7-py3-none-any.whl", hash = "sha256:5d56075dce360bcc9c6775bfe7cb431aa395de600ca7e8d40580a28d50b2a803"}, -] - -[package.dependencies] -flake8 = ">=3.0.0" -pygments = "*" -restructuredtext-lint = "*" - -[[package]] -name = "flake8-string-format" -version = "0.3.0" -description = "string format checker, plugin for flake8" -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2"}, - {file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"}, -] - -[package.dependencies] -flake8 = "*" - -[[package]] -name = "gitdb" -version = "4.0.11" -description = "Git Object Database" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, - {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, -] - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.43" -description = "GitPython is a Python library used to interact with Git repositories" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"}, - {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"}, -] - -[package.dependencies] -gitdb = ">=4.0.1,<5" - -[package.extras] -doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] -test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock ; python_version < \"3.8\"", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions ; python_version < \"3.11\""] - -[[package]] -name = "greenlet" -version = "3.0.3" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.7" -groups = ["main"] -markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - -[[package]] -name = "gunicorn" -version = "21.2.0" -description = "WSGI HTTP Server for UNIX" -optional = false -python-versions = ">=3.5" -groups = ["main"] -files = [ - {file = "gunicorn-21.2.0-py3-none-any.whl", hash = "sha256:3213aa5e8c24949e792bcacfc176fef362e7aac80b76c56f6b5122bf350722f0"}, - {file = "gunicorn-21.2.0.tar.gz", hash = "sha256:88ec8bff1d634f98e61b9f65bc4bf3cd918a90806c6f5c48bc5603849ec81033"}, -] - -[package.dependencies] -packaging = "*" - -[package.extras] -eventlet = ["eventlet (>=0.24.1)"] -gevent = ["gevent (>=1.4.0)"] -setproctitle = ["setproctitle"] -tornado = ["tornado (>=0.2)"] - -[[package]] -name = "h11" -version = "0.16.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.8" -groups = ["main", "dev"] -files = [ - {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, - {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, -] - -[[package]] -name = "hiredis" -version = "2.3.2" -description = "Python wrapper for hiredis" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "hiredis-2.3.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:742093f33d374098aa21c1696ac6e4874b52658c870513a297a89265a4d08fe5"}, - {file = "hiredis-2.3.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:9e14fb70ca4f7efa924f508975199353bf653f452e4ef0a1e47549e208f943d7"}, - {file = "hiredis-2.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d7302b4b17fcc1cc727ce84ded7f6be4655701e8d58744f73b09cb9ed2b13df"}, - {file = "hiredis-2.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed63e8b75c193c5e5a8288d9d7b011da076cc314fafc3bfd59ec1d8a750d48c8"}, - {file = "hiredis-2.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b4edee59dc089bc3948f4f6fba309f51aa2ccce63902364900aa0a553a85e97"}, - {file = "hiredis-2.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6481c3b7673a86276220140456c2a6fbfe8d1fb5c613b4728293c8634134824"}, - {file = "hiredis-2.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684840b014ce83541a087fcf2d48227196576f56ae3e944d4dfe14c0a3e0ccb7"}, - {file = "hiredis-2.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c4c0bcf786f0eac9593367b6279e9b89534e008edbf116dcd0de956524702c8"}, - {file = "hiredis-2.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66ab949424ac6504d823cba45c4c4854af5c59306a1531edb43b4dd22e17c102"}, - {file = "hiredis-2.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:322c668ee1c12d6c5750a4b1057e6b4feee2a75b3d25d630922a463cfe5e7478"}, - {file = "hiredis-2.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bfa73e3f163c6e8b2ec26f22285d717a5f77ab2120c97a2605d8f48b26950dac"}, - {file = "hiredis-2.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:7f39f28ffc65de577c3bc0c7615f149e35bc927802a0f56e612db9b530f316f9"}, - {file = "hiredis-2.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:55ce31bf4711da879b96d511208efb65a6165da4ba91cb3a96d86d5a8d9d23e6"}, - {file = "hiredis-2.3.2-cp310-cp310-win32.whl", hash = "sha256:3dd63d0bbbe75797b743f35d37a4cca7ca7ba35423a0de742ae2985752f20c6d"}, - {file = "hiredis-2.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:ea002656a8d974daaf6089863ab0a306962c8b715db6b10879f98b781a2a5bf5"}, - {file = "hiredis-2.3.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:adfbf2e9c38b77d0db2fb32c3bdaea638fa76b4e75847283cd707521ad2475ef"}, - {file = "hiredis-2.3.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:80b02d27864ebaf9b153d4b99015342382eeaed651f5591ce6f07e840307c56d"}, - {file = "hiredis-2.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd40d2e2f82a483de0d0a6dfd8c3895a02e55e5c9949610ecbded18188fd0a56"}, - {file = "hiredis-2.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfa904045d7cebfb0f01dad51352551cce1d873d7c3f80c7ded7d42f8cac8f89"}, - {file = "hiredis-2.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:28bd184b33e0dd6d65816c16521a4ba1ffbe9ff07d66873c42ea4049a62fed83"}, - {file = "hiredis-2.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f70481213373d44614148f0f2e38e7905be3f021902ae5167289413196de4ba4"}, - {file = "hiredis-2.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb8797b528c1ff81eef06713623562b36db3dafa106b59f83a6468df788ff0d1"}, - {file = "hiredis-2.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02fc71c8333586871602db4774d3a3e403b4ccf6446dc4603ec12df563127cee"}, - {file = "hiredis-2.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0da56915bda1e0a49157191b54d3e27689b70960f0685fdd5c415dacdee2fbed"}, - {file = "hiredis-2.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e2674a5a3168349435b08fa0b82998ed2536eb9acccf7087efe26e4cd088a525"}, - {file = "hiredis-2.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:dc1c3fd49930494a67dcec37d0558d99d84eca8eb3f03b17198424538f2608d7"}, - {file = "hiredis-2.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:14c7b43205e515f538a9defb4e411e0f0576caaeeda76bb9993ed505486f7562"}, - {file = "hiredis-2.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7bac7e02915b970c3723a7a7c5df4ba7a11a3426d2a3f181e041aa506a1ff028"}, - {file = "hiredis-2.3.2-cp311-cp311-win32.whl", hash = "sha256:63a090761ddc3c1f7db5e67aa4e247b4b3bb9890080bdcdadd1b5200b8b89ac4"}, - {file = "hiredis-2.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:70d226ab0306a5b8d408235cabe51d4bf3554c9e8a72d53ce0b3c5c84cf78881"}, - {file = "hiredis-2.3.2-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:5c614552c6bd1d0d907f448f75550f6b24fb56cbfce80c094908b7990cad9702"}, - {file = "hiredis-2.3.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9c431431abf55b64347ddc8df68b3ef840269cb0aa5bc2d26ad9506eb4b1b866"}, - {file = "hiredis-2.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a45857e87e9d2b005e81ddac9d815a33efd26ec67032c366629f023fe64fb415"}, - {file = "hiredis-2.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e138d141ec5a6ec800b6d01ddc3e5561ce1c940215e0eb9960876bfde7186aae"}, - {file = "hiredis-2.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:387f655444d912a963ab68abf64bf6e178a13c8e4aa945cb27388fd01a02e6f1"}, - {file = "hiredis-2.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4852f4bf88f0e2d9bdf91279892f5740ed22ae368335a37a52b92a5c88691140"}, - {file = "hiredis-2.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d711c107e83117129b7f8bd08e9820c43ceec6204fff072a001fd82f6d13db9f"}, - {file = "hiredis-2.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92830c16885f29163e1c2da1f3c1edb226df1210ec7e8711aaabba3dd0d5470a"}, - {file = "hiredis-2.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:16b01d9ceae265d4ab9547be0cd628ecaff14b3360357a9d30c029e5ae8b7e7f"}, - {file = "hiredis-2.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5986fb5f380169270a0293bebebd95466a1c85010b4f1afc2727e4d17c452512"}, - {file = "hiredis-2.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:49532d7939cc51f8e99efc326090c54acf5437ed88b9c904cc8015b3c4eda9c9"}, - {file = "hiredis-2.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8f34801b251ca43ad70691fb08b606a2e55f06b9c9fb1fc18fd9402b19d70f7b"}, - {file = "hiredis-2.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7298562a49d95570ab1c7fc4051e72824c6a80e907993a21a41ba204223e7334"}, - {file = "hiredis-2.3.2-cp312-cp312-win32.whl", hash = "sha256:e1d86b75de787481b04d112067a4033e1ecfda2a060e50318a74e4e1c9b2948c"}, - {file = "hiredis-2.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:6dbfe1887ffa5cf3030451a56a8f965a9da2fa82b7149357752b67a335a05fc6"}, - {file = "hiredis-2.3.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:4fc242e9da4af48714199216eb535b61e8f8d66552c8819e33fc7806bd465a09"}, - {file = "hiredis-2.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e81aa4e9a1fcf604c8c4b51aa5d258e195a6ba81efe1da82dea3204443eba01c"}, - {file = "hiredis-2.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419780f8583ddb544ffa86f9d44a7fcc183cd826101af4e5ffe535b6765f5f6b"}, - {file = "hiredis-2.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6871306d8b98a15e53a5f289ec1106a3a1d43e7ab6f4d785f95fcef9a7bd9504"}, - {file = "hiredis-2.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb0b35b63717ef1e41d62f4f8717166f7c6245064957907cfe177cc144357c"}, - {file = "hiredis-2.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c490191fa1218851f8a80c5a21a05a6f680ac5aebc2e688b71cbfe592f8fec6"}, - {file = "hiredis-2.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4baf4b579b108062e91bd2a991dc98b9dc3dc06e6288db2d98895eea8acbac22"}, - {file = "hiredis-2.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e627d8ef5e100556e09fb44c9571a432b10e11596d3c4043500080ca9944a91a"}, - {file = "hiredis-2.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:ba3dc0af0def8c21ce7d903c59ea1e8ec4cb073f25ece9edaec7f92a286cd219"}, - {file = "hiredis-2.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:56e9b7d6051688ca94e68c0c8a54a243f8db841911b683cedf89a29d4de91509"}, - {file = "hiredis-2.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:380e029bb4b1d34cf560fcc8950bf6b57c2ef0c9c8b7c7ac20b7c524a730fadd"}, - {file = "hiredis-2.3.2-cp37-cp37m-win32.whl", hash = "sha256:948d9f2ca7841794dd9b204644963a4bcd69ced4e959b0d4ecf1b8ce994a6daa"}, - {file = "hiredis-2.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:cfa67afe2269b2d203cd1389c00c5bc35a287cd57860441fb0e53b371ea6a029"}, - {file = "hiredis-2.3.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:bcbe47da0aebc00a7cfe3ebdcff0373b86ce2b1856251c003e3d69c9db44b5a7"}, - {file = "hiredis-2.3.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f2c9c0d910dd3f7df92f0638e7f65d8edd7f442203caf89c62fc79f11b0b73f8"}, - {file = "hiredis-2.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:01b6c24c0840ac7afafbc4db236fd55f56a9a0919a215c25a238f051781f4772"}, - {file = "hiredis-2.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1f567489f422d40c21e53212a73bef4638d9f21043848150f8544ef1f3a6ad1"}, - {file = "hiredis-2.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:28adecb308293e705e44087a1c2d557a816f032430d8a2a9bb7873902a1c6d48"}, - {file = "hiredis-2.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27e9619847e9dc70b14b1ad2d0fb4889e7ca18996585c3463cff6c951fd6b10b"}, - {file = "hiredis-2.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a0026cfbf29f07649b0e34509091a2a6016ff8844b127de150efce1c3aff60b"}, - {file = "hiredis-2.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9de7586522e5da6bee83c9cf0dcccac0857a43249cb4d721a2e312d98a684d1"}, - {file = "hiredis-2.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e58494f282215fc461b06709e9a195a24c12ba09570f25bdf9efb036acc05101"}, - {file = "hiredis-2.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3a32b4b76d46f1eb42b24a918d51d8ca52411a381748196241d59a895f7c5c"}, - {file = "hiredis-2.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1979334ccab21a49c544cd1b8d784ffb2747f99a51cb0bd0976eebb517628382"}, - {file = "hiredis-2.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0c0773266e1c38a06e7593bd08870ac1503f5f0ce0f5c63f2b4134b090b5d6a4"}, - {file = "hiredis-2.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bd1cee053416183adcc8e6134704c46c60c3f66b8faaf9e65bf76191ca59a2f7"}, - {file = "hiredis-2.3.2-cp38-cp38-win32.whl", hash = "sha256:5341ce3d01ef3c7418a72e370bf028c7aeb16895e79e115fe4c954fff990489e"}, - {file = "hiredis-2.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:8fc7197ff33047ce43a67851ccf190acb5b05c52fd4a001bb55766358f04da68"}, - {file = "hiredis-2.3.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:f47775e27388b58ce52f4f972f80e45b13c65113e9e6b6bf60148f893871dc9b"}, - {file = "hiredis-2.3.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:9412a06b8a8e09abd6313d96864b6d7713c6003a365995a5c70cfb9209df1570"}, - {file = "hiredis-2.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3020b60e3fc96d08c2a9b011f1c2e2a6bdcc09cb55df93c509b88be5cb791df"}, - {file = "hiredis-2.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53d0f2c59bce399b8010a21bc779b4f8c32d0f582b2284ac8c98dc7578b27bc4"}, - {file = "hiredis-2.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:57c0d0c7e308ed5280a4900d4468bbfec51f0e1b4cde1deae7d4e639bc6b7766"}, - {file = "hiredis-2.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d63318ca189fddc7e75f6a4af8eae9c0545863619fb38cfba5f43e81280b286"}, - {file = "hiredis-2.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e741ffe4e2db78a1b9dd6e5d29678ce37fbaaf65dfe132e5b82a794413302ef1"}, - {file = "hiredis-2.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb98038ccd368e0d88bd92ee575c58cfaf33e77f788c36b2a89a84ee1936dc6b"}, - {file = "hiredis-2.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:eae62ed60d53b3561148bcd8c2383e430af38c0deab9f2dd15f8874888ffd26f"}, - {file = "hiredis-2.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ca33c175c1cf60222d9c6d01c38fc17ec3a484f32294af781de30226b003e00f"}, - {file = "hiredis-2.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c5f6972d2bdee3cd301d5c5438e31195cf1cabf6fd9274491674d4ceb46914d"}, - {file = "hiredis-2.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:a6b54dabfaa5dbaa92f796f0c32819b4636e66aa8e9106c3d421624bd2a2d676"}, - {file = "hiredis-2.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e96cd35df012a17c87ae276196ea8f215e77d6eeca90709eb03999e2d5e3fd8a"}, - {file = "hiredis-2.3.2-cp39-cp39-win32.whl", hash = "sha256:63b99b5ea9fe4f21469fb06a16ca5244307678636f11917359e3223aaeca0b67"}, - {file = "hiredis-2.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:a50c8af811b35b8a43b1590cf890b61ff2233225257a3cad32f43b3ec7ff1b9f"}, - {file = "hiredis-2.3.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e8bf4444b09419b77ce671088db9f875b26720b5872d97778e2545cd87dba4a"}, - {file = "hiredis-2.3.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bd42d0d45ea47a2f96babd82a659fbc60612ab9423a68e4a8191e538b85542a"}, - {file = "hiredis-2.3.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80441b55edbef868e2563842f5030982b04349408396e5ac2b32025fb06b5212"}, - {file = "hiredis-2.3.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec444ab8f27562a363672d6a7372bc0700a1bdc9764563c57c5f9efa0e592b5f"}, - {file = "hiredis-2.3.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f9f606e810858207d4b4287b4ef0dc622c2aa469548bf02b59dcc616f134f811"}, - {file = "hiredis-2.3.2-pp37-pypy37_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c3dde4ca00fe9eee3b76209711f1941bb86db42b8a75d7f2249ff9dfc026ab0e"}, - {file = "hiredis-2.3.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4dd676107a1d3c724a56a9d9db38166ad4cf44f924ee701414751bd18a784a0"}, - {file = "hiredis-2.3.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce42649e2676ad783186264d5ffc788a7612ecd7f9effb62d51c30d413a3eefe"}, - {file = "hiredis-2.3.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e3f8b1733078ac663dad57e20060e16389a60ab542f18a97931f3a2a2dd64a4"}, - {file = "hiredis-2.3.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:532a84a82156a82529ec401d1c25d677c6543c791e54a263aa139541c363995f"}, - {file = "hiredis-2.3.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4d59f88c4daa36b8c38e59ac7bffed6f5d7f68eaccad471484bf587b28ccc478"}, - {file = "hiredis-2.3.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a91a14dd95e24dc078204b18b0199226ee44644974c645dc54ee7b00c3157330"}, - {file = "hiredis-2.3.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb777a38797c8c7df0444533119570be18d1a4ce5478dffc00c875684df7bfcb"}, - {file = "hiredis-2.3.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d47c915897a99d0d34a39fad4be97b4b709ab3d0d3b779ebccf2b6024a8c681e"}, - {file = "hiredis-2.3.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:333b5e04866758b11bda5f5315b4e671d15755fc6ed3b7969721bc6311d0ee36"}, - {file = "hiredis-2.3.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c8937f1100435698c18e4da086968c4b5d70e86ea718376f833475ab3277c9aa"}, - {file = "hiredis-2.3.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa45f7d771094b8145af10db74704ab0f698adb682fbf3721d8090f90e42cc49"}, - {file = "hiredis-2.3.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33d5ebc93c39aed4b5bc769f8ce0819bc50e74bb95d57a35f838f1c4378978e0"}, - {file = "hiredis-2.3.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a797d8c7df9944314d309b0d9e1b354e2fa4430a05bb7604da13b6ad291bf959"}, - {file = "hiredis-2.3.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e15a408f71a6c8c87b364f1f15a6cd9c1baca12bbc47a326ac8ab99ec7ad3c64"}, - {file = "hiredis-2.3.2.tar.gz", hash = "sha256:733e2456b68f3f126ddaf2cd500a33b25146c3676b97ea843665717bda0c5d43"}, -] - -[[package]] -name = "httpcore" -version = "1.0.9" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -groups = ["main", "dev"] -files = [ - {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, - {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.16" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<1.0)"] - -[[package]] -name = "httpx" -version = "0.25.2" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.8" -groups = ["main", "dev"] -files = [ - {file = "httpx-0.25.2-py3-none-any.whl", hash = "sha256:a05d3d052d9b2dfce0e3896636467f8a5342fb2b902c819428e1ac65413ca118"}, - {file = "httpx-0.25.2.tar.gz", hash = "sha256:8b8fcaa0c8ea7b05edd69a094e63a2094c4efcb48129fb757361bc423c0ad9e8"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -httpcore = "==1.*" -idna = "*" -sniffio = "*" - -[package.extras] -brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] - -[[package]] -name = "idna" -version = "3.6" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -groups = ["main", "dev"] -files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, -] - -[[package]] -name = "importlib-metadata" -version = "4.11.4" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.7" -groups = ["main", "dev"] -files = [ - {file = "importlib_metadata-4.11.4-py3-none-any.whl", hash = "sha256:c58c8eb8a762858f49e18436ff552e83914778e50e9d2f1660535ffb364552ec"}, - {file = "importlib_metadata-4.11.4.tar.gz", hash = "sha256:5d26852efe48c0a32b0509ffbc583fda1a2266545a78d104a6f4aff3db17d700"}, -] -markers = {main = "python_version == \"3.8\"", dev = "python_version < \"3.10\""} - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\"", "pytest-perf (>=0.9.2)"] - -[[package]] -name = "importlib-resources" -version = "5.4.0" -description = "Read resources from Python packages" -optional = false -python-versions = ">=3.6" -groups = ["main"] -markers = "python_version == \"3.8\"" -files = [ - {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, - {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, -] - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy ; platform_python_implementation != \"PyPy\""] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "isort" -version = "5.10.1" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.6.1,<4.0" -groups = ["dev"] -files = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, -] - -[package.extras] -colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile-deprecated-finder = ["pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] - -[[package]] -name = "loguru" -version = "0.6.0" -description = "Python logging made (stupidly) simple" -optional = false -python-versions = ">=3.5" -groups = ["main"] -files = [ - {file = "loguru-0.6.0-py3-none-any.whl", hash = "sha256:4e2414d534a2ab57573365b3e6d0234dfb1d84b68b7f3b948e6fb743860a77c3"}, - {file = "loguru-0.6.0.tar.gz", hash = "sha256:066bd06758d0a513e9836fd9c6b5a75bfb3fd36841f4b996bc60b547a309d41c"}, -] - -[package.dependencies] -colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} -win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} - -[package.extras] -dev = ["Sphinx (>=4.1.1) ; python_version >= \"3.6\"", "black (>=19.10b0) ; python_version >= \"3.6\"", "colorama (>=0.3.4)", "docutils (==0.16)", "flake8 (>=3.7.7)", "isort (>=5.1.1) ; python_version >= \"3.6\"", "pytest (>=4.6.2)", "pytest-cov (>=2.7.1)", "sphinx-autobuild (>=0.7.1) ; python_version >= \"3.6\"", "sphinx-rtd-theme (>=0.4.3) ; python_version >= \"3.6\"", "tox (>=3.9.0)"] - -[[package]] -name = "mako" -version = "1.2.4" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"}, - {file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"}, -] - -[package.dependencies] -MarkupSafe = ">=0.9.2" - -[package.extras] -babel = ["Babel"] -lingua = ["lingua"] -testing = ["pytest"] - -[[package]] -name = "markdown" -version = "3.3.6" -description = "Python implementation of Markdown." -optional = false -python-versions = ">=3.6" -groups = ["dev"] -files = [ - {file = "Markdown-3.3.6-py3-none-any.whl", hash = "sha256:9923332318f843411e9932237530df53162e29dc7a4e2b91e35764583c46c9a3"}, - {file = "Markdown-3.3.6.tar.gz", hash = "sha256:76df8ae32294ec39dcf89340382882dfa12975f87f45c3ed1ecdb1e8cefc7006"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} - -[package.extras] -testing = ["coverage", "pyyaml"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "mccabe" -version = "0.6.1" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, -] - -[[package]] -name = "mypy" -version = "1.0.1" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "mypy-1.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:71a808334d3f41ef011faa5a5cd8153606df5fc0b56de5b2e89566c8093a0c9a"}, - {file = "mypy-1.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:920169f0184215eef19294fa86ea49ffd4635dedfdea2b57e45cb4ee85d5ccaf"}, - {file = "mypy-1.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a0f74a298769d9fdc8498fcb4f2beb86f0564bcdb1a37b58cbbe78e55cf8c0"}, - {file = "mypy-1.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:65b122a993d9c81ea0bfde7689b3365318a88bde952e4dfa1b3a8b4ac05d168b"}, - {file = "mypy-1.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:5deb252fd42a77add936b463033a59b8e48eb2eaec2976d76b6878d031933fe4"}, - {file = "mypy-1.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2013226d17f20468f34feddd6aae4635a55f79626549099354ce641bc7d40262"}, - {file = "mypy-1.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:48525aec92b47baed9b3380371ab8ab6e63a5aab317347dfe9e55e02aaad22e8"}, - {file = "mypy-1.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c96b8a0c019fe29040d520d9257d8c8f122a7343a8307bf8d6d4a43f5c5bfcc8"}, - {file = "mypy-1.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:448de661536d270ce04f2d7dddaa49b2fdba6e3bd8a83212164d4174ff43aa65"}, - {file = "mypy-1.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:d42a98e76070a365a1d1c220fcac8aa4ada12ae0db679cb4d910fabefc88b994"}, - {file = "mypy-1.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64f48c6176e243ad015e995de05af7f22bbe370dbb5b32bd6988438ec873919"}, - {file = "mypy-1.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fdd63e4f50e3538617887e9aee91855368d9fc1dea30da743837b0df7373bc4"}, - {file = "mypy-1.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dbeb24514c4acbc78d205f85dd0e800f34062efcc1f4a4857c57e4b4b8712bff"}, - {file = "mypy-1.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a2948c40a7dd46c1c33765718936669dc1f628f134013b02ff5ac6c7ef6942bf"}, - {file = "mypy-1.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bc8d6bd3b274dd3846597855d96d38d947aedba18776aa998a8d46fabdaed76"}, - {file = "mypy-1.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:17455cda53eeee0a4adb6371a21dd3dbf465897de82843751cf822605d152c8c"}, - {file = "mypy-1.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e831662208055b006eef68392a768ff83596035ffd6d846786578ba1714ba8f6"}, - {file = "mypy-1.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e60d0b09f62ae97a94605c3f73fd952395286cf3e3b9e7b97f60b01ddfbbda88"}, - {file = "mypy-1.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:0af4f0e20706aadf4e6f8f8dc5ab739089146b83fd53cb4a7e0e850ef3de0bb6"}, - {file = "mypy-1.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24189f23dc66f83b839bd1cce2dfc356020dfc9a8bae03978477b15be61b062e"}, - {file = "mypy-1.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93a85495fb13dc484251b4c1fd7a5ac370cd0d812bbfc3b39c1bafefe95275d5"}, - {file = "mypy-1.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f546ac34093c6ce33f6278f7c88f0f147a4849386d3bf3ae193702f4fe31407"}, - {file = "mypy-1.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c6c2ccb7af7154673c591189c3687b013122c5a891bb5651eca3db8e6c6c55bd"}, - {file = "mypy-1.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:15b5a824b58c7c822c51bc66308e759243c32631896743f030daf449fe3677f3"}, - {file = "mypy-1.0.1-py3-none-any.whl", hash = "sha256:eda5c8b9949ed411ff752b9a01adda31afe7eae1e53e946dbdf9db23865e66c4"}, - {file = "mypy-1.0.1.tar.gz", hash = "sha256:28cea5a6392bb43d266782983b5a4216c25544cd7d80be681a155ddcdafd152d"}, -] - -[package.dependencies] -mypy-extensions = ">=0.4.3" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=3.10" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "0.4.4" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -optional = false -python-versions = ">=2.7" -groups = ["main", "dev"] -files = [ - {file = "mypy_extensions-0.4.4.tar.gz", hash = "sha256:c8b707883a96efe9b4bb3aaf0dcc07e7e217d7d8368eec4db4049ee9e142f4fd"}, -] - -[[package]] -name = "packaging" -version = "24.0" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -groups = ["main", "dev"] -files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, -] - -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "pbr" -version = "6.0.0" -description = "Python Build Reasonableness" -optional = false -python-versions = ">=2.6" -groups = ["dev"] -files = [ - {file = "pbr-6.0.0-py2.py3-none-any.whl", hash = "sha256:4a7317d5e3b17a3dccb6a8cfe67dab65b20551404c52c8ed41279fa4f0cb4cda"}, - {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, -] - -[[package]] -name = "pep8-naming" -version = "0.12.1" -description = "Check PEP-8 naming conventions, plugin for flake8" -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "pep8-naming-0.12.1.tar.gz", hash = "sha256:bb2455947757d162aa4cad55dba4ce029005cd1692f2899a21d51d8630ca7841"}, - {file = "pep8_naming-0.12.1-py2.py3-none-any.whl", hash = "sha256:4a8daeaeb33cfcde779309fc0c9c0a68a3bbe2ad8a8308b763c5068f86eb9f37"}, -] - -[package.dependencies] -flake8 = ">=3.9.1" -flake8-polyfill = ">=1.0.2,<2" - -[[package]] -name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] - -[[package]] -name = "pluggy" -version = "1.4.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "psycopg2-binary" -version = "2.9.9" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, -] - -[[package]] -name = "pybotx" -version = "0.73.4" -description = "A python library for interacting with eXpress BotX API" -optional = false -python-versions = "<3.13,>=3.8" -groups = ["main"] -files = [ - {file = "pybotx-0.73.4-py3-none-any.whl", hash = "sha256:2795f9e4a605c92fc39b2c6054f5e92c6dbc372e510b8891a10dd35fb43eab83"}, - {file = "pybotx-0.73.4.tar.gz", hash = "sha256:7b0c20c9aae3ffd77a91d8456b94aa363ed337a045811b0da57613073a4b39d4"}, -] - -[package.dependencies] -aiocsv = ">=1.2.3,<1.3.0" -aiofiles = ">=0.7.0,<24.0.0" -httpcore = "1.0.9" -httpx = ">=0.25.0,<0.26.0" -loguru = ">=0.6.0,<0.7.0" -mypy-extensions = ">=0.2.0,<0.5.0" -pydantic = ">=1.6.0,<1.11.0" -pyjwt = ">=2.0.0,<3.0.0" - -[[package]] -name = "pybotx-smart-logger" -version = "0.10.1" -description = "Shows logs when you need it" -optional = false -python-versions = ">=3.8,<3.12" -groups = ["main"] -files = [ - {file = "pybotx_smart_logger-0.10.1-py3-none-any.whl", hash = "sha256:dea25a90d3bc857a997e0598d2f224520dfe54b89cabc2f2f7a4411d471c4ba5"}, - {file = "pybotx_smart_logger-0.10.1.tar.gz", hash = "sha256:025018701e206cda84453a5379a0b890ac51588bd1555561b307c432ce6636be"}, -] - -[package.dependencies] -loguru = ">=0.6.0,<0.7.0" -pydantic = ">=1.10.5,<2.0.0" - -[[package]] -name = "pycodestyle" -version = "2.8.0" -description = "Python style guide checker" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -groups = ["dev"] -files = [ - {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, - {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, -] - -[[package]] -name = "pydantic" -version = "1.10.15" -description = "Data validation and settings management using python type hints" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, - {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, - {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, - {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, - {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, - {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, - {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, - {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, - {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, - {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, - {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, - {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, - {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, - {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, - {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, - {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, - {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, - {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, - {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, - {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, - {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, - {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, - {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, - {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, - {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, - {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, - {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, - {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, - {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, - {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, - {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, - {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, - {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, - {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, - {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, - {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, -] - -[package.dependencies] -python-dotenv = {version = ">=0.10.4", optional = true, markers = "extra == \"dotenv\""} -typing-extensions = ">=4.2.0" - -[package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] - -[[package]] -name = "pydocstyle" -version = "6.3.0" -description = "Python docstring style checker" -optional = false -python-versions = ">=3.6" -groups = ["dev"] -files = [ - {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, - {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, -] - -[package.dependencies] -snowballstemmer = ">=2.2.0" - -[package.extras] -toml = ["tomli (>=1.2.3) ; python_version < \"3.11\""] - -[[package]] -name = "pyflakes" -version = "2.4.0" -description = "passive checker of Python programs" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -groups = ["dev"] -files = [ - {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, - {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, -] - -[[package]] -name = "pygments" -version = "2.17.2" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, -] - -[package.extras] -plugins = ["importlib-metadata ; python_version < \"3.8\""] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pyjwt" -version = "2.8.0" -description = "JSON Web Token implementation in Python" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, -] - -[package.extras] -crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] - -[[package]] -name = "pytest" -version = "7.2.2" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "pytest-7.2.2-py3-none-any.whl", hash = "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e"}, - {file = "pytest-7.2.2.tar.gz", hash = "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] - -[[package]] -name = "pytest-asyncio" -version = "0.18.3" -description = "Pytest support for asyncio" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "pytest-asyncio-0.18.3.tar.gz", hash = "sha256:7659bdb0a9eb9c6e3ef992eef11a2b3e69697800ad02fb06374a210d85b29f91"}, - {file = "pytest_asyncio-0.18.3-1-py3-none-any.whl", hash = "sha256:16cf40bdf2b4fb7fc8e4b82bd05ce3fbcd454cbf7b92afc445fe299dabb88213"}, - {file = "pytest_asyncio-0.18.3-py3-none-any.whl", hash = "sha256:8fafa6c52161addfd41ee7ab35f11836c5a16ec208f93ee388f752bea3493a84"}, -] - -[package.dependencies] -pytest = ">=6.1.0" - -[package.extras] -testing = ["coverage (==6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (==0.931)", "pytest-trio (>=0.7.0)"] - -[[package]] -name = "pytest-cov" -version = "3.0.0" -description = "Pytest plugin for measuring coverage." -optional = false -python-versions = ">=3.6" -groups = ["dev"] -files = [ - {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, - {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, -] - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main"] -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-dotenv" -version = "1.0.1" -description = "Read key-value pairs from a .env file and set them as environment variables" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, -] - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -groups = ["dev"] -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "redis" -version = "5.0.3" -description = "Python client for Redis database and key-value store" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "redis-5.0.3-py3-none-any.whl", hash = "sha256:5da9b8fe9e1254293756c16c008e8620b3d15fcc6dde6babde9541850e72a32d"}, - {file = "redis-5.0.3.tar.gz", hash = "sha256:4973bae7444c0fbed64a06b87446f79361cb7e4ec1538c022d696ed7a5015580"}, -] - -[package.dependencies] -async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} -hiredis = {version = ">=1.0.0", optional = true, markers = "extra == \"hiredis\""} - -[package.extras] -hiredis = ["hiredis (>=1.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "respx" -version = "0.20.2" -description = "A utility for mocking out the Python HTTPX and HTTP Core libraries." -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "respx-0.20.2-py2.py3-none-any.whl", hash = "sha256:ab8e1cf6da28a5b2dd883ea617f8130f77f676736e6e9e4a25817ad116a172c9"}, - {file = "respx-0.20.2.tar.gz", hash = "sha256:07cf4108b1c88b82010f67d3c831dae33a375c7b436e54d87737c7f9f99be643"}, -] - -[package.dependencies] -httpx = ">=0.21.0" - -[[package]] -name = "restructuredtext-lint" -version = "1.4.0" -description = "reStructuredText linter" -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45"}, -] - -[package.dependencies] -docutils = ">=0.11,<1.0" - -[[package]] -name = "saq" -version = "0.12.4" -description = "Distributed Python job queue with asyncio and redis" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "saq-0.12.4-py3-none-any.whl", hash = "sha256:3373b1f246b0fe79a307cfc89349f8b92f30c82b6be4797ab41e41ad1b9632cc"}, - {file = "saq-0.12.4.tar.gz", hash = "sha256:2bec857c08535287a7320cdaaace7ed400a622ade2218bf28fac08795a255c37"}, -] - -[package.dependencies] -croniter = ">=0.3.18" -redis = [ - {version = ">=4.2,<6.0"}, - {version = ">=4.2.0", extras = ["hiredis"], optional = true, markers = "extra == \"hiredis\""}, -] - -[package.extras] -dev = ["black", "coverage", "httpx", "mypy", "pylint", "starlette", "types-croniter", "types-redis", "types-setuptools"] -hiredis = ["redis[hiredis] (>=4.2.0)"] -web = ["aiohttp", "aiohttp-basicauth"] - -[[package]] -name = "setuptools" -version = "69.2.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, - {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov ; platform_python_implementation != \"PyPy\"", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\"", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -groups = ["main"] -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "smmap" -version = "5.0.1" -description = "A pure Python implementation of a sliding window memory map manager" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, - {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -groups = ["main", "dev"] -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] - -[[package]] -name = "sqlalchemy" -version = "2.0.29" -description = "Database Abstraction Library" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c142852ae192e9fe5aad5c350ea6befe9db14370b34047e1f0f7cf99e63c63b"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a1e69d4e26f71e750e9ad6fdc8614fbddb67cfe2173a3628a2566034e223c7"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef3fbccb4058355053c51b82fd3501a6e13dd808c8d8cd2561e610c5456013c"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6753305936eddc8ed190e006b7bb33a8f50b9854823485eed3a886857ab8d1"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0f3ca96af060a5250a8ad5a63699180bc780c2edf8abf96c58af175921df847a"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4520047006b1d3f0d89e0532978c0688219857eb2fee7c48052560ae76aca1e"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-win32.whl", hash = "sha256:b2a0e3cf0caac2085ff172c3faacd1e00c376e6884b5bc4dd5b6b84623e29e4f"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-win_amd64.whl", hash = "sha256:01d10638a37460616708062a40c7b55f73e4d35eaa146781c683e0fa7f6c43fb"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:308ef9cb41d099099fffc9d35781638986870b29f744382904bf9c7dadd08513"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:296195df68326a48385e7a96e877bc19aa210e485fa381c5246bc0234c36c78e"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a13b917b4ffe5a0a31b83d051d60477819ddf18276852ea68037a144a506efb9"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f6d971255d9ddbd3189e2e79d743ff4845c07f0633adfd1de3f63d930dbe673"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61405ea2d563407d316c63a7b5271ae5d274a2a9fbcd01b0aa5503635699fa1e"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de7202ffe4d4a8c1e3cde1c03e01c1a3772c92858837e8f3879b497158e4cb44"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-win32.whl", hash = "sha256:b5d7ed79df55a731749ce65ec20d666d82b185fa4898430b17cb90c892741520"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-win_amd64.whl", hash = "sha256:205f5a2b39d7c380cbc3b5dcc8f2762fb5bcb716838e2d26ccbc54330775b003"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d96710d834a6fb31e21381c6d7b76ec729bd08c75a25a5184b1089141356171f"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52de4736404e53c5c6a91ef2698c01e52333988ebdc218f14c833237a0804f1b"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7b02525ede2a164c5fa5014915ba3591730f2cc831f5be9ff3b7fd3e30958e"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dfefdb3e54cd15f5d56fd5ae32f1da2d95d78319c1f6dfb9bcd0eb15d603d5d"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a88913000da9205b13f6f195f0813b6ffd8a0c0c2bd58d499e00a30eb508870c"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fecd5089c4be1bcc37c35e9aa678938d2888845a134dd016de457b942cf5a758"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-win32.whl", hash = "sha256:8197d6f7a3d2b468861ebb4c9f998b9df9e358d6e1cf9c2a01061cb9b6cf4e41"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-win_amd64.whl", hash = "sha256:9b19836ccca0d321e237560e475fd99c3d8655d03da80c845c4da20dda31b6e1"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87a1d53a5382cdbbf4b7619f107cc862c1b0a4feb29000922db72e5a66a5ffc0"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0732dffe32333211801b28339d2a0babc1971bc90a983e3035e7b0d6f06b93"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90453597a753322d6aa770c5935887ab1fc49cc4c4fdd436901308383d698b4b"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ea311d4ee9a8fa67f139c088ae9f905fcf0277d6cd75c310a21a88bf85e130f5"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5f20cb0a63a3e0ec4e169aa8890e32b949c8145983afa13a708bc4b0a1f30e03"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-win32.whl", hash = "sha256:e5bbe55e8552019c6463709b39634a5fc55e080d0827e2a3a11e18eb73f5cdbd"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-win_amd64.whl", hash = "sha256:c2f9c762a2735600654c654bf48dad388b888f8ce387b095806480e6e4ff6907"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e614d7a25a43a9f54fcce4675c12761b248547f3d41b195e8010ca7297c369c"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471fcb39c6adf37f820350c28aac4a7df9d3940c6548b624a642852e727ea586"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:988569c8732f54ad3234cf9c561364221a9e943b78dc7a4aaf35ccc2265f1930"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dddaae9b81c88083e6437de95c41e86823d150f4ee94bf24e158a4526cbead01"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:334184d1ab8f4c87f9652b048af3f7abea1c809dfe526fb0435348a6fef3d380"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38b624e5cf02a69b113c8047cf7f66b5dfe4a2ca07ff8b8716da4f1b3ae81567"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-win32.whl", hash = "sha256:bab41acf151cd68bc2b466deae5deeb9e8ae9c50ad113444151ad965d5bf685b"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-win_amd64.whl", hash = "sha256:52c8011088305476691b8750c60e03b87910a123cfd9ad48576d6414b6ec2a1d"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3071ad498896907a5ef756206b9dc750f8e57352113c19272bdfdc429c7bd7de"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dba622396a3170974f81bad49aacebd243455ec3cc70615aeaef9e9613b5bca5"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b184e3de58009cc0bf32e20f137f1ec75a32470f5fede06c58f6c355ed42a72"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c37f1050feb91f3d6c32f864d8e114ff5545a4a7afe56778d76a9aec62638ba"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bda7ce59b06d0f09afe22c56714c65c957b1068dee3d5e74d743edec7daba552"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25664e18bef6dc45015b08f99c63952a53a0a61f61f2e48a9e70cec27e55f699"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-win32.whl", hash = "sha256:77d29cb6c34b14af8a484e831ab530c0f7188f8efed1c6a833a2c674bf3c26ec"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-win_amd64.whl", hash = "sha256:04c487305ab035a9548f573763915189fc0fe0824d9ba28433196f8436f1449c"}, - {file = "SQLAlchemy-2.0.29-py3-none-any.whl", hash = "sha256:dc4ee2d4ee43251905f88637d5281a8d52e916a021384ec10758826f5cbae305"}, - {file = "SQLAlchemy-2.0.29.tar.gz", hash = "sha256:bd9566b8e58cabd700bc367b60e90d9349cd16f0984973f98a9a09f9c64e86f0"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} -typing-extensions = ">=4.6.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "starlette" -version = "0.37.2" -description = "The little ASGI library that shines." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, - {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, -] - -[package.dependencies] -anyio = ">=3.4.0,<5" -typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} - -[package.extras] -full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] - -[[package]] -name = "stevedore" -version = "5.2.0" -description = "Manage dynamic plugins for Python applications" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "stevedore-5.2.0-py3-none-any.whl", hash = "sha256:1c15d95766ca0569cad14cb6272d4d31dae66b011a929d7c18219c176ea1b5c9"}, - {file = "stevedore-5.2.0.tar.gz", hash = "sha256:46b93ca40e1114cea93d738a6c1e365396981bb6bb78c27045b7587c9473544d"}, -] - -[package.dependencies] -pbr = ">=2.0.0,<2.1.0 || >2.1.0" - -[[package]] -name = "tokenize-rt" -version = "5.2.0" -description = "A wrapper around the stdlib `tokenize` which roundtrips." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "tokenize_rt-5.2.0-py2.py3-none-any.whl", hash = "sha256:b79d41a65cfec71285433511b50271b05da3584a1da144a0752e9c621a285289"}, - {file = "tokenize_rt-5.2.0.tar.gz", hash = "sha256:9fe80f8a5c1edad2d3ede0f37481cc0cc1538a2f442c9c2f9e4feacd2792d054"}, -] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -markers = "python_full_version <= \"3.11.0a6\"" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "typing-extensions" -version = "4.10.0" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -groups = ["main", "dev"] -files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, -] - -[[package]] -name = "urllib3" -version = "2.2.1" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "uvicorn" -version = "0.29.0" -description = "The lightning-fast ASGI server." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"}, - {file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"}, -] - -[package.dependencies] -click = ">=7.0" -h11 = ">=0.8" -typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} - -[package.extras] -standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] - -[[package]] -name = "wemake-python-styleguide" -version = "0.16.0" -description = "The strictest and most opinionated python linter ever" -optional = false -python-versions = ">=3.6,<4.0" -groups = ["dev"] -files = [ - {file = "wemake-python-styleguide-0.16.0.tar.gz", hash = "sha256:3bf0a4962404e6fd6fa479e72e2ba3fb75d5920ea6c44b72b45240c9e519543c"}, - {file = "wemake_python_styleguide-0.16.0-py3-none-any.whl", hash = "sha256:8caa92b4aa77b08a505d718553238812d1b612b1036bc171ca3aa18345efe0b4"}, -] - -[package.dependencies] -astor = ">=0.8,<0.9" -attrs = "*" -darglint = ">=1.2,<2.0" -flake8 = ">=3.7,<5" -flake8-bandit = ">=2.1,<3.0" -flake8-broken-line = ">=0.3,<0.5" -flake8-bugbear = ">=20.1,<22.0" -flake8-commas = ">=2.0,<3.0" -flake8-comprehensions = ">=3.1,<4.0" -flake8-debugger = ">=4.0,<5.0" -flake8-docstrings = ">=1.3,<2.0" -flake8-eradicate = ">=1.0,<2.0" -flake8-isort = ">=4.0,<5.0" -flake8-quotes = ">=3.0,<4.0" -flake8-rst-docstrings = ">=0.2.3,<0.3.0" -flake8-string-format = ">=0.3,<0.4" -pep8-naming = ">=0.11,<0.13" -pygments = ">=2.4,<3.0" -typing_extensions = ">=3.6,<5.0" - -[[package]] -name = "win32-setctime" -version = "1.1.0" -description = "A small Python utility to set file creation time on Windows" -optional = false -python-versions = ">=3.5" -groups = ["main"] -markers = "sys_platform == \"win32\"" -files = [ - {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, - {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, -] - -[package.extras] -dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"] - -[[package]] -name = "zipp" -version = "3.7.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.7" -groups = ["main", "dev"] -files = [ - {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, - {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, -] -markers = {main = "python_version == \"3.8\"", dev = "python_version < \"3.10\""} - -[package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy ; platform_python_implementation != \"PyPy\""] - -[metadata] -lock-version = "2.1" -python-versions = ">=3.8,<3.12" -content-hash = "b0630bcbbe2a44d8e00b86d0ab87f719db2035f1b8258f5dca15affdf5cfcae5" diff --git a/pyproject.toml.jinja b/pyproject.toml similarity index 63% rename from pyproject.toml.jinja rename to pyproject.toml index 2de5d1d..ee84cc7 100644 --- a/pyproject.toml.jinja +++ b/pyproject.toml @@ -1,20 +1,20 @@ # https://python-poetry.org/docs/ [tool.poetry] -name = "{{bot_project_name}}" +name = "bot_refactor" version = "0.1.0" -description = "{{bot_short_description}}" +description = "TODO" authors = [] [tool.poetry.dependencies] -python = ">=3.8,<3.12" +python = ">=3.9,<3.12" -pybotx = "~0.73.4" +pybotx = "~0.75.1" pybotx-smart-logger = "~0.10.1" -{% if add_fsm %} -pybotx-fsm = "~0.4.12" -{% endif %} + +pybotx-fsm = "~0.6.1" + fastapi = "~0.110.1" gunicorn = "~21.2.0" @@ -22,7 +22,8 @@ uvicorn = { version = "~0.29.0", extras = ["standart"] } loguru = ">=0.6.0,<0.7.0" mako = "~1.2.2" -pydantic = { version = "~1.10.4", extras = ["dotenv"] } + +pydantic = { version = ">=1.10.5,<2.0.0", extras = ["dotenv"] } alembic = "~1.13.1" SQLAlchemy = "~2.0.0" @@ -35,31 +36,43 @@ saq = { version = "~0.12.4", extras = ["hiredis"] } importlib-resources = { version = "~5.4.0", python = "<3.9" } zipp = { version = "~3.7.0", python = "<3.9" } importlib-metadata = { version = "~4.11.0", python = "<3.9" } +dependency-injector = "4.41.0" +orjson = "^3.10.18" +factory-boy = "^3.3.3" +async-factory-boy = "^1.0.1" [tool.poetry.dev-dependencies] -add-trailing-comma = "2.2.1" -autoflake = "1.4.0" -black = "22.3.0" -isort = "5.10.1" -mypy = "1.0.1" -wemake-python-styleguide = "0.16.0" - -flake8-bandit = "2.1.2" # https://github.com/PyCQA/bandit/issues/837 +mypy = "1.16.1" +ruff = "0.12.0" + bandit = "1.7.2" # https://github.com/PyCQA/bandit/issues/837 -pytest = "~7.2.0" +pytest = "~8.4.1" pytest-asyncio = "~0.18.2" pytest-cov = "~3.0.0" asgi-lifespan = "~1.0.1" requests = "~2.31.0" respx = "~0.20.0" -httpx = "~0.25.0" +httpx = ">=0.28.0,<0.29.0" markdown = "3.3.6" # https://github.com/python-poetry/poetry/issues/4777 +testcontainers = { extras = ["postgresql"], version = "^4.10.0" } + +deepdiff = "^8.5.0" + [build-system] requires = ["poetry>=1.1.12"] build-backend = "poetry.masonry.api" + + +[tool.pytest.ini_options] +addopts = "-ra" +asyncio_mode = "auto" +markers = [ + "unit: fast, offline tests", + "integ: slow integration tests; run only with --run-integ", +] diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 0000000..3210f84 --- /dev/null +++ b/ruff.toml @@ -0,0 +1,33 @@ +target-version = "py310" +line-length = 88 +exclude = ["app/db/migrations"] # папки/файлы, которые Ruff вообще не читает + +[lint] +# группы правил +select = ["E", "F", "B", "S", "W", "I", "N", "Q"] +extend-select = ["B006", "B007"] + +ignore = [ + # Bugbear + "B008", + # pydocstyle + "D101", "D102", "D103", "D106", "D107", "D202", + # whitespace-совместимость c Black + "E203", + # Bandit + "S101", "S702", +] + +[lint.per-file-ignores] +"*/__init__.py" = ["D104"] +"app/bot/commands/*.py" = ["D104"] +"app/resources/strings.py" = ["E501"] +"tests/*" = ["D100"] + +[lint.flake8-quotes] +inline-quotes = "double" +avoid-escape = true + +[lint.isort] +known-first-party = ["app"] +combine-as-imports = true # from x import y as z, w as q \ No newline at end of file diff --git a/scripts/format b/scripts/format index 0a5e18e..b60ee9d 100755 --- a/scripts/format +++ b/scripts/format @@ -1,10 +1,25 @@ + #!/usr/bin/env bash +set -euo pipefail set -ex autoflake --recursive --in-place \ --remove-all-unused-imports \ --ignore-init-module-imports \ +# ------------------------------------------------------------ +# 1. Удаляем неиспользуемые импорты (F401/F841) и +# сортируем оставшиеся (I###) — «исправляем» только эти коды. +# ------------------------------------------------------------ +ruff check \ + --select F401,F841,I \ + --fix \ app tests isort --profile black app tests black app tests + +# ------------------------------------------------------------ +# 2. Применяем Black-совместимое форматирование +# (эквивалент: `black app tests`) +# ------------------------------------------------------------ +ruff format app tests \ No newline at end of file diff --git a/scripts/lint b/scripts/lint index 930eb08..8ca2743 100755 --- a/scripts/lint +++ b/scripts/lint @@ -1,9 +1,24 @@ #!/usr/bin/env bash +set -euo pipefail set -ex +# --------------------------------------------------------------- +# 1. Проверяем форматирование (equivalent to: black --check --diff) +# --diff → вывод изменений +# --check → не править файлы, вернуть ненулевой код, если найдены проблемы +# --------------------------------------------------------------- +ruff format --check --diff app tests black --check --diff app tests isort --profile black --check-only app tests +# --------------------------------------------------------------- +# 2. Запускаем все lint-правила (B, E, F, S, B*, W*, N*, Q*, ...) +# Конфигурация берётся из pyproject.toml / ruff.toml +# --------------------------------------------------------------- +ruff check app tests +# --------------------------------------------------------------- +# 3. Type-checking +# --------------------------------------------------------------- mypy app tests -flake8 app tests +flake8 app tests \ No newline at end of file diff --git a/scripts/test b/scripts/test index 743b0a6..0c82f14 100755 --- a/scripts/test +++ b/scripts/test @@ -1,5 +1,90 @@ #!/usr/bin/env bash +# +# scripts/test – удобный раннер для pytest. +# +# ┌───────────────────────────────┐ +# │ ПРИМЕРЫ │ +# ├───────────────────────────────┤ +# │ ./scripts/test │ → все тесты │ +# │ ./scripts/test --unit -q │ → только unit, тихо │ +# │ ./scripts/test --integ -k api │ → только integ, по кею │ +# │ ./scripts/test --unit --integ │ → unit + integ │ +# └───────────────────────────────┘ +# +# Если скрипт запустили не через bash (например, `sh scripts/test`), +# он перезапустит себя под bash, чтобы работали массивы. +# --------------------------------------------------------------------- set -ex +# --- re-exec в bash, если нужно -------------------------------------- +if [ -z "${BASH_VERSION:-}" ]; then + exec bash "$0" "$@" +fi pytest ${@} +set -euo pipefail + +show_help() { + cat < ISampleRecordUseCases: + """Return sample record use cases with real repository""" + return SampleRecordUseCases(SampleRecordRepository(isolated_session)) diff --git a/tests/application/integration/test_sample_record_use_cases_int.py b/tests/application/integration/test_sample_record_use_cases_int.py new file mode 100644 index 0000000..39a85e3 --- /dev/null +++ b/tests/application/integration/test_sample_record_use_cases_int.py @@ -0,0 +1,88 @@ +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.application.use_cases.interfaces import ISampleRecordUseCases +from app.infrastructure.db.sample_record.models import SampleRecordModel +from app.presentation.bot.schemas import SampleRecordResponseSchema +from tests.factories import SampleRecordCreateSchemaFactory, SampleRecordModelFactory + + +def assert_database_object_equal_to_retrieved_object( + database_object: SampleRecordModel, retrieved_object: SampleRecordResponseSchema +): + assert isinstance(retrieved_object, SampleRecordResponseSchema) + assert database_object.id == retrieved_object.id + assert database_object.record_data == retrieved_object.record_data + + +async def test_sample_record_use_case_add_record_in_database( + sample_record_use_cases_with_real_repo: ISampleRecordUseCases, + isolated_session: AsyncSession, +): + """Test adding a new record.""" + + sample_record_create_request = SampleRecordCreateSchemaFactory() + response = await sample_record_use_cases_with_real_repo.create_record( + sample_record_create_request + ) + + query = select(SampleRecordModel).where(SampleRecordModel.id == response.id) + result = (await isolated_session.execute(query)).scalar_one() + + assert_database_object_equal_to_retrieved_object(result, response) + + +async def test_sample_record_use_case_get_record_from_database( + sample_record_use_cases_with_real_repo: ISampleRecordUseCases, + isolated_session: AsyncSession, +): + """Test get a record.""" + + existing_record = SampleRecordModel(record_data="existing_record") + isolated_session.add(existing_record) + await isolated_session.flush() + + response = await sample_record_use_cases_with_real_repo.get_record( + existing_record.id + ) + + assert isinstance(response, SampleRecordResponseSchema) + assert response.record_data == existing_record.record_data + assert response.id == existing_record.id + + +async def test_sample_record_use_case_remove_record_from_database( + sample_record_use_cases_with_real_repo: ISampleRecordUseCases, + isolated_session: AsyncSession, +): + """Test adding a new record.""" + + existing_record = SampleRecordModel(record_data="existing_record") + isolated_session.add(existing_record) + await isolated_session.flush() + + response = await sample_record_use_cases_with_real_repo.delete_record( + existing_record.id + ) + + assert response == existing_record.id + + assert await isolated_session.get(SampleRecordModel, existing_record.id) is None + + +async def test_sample_record_use_case_get_all_records_from_database( + sample_record_use_cases_with_real_repo: ISampleRecordUseCases, + isolated_session: AsyncSession, + sample_record_factory: SampleRecordModelFactory, +): + """Test get all records from database""" + existing_records = { + record.id: record for record in await sample_record_factory.create_batch(3) + } + + response = await sample_record_use_cases_with_real_repo.get_all_records() + + for response_record in response.data: + assert_database_object_equal_to_retrieved_object( + existing_records[response_record.id], response_record + ) diff --git a/tests/application/unit/__init__.py b/tests/application/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/application/unit/conftest.py b/tests/application/unit/conftest.py new file mode 100644 index 0000000..6de516e --- /dev/null +++ b/tests/application/unit/conftest.py @@ -0,0 +1,12 @@ +import pytest + +from app.application.use_cases.interfaces import ISampleRecordUseCases +from app.application.use_cases.record_use_cases import SampleRecordUseCases +from app.domain.entities.sample_record import SampleRecord +from tests.application.unit.fake_repository import FakeSampleRecordRepository + + +@pytest.fixture +def sample_record_use_cases_with_fake_repo() -> ISampleRecordUseCases: + """Return sample record use cases with fake repository""" + return SampleRecordUseCases(FakeSampleRecordRepository()) diff --git a/tests/application/unit/fake_repository.py b/tests/application/unit/fake_repository.py new file mode 100644 index 0000000..3b95f75 --- /dev/null +++ b/tests/application/unit/fake_repository.py @@ -0,0 +1,44 @@ +from typing import List + +from app.application.repository.exceptions import RecordDoesNotExistError +from app.application.repository.interfaces import ISampleRecordRepository +from app.domain.entities.sample_record import SampleRecord + + +class FakeSampleRecordRepository(ISampleRecordRepository): + def __init__(self, records: List[SampleRecord] = None): + self._records = {} + if records: + for id, record in enumerate(records): + record.id = id + self._records[record.id] = record + + async def create(self, record: SampleRecord) -> SampleRecord: + if record.id is None: + record.id = len(self._records) + 1 + + self._records[record.id] = record + return record + + async def update(self, record: SampleRecord) -> SampleRecord: + if record.id not in self._records: + raise RecordDoesNotExistError(f"Record with id={record.id} does not exist.") + + self._records[record.id] = record + return record + + async def delete(self, record_id: int) -> int: + if record_id not in self._records: + raise RecordDoesNotExistError(f"Record with id={record_id} does not exist.") + + del self._records[record_id] + return record_id + + async def get_by_id(self, record_id: int) -> SampleRecord: + if record_id not in self._records: + raise RecordDoesNotExistError(f"Record with id={record_id} does not exist.") + + return self._records[record_id] + + async def get_all(self) -> List[SampleRecord]: + return list(self._records.values()) diff --git a/tests/application/unit/test_sample_record_use_cases.py b/tests/application/unit/test_sample_record_use_cases.py new file mode 100644 index 0000000..3601345 --- /dev/null +++ b/tests/application/unit/test_sample_record_use_cases.py @@ -0,0 +1,116 @@ +import pytest + +from app.application.repository.exceptions import RecordDoesNotExistError +from app.application.use_cases.interfaces import ISampleRecordUseCases +from app.presentation.bot.schemas import ( + SampleRecordResponseSchema, + SampleRecordResponseListSchema, +) +from tests.factories import ( + SampleRecordCreateSchemaFactory, + SampleRecordUpdateSchemaFactory, +) + + +async def test_sample_record_use_case_add_record( + sample_record_use_cases_with_fake_repo: ISampleRecordUseCases, +): + sample_record_create_request = SampleRecordCreateSchemaFactory() + response = await sample_record_use_cases_with_fake_repo.create_record( + sample_record_create_request + ) + + assert isinstance(response, SampleRecordResponseSchema) + assert response.record_data == sample_record_create_request.record_data + + +async def test_sample_record_use_case_update_record( + sample_record_use_cases_with_fake_repo: ISampleRecordUseCases, +): + existing_record = await sample_record_use_cases_with_fake_repo.create_record( + SampleRecordCreateSchemaFactory() + ) + + update_request = SampleRecordUpdateSchemaFactory(id=existing_record.id) + + response = await sample_record_use_cases_with_fake_repo.update_record( + update_request + ) + + assert isinstance(response, SampleRecordResponseSchema) + assert response.record_data == update_request.record_data + + +async def test_sample_record_use_case_delete_record( + sample_record_use_cases_with_fake_repo: ISampleRecordUseCases, +): + existing_record = await sample_record_use_cases_with_fake_repo.create_record( + SampleRecordCreateSchemaFactory() + ) + + result = await sample_record_use_cases_with_fake_repo.delete_record( + existing_record.id + ) + assert result == existing_record.id + + +async def test_sample_record_use_case_get_record( + sample_record_use_cases_with_fake_repo: ISampleRecordUseCases, +): + existing_record = await sample_record_use_cases_with_fake_repo.create_record( + SampleRecordCreateSchemaFactory() + ) + + response = await sample_record_use_cases_with_fake_repo.get_record( + existing_record.id + ) + assert isinstance(response, SampleRecordResponseSchema) + assert response.record_data == existing_record.record_data + assert response.id == existing_record.id + + +async def test_sample_record_use_case_get_all_records( + sample_record_use_cases_with_fake_repo: ISampleRecordUseCases, +): + existing_records = [ + await sample_record_use_cases_with_fake_repo.create_record(request) + for request in SampleRecordCreateSchemaFactory.create_batch(3) + ] + + response = await sample_record_use_cases_with_fake_repo.get_all_records() + + assert isinstance(response, SampleRecordResponseListSchema) + assert len(response.data) == len(existing_records) + + for record, response_record in zip(existing_records, response.data): + assert record.id == response_record.id + assert record.record_data == response_record.record_data + + +async def test_delete_non_existing_record_raises_error( + sample_record_use_cases_with_fake_repo: ISampleRecordUseCases, +): + """Test deleting a not existing record re raises the error from repository.""" + + with pytest.raises(RecordDoesNotExistError): + await sample_record_use_cases_with_fake_repo.delete_record(42) + + +async def test_update_non_existing_record_raises_error( + sample_record_use_cases_with_fake_repo: ISampleRecordUseCases, +): + """Test deleting a not existing record re raises the error from repository.""" + + with pytest.raises(RecordDoesNotExistError): + await sample_record_use_cases_with_fake_repo.update_record( + SampleRecordUpdateSchemaFactory(id=42) + ) + + +async def test_get_non_existing_record_raises_error( + sample_record_use_cases_with_fake_repo: ISampleRecordUseCases, +): + """Test deleting a not existing record re raises the error from repository.""" + + with pytest.raises(RecordDoesNotExistError): + await sample_record_use_cases_with_fake_repo.get_record(42) diff --git a/tests/commands/{% if CI %}test_test.py{% endif %} b/tests/commands/{% if CI %}test_test.py{% endif %} deleted file mode 100644 index a4023ba..0000000 --- a/tests/commands/{% if CI %}test_test.py{% endif %} +++ /dev/null @@ -1,425 +0,0 @@ -import asyncio -import os -import re -from http import HTTPStatus -from typing import AsyncGenerator, Callable -from unittest.mock import AsyncMock -from uuid import UUID, uuid4 - -import httpx -import pytest -from asgi_lifespan import LifespanManager -from pybotx import ( - AttachmentTypes, - Bot, - BotXMethodFailedCallbackReceivedError, - BubbleMarkup, - CallbackNotReceivedError, - IncomingMessage, - KeyboardMarkup, - OutgoingMessage, -) -from pybotx.models.attachments import AttachmentVideo -from respx import MockRouter -from sqlalchemy.ext.asyncio import AsyncSession - -from app.caching.redis_repo import RedisRepo -from app.db.record.repo import RecordRepo -from app.main import get_application -from app.schemas.record import Record -from tests.conftest import mock_authorization - -pytestmark = pytest.mark.xfail( - os.getenv("CI") == "true", reason="CI is too slow for this tests" -) - - -@pytest.fixture -async def bot() -> AsyncGenerator[Bot, None]: - fastapi_app = get_application(raise_bot_exceptions=True) - - mock_authorization() - - async with LifespanManager(fastapi_app): - built_bot = fastapi_app.state.bot - - yield built_bot - - -async def test_callback_redis_repo_successful_callback( - respx_mock: MockRouter, - host: str, - bot: Bot, - incoming_message_factory: Callable[..., IncomingMessage], -) -> None: - # - Arrange - - message = incoming_message_factory(body="/_test-redis-callback-repo") - respx_mock.post( - f"https://{host}/api/v4/botx/notifications/direct", - json={ - "group_chat_id": str(message.chat.id), - "notification": {"status": "ok", "body": "Hello!"}, - }, - ).mock( - return_value=httpx.Response( - HTTPStatus.ACCEPTED, - json={ - "status": "ok", - "result": {"sync_id": "21a9ec9e-f21f-4406-ac44-1a78d2ccf9e3"}, - }, - ), - ) - - # - Act - - task = bot.async_execute_bot_command(message) - await asyncio.sleep(0) - - await bot.set_raw_botx_method_result( - { - "status": "ok", - "sync_id": "21a9ec9e-f21f-4406-ac44-1a78d2ccf9e3", - "result": {}, - } - ) - await asyncio.sleep(0) - - await task - - # - Assert - - assert task.exception() is None - - -async def test_callback_redis_repo_unsuccessful_callback( - respx_mock: MockRouter, - host: str, - bot: Bot, - incoming_message_factory: Callable[..., IncomingMessage], -) -> None: - # - Arrange - - message = incoming_message_factory(body="/_test-redis-callback-repo") - respx_mock.post( - f"https://{host}/api/v4/botx/notifications/direct", - json={ - "group_chat_id": str(message.chat.id), - "notification": {"status": "ok", "body": "Hello!"}, - }, - ).mock( - return_value=httpx.Response( - HTTPStatus.ACCEPTED, - json={ - "status": "ok", - "result": {"sync_id": "21a9ec9e-f21f-4406-ac44-1a78d2ccf9e3"}, - }, - ), - ) - - # - Act - - task = bot.async_execute_bot_command(message) - await asyncio.sleep(0) - - await bot.set_raw_botx_method_result( - { - "status": "error", - "sync_id": "21a9ec9e-f21f-4406-ac44-1a78d2ccf9e3", - "reason": "test_reason", - "errors": [], - "error_data": {}, - } - ) - with pytest.raises(BotXMethodFailedCallbackReceivedError) as exc: - await task - - # - Assert - - assert "test_reason" in str(exc.value) # noqa: WPS441 - - -async def test_callback_redis_repo_no_callback( - respx_mock: MockRouter, - host: str, - bot: Bot, - incoming_message_factory: Callable[..., IncomingMessage], -) -> None: - # - Arrange - - message = incoming_message_factory(body="/_test-redis-callback-repo") - respx_mock.post( - f"https://{host}/api/v4/botx/notifications/direct", - json={ - "group_chat_id": str(message.chat.id), - "notification": {"status": "ok", "body": "Hello!"}, - }, - ).mock( - return_value=httpx.Response( - HTTPStatus.ACCEPTED, - json={ - "status": "ok", - "result": {"sync_id": "21a9ec9e-f21f-4406-ac44-1a78d2ccf9e3"}, - }, - ), - ) - - # - Act - - task = bot.async_execute_bot_command(message) - - with pytest.raises(CallbackNotReceivedError) as exc: - await task - - # - Assert - - assert "hasn't been received" in str(exc.value) # noqa: WPS441 - - -async def test_callback_redis_repo_wait_callback( - respx_mock: MockRouter, - host: str, - bot: Bot, - incoming_message_factory: Callable[..., IncomingMessage], -) -> None: - # - Arrange - - message = incoming_message_factory(body="/_test-redis-callback-repo-wait") - respx_mock.post( - f"https://{host}/api/v4/botx/notifications/direct", - json={ - "group_chat_id": str(message.chat.id), - "notification": {"status": "ok", "body": "Hello!"}, - }, - ).mock( - return_value=httpx.Response( - HTTPStatus.ACCEPTED, - json={ - "status": "ok", - "result": {"sync_id": "21a9ec9e-f21f-4406-ac44-1a78d2ccf9e3"}, - }, - ), - ) - - # - Act - - task = bot.async_execute_bot_command(message) - await asyncio.sleep(0.1) - - await bot.set_raw_botx_method_result( - { - "status": "error", - "sync_id": "21a9ec9e-f21f-4406-ac44-1a78d2ccf9e3", - "reason": "test_reason", - "errors": [], - "error_data": {}, - } - ) - await asyncio.sleep(0.1) - - await task - - # - Assert - - assert task.exception() is None - - -async def test_callback_redis_repo_no_wait_callback( - respx_mock: MockRouter, - host: str, - bot: Bot, - incoming_message_factory: Callable[..., IncomingMessage], - loguru_caplog: pytest.LogCaptureFixture, -) -> None: - # - Arrange - - message = incoming_message_factory(body="/_test-redis-callback-repo-no-wait") - respx_mock.post( - f"https://{host}/api/v4/botx/notifications/direct", - json={ - "group_chat_id": str(message.chat.id), - "notification": {"status": "ok", "body": "Hello!"}, - }, - ).mock( - return_value=httpx.Response( - HTTPStatus.ACCEPTED, - json={ - "status": "ok", - "result": {"sync_id": "21a9ec9e-f21f-4406-ac44-1a78d2ccf9e3"}, - }, - ), - ) - - # - Act - - await bot.async_execute_bot_command(message) - await asyncio.sleep(0) - - # - Assert - - assert ( - "Callback `21a9ec9e-f21f-4406-ac44-1a78d2ccf9e3` wasn't waited" - in loguru_caplog.text - ) - - -async def test_answer_error_exception_middleware( - bot: Bot, - user_huid: UUID, - incoming_message_factory: Callable[..., IncomingMessage], -) -> None: - # - Arrange - - message = incoming_message_factory(body="/_test-answer-error") - bot.send = AsyncMock(return_value=uuid4()) # type: ignore - - # - Act - - await bot.async_execute_bot_command(message) - - # - Assert - - bot.send.assert_awaited_once_with( - message=OutgoingMessage( - bot_id=message.bot.id, - chat_id=message.chat.id, - body="test", - metadata={"test": 1}, - bubbles=BubbleMarkup([[]]), - keyboard=KeyboardMarkup([[]]), - file=AttachmentVideo( - type=AttachmentTypes.VIDEO, - filename="test_file.mp4", - size=len(b"Hello, world!\n"), - is_async_file=False, - content=b"Hello, world!\n", - duration=10, - ), - recipients=[user_huid], - silent_response=False, - markup_auto_adjust=False, - stealth_mode=False, - send_push=False, - ignore_mute=False, - ), - wait_callback=True, - callback_timeout=1, - ) - - -async def test_answer_message_error_exception_middleware( - bot: Bot, - user_huid: UUID, - incoming_message_factory: Callable[..., IncomingMessage], -) -> None: - # - Arrange - - message = incoming_message_factory(body="/_test-answer-message-error") - bot.answer_message = AsyncMock(return_value=uuid4()) # type: ignore - - # - Act - - await bot.async_execute_bot_command(message) - - # - Assert - - bot.answer_message.assert_awaited_once_with( # type: ignore - body="test", - metadata={"test": 1}, - bubbles=BubbleMarkup([[]]), - keyboard=KeyboardMarkup([[]]), - file=AttachmentVideo( - type=AttachmentTypes.VIDEO, - filename="test_file.mp4", - size=len(b"Hello, world!\n"), - is_async_file=False, - content=b"Hello, world!\n", - duration=10, - ), - recipients=[user_huid], - silent_response=False, - markup_auto_adjust=False, - stealth_mode=False, - send_push=False, - ignore_mute=False, - wait_callback=True, - callback_timeout=1, - ) - - -async def test_fail_handler_while_shutting_down( - bot: Bot, - incoming_message_factory: Callable[..., IncomingMessage], - loguru_caplog: pytest.LogCaptureFixture, -) -> None: - # - Arrange - - message = incoming_message_factory(body="/_test-fail-shutting-down") - bot.answer_message = AsyncMock(return_value=uuid4()) # type: ignore - - # - Act - - await bot.async_execute_bot_command(message) - - # - Assert - - message.state.fsm.drop_state.assert_awaited_once() - assert re.search(r"Internal error ([a-z\d\-]*)", loguru_caplog.text) - assert re.search( - ( - "При обработке сообщения или нажатия на кнопку произошла " - "непредвиденная ошибка.\n" - "Пожалуйста, сообщите об этом вашему администратору бота.\n" - r"\*\*Идентификатор ошибки\*\*: `([a-z\d\-]*)`." - ), - bot.answer_message.call_args[0][0], # type: ignore - ) - - -async def test_fail_handler( - bot: Bot, - incoming_message_factory: Callable[..., IncomingMessage], - loguru_caplog: pytest.LogCaptureFixture, -) -> None: - # - Arrange - - message = incoming_message_factory(body="/_test-fail") - bot.answer_message = AsyncMock(return_value=uuid4()) # type: ignore - - # - Act - - await bot.async_execute_bot_command(message) - - # - Assert - - message.state.fsm.drop_state.assert_awaited_once() - assert re.search(r"Internal error ([a-z\d\-]*)", loguru_caplog.text) - assert re.search( - ( - "При обработке сообщения или нажатия на кнопку произошла " - "непредвиденная ошибка.\n" - "Пожалуйста, сообщите об этом вашему администратору бота.\n" - r"\*\*Идентификатор ошибки\*\*: `([a-z\d\-]*)`." - ), - bot.answer_message.call_args[0][0], # type: ignore - ) - assert "Test smart_log output" in loguru_caplog.text - - -async def test_redis_handler( - bot: Bot, - incoming_message_factory: Callable[..., IncomingMessage], - redis_repo: RedisRepo, -) -> None: - # - Arrange - - message = incoming_message_factory(body="/_test-redis") - - # - Act - - await bot.async_execute_bot_command(message) - - # - Assert - - assert await redis_repo.rget("test_key") == "test_value" - assert await redis_repo.get("test_key") is None - - -async def test_db_handler( - bot: Bot, - incoming_message_factory: Callable[..., IncomingMessage], - db_session: AsyncSession, -) -> None: - # - Arrange - - message = incoming_message_factory(body="/_test-db") - record_repo = RecordRepo(db_session) - - # - Act - - await bot.async_execute_bot_command(message) - - # - Assert - - assert await record_repo.get(record_id=1) == Record( - id=1, record_data="test 1 (updated)" - ) - assert await record_repo.get_or_none(record_id=2) is None - assert await record_repo.filter_by_record_data( - record_data="test not unique data" - ) == [ - Record(id=3, record_data="test not unique data"), - Record(id=4, record_data="test not unique data"), - ] - assert await record_repo.get_all() == [ - Record(id=1, record_data="test 1 (updated)"), - Record(id=3, record_data="test not unique data"), - Record(id=4, record_data="test not unique data"), - ] diff --git a/tests/conftest.py b/tests/conftest.py index 73f313a..e2d6ee2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,14 +1,20 @@ +import asyncio +from asyncio import current_task from datetime import datetime from http import HTTPStatus +from pathlib import Path from typing import Any, AsyncGenerator, Callable, Dict, Generator, List, Optional -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch from uuid import UUID, uuid4 +import os import httpx import jwt import pytest import respx -from alembic import config as alembic_config +import sqlalchemy +from alembic import config as alembic_config, command +from alembic.config import Config from asgi_lifespan import LifespanManager from pybotx import ( Bot, @@ -20,26 +26,88 @@ UserSender, ) from pybotx.logger import logger -from sqlalchemy.ext.asyncio import AsyncSession - -from app.caching.redis_repo import RedisRepo +from sqlalchemy import NullPool, event +from sqlalchemy.exc import SQLAlchemyError +from sqlalchemy.ext.asyncio import ( + AsyncSession, + AsyncEngine, + create_async_engine, + async_scoped_session, + async_sessionmaker, +) +from sqlalchemy.orm import sessionmaker, Session, SessionTransaction +from testcontainers.postgres import PostgresContainer + +from app.infrastructure.caching.redis_repo import RedisRepo +from app.infrastructure.db.sqlalchemy import ( + build_db_session_factory, + AsyncSessionFactory, + make_url_async, +) +from app.infrastructure.repositories.sample_record import SampleRecordRepository from app.main import get_application -from app.settings import settings +from app.settings import settings, AppSettings +from tests.factories import SampleRecordModelFactory + + +@pytest.fixture(scope="session") +def postgres_container() -> Generator[PostgresContainer, None, None]: + """Starts a temporary PostgreSQL container for the test session.""" + container_name = f"bot_testing_container" + + with PostgresContainer("postgres:15").with_name(container_name) as postgres: + container_url = postgres.get_connection_url() + with patch.object(settings, "POSTGRES_DSN", container_url): + yield postgres + + +@pytest.fixture(scope="session") +def event_loop(): + """Create a session-scoped event loop for async session-scoped fixtures.""" + loop = asyncio.new_event_loop() + yield loop + loop.close() + + +@pytest.fixture(scope="session") +async def db_session_factory(postgres_container) -> AsyncSessionFactory: + engine: AsyncEngine = create_async_engine( + make_url_async(settings.POSTGRES_DSN), poolclass=NullPool + ) + + factory = async_scoped_session( + sessionmaker( + bind=engine, + expire_on_commit=False, + class_=AsyncSession, # type:ignore + ), + scopefunc=current_task, + ) + return factory + + +@pytest.fixture +async def isolated_session(db_session_factory): + """Isolated session with proper rollback to prevent test data leaks.""" + alembic_cfg = Config(str(Path(__file__).parent.parent / "alembic.ini")) + command.upgrade(alembic_cfg, "head") + async with db_session_factory() as session: + yield session + command.downgrade(alembic_cfg, "base") + + +@pytest.fixture +async def sample_record_repository(isolated_session) -> SampleRecordRepository: + return SampleRecordRepository(isolated_session) @pytest.fixture -def db_migrations() -> Generator: - alembic_config.main(argv=["upgrade", "head"]) - yield - alembic_config.main(argv=["downgrade", "base"]) - - -@pytest.hookimpl(trylast=True) -def pytest_collection_modifyitems(items: List[pytest.Function]) -> None: - # We can't use autouse, because it appends fixture to the end - # but session from db_session fixture must be closed before migrations downgrade - for item in items: - item.fixturenames = ["db_migrations"] + item.fixturenames +def sample_record_factory( + isolated_session, +) -> Generator[type[SampleRecordModelFactory], None, None]: + SampleRecordModelFactory._meta.sqlalchemy_session = isolated_session + yield SampleRecordModelFactory + SampleRecordModelFactory._meta.sqlalchemy_session = None @pytest.fixture diff --git a/tests/factories.py b/tests/factories.py new file mode 100644 index 0000000..f786090 --- /dev/null +++ b/tests/factories.py @@ -0,0 +1,59 @@ +from async_factory_boy.factory.sqlalchemy import AsyncSQLAlchemyFactory +from factory import Factory, DictFactory +import factory + +from app.domain.entities.sample_record import SampleRecord +from app.infrastructure.db.sample_record.models import SampleRecordModel +from app.presentation.bot.schemas.sample_record import ( + SampleRecordCreateRequestSchema, + SampleRecordUpdateRequestSchema, + SampleRecordDeleteRequestSchema, +) + + +class SampleRecordModelFactory(AsyncSQLAlchemyFactory): + """Factory for sample record model objects in the database.""" + + class Meta: + model = SampleRecordModel + + record_data = factory.Faker("text") + + +class SampleRecordFactory(Factory): + """Factory for sample record domain objects""" + + class Meta: + model = SampleRecord + + record_data = factory.Faker("text") + + +class SampleRecordCreateSchemaFactory(Factory): + """Factory for sample record create schema objects. + + Dict factory used to break dependency from inner schema object""" + + class Meta: + model = SampleRecordCreateRequestSchema + + record_data = factory.Faker("text") + + +class SampleRecordUpdateSchemaFactory(DictFactory): + """Factory for sample record update schema objects.""" + + class Meta: + model = SampleRecordUpdateRequestSchema + + id = factory.Faker("integer") + record_data = factory.Faker("text") + + +class SampleRecordDeleteSchemaFactory(DictFactory): + """Factory for sample record delete schema objects.""" + + class Meta: + model = SampleRecordDeleteRequestSchema + + id = factory.Faker("integer") diff --git a/tests/infrastructure/__init__.py b/tests/infrastructure/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/infrastructure/repository/__init__.py b/tests/infrastructure/repository/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/infrastructure/repository/test_sample_record_repository.py b/tests/infrastructure/repository/test_sample_record_repository.py new file mode 100644 index 0000000..22e601b --- /dev/null +++ b/tests/infrastructure/repository/test_sample_record_repository.py @@ -0,0 +1,120 @@ +import pytest +from deepdiff import DeepDiff +from sqlalchemy import select, func +from sqlalchemy.ext.asyncio import AsyncSession + +from app.application.repository.exceptions import RecordDoesNotExistError +from app.domain.entities.sample_record import SampleRecord +from app.infrastructure.db.sample_record.models import SampleRecordModel +from app.infrastructure.repositories.sample_record import SampleRecordRepository +from tests.factories import SampleRecordModelFactory + + +def assert_database_object_equal_domain( + db_object: SampleRecordModel, domain_object: SampleRecord +): + assert db_object.id == domain_object.id + assert db_object.record_data == domain_object.record_data + + +async def test_add_record( + sample_record_repository: SampleRecordRepository, + sample_record_factory: SampleRecordModelFactory, + isolated_session: AsyncSession, +): + """Test adding a new record.""" + + new_record = SampleRecord(record_data="test_add") + created_record = await sample_record_repository.create(new_record) + + count = await isolated_session.scalar( + select(func.count()).select_from(SampleRecordModel) + ) + assert count == 1 + + diff = DeepDiff(new_record, created_record, exclude_paths={"id"}) + assert not diff, diff + + +async def test_update_record( + sample_record_repository: SampleRecordRepository, + isolated_session: AsyncSession, + sample_record_factory: SampleRecordModelFactory, +): + """Test updating an existing record.""" + + existing_record = await sample_record_factory(record_data="test_update") + + updated_record = SampleRecord( + id=existing_record.id, record_data="test_update_new_value" + ) + updated_record_in_db = await sample_record_repository.update(updated_record) + + count = await isolated_session.scalar( + select(func.count()).select_from(SampleRecordModel) + ) + assert count == 1 + assert updated_record_in_db.id == existing_record.id + assert updated_record_in_db.record_data == "test_update_new_value" + + +async def test_delete_record( + sample_record_repository: SampleRecordRepository, + isolated_session: AsyncSession, + sample_record_factory: SampleRecordModelFactory, +): + """Test deleting a record.""" + print(isolated_session) + existing_record = await sample_record_factory(record_data="test_delete") + deleted_record_id = await sample_record_repository.delete(existing_record.id) + + assert deleted_record_id == existing_record.id + count = await isolated_session.scalar( + select(func.count()).select_from(SampleRecordModel) + ) + assert count == 0 + + assert existing_record + + +async def test_delete_non_exist_record( + sample_record_repository: SampleRecordRepository, +): + """Test deleting a not existing record raises the error.""" + + with pytest.raises(RecordDoesNotExistError): + await sample_record_repository.delete(42) + + +async def test_get_record( + sample_record_repository: SampleRecordRepository, sample_record_factory +): + existing_record = await sample_record_factory() + record_from_db = await sample_record_repository.get_by_id(existing_record.id) + + assert_database_object_equal_domain(existing_record, record_from_db) + + +async def test_get_non_existing_record( + sample_record_repository: SampleRecordRepository, + isolated_session: AsyncSession, +): + """Test deleting a not existing record raises the error.""" + + with pytest.raises(RecordDoesNotExistError): + await sample_record_repository.get_by_id(42) + + +async def test_get_all_records( + sample_record_repository: SampleRecordRepository, + sample_record_factory: SampleRecordModelFactory, +): + existing_records_map = { + record.id: record for record in await sample_record_factory.create_batch(4) + } + records = await sample_record_repository.get_all() + + assert len(existing_records_map) == len(records) + + for record in records: + assert_database_object_equal_domain(existing_records_map[record.id], record) diff --git a/tests/presentation/__init__.py b/tests/presentation/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/presentation/commands/__init__.py b/tests/presentation/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/commands/test_common.py.jinja b/tests/presentation/commands/test_common.py similarity index 97% rename from tests/commands/test_common.py.jinja rename to tests/presentation/commands/test_common.py index acb08d7..8bf7a45 100644 --- a/tests/commands/test_common.py.jinja +++ b/tests/presentation/commands/test_common.py @@ -70,7 +70,7 @@ async def test_chat_created_handler( # - Assert - bot.answer_message.assert_awaited_once_with( # type: ignore ( - "Вас приветствует {{bot_display_name}}!\n\n" + "Вас приветствует gubarik_bot_refactor!\n\n" "Для более подробной информации нажмите кнопку `/help`" ), bubbles=BubbleMarkup([[Button(command="/help", label="/help")]]), diff --git a/tests/presentation/endpoints/__init__.py b/tests/presentation/endpoints/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/endpoints/conftest.py b/tests/presentation/endpoints/conftest.py similarity index 60% rename from tests/endpoints/conftest.py rename to tests/presentation/endpoints/conftest.py index ddf4cad..8c184b8 100644 --- a/tests/endpoints/conftest.py +++ b/tests/presentation/endpoints/conftest.py @@ -1,4 +1,4 @@ -from tests.endpoints.fixtures import ( # noqa: F401 +from tests.presentation.endpoints.fixtures import ( # noqa: F401 base_command_payload, command_payload_v3, command_payload_v4, diff --git a/tests/endpoints/fixtures.py b/tests/presentation/endpoints/fixtures.py similarity index 100% rename from tests/endpoints/fixtures.py rename to tests/presentation/endpoints/fixtures.py diff --git a/tests/endpoints/test_botx.py b/tests/presentation/endpoints/test_botx.py similarity index 98% rename from tests/endpoints/test_botx.py rename to tests/presentation/endpoints/test_botx.py index 52f1602..759d0a6 100644 --- a/tests/endpoints/test_botx.py +++ b/tests/presentation/endpoints/test_botx.py @@ -225,6 +225,5 @@ def test__web_app__unsupported_bot_api_version_service_unavailable( status_message = response.json()["error_data"]["status_message"] assert status_message == ( - "Unsupported Bot API version: `3`. " - "Set protocol version to `4` in Admin panel." + "Unsupported Bot API version: `3`. Set protocol version to `4` in Admin panel." ) diff --git a/tests/presentation/unit/__init__.py b/tests/presentation/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/{% if from_ccsteam %}.gitlab-ci.yml{% endif %}.jinja b/{% if from_ccsteam %}.gitlab-ci.yml{% endif %}.jinja deleted file mode 100644 index 71525d4..0000000 --- a/{% if from_ccsteam %}.gitlab-ci.yml{% endif %}.jinja +++ /dev/null @@ -1,219 +0,0 @@ -# YAML objects named with dot is anchors, which are not recognized as jobs -.run_bot: &run_bot - - docker rm -f ${CONTAINER_NAME} || true - - docker pull ${CONTAINER_RELEASE_IMAGE} - # Add envs for app here. Don't forget to add them in example.env and docker-compose files. - - docker run - -d - $MEMORY_LIMIT - --name ${CONTAINER_NAME} - --restart always - --label traefik.http.routers.${BOT_PROJECT_NAME}.rule="Host(\`${BOT_URL}\`)" - --label traefik.enable=true - --label traefik.http.services.${BOT_PROJECT_NAME}.loadbalancer.server.port="8000" - --log-opt max-size=10m - --log-opt max-file=5 - -e POSTGRES_DSN="${POSTGRES_DSN}" - -e REDIS_DSN="${REDIS_DSN}" - -e BOT_CREDENTIALS="${BOT_CREDENTIALS}" - -e DEBUG="${DEBUG:-false}" - ${CONTAINER_RELEASE_IMAGE} - {% if add_worker -%} - - docker rm -f ${CONTAINER_NAME}-worker || true - # Add envs for worker here - - docker run - -d - $MEMORY_LIMIT_WORKER - --name ${CONTAINER_NAME}-worker - --restart always - --log-opt max-size=10m - --log-opt max-file=5 - -e POSTGRES_DSN="${POSTGRES_DSN}" - -e REDIS_DSN="${REDIS_DSN}" - -e BOT_CREDENTIALS="${BOT_CREDENTIALS}" - -e DEBUG="${DEBUG:-false}" - ${CONTAINER_RELEASE_IMAGE} - bash -c 'PYTHONPATH="$PYTHONPATH:$PWD" saq app.worker.worker.settings' - {%- endif %} - -.create_db: &create_db - - psql -c "create user \"${POSTGRES_USER}\"" postgres || true - - psql -c "alter user \"${POSTGRES_USER}\" with password '${POSTGRES_PASSWORD}'" postgres - - psql -c "create database \"${POSTGRES_DB}\" with owner \"${POSTGRES_USER}\"" postgres || true - -.install_dependencies: &install_dependencies - - echo -e "machine ${GIT_HOST}\nlogin gitlab-ci-token\npassword ${CI_JOB_TOKEN}" > ~/.netrc - - pip install -q poetry - - poetry config virtualenvs.in-project true - - poetry install - -.cache_dependencies: &cache_dependencies - key: - files: - - poetry.lock - prefix: "venv" - paths: - - .cache/pip - - .venv - -.postgres_envs: &postgres_envs - - POSTGRES_USER=${CONTAINER_NAME} - - POSTGRES_DB=${CONTAINER_NAME} - - POSTGRES_PASSWORD=$(openssl rand -hex 16) - - POSTGRES_HOST=${PROD_POSTGRES_HOST} - - POSTGRES_DSN=postgres://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST/$POSTGRES_DB - -# Jobs -variables: - GIT_DEPTH: 1 # Fetch only latest commit - PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip" - -stages: - - check - - build - - security - - deploy - -default: - interruptible: true - -lint: - image: python:3.10 - stage: check - tags: - - docker - cache: *cache_dependencies - before_script: - - *install_dependencies - script: - - poetry run ./scripts/lint - -test: - image: python:3.10 - stage: check - tags: - - docker - services: - - postgres:15.3-alpine - - redis:7.0-alpine - cache: *cache_dependencies - variables: - BOT_CREDENTIALS: cts.example.com@secret@123e4567-e89b-12d3-a456-426655440000 - POSTGRES_DSN: postgres://postgres:postgres@postgres/postgres - REDIS_DSN: redis://redis/0 - before_script: - - *install_dependencies - script: - - poetry run pytest --cov-config=setup.cfg - coverage: '/Total coverage: \d\d\d.\d\d%/' - -security: - stage: security - allow_failure: true - trigger: - include: - - project: devsecops/pipelines - file: integration_templates/python.yml - - -build: - image: docker:latest - stage: build - tags: - - docker - before_script: - - docker info - - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY - - CONTAINER_RELEASE_IMAGE="$CI_REGISTRY_IMAGE:${CI_COMMIT_TAG:-$CI_COMMIT_REF_SLUG}" - script: - - docker pull $CONTAINER_RELEASE_IMAGE || true - - docker build - --cache-from $CONTAINER_RELEASE_IMAGE - --build-arg GIT_HOST=$GIT_HOST - --build-arg CI_JOB_TOKEN=$CI_JOB_TOKEN - --build-arg CI_COMMIT_SHA=$CI_COMMIT_SHA - --force-rm - -t $CONTAINER_RELEASE_IMAGE . - - docker push $CONTAINER_RELEASE_IMAGE - - docker rmi $CONTAINER_RELEASE_IMAGE - -deploy.botstest: - image: docker:latest - stage: deploy - tags: - - bots-test - only: - - branches - when: manual - environment: - name: test - on_stop: deploy.botstest.stop - variables: - # https://docs.gitlab.com/ee/ci/runners/configure_runners.html#git-strategy - GIT_STRATEGY: none - before_script: - - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY - - if [ -z ${BOT_PROJECT_NAME:-} ]; then BOT_PROJECT_NAME=${CI_PROJECT_PATH_SLUG#"$CI_PROJECT_NAMESPACE-"}; fi - - CONTAINER_NAME=$BOT_PROJECT_NAME - - CONTAINER_RELEASE_IMAGE="$CI_REGISTRY_IMAGE:${CI_COMMIT_TAG:-$CI_COMMIT_REF_SLUG}" - - BOT_URL="${BOT_PROJECT_NAME}.${DEV_SERVER_HOST}" - - BOT_CREDENTIALS=$DEV_BOT_CREDENTIALS - - *postgres_envs - - REDIS_DSN=redis://${DOCKER_NETWORK_IP}/1 - script: - - echo "Use URL 'https://${BOT_URL}/' in your cts admin site" - - echo "Using credentials ${BOT_CREDENTIALS}" - - echo "Deploing Docker container ${CONTAINER_NAME}" - - *create_db - - *run_bot - -deploy.botstest.stop: - when: manual - environment: - name: test - action: stop - extends: deploy.botstest - script: - - docker rm -f ${CONTAINER_NAME} || true - {% if add_worker -%} - - docker rm -f ${CONTAINER_NAME} ${CONTAINER_NAME}-worker || true - {%- endif %} - - psql -c "select pg_terminate_backend(pid) from pg_stat_activity \ - where datname = '${POSTGRES_DB}';" postgres || true - - psql -c "drop database \"${POSTGRES_DB}\"" postgres || true - - psql -c "drop user \"${POSTGRES_USER}\"" postgres || true - -deploy.botsprod: - stage: deploy - image: docker:latest - tags: - - bots-prod - only: - # Note the bots-prod worker requires branch to be protected - - master - when: manual - environment: - name: production - variables: - # https://docs.gitlab.com/ee/ci/runners/configure_runners.html#git-strategy - GIT_STRATEGY: none - before_script: - - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY - - if [ -z ${BOT_PROJECT_NAME:-} ]; then BOT_PROJECT_NAME=${CI_PROJECT_PATH_SLUG#"$CI_PROJECT_NAMESPACE-"}; fi - - CONTAINER_NAME=$BOT_PROJECT_NAME - - CONTAINER_RELEASE_IMAGE="$CI_REGISTRY_IMAGE:${CI_COMMIT_TAG:-$CI_COMMIT_REF_SLUG}" - - BOT_URL="${BOT_PROJECT_NAME}.${PROD_SERVER_HOST}" - - *postgres_envs - - REDIS_DSN=redis://${DOCKER_NETWORK_IP}/1 - - MEMORY_LIMIT=--memory=100m - {% if add_worker -%} - - MEMORY_LIMIT_WORKER=-memory=130m - {%- endif %} - script: - - echo "Use URL 'https://${BOT_URL}/' in your cts admin site" - - echo "Using credentials ${BOT_CREDENTIALS}" - - echo "Deploing Docker container ${CONTAINER_NAME}" - - *create_db - - *run_bot - needs: - - job: security diff --git a/{{_copier_conf.answers_file}}.jinja b/{{_copier_conf.answers_file}}.jinja deleted file mode 100644 index ee70a21..0000000 --- a/{{_copier_conf.answers_file}}.jinja +++ /dev/null @@ -1,2 +0,0 @@ -# Changes here will be overwritten by Copier -{{_copier_answers|to_nice_yaml}} From 50cd541e0d7894ba0583058a2123ea0baab0fcdc Mon Sep 17 00:00:00 2001 From: vladimirgubarik Date: Fri, 18 Jul 2025 12:24:48 +0300 Subject: [PATCH 02/15] tests passed --- app/application/repository/interfaces.py | 11 +- app/application/service/interfaces.py | 8 - app/application/use_cases/healthcheck.py | 25 -- app/application/use_cases/interfaces.py | 2 +- app/application/use_cases/record_use_cases.py | 6 +- .../service => decorators}/__init__.py | 0 .../bot_exception_answer.py | 8 +- app/decorators/exception_mapper.py | 194 ++++++++++++++ .../exceptions_mapper.py | 17 +- app/domain/entities/healthcheck.py | 18 -- app/domain/entities/sample_record.py | 5 +- app/domain/exceptions/domain_exceptions.py | 4 +- .../caching/callback_redis_repo.py | 2 +- app/infrastructure/caching/redis_repo.py | 2 +- app/infrastructure/containers.py | 4 +- app/infrastructure/db/migrations/env.py | 1 - .../migrations/versions/765dcfed2d16_init.py | 3 +- app/infrastructure/db/sample_record/models.py | 2 +- app/infrastructure/db/sqlalchemy.py | 10 +- .../repositories/sample_record.py | 12 +- app/infrastructure/services/healthcheck.py | 48 ---- app/infrastructure/worker/worker.py | 2 +- app/logger.py | 2 +- app/main.py | 10 +- app/presentation/api/botx.py | 4 +- app/presentation/api/healthcheck.py | 41 --- app/presentation/api/schemas/healthcheck.py | 24 -- app/presentation/bot/bot.py | 9 +- .../bot/commands/command_listing.py | 7 - app/presentation/bot/commands/common.py | 18 -- .../bot/commands/sample_record_fsm.py | 26 -- .../bot/commands/sample_record_simple.py | 6 +- app/presentation/bot/handlers/command.py | 4 +- app/presentation/bot/handlers/error.py | 4 +- .../bot/handlers/sample_record.py | 9 +- app/presentation/bot/resources/strings.py | 12 +- app/presentation/bot/schemas/sample_record.py | 6 +- app/presentation/bot/validators/base.py | 5 +- .../bot/validators/sample_record.py | 9 +- app/presentation/dependencies/healthcheck.py | 2 +- app/services/healthcheck.py | 39 --- app/settings.py | 8 +- pyproject.toml | 4 +- ruff.toml | 5 + scripts/format | 8 - scripts/lint | 6 +- scripts/test | 4 +- setup.cfg | 166 +++--------- tests/conftest.py | 48 ++-- tests/factories.py | 6 +- .../integration}/__init__.py | 0 .../integration/endpoints}/__init__.py | 0 .../endpoints/conftest.py | 2 +- .../endpoints/fixtures.py | 0 .../endpoints/test_botx.py | 91 ------- .../integration/repository}/__init__.py | 0 .../test_sample_record_repository.py | 28 +- .../sample_record_use_cases}/__init__.py | 0 .../sample_record_use_cases}/conftest.py | 2 - .../test_sample_record_use_cases_int.py | 10 +- tests/presentation/endpoints/__init__.py | 0 tests/presentation/unit/__init__.py | 0 tests/services/__init__.py | 0 tests/services/test_botx_user_search.py | 80 ------ .../integration => unit}/__init__.py | 0 .../unit => unit/bot_commands}/__init__.py | 0 .../bot_commands}/test_common.py | 31 +-- .../decorators}/__init__.py | 0 tests/unit/decorators/test_classes.py | 41 +++ tests/unit/decorators/test_errors_mapper.py | 253 ++++++++++++++++++ .../repository => unit/use_cases}/__init__.py | 0 .../unit => unit/use_cases}/conftest.py | 3 +- .../use_cases}/fake_repository.py | 5 +- .../test_sample_record_use_cases.py | 30 ++- 74 files changed, 693 insertions(+), 759 deletions(-) delete mode 100644 app/application/service/interfaces.py delete mode 100644 app/application/use_cases/healthcheck.py rename app/{application/service => decorators}/__init__.py (100%) rename app/{utils => decorators}/bot_exception_answer.py (90%) create mode 100644 app/decorators/exception_mapper.py rename app/{utils => decorators}/exceptions_mapper.py (91%) delete mode 100644 app/domain/entities/healthcheck.py delete mode 100644 app/infrastructure/services/healthcheck.py delete mode 100644 app/presentation/api/healthcheck.py delete mode 100644 app/presentation/api/schemas/healthcheck.py delete mode 100644 app/presentation/bot/commands/sample_record_fsm.py delete mode 100644 app/services/healthcheck.py rename {app/infrastructure/services => tests/integration}/__init__.py (100%) rename {app/presentation/api/schemas => tests/integration/endpoints}/__init__.py (100%) rename tests/{presentation => integration}/endpoints/conftest.py (60%) rename tests/{presentation => integration}/endpoints/fixtures.py (100%) rename tests/{presentation => integration}/endpoints/test_botx.py (62%) rename {app/utils => tests/integration/repository}/__init__.py (100%) rename tests/{infrastructure => integration}/repository/test_sample_record_repository.py (82%) rename tests/{application => integration/sample_record_use_cases}/__init__.py (100%) rename tests/{application/integration => integration/sample_record_use_cases}/conftest.py (77%) rename tests/{application/integration => integration/sample_record_use_cases}/test_sample_record_use_cases_int.py (91%) delete mode 100644 tests/presentation/endpoints/__init__.py delete mode 100644 tests/presentation/unit/__init__.py delete mode 100644 tests/services/__init__.py delete mode 100644 tests/services/test_botx_user_search.py rename tests/{application/integration => unit}/__init__.py (100%) rename tests/{application/unit => unit/bot_commands}/__init__.py (100%) rename tests/{presentation/commands => unit/bot_commands}/test_common.py (69%) rename tests/{infrastructure => unit/decorators}/__init__.py (100%) create mode 100644 tests/unit/decorators/test_classes.py create mode 100644 tests/unit/decorators/test_errors_mapper.py rename tests/{infrastructure/repository => unit/use_cases}/__init__.py (100%) rename tests/{application/unit => unit/use_cases}/conftest.py (73%) rename tests/{application/unit => unit/use_cases}/fake_repository.py (91%) rename tests/{application/unit => unit/use_cases}/test_sample_record_use_cases.py (82%) diff --git a/app/application/repository/interfaces.py b/app/application/repository/interfaces.py index cdf5242..f3a486e 100644 --- a/app/application/repository/interfaces.py +++ b/app/application/repository/interfaces.py @@ -1,7 +1,7 @@ """Record repository interface.""" from abc import ABC, abstractmethod -from typing import List, Optional +from typing import List from app.domain.entities.sample_record import SampleRecord @@ -34,7 +34,7 @@ async def update(self, record: SampleRecord) -> SampleRecord: pass @abstractmethod - async def delete(self, record_id: int) -> int: + async def delete(self, record_id: int) -> None: """ Delete a record from the database by provided id @@ -42,12 +42,7 @@ async def delete(self, record_id: int) -> int: record_id: The unique identifier of the record to be deleted. Returns: - None - - This method does not return any value. - - Raises: - NotImplementedError: If this method is not overridden in the implementing class. + An id of the deleted record """ pass diff --git a/app/application/service/interfaces.py b/app/application/service/interfaces.py deleted file mode 100644 index 1ea9ad1..0000000 --- a/app/application/service/interfaces.py +++ /dev/null @@ -1,8 +0,0 @@ -from abc import ABC, abstractmethod -from typing import Optional - - -class HealthCheckService(ABC): - @abstractmethod - async def check(self) -> Optional[str]: # Return error or None - ... diff --git a/app/application/use_cases/healthcheck.py b/app/application/use_cases/healthcheck.py deleted file mode 100644 index 2d105e4..0000000 --- a/app/application/use_cases/healthcheck.py +++ /dev/null @@ -1,25 +0,0 @@ -from app.application.service.interfaces import HealthCheckService -from app.domain.entities.healthcheck import ( - HealthCheckStatuses, - HealthCheckServiceResult, -) - - -class HealthCheckUseCase: - def __init__(self, services: list[tuple[str, HealthCheckService]]): - self.services = services - - async def execute( - self, - ) -> tuple[HealthCheckStatuses, list[HealthCheckServiceResult]]: - results = [] - healthy = True - - for name, service in self.services: - error = await service.check() - results.append(HealthCheckServiceResult(name=name, error=error)) - if error: - healthy = False - - status = HealthCheckStatuses.OK if healthy else HealthCheckStatuses.ERROR - return status, results diff --git a/app/application/use_cases/interfaces.py b/app/application/use_cases/interfaces.py index d3aff69..f09db34 100644 --- a/app/application/use_cases/interfaces.py +++ b/app/application/use_cases/interfaces.py @@ -4,9 +4,9 @@ from app.presentation.bot.schemas.sample_record import ( SampleRecordCreateRequestSchema, + SampleRecordResponseListSchema, SampleRecordResponseSchema, SampleRecordUpdateRequestSchema, - SampleRecordResponseListSchema, ) diff --git a/app/application/use_cases/record_use_cases.py b/app/application/use_cases/record_use_cases.py index 2b6bb90..ea18e38 100644 --- a/app/application/use_cases/record_use_cases.py +++ b/app/application/use_cases/record_use_cases.py @@ -5,9 +5,9 @@ from app.domain.entities.sample_record import SampleRecord from app.presentation.bot.schemas.sample_record import ( SampleRecordCreateRequestSchema, + SampleRecordResponseListSchema, SampleRecordResponseSchema, SampleRecordUpdateRequestSchema, - SampleRecordResponseListSchema, ) @@ -34,9 +34,9 @@ async def update_record( updated_record = await self._repo.update(domain_object) return SampleRecordResponseSchema.from_orm(updated_record) - async def delete_record(self, record_id: int) -> int: + async def delete_record(self, record_id: int) -> None: """Delete a record.""" - return await self._repo.delete(record_id) + await self._repo.delete(record_id) async def get_record(self, record_id: int) -> SampleRecordResponseSchema: """Get a record by ID.""" diff --git a/app/application/service/__init__.py b/app/decorators/__init__.py similarity index 100% rename from app/application/service/__init__.py rename to app/decorators/__init__.py diff --git a/app/utils/bot_exception_answer.py b/app/decorators/bot_exception_answer.py similarity index 90% rename from app/utils/bot_exception_answer.py rename to app/decorators/bot_exception_answer.py index dc0e56d..db71832 100644 --- a/app/utils/bot_exception_answer.py +++ b/app/decorators/bot_exception_answer.py @@ -19,8 +19,8 @@ def _get_user_message( return exception_message_to_user -def explain_exception_to_user( # noqa: WPS231 - mapping: dict[type[Exception], str | Callable[[Exception], str]], # noqa: WPS221 +def explain_exception_to_user( + mapping: dict[type[Exception], str | Callable[[Exception], str]], ) -> Callable: """ Decorate a function to catch specified exceptions and send a response to the user. @@ -43,10 +43,10 @@ async def wrapper(bot: Bot, *args, **kwargs) -> Any: # type: ignore *args, **kwargs, ) - except tuple(mapping.keys()) as exc: # noqa: WPS455 + except tuple(mapping.keys()) as exc: if (message := _get_user_message(mapping, exc)) is not None: await bot.answer_message(message) - raise # noqa: WPS220 + raise return wrapper diff --git a/app/decorators/exception_mapper.py b/app/decorators/exception_mapper.py new file mode 100644 index 0000000..946216d --- /dev/null +++ b/app/decorators/exception_mapper.py @@ -0,0 +1,194 @@ +"""Decorators to rethrow and log exceptions.""" + +from abc import ABC, abstractmethod +from functools import cached_property, wraps +from inspect import iscoroutinefunction +from typing import Any, Callable, Type + +from cachetools import LRUCache # type:ignore + +from app.logger import logger + + +class ExceptionContext: + SENSITIVE_KEYS: frozenset[str] = frozenset( + ("password", "token", "key", "secret", "auth", "credential", "passwd") + ) + + def __init__( + self, + original_exception: Exception, + func: Callable, + args: tuple[Any, ...], + kwargs: dict[str, Any], + ): + self.original_exception = original_exception + self.func = func + self.args = args + self.kwargs = kwargs + + @cached_property + def formatted_context(self) -> str: + error_context = [ + f"Error in function '{self.func.__module__}.{self.func.__qualname__}'" + ] + + if self.args: + args_str = ", ".join(self._sanitised_value(arg) for arg in self.args) + error_context.append(f"Args: [{args_str}]") + + if self.kwargs: + kwargs_str = ", ".join( + f"{k}={self._sanitised_value(v, k)}" for k, v in self.kwargs.items() + ) + error_context.append(f"Kwargs: {kwargs_str}") + + return "\n".join(error_context).replace("{", "{{").replace("}", "}}") + + def _sanitised_value( + self, + value: Any, + key: str | None = None, + ) -> str: + if key is not None and key.lower() in self.SENSITIVE_KEYS: + return "****HIDDEN****" + + try: + str_value = str(value) + return f"{str_value[:100]}..." if len(str_value) > 100 else str_value + except Exception: + return f"<{type(value).__name__} object - str() failed>" + + +class ExceptionFactory(ABC): + """ + Create and describe a factory for exceptions. + + This class is an abstract base class meant to define the interface for an + exception factory. + + """ + + @abstractmethod + def make_exception(self, context: ExceptionContext) -> Exception: + """Make an exception based on the given context.""" + + +class EnrichedExceptionFactory(ExceptionFactory): + """ + Create and manage enriched exceptions based on a given exception type. + + This class provides a mechanism to create exceptions dynamically, + enriching them with a formatted context. It extends the behavior of + the base ExceptionFactory class by incorporating the concept of a + generated error type and formatted context. + + :ivar generated_error: The type of exception to generate when creating + an enriched exception. + :type generated_error: type[Exception] + """ + + def __init__(self, generated_error: type[Exception]): + self.generated_error = generated_error + + def make_exception(self, context: ExceptionContext) -> Exception: + return self.generated_error(context.formatted_context) + + +ExceptionOrTupleOfExceptions = Type[Exception] | tuple[Type[Exception], ...] + + +class ExceptionMapper: + """Exception-mapping decorator with bounded LRU caching and dynamic MRO lookup.""" + + def __init__( + self, + exception_map: dict[ExceptionOrTupleOfExceptions, ExceptionFactory], + max_cache_size: int = 512, + log_error: bool = True, + is_bound_method: bool = False, + ): + self.mapping = self._get_flat_map(exception_map) + self.exception_catchall_factory = self.mapping.pop(Exception, None) + self._lru_cache: LRUCache = LRUCache(maxsize=max_cache_size) + self.log_error = log_error + self.is_bound_method = is_bound_method + + def __call__(self, func: Callable) -> Callable: + return ( + self._async_wrapper(func) + if iscoroutinefunction(func) + else self._sync_wrapper(func) + ) + + def _get_flat_map( + self, + exception_map: dict[ExceptionOrTupleOfExceptions, ExceptionFactory], + ) -> dict[Type[Exception], ExceptionFactory]: + flat_map: dict[Type[Exception], ExceptionFactory] = {} + for exception_class, factory in exception_map.items(): + if isinstance(exception_class, tuple): + for exc_type in exception_class: + flat_map[exc_type] = factory + else: + flat_map[exception_class] = factory + return flat_map + + def _async_wrapper(self, func: Callable) -> Callable: + @wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + try: + return await func(*args, **kwargs) + except Exception as exc: + self._handle_exception_logic(exc, func, args, kwargs) + + return wrapper + + def _sync_wrapper(self, func: Callable) -> Callable: + @wraps(func) + def wrapper(*args: Any, **kwargs: Any) -> Any: + try: + return func(*args, **kwargs) + except Exception as exc: + self._handle_exception_logic(exc, func, args, kwargs) + + return wrapper + + def _filtered_args(self, args: tuple[Any, ...]) -> tuple[Any, ...]: + return args[1:] if args and self.is_bound_method else args + + def _handle_exception_logic( + self, + exc: Exception, + func: Callable, + args: tuple[Any, ...], + kwargs: dict[str, Any], + ) -> None: + context = ExceptionContext(exc, func, self._filtered_args(args), kwargs) + if self.log_error: + logger.error(context.formatted_context, exc_info=True) + + if exception_factory := self._get_exception_factory(type(exc)): + raise exception_factory.make_exception(context) from exc + + raise exc + + def _get_exception_factory( + self, exc_type: Type[Exception] + ) -> ExceptionFactory | None: + # Try to get from_cache + if cached_factory := self._lru_cache.get(exc_type): + return cached_factory + + # Try to find exception parents in base mapping and put to cache if found + for exc_class in exc_type.mro(): + if target_exception_factory := self.mapping.get(exc_class): # type:ignore + self._lru_cache[exc_type] = target_exception_factory + return target_exception_factory + + # exception is not presented in base mapping, but Exception in base mapping + if self.exception_catchall_factory: + self._lru_cache[exc_type] = self.exception_catchall_factory + return self.exception_catchall_factory + + return None diff --git a/app/utils/exceptions_mapper.py b/app/decorators/exceptions_mapper.py similarity index 91% rename from app/utils/exceptions_mapper.py rename to app/decorators/exceptions_mapper.py index 7f40e34..7c8d3c1 100644 --- a/app/utils/exceptions_mapper.py +++ b/app/decorators/exceptions_mapper.py @@ -9,9 +9,9 @@ FunctionType = TypeVar("FunctionType", bound=Callable[..., Any]) -CatchExceptionClass = Union[Type[Exception], Tuple[Type[Exception], ...]] # noqa: WPS221 -T = TypeVar("T") # noqa:WPS111 -Decorator = Callable[[Callable[..., T]], Callable[..., T]] # noqa: WPS221 +CatchExceptionClass = Union[Type[Exception], Tuple[Type[Exception], ...]] +T = TypeVar("T") +Decorator = Callable[[Callable[..., T]], Callable[..., T]] def _get_error_message( @@ -39,7 +39,7 @@ def _get_error_message( error_context = [ f"Error in function '{func.__module__}.{func.__qualname__}'", - f"Original exception: {ex.__class__.__name__}: {str(ex)}", # noqa: WPS237 + f"Original exception: {ex.__class__.__name__}: {str(ex)}", ] filtered_args = args[1:] if args and inspect.ismethod(func) else args @@ -49,10 +49,7 @@ def _get_error_message( error_context.append(f"Args: [{args_str}]") if kwargs: - kwargs_str = ", ".join( - f"{k}={str(v)[:100]}" # noqa: WPS237, WPS221 - for k, v in kwargs.items() # noqa: WPS111 - ) + kwargs_str = ", ".join(f"{k}={str(v)[:100]}" for k, v in kwargs.items()) error_context.append(f"Kwargs: {kwargs_str}") return "\n".join(error_context) @@ -68,7 +65,7 @@ def _create_sync_wrapper( """Create a synchronous wrapper function for exception mapping.""" @wraps(func) - def sync_wrapper(*args: Any, **kwargs: Any) -> Any: # noqa: WPS430 + def sync_wrapper(*args: Any, **kwargs: Any) -> Any: try: return func(*args, **kwargs) except catch_exceptions as ex: @@ -92,7 +89,7 @@ def _create_async_wrapper( """Create an asynchronous wrapper function for exception mapping.""" @wraps(func) - async def async_wrapper(*args: Any, **kwargs: Any) -> Any: # noqa: WPS430 + async def async_wrapper(*args: Any, **kwargs: Any) -> Any: try: return await func(*args, **kwargs) except catch_exceptions as ex: diff --git a/app/domain/entities/healthcheck.py b/app/domain/entities/healthcheck.py deleted file mode 100644 index 47d1931..0000000 --- a/app/domain/entities/healthcheck.py +++ /dev/null @@ -1,18 +0,0 @@ -from dataclasses import dataclass -from enum import Enum -from typing import Optional - - -@dataclass -class HealthCheckServiceResult: - name: str - error: Optional[str] - - -class StrEnum(str, Enum): # noqa: WPS600 - """Base enum.""" - - -class HealthCheckStatuses(StrEnum): - OK = "ok" - ERROR = "error" diff --git a/app/domain/entities/sample_record.py b/app/domain/entities/sample_record.py index 026b6a2..52a3c6e 100644 --- a/app/domain/entities/sample_record.py +++ b/app/domain/entities/sample_record.py @@ -1,9 +1,8 @@ """Record entity for the domain layer.""" from dataclasses import dataclass -from typing import Optional -from app.domain.exceptions.domain_exceptions import WrongRecordData +from app.domain.exceptions.domain_exceptions import WrongRecordDataError @dataclass @@ -22,4 +21,4 @@ def __post_init__(self) -> None: For example for some reason record data shouldn't start with A123 """ if self.record_data.startswith("A123"): - raise WrongRecordData("Record data shouldn't start with A") + raise WrongRecordDataError("Record data shouldn't start with A") diff --git a/app/domain/exceptions/domain_exceptions.py b/app/domain/exceptions/domain_exceptions.py index c732f51..ba734d4 100644 --- a/app/domain/exceptions/domain_exceptions.py +++ b/app/domain/exceptions/domain_exceptions.py @@ -1,7 +1,7 @@ """Domain-specific exceptions.""" -class DomainException(Exception): +class DomainError(Exception): """Base exception for all domain-specific exceptions.""" def __init__(self, message: str = "Domain error occurred"): @@ -9,7 +9,7 @@ def __init__(self, message: str = "Domain error occurred"): super().__init__(self.message) -class WrongRecordData(DomainException): +class WrongRecordDataError(DomainError): """Raised when record data is not valid.""" def __init__(self, message: str = "Wrong record data"): diff --git a/app/infrastructure/caching/callback_redis_repo.py b/app/infrastructure/caching/callback_redis_repo.py index 0cfb6ed..c15cddc 100644 --- a/app/infrastructure/caching/callback_redis_repo.py +++ b/app/infrastructure/caching/callback_redis_repo.py @@ -59,7 +59,7 @@ async def set_botx_method_callback_result( callback: The callback data to publish. Raises: - BotXMethodCallbackNotFoundError: If no subscriber is listening on the channel. + BotXMethodCallbackNotFoundError: If no subscriber is listening. """ dump = pickle.dumps(callback) status_code = await self._redis.publish( diff --git a/app/infrastructure/caching/redis_repo.py b/app/infrastructure/caching/redis_repo.py index 4bc3008..7c3ea2c 100644 --- a/app/infrastructure/caching/redis_repo.py +++ b/app/infrastructure/caching/redis_repo.py @@ -57,4 +57,4 @@ def _key(self, arg: Hashable) -> str: else: prefix = "" - return prefix + hashlib.md5(pickle.dumps(arg)).hexdigest() # noqa: S303 + return prefix + hashlib.md5(pickle.dumps(arg)).hexdigest() # noqa: S324 diff --git a/app/infrastructure/containers.py b/app/infrastructure/containers.py index c6086f0..5ec979c 100644 --- a/app/infrastructure/containers.py +++ b/app/infrastructure/containers.py @@ -1,11 +1,11 @@ from dependency_injector import containers -from dependency_injector.providers import Factory, Callable, Singleton +from dependency_injector.providers import Callable, Factory, Singleton from redis import asyncio as aioredis from app.application.use_cases.record_use_cases import SampleRecordUseCases from app.infrastructure.caching.redis_repo import RedisRepo -from app.infrastructure.repositories.sample_record import SampleRecordRepository from app.infrastructure.db.sqlalchemy import build_db_session_factory +from app.infrastructure.repositories.sample_record import SampleRecordRepository from app.presentation.bot.resources import strings from app.settings import settings diff --git a/app/infrastructure/db/migrations/env.py b/app/infrastructure/db/migrations/env.py index 9812b7b..1980855 100644 --- a/app/infrastructure/db/migrations/env.py +++ b/app/infrastructure/db/migrations/env.py @@ -13,7 +13,6 @@ from app.infrastructure.db.sqlalchemy import Base, make_url_sync # isort:skip # Import models to make them visible by alembic -import app.infrastructure.db.sample_record.models # isort:skip postgres_dsn = make_url_sync(settings.POSTGRES_DSN) context_config = context.config diff --git a/app/infrastructure/db/migrations/versions/765dcfed2d16_init.py b/app/infrastructure/db/migrations/versions/765dcfed2d16_init.py index 50deaef..4c7c2dc 100644 --- a/app/infrastructure/db/migrations/versions/765dcfed2d16_init.py +++ b/app/infrastructure/db/migrations/versions/765dcfed2d16_init.py @@ -7,9 +7,8 @@ Doc: https://alembic.sqlalchemy.org/en/latest/tutorial.html#create-a-migration-script """ -from alembic import op import sqlalchemy as sa - +from alembic import op revision = "765dcfed2d16" down_revision = None diff --git a/app/infrastructure/db/sample_record/models.py b/app/infrastructure/db/sample_record/models.py index a71e741..669bcae 100644 --- a/app/infrastructure/db/sample_record/models.py +++ b/app/infrastructure/db/sample_record/models.py @@ -10,7 +10,7 @@ class SampleRecordModel(Base): __tablename__ = "sample_record" - id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) # noqa: WPS125 + id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) record_data: Mapped[str] def __repr__(self) -> str: diff --git a/app/infrastructure/db/sqlalchemy.py b/app/infrastructure/db/sqlalchemy.py index 4de8c1d..5e6605c 100644 --- a/app/infrastructure/db/sqlalchemy.py +++ b/app/infrastructure/db/sqlalchemy.py @@ -2,8 +2,8 @@ from asyncio import current_task from contextlib import asynccontextmanager -from functools import wraps, lru_cache -from typing import Callable, Any +from functools import lru_cache, wraps +from typing import Any, Callable from sqlalchemy import MetaData from sqlalchemy.ext.asyncio import ( @@ -23,12 +23,12 @@ def make_url_async(url: str) -> str: """Add +asyncpg to url scheme.""" - return "postgresql+asyncpg" + url[url.find(":") :] # noqa: WPS336 + return "postgresql+asyncpg" + url[url.find(":") :] def make_url_sync(url: str) -> str: """Remove +asyncpg from url scheme.""" - return "postgresql" + url[url.find(":") :] # noqa: WPS336 + return "postgresql" + url[url.find(":") :] convention = { @@ -61,7 +61,7 @@ async def build_db_session_factory() -> AsyncSessionFactory: @asynccontextmanager -async def session_resource(): +async def session_resource() -> AsyncSession: factory = await build_db_session_factory() session: AsyncSession = factory() try: diff --git a/app/infrastructure/repositories/sample_record.py b/app/infrastructure/repositories/sample_record.py index fed23a2..f829643 100644 --- a/app/infrastructure/repositories/sample_record.py +++ b/app/infrastructure/repositories/sample_record.py @@ -1,22 +1,22 @@ """Record repository implementation.""" -from typing import List, Optional +from typing import List from sqlalchemy import delete, insert, select, update from sqlalchemy.exc import NoResultFound, SQLAlchemyError from app.application.repository.exceptions import ( - RecordDoesNotExistError, - RecordUpdateError, RecordCreateError, RecordDeleteError, + RecordDoesNotExistError, RecordRetreiveError, + RecordUpdateError, ) from app.application.repository.interfaces import ISampleRecordRepository +from app.decorators.exceptions_mapper import exception_mapper from app.domain.entities.sample_record import SampleRecord from app.infrastructure.db.sample_record.models import SampleRecordModel from app.infrastructure.db.sqlalchemy import AsyncSession -from app.utils.exceptions_mapper import exception_mapper class SampleRecordRepository(ISampleRecordRepository): @@ -66,7 +66,7 @@ async def update(self, record: SampleRecord) -> SampleRecord: @exception_mapper( catch_exceptions=SQLAlchemyError, raise_exception=RecordDeleteError ) - async def delete(self, record_id: int) -> int: + async def delete(self, record_id: int) -> None: """Delete a record. Args: @@ -88,8 +88,6 @@ async def delete(self, record_id: int) -> int: await self._session.flush() - return deletion_result - @exception_mapper( catch_exceptions=NoResultFound, raise_exception=RecordDoesNotExistError ) diff --git a/app/infrastructure/services/healthcheck.py b/app/infrastructure/services/healthcheck.py deleted file mode 100644 index e7162dc..0000000 --- a/app/infrastructure/services/healthcheck.py +++ /dev/null @@ -1,48 +0,0 @@ -from fastapi import Request -from sqlalchemy.sql import text -from asyncio.exceptions import TimeoutError -from pybotx import Bot - -from app.application.service.interfaces import HealthCheckService -from app.infrastructure.worker.worker import queue -from app.settings import settings - - -class PostgresHealthCheck(HealthCheckService): - def __init__(self, request: Request): - self._request = request - - async def check(self): - bot: Bot = self._request.app.state.bot - session_factory = bot.state.db_session_factory - - async with session_factory() as db_session: - try: - await db_session.execute(text("SELECT 1")) - except Exception as exc: - return str(exc) - return None - - -class RedisHealthCheck(HealthCheckService): - def __init__(self, request: Request): - self._request = request - - async def check(self): - bot: Bot = self._request.app.state.bot - return await bot.state.redis_repo.ping() - - -class WorkerHealthCheck(HealthCheckService): - async def check(self): - job = await queue.enqueue("healthcheck") - if not job: - return None - - try: - await job.refresh(settings.WORKER_TIMEOUT_SEC) - except TimeoutError: - return "Worker is overloaded or not launched" - except Exception as exc: - return str(exc) - return None diff --git a/app/infrastructure/worker/worker.py b/app/infrastructure/worker/worker.py index 2941733..dc61452 100644 --- a/app/infrastructure/worker/worker.py +++ b/app/infrastructure/worker/worker.py @@ -16,7 +16,7 @@ async def startup(ctx: SaqCtx) -> None: - from app.presentation.bot.bot import get_bot # noqa: WPS433 + from app.presentation.bot.bot import get_bot callback_repo = CallbackRedisRepo(aioredis.from_url(app_settings.REDIS_DSN)) bot = get_bot(callback_repo, raise_exceptions=False) diff --git a/app/logger.py b/app/logger.py index a45a185..2b1da90 100644 --- a/app/logger.py +++ b/app/logger.py @@ -24,7 +24,7 @@ def emit(self, record): # type: ignore # Find caller from where originated the logged message frame, depth = logging.currentframe(), 2 - while frame.f_code.co_filename == logging.__file__: # noqa: WPS352, WPS609 + while frame.f_code.co_filename == logging.__file__: frame = frame.f_back # type: ignore [assignment] depth += 1 diff --git a/app/main.py b/app/main.py index 0b298a9..33e308d 100644 --- a/app/main.py +++ b/app/main.py @@ -7,13 +7,15 @@ from fastapi import FastAPI from pybotx import Bot from redis import asyncio as aioredis +from redis.asyncio import Redis -from app.presentation.api.routers import router -from app.presentation.bot.bot import get_bot from app.infrastructure.caching.callback_redis_repo import CallbackRedisRepo from app.infrastructure.caching.exception_handlers import PubsubExceptionHandler +from app.infrastructure.caching.redis_repo import RedisRepo from app.infrastructure.containers import ApplicationStartupContainer from app.infrastructure.db.sqlalchemy import close_db_connections +from app.presentation.api.routers import router +from app.presentation.bot.bot import get_bot from app.presentation.bot.resources import strings from app.settings import settings @@ -21,8 +23,8 @@ async def startup( application: FastAPI, raise_bot_exceptions: bool, - redis_client=Provide[ApplicationStartupContainer.redis_client], - redis_repo=Provide[ApplicationStartupContainer.redis_repo], + redis_client: Redis = Provide[ApplicationStartupContainer.redis_client], + redis_repo: RedisRepo = Provide[ApplicationStartupContainer.redis_repo], ) -> None: pool = aioredis.BlockingConnectionPool( max_connections=settings.REDIS_CONNECTION_POOL_SIZE, diff --git a/app/presentation/api/botx.py b/app/presentation/api/botx.py index b39bade..66d43fc 100644 --- a/app/presentation/api/botx.py +++ b/app/presentation/api/botx.py @@ -16,8 +16,8 @@ ) from pybotx.constants import BOT_API_VERSION -from app.presentation.api.bot import bot_dependency from app.logger import logger +from app.presentation.api.bot import bot_dependency from app.settings import settings router = APIRouter() @@ -27,7 +27,7 @@ async def command_handler(request: Request, bot: Bot = bot_dependency) -> JSONResponse: """Receive commands from users. Max timeout - 5 seconds.""" - try: # noqa: WPS225 + try: bot.async_execute_raw_bot_command( await request.json(), request_headers=request.headers, diff --git a/app/presentation/api/healthcheck.py b/app/presentation/api/healthcheck.py deleted file mode 100644 index 687c5da..0000000 --- a/app/presentation/api/healthcheck.py +++ /dev/null @@ -1,41 +0,0 @@ -from fastapi import APIRouter, Request - -from app.application.use_cases.healthcheck import HealthCheckUseCase -from app.infrastructure.services.healthcheck import ( - PostgresHealthCheck, - RedisHealthCheck, - WorkerHealthCheck, -) -from app.presentation.api.schemas.healthcheck import ( - HealthCheckResponse, - HealthCheckFailed, - HealthCheckSucceed, -) - -router = APIRouter() - - -@router.get("/healthcheck", response_model=HealthCheckResponse) -async def healthcheck(request: Request): - services = [ - ("postgres", PostgresHealthCheck(request)), - ("redis", RedisHealthCheck(request)), - ("worker", WorkerHealthCheck()), - ] - - use_case = HealthCheckUseCase(services) - status, raw_results = await use_case.execute() - - response_models = [] - for r in raw_results: - if r.error: - response_models.append( - HealthCheckFailed(name=r.name, error=r.error, status="error") - ) - else: - response_models.append(HealthCheckSucceed(name=r.name, status="ok")) - - return HealthCheckResponse( - status=status, - services=response_models, - ) diff --git a/app/presentation/api/schemas/healthcheck.py b/app/presentation/api/schemas/healthcheck.py deleted file mode 100644 index 9c298a0..0000000 --- a/app/presentation/api/schemas/healthcheck.py +++ /dev/null @@ -1,24 +0,0 @@ -from typing import Literal, Union, Optional, List - -from pydantic import BaseModel - -from app.domain.entities.healthcheck import HealthCheckStatuses - - -class HealthCheckSucceed(BaseModel): - name: str - status: Literal[HealthCheckStatuses.OK] = HealthCheckStatuses.OK - - -class HealthCheckFailed(BaseModel): - name: str - error: str - status: Literal[HealthCheckStatuses.ERROR] = HealthCheckStatuses.ERROR - - -HealthCheckResult = Union[HealthCheckSucceed, HealthCheckFailed] - - -class HealthCheckResponse(BaseModel): - status: Optional[HealthCheckStatuses] - services: List[HealthCheckResult] diff --git a/app/presentation/bot/bot.py b/app/presentation/bot/bot.py index 0018e85..c75f38e 100644 --- a/app/presentation/bot/bot.py +++ b/app/presentation/bot/bot.py @@ -2,12 +2,11 @@ from httpx import AsyncClient, Limits from pybotx import Bot, CallbackRepoProto -from pybotx_fsm import FSMMiddleware -from app.presentation.bot.commands import common, sample_record_simple -from app.presentation.bot.handlers.internal_error import internal_error_handler from app.infrastructure.middlewares.answer_error import answer_error_middleware from app.infrastructure.middlewares.smart_logger import smart_logger_middleware +from app.presentation.bot.commands import common, sample_record_simple +from app.presentation.bot.handlers.internal_error import internal_error_handler from app.settings import settings BOTX_CALLBACK_TIMEOUT = 30 @@ -30,10 +29,6 @@ def get_bot(callback_repo: CallbackRepoProto, raise_exceptions: bool) -> Bot: middlewares=[ smart_logger_middleware, answer_error_middleware, - FSMMiddleware( - [], - state_repo_key="redis_repo", - ), ], callback_repo=callback_repo, ) diff --git a/app/presentation/bot/commands/command_listing.py b/app/presentation/bot/commands/command_listing.py index 3f94f40..91fa871 100644 --- a/app/presentation/bot/commands/command_listing.py +++ b/app/presentation/bot/commands/command_listing.py @@ -19,10 +19,3 @@ class SampleRecordCommands: command_name="/create_record", description="Создать запись", ) - - -class SampleRecordFSMCommands: - CREATE_RECORD = BotCommand( - command_name="/create_record_fsm", - description="Создать запись, используя fsm", - ) diff --git a/app/presentation/bot/commands/common.py b/app/presentation/bot/commands/common.py index 35ccb0e..f09c050 100644 --- a/app/presentation/bot/commands/common.py +++ b/app/presentation/bot/commands/common.py @@ -1,8 +1,5 @@ """Handlers for default bot commands and system events.""" -from os import environ -from subprocess import PIPE, STDOUT, run - from pybotx import ( Bot, BubbleMarkup, @@ -54,18 +51,3 @@ async def help_handler(message: IncomingMessage, bot: Bot) -> None: ) await bot.answer_message(answer_body) - - -@collector.command("/_debug:git-commit-sha", visible=False) -async def git_commit_sha(message: IncomingMessage, bot: Bot) -> None: - """Show git commit SHA.""" - - await bot.answer_message(environ.get("GIT_COMMIT_SHA", "")) - - -@collector.command("/_debug:version", visible=False) -async def build_version(message: IncomingMessage, bot: Bot) -> None: - """Show app version.""" - cmd = "poetry version --short" - output = run(cmd.split(), stdout=PIPE, stderr=STDOUT, text=True).stdout - await bot.answer_message(output.strip("\n")) diff --git a/app/presentation/bot/commands/sample_record_fsm.py b/app/presentation/bot/commands/sample_record_fsm.py deleted file mode 100644 index 5c3a4e0..0000000 --- a/app/presentation/bot/commands/sample_record_fsm.py +++ /dev/null @@ -1,26 +0,0 @@ -from dependency_injector.wiring import inject, Provider -from pybotx import HandlerCollector, Bot, IncomingMessage -from sqlalchemy.ext.asyncio import AsyncSession - -from app.infrastructure.containers import BotSampleRecordCommandContainer -from app.infrastructure.db.sqlalchemy import provide_session -from app.presentation.bot.commands.command_listing import SampleRecordCommands -from app.presentation.bot.handlers import CreateSampleRecordHandler - -collector = HandlerCollector() - - -@collector.command(**SampleRecordCommands.CREATE_RECORD.command_data()) -@provide_session -@inject -async def create_sample_record_with_fsm( - message: IncomingMessage, - bot: Bot, - session: AsyncSession, - record_use_cases_factory=Provider[ - BotSampleRecordCommandContainer.record_use_cases_factory - ], -): - await CreateSampleRecordHandler( - bot=bot, message=message, use_cases=record_use_cases_factory.provider(session) - ).execute() diff --git a/app/presentation/bot/commands/sample_record_simple.py b/app/presentation/bot/commands/sample_record_simple.py index 7f44173..4dece5d 100644 --- a/app/presentation/bot/commands/sample_record_simple.py +++ b/app/presentation/bot/commands/sample_record_simple.py @@ -1,6 +1,6 @@ from dependency_injector.providers import Factory -from dependency_injector.wiring import inject, Provider -from pybotx import HandlerCollector, Bot, IncomingMessage +from dependency_injector.wiring import Provider, inject +from pybotx import Bot, HandlerCollector, IncomingMessage from sqlalchemy.ext.asyncio import AsyncSession from app.application.use_cases.interfaces import ISampleRecordUseCases @@ -22,7 +22,7 @@ async def create_sample_record( record_use_cases_factory: Factory[ISampleRecordUseCases] = Provider[ BotSampleRecordCommandContainer.record_use_cases_factory ], -): +) -> None: await CreateSampleRecordHandler( bot=bot, message=message, use_cases=record_use_cases_factory.provider(session) ).execute() diff --git a/app/presentation/bot/handlers/command.py b/app/presentation/bot/handlers/command.py index 57735aa..f257bff 100644 --- a/app/presentation/bot/handlers/command.py +++ b/app/presentation/bot/handlers/command.py @@ -29,7 +29,7 @@ def incoming_argument_parser( @abc.abstractmethod async def handle_logic( self, - request_parameter: BaseModel | str, + request_parameter: BaseModel | str | None, ) -> None: pass @@ -44,7 +44,7 @@ def get_request_parameter( async def execute( self, - ): + ) -> None: try: parameter = self.get_request_parameter() await self.handle_logic(parameter) diff --git a/app/presentation/bot/handlers/error.py b/app/presentation/bot/handlers/error.py index 5c5c35e..43be1b3 100644 --- a/app/presentation/bot/handlers/error.py +++ b/app/presentation/bot/handlers/error.py @@ -1,11 +1,11 @@ from typing import Callable from uuid import uuid4 -from pybotx import Bot, IncomingMessage, BotShuttingDownError +from pybotx import Bot, BotShuttingDownError, IncomingMessage from app.logger import logger -from app.presentation.bot.validators.exceptions import MessageValidationError from app.presentation.bot.resources import strings +from app.presentation.bot.validators.exceptions import MessageValidationError class BaseExceptionHandler: diff --git a/app/presentation/bot/handlers/sample_record.py b/app/presentation/bot/handlers/sample_record.py index fa9ef23..947631a 100644 --- a/app/presentation/bot/handlers/sample_record.py +++ b/app/presentation/bot/handlers/sample_record.py @@ -2,12 +2,14 @@ from app.application.use_cases.interfaces import ISampleRecordUseCases from app.presentation.bot.handlers.command import BaseCommandHandler -from app.presentation.bot.validators.base import BotXJsonRequestParser from app.presentation.bot.resources.strings import SAMPLE_RECORD_CREATED_ANSWER from app.presentation.bot.schemas.sample_record import SampleRecordCreateRequestSchema +from app.presentation.bot.validators.base import BotXJsonRequestParser class CreateSampleRecordHandler(BaseCommandHandler): + incoming_argument_parser = BotXJsonRequestParser(SampleRecordCreateRequestSchema) + def __init__( self, bot: Bot, @@ -17,12 +19,9 @@ def __init__( self._use_cases = use_cases super().__init__(bot, message) - exception_explain_mapping = {} - incoming_argument_parser = BotXJsonRequestParser(SampleRecordCreateRequestSchema) - async def handle_logic( self, - request_parameter: SampleRecordCreateRequestSchema, + request_parameter: SampleRecordCreateRequestSchema, # type: ignore ) -> None: created_record = await self._use_cases.create_record(request_parameter) await self._bot.answer_message( diff --git a/app/presentation/bot/resources/strings.py b/app/presentation/bot/resources/strings.py index 87ec35e..e30f8e4 100644 --- a/app/presentation/bot/resources/strings.py +++ b/app/presentation/bot/resources/strings.py @@ -2,7 +2,7 @@ from typing import Any, Protocol, cast -from mako.lookup import TemplateLookup +from mako.lookup import TemplateLookup # type: ignore[import-untyped] class FormatTemplate(Protocol): @@ -13,7 +13,7 @@ class FormatTemplate(Protocol): with regular string formatting. """ - def format(self, **kwargs: Any) -> str: # noqa: WPS125 A003 + def format(self, **kwargs: Any) -> str: """Render template.""" @@ -23,11 +23,11 @@ class TemplateFormatterLookup(TemplateLookup): def get_template(self, uri: str) -> FormatTemplate: """Cast default mako template to FormatTemplate.""" - def _format(**kwargs: Any) -> str: # noqa: WPS430 + def _format(**kwargs: Any) -> str: return template.render(**kwargs).rstrip() template = super().get_template(uri) - template.format = _format # noqa: WPS125 + template.format = _format return cast(FormatTemplate, template) @@ -40,8 +40,8 @@ def _format(**kwargs: Any) -> str: # noqa: WPS430 strict_undefined=True, ) -BOT_PROJECT_NAME = "bot_refactor" -BOT_DISPLAY_NAME = "gubarik_bot_refactor" +BOT_PROJECT_NAME = "template_bot" +BOT_DISPLAY_NAME = "template_bot" CHAT_CREATED_TEMPLATE = lookup.get_template("chat_created.txt.mako") HELP_COMMAND_MESSAGE_TEMPLATE = lookup.get_template("help.txt.mako") diff --git a/app/presentation/bot/schemas/sample_record.py b/app/presentation/bot/schemas/sample_record.py index 068666b..d18a629 100644 --- a/app/presentation/bot/schemas/sample_record.py +++ b/app/presentation/bot/schemas/sample_record.py @@ -1,5 +1,7 @@ """Domains.""" +from typing import Self + from pydantic import BaseModel, Field @@ -35,6 +37,6 @@ class SampleRecordUpdateRequestSchema(BaseModel): record_data: str = Field(..., min_length=1) @classmethod - def _from_plain_message_data(cls, message_data: str): + def _from_plain_message_data(cls, message_data: str) -> Self: record_id, record_data = message_data.split(" ") - return cls(id=record_id, record_data=record_data) + return cls(id=record_id, record_data=record_data) # type: ignore[arg-type] diff --git a/app/presentation/bot/validators/base.py b/app/presentation/bot/validators/base.py index 0bb217a..7a143e3 100644 --- a/app/presentation/bot/validators/base.py +++ b/app/presentation/bot/validators/base.py @@ -1,7 +1,8 @@ from abc import ABC, abstractmethod -from typing import Generic, TypeVar, Any +from typing import Any, Generic, TypeVar -from orjson import orjson, JSONDecodeError +import orjson +from orjson import JSONDecodeError from pybotx import IncomingMessage from pydantic import BaseModel, ValidationError diff --git a/app/presentation/bot/validators/sample_record.py b/app/presentation/bot/validators/sample_record.py index eeddd84..e38ca12 100644 --- a/app/presentation/bot/validators/sample_record.py +++ b/app/presentation/bot/validators/sample_record.py @@ -1,11 +1,12 @@ -from orjson import orjson, JSONDecodeError +import orjson +from orjson import JSONDecodeError from pybotx import IncomingMessage from pydantic import ValidationError +from app.decorators.exceptions_mapper import exception_mapper from app.presentation.bot.schemas.sample_record import SampleRecordCreateRequestSchema -from app.presentation.validators.base import IBotRequestParser -from app.presentation.validators.exceptions import MessageValidationError -from app.utils.exceptions_mapper import exception_mapper +from app.presentation.bot.validators.base import IBotRequestParser +from app.presentation.bot.validators.exceptions import MessageValidationError class SampleRecordJsonCreateRequestValidator( diff --git a/app/presentation/dependencies/healthcheck.py b/app/presentation/dependencies/healthcheck.py index fa6d3e5..9fe9adc 100644 --- a/app/presentation/dependencies/healthcheck.py +++ b/app/presentation/dependencies/healthcheck.py @@ -7,8 +7,8 @@ from pybotx import Bot from sqlalchemy.sql import text -from app.settings import settings from app.infrastructure.worker.worker import queue +from app.settings import settings async def check_db_connection(request: Request) -> Optional[str]: diff --git a/app/services/healthcheck.py b/app/services/healthcheck.py deleted file mode 100644 index 7608273..0000000 --- a/app/services/healthcheck.py +++ /dev/null @@ -1,39 +0,0 @@ -"""Healthcheck service bot.""" - -from typing import List - -from app.domain.entities.healthcheck import ( - HealthCheckServiceResult, - HealthCheckStatuses, -) -from app.presentation.api.schemas.healthcheck import ( - HealthCheckSucceed, - HealthCheckFailed, - HealthCheckResult, - HealthCheckResponse, -) - - -class HealthCheckResponseBuilder: - def __init__(self) -> None: - self._healthcheck_results: List[HealthCheckServiceResult] = [] - - def add_healthcheck_result(self, service: HealthCheckServiceResult) -> None: - self._healthcheck_results.append(service) - - def build(self) -> HealthCheckResponse: - healthcheck: HealthCheckResult - healthchecks = [] - healthy = True - for healthcheck_result in self._healthcheck_results: - if healthcheck_result.error is None: - healthcheck = HealthCheckSucceed(name=healthcheck_result.name) - else: - healthy = False - healthcheck = HealthCheckFailed( - name=healthcheck_result.name, error=healthcheck_result.error - ) - healthchecks.append(healthcheck) - - result_status = HealthCheckStatuses.OK if healthy else HealthCheckStatuses.ERROR - return HealthCheckResponse(status=result_status, services=healthchecks) diff --git a/app/settings.py b/app/settings.py index feebabe..fb7d2d3 100644 --- a/app/settings.py +++ b/app/settings.py @@ -8,7 +8,7 @@ class AppSettings(BaseSettings): - class Config: # noqa: WPS431 + class Config: env_file = ".env" @classmethod @@ -49,7 +49,9 @@ def _build_credentials_from_string( cts_url = f"https://{cts_url}" return BotAccountWithSecret( - id=UUID(bot_id), cts_url=cts_url, secret_key=secret_key + id=UUID(bot_id), + cts_url=cts_url, # type: ignore[arg-type] + secret_key=secret_key, ) BOT_CREDENTIALS: List[BotAccountWithSecret] @@ -72,4 +74,4 @@ def _build_credentials_from_string( WORKER_TIMEOUT_SEC: float = 4 -settings = AppSettings() +settings = AppSettings() # type: ignore[call-arg] diff --git a/pyproject.toml b/pyproject.toml index ee84cc7..ad82758 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,6 @@ python = ">=3.9,<3.12" pybotx = "~0.75.1" pybotx-smart-logger = "~0.10.1" -pybotx-fsm = "~0.6.1" fastapi = "~0.110.1" @@ -40,6 +39,7 @@ dependency-injector = "4.41.0" orjson = "^3.10.18" factory-boy = "^3.3.3" async-factory-boy = "^1.0.1" +cachetools = "^6.1.0" [tool.poetry.dev-dependencies] @@ -54,7 +54,7 @@ pytest-cov = "~3.0.0" asgi-lifespan = "~1.0.1" requests = "~2.31.0" -respx = "~0.20.0" +respx = "~0.22.0" httpx = ">=0.28.0,<0.29.0" markdown = "3.3.6" # https://github.com/python-poetry/poetry/issues/4777 diff --git a/ruff.toml b/ruff.toml index 3210f84..39816b5 100644 --- a/ruff.toml +++ b/ruff.toml @@ -23,6 +23,11 @@ ignore = [ "app/bot/commands/*.py" = ["D104"] "app/resources/strings.py" = ["E501"] "tests/*" = ["D100"] +# alembic migrations +"*/migrations/*" = ["ALL"] +"*/env.py" = ["ALL"] +# ignore test functons missing return annotations +"*/test_*.py" = ["ANN201", "ANN202"] [lint.flake8-quotes] inline-quotes = "double" diff --git a/scripts/format b/scripts/format index b60ee9d..675c857 100755 --- a/scripts/format +++ b/scripts/format @@ -1,12 +1,6 @@ - #!/usr/bin/env bash set -euo pipefail -set -ex - -autoflake --recursive --in-place \ - --remove-all-unused-imports \ - --ignore-init-module-imports \ # ------------------------------------------------------------ # 1. Удаляем неиспользуемые импорты (F401/F841) и # сортируем оставшиеся (I###) — «исправляем» только эти коды. @@ -15,8 +9,6 @@ ruff check \ --select F401,F841,I \ --fix \ app tests -isort --profile black app tests -black app tests # ------------------------------------------------------------ # 2. Применяем Black-совместимое форматирование diff --git a/scripts/lint b/scripts/lint index 8ca2743..d815961 100755 --- a/scripts/lint +++ b/scripts/lint @@ -1,7 +1,6 @@ #!/usr/bin/env bash set -euo pipefail -set -ex # --------------------------------------------------------------- # 1. Проверяем форматирование (equivalent to: black --check --diff) # --diff → вывод изменений @@ -9,8 +8,6 @@ set -ex # --------------------------------------------------------------- ruff format --check --diff app tests -black --check --diff app tests -isort --profile black --check-only app tests # --------------------------------------------------------------- # 2. Запускаем все lint-правила (B, E, F, S, B*, W*, N*, Q*, ...) # Конфигурация берётся из pyproject.toml / ruff.toml @@ -20,5 +17,4 @@ ruff check app tests # --------------------------------------------------------------- # 3. Type-checking # --------------------------------------------------------------- -mypy app tests -flake8 app tests \ No newline at end of file +mypy --config-file=setup.cfg app tests diff --git a/scripts/test b/scripts/test index 0c82f14..3093c7f 100755 --- a/scripts/test +++ b/scripts/test @@ -15,13 +15,11 @@ # он перезапустит себя под bash, чтобы работали массивы. # --------------------------------------------------------------------- -set -ex # --- re-exec в bash, если нужно -------------------------------------- if [ -z "${BASH_VERSION:-}" ]; then exec bash "$0" "$@" fi -pytest ${@} set -euo pipefail show_help() { @@ -87,4 +85,4 @@ if [[ -n $MARK_EXPR ]]; then pytest -m "$MARK_EXPR" "${POSITIONAL[@]:-}" else pytest "${POSITIONAL[@]:-}" -fi \ No newline at end of file +fi diff --git a/setup.cfg b/setup.cfg index 33db506..ad62e6b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -22,131 +22,49 @@ exclude_lines = disallow_untyped_defs = True strict_optional = True -[mypy-redis.*] -ignore_missing_imports = True - -[mypy-alembic.*] -ignore_missing_imports = True - -[mypy-asyncpg.*] -ignore_missing_imports = True - -[mypy-loguru.*] -ignore_missing_imports = True - -[mypy-mako.*] -ignore_missing_imports = True +[mypy-tests.*] +disallow_untyped_defs = False -[mypy-saq.*] -ignore_missing_imports = True - -[mypy-sqlalchemy.*] -ignore_missing_imports = True - -[mypy-app.db.migrations.*] +[mypy-app.infrastructure.db.*] ignore_errors = True +check_untyped_defs = False +disallow_untyped_defs = False +disallow_incomplete_defs = False +[mypy-async_factory_boy.*] +ignore_missing_imports = True -[isort] -profile = black -multi_line_output = 3 -include_trailing_comma = True -line_length = 88 -force_grid_wrap = 0 -combine_as_imports = True -# move imports to the top of the file -float_to_top=true - -[flake8] -# See https://flake8.pycqa.org/en/latest/user/configuration.html#project-configuration -max-line-length = 88 -max-awaits = 10 -max-local-variables = 10 -max-module-members = 10 -max-arguments = 10 -nested-classes-whitelist = Config, Meta, Params -exclude = app/db/migrations -per-file-ignores = -# docstings for module - */__init__.py:D104 -# too many imports - app/bot/commands/*.py:WPS201,D104 - app/services/botx_user_search.py:WPS232 - app/main.py:WPS201 -# line too long - app/resources/strings.py:E501 - tests/*:D100,WPS110,WPS116,WPS118,WPS201,WPS204,WPS235,WPS430,WPS442,WPS432 -# too many public attrs -# too many args -# wrong var name - app/services/answer_error.py:WPS110,WPS211,WPS230 -# too many imported names, subprocess usage - app/bot/commands/common.py:WPS235,S404,S603 -# names shadowing -# `%` string formatting - app/db/sqlalchemy.py:WPS442,WPS323 - -no-accept-encodings = True -inline-quotes = double - -# See https://wemake-python-stylegui.de/en/latest/pages/usage/violations/index.html -ignore = - # Bxxx - # Function calls in arguments definition is part of fastapi and botx di system - B008, - - # Cxxx - # Handled by black - C8, - - # Dxxx - # Docstrings for public classes - D101, - # Docstrings for public methods - D102, - # Docstrings for public functions - D103, - # Docstrings for public nested classes like Meta or Config not necessary - D106, - # Docstrings for __init__ methods - D107, - # Allow empty line after docstings - D202, - - # Exxx - # Black handles whitespace before ':' - E203, - - # Sxxx - # Asserts are useful. - S101, - # Mako templates are used as messages, so no xss attacks - S702, - - # WPSxxx - # Allow upper-case constants - WPS115, - # Too many module members - WPS202, - # Too many methods in class - WPS214, - # Does not play well with forward type references - WPS226, - # f-strings are useful - WPS305, - # Required base in class definition is strange - WPS306, - # Context manager with too many assignments - WPS316, - # Forbids to use implicit string concatenation - WPS326, - # Allow walrus operator - WPS332, - # Allow lines that starts with a dot - WPS348, - # Objects that are returned from fastapi and botx as di system parts should be available - WPS404, - -[darglint] -# See https://github.com/terrencepreilly/darglint#strictness-configuration -strictness = long +;[mypy-mako.*] +;ignore_missing_imports = True +; +;[mypy-redis.*] +;ignore_missing_imports = True +; +;[mypy-alembic.*] +;ignore_missing_imports = True +; +;[mypy-asyncpg.*] +;ignore_missing_imports = True +; +;[mypy-loguru.*] +;ignore_missing_imports = True +; +; +;[mypy-saq.*] +;ignore_missing_imports = True +; +;[mypy-sqlalchemy.*] +;ignore_missing_imports = True +; + + +;[isort] +;profile = black +;multi_line_output = 3 +;include_trailing_comma = True +;line_length = 88 +;force_grid_wrap = 0 +;combine_as_imports = True +;# move imports to the top of the file +;float_to_top=true +; diff --git a/tests/conftest.py b/tests/conftest.py index e2d6ee2..7676021 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,19 +1,18 @@ import asyncio -from asyncio import current_task +import re +from asyncio import AbstractEventLoop, current_task from datetime import datetime from http import HTTPStatus from pathlib import Path -from typing import Any, AsyncGenerator, Callable, Dict, Generator, List, Optional +from typing import Any, AsyncGenerator, Callable, Dict, Generator, Optional from unittest.mock import AsyncMock, patch from uuid import UUID, uuid4 -import os import httpx import jwt import pytest import respx -import sqlalchemy -from alembic import config as alembic_config, command +from alembic import command from alembic.config import Config from asgi_lifespan import LifespanManager from pybotx import ( @@ -26,34 +25,31 @@ UserSender, ) from pybotx.logger import logger -from sqlalchemy import NullPool, event -from sqlalchemy.exc import SQLAlchemyError +from sqlalchemy import NullPool from sqlalchemy.ext.asyncio import ( - AsyncSession, AsyncEngine, - create_async_engine, + AsyncSession, async_scoped_session, - async_sessionmaker, + create_async_engine, ) -from sqlalchemy.orm import sessionmaker, Session, SessionTransaction -from testcontainers.postgres import PostgresContainer +from sqlalchemy.orm import sessionmaker +from testcontainers.postgres import PostgresContainer # type: ignore from app.infrastructure.caching.redis_repo import RedisRepo from app.infrastructure.db.sqlalchemy import ( - build_db_session_factory, AsyncSessionFactory, make_url_async, ) from app.infrastructure.repositories.sample_record import SampleRecordRepository from app.main import get_application -from app.settings import settings, AppSettings +from app.settings import settings from tests.factories import SampleRecordModelFactory @pytest.fixture(scope="session") def postgres_container() -> Generator[PostgresContainer, None, None]: """Starts a temporary PostgreSQL container for the test session.""" - container_name = f"bot_testing_container" + container_name = "bot_testing_container" with PostgresContainer("postgres:15").with_name(container_name) as postgres: container_url = postgres.get_connection_url() @@ -62,7 +58,7 @@ def postgres_container() -> Generator[PostgresContainer, None, None]: @pytest.fixture(scope="session") -def event_loop(): +def event_loop() -> Generator[AbstractEventLoop, None, None]: """Create a session-scoped event loop for async session-scoped fixtures.""" loop = asyncio.new_event_loop() yield loop @@ -121,8 +117,24 @@ async def redis_repo(bot: Bot) -> RedisRepo: return bot.state.redis_repo +# def mock_authorization() -> None: +# respx.route(method="GET", path__regex="/api/v2/botx/bots/.*/token").mock( +# return_value=httpx.Response( +# HTTPStatus.OK, +# json={ +# "status": "ok", +# "result": "token", +# }, +# ), +# ) + + def mock_authorization() -> None: - respx.route(method="GET", path__regex="/api/v2/botx/bots/.*/token").mock( + respx.get( + # url__regex=re.compile(r"^https://.*?/api/v2/botx/bots/[^/]+/token") + url__regex=re.compile(r"^https://[^/]+/api/v2/botx/bots/[^/]+/token(\?.*)?$") + # url__regex=re.compile(r"^https://.*?/api/v2/botx/bots/[^/]+/token(?:\?.*)?$") + ).mock( return_value=httpx.Response( HTTPStatus.OK, json={ @@ -143,9 +155,7 @@ async def bot( async with LifespanManager(fastapi_app): built_bot = fastapi_app.state.bot - built_bot.answer_message = AsyncMock(return_value=uuid4()) - yield built_bot diff --git a/tests/factories.py b/tests/factories.py index f786090..a2c90d1 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -1,13 +1,13 @@ -from async_factory_boy.factory.sqlalchemy import AsyncSQLAlchemyFactory -from factory import Factory, DictFactory import factory +from async_factory_boy.factory.sqlalchemy import AsyncSQLAlchemyFactory # type ignore +from factory import DictFactory, Factory from app.domain.entities.sample_record import SampleRecord from app.infrastructure.db.sample_record.models import SampleRecordModel from app.presentation.bot.schemas.sample_record import ( SampleRecordCreateRequestSchema, - SampleRecordUpdateRequestSchema, SampleRecordDeleteRequestSchema, + SampleRecordUpdateRequestSchema, ) diff --git a/app/infrastructure/services/__init__.py b/tests/integration/__init__.py similarity index 100% rename from app/infrastructure/services/__init__.py rename to tests/integration/__init__.py diff --git a/app/presentation/api/schemas/__init__.py b/tests/integration/endpoints/__init__.py similarity index 100% rename from app/presentation/api/schemas/__init__.py rename to tests/integration/endpoints/__init__.py diff --git a/tests/presentation/endpoints/conftest.py b/tests/integration/endpoints/conftest.py similarity index 60% rename from tests/presentation/endpoints/conftest.py rename to tests/integration/endpoints/conftest.py index 8c184b8..68afcb8 100644 --- a/tests/presentation/endpoints/conftest.py +++ b/tests/integration/endpoints/conftest.py @@ -1,4 +1,4 @@ -from tests.presentation.endpoints.fixtures import ( # noqa: F401 +from tests.integration.endpoints.fixtures import ( # noqa: F401 base_command_payload, command_payload_v3, command_payload_v4, diff --git a/tests/presentation/endpoints/fixtures.py b/tests/integration/endpoints/fixtures.py similarity index 100% rename from tests/presentation/endpoints/fixtures.py rename to tests/integration/endpoints/fixtures.py diff --git a/tests/presentation/endpoints/test_botx.py b/tests/integration/endpoints/test_botx.py similarity index 62% rename from tests/presentation/endpoints/test_botx.py rename to tests/integration/endpoints/test_botx.py index 759d0a6..902ab96 100644 --- a/tests/presentation/endpoints/test_botx.py +++ b/tests/integration/endpoints/test_botx.py @@ -11,97 +11,6 @@ from app.main import get_application -@respx.mock -def test__web_app__bot_status_response_ok( - bot_id: UUID, - bot: Bot, - authorization_header: Dict[str, str], -) -> None: - # - Arrange - - query_params = { - "bot_id": str(bot_id), - "chat_type": "chat", - "user_huid": "f16cdc5f-6366-5552-9ecd-c36290ab3d11", - } - - # - Act - - with TestClient(get_application()) as test_client: - response = test_client.get( - "/status", - params=query_params, - headers=authorization_header, - ) - - # - Assert - - assert response.status_code == HTTPStatus.OK - assert response.json() == { - "result": { - "commands": [ - { - "body": "/help", - "description": "Get available commands", - "name": "/help", - } - ], - "enabled": True, - "status_message": "Bot is working", - }, - "status": "ok", - } - - -@respx.mock -def test__web_app__bot_status_unknown_bot_response_service_unavailable( - bot_id: UUID, - bot: Bot, - authorization_header: Dict[str, str], -) -> None: - # - Arrange - - query_params = { - "bot_id": "f3e176d5-ff46-4b18-b260-25008338c06e", - "chat_type": "chat", - "user_huid": "f16cdc5f-6366-5552-9ecd-c36290ab3d11", - } - - # - Act - - with TestClient(get_application()) as test_client: - response = test_client.get( - "/status", - params=query_params, - headers=authorization_header, - ) - - # - Assert - - assert response.status_code == HTTPStatus.SERVICE_UNAVAILABLE - - status_message = response.json()["error_data"]["status_message"] - assert status_message == "Unknown bot_id: f3e176d5-ff46-4b18-b260-25008338c06e" - - -@respx.mock -def test__web_app__bot_status_without_parameters_response_bad_request( - bot_id: UUID, - bot: Bot, - authorization_header: Dict[str, str], -) -> None: - # - Arrange - - query_params: Dict[str, str] = {} - - # - Act - - with TestClient(get_application()) as test_client: - response = test_client.get( - "/status", - params=query_params, - headers=authorization_header, - ) - - # - Assert - - assert response.status_code == HTTPStatus.BAD_REQUEST - - status_message = response.json()["error_data"]["status_message"] - assert status_message == "Invalid params" - - @respx.mock async def test__web_app__bot_command_response_accepted( bot_id: UUID, diff --git a/app/utils/__init__.py b/tests/integration/repository/__init__.py similarity index 100% rename from app/utils/__init__.py rename to tests/integration/repository/__init__.py diff --git a/tests/infrastructure/repository/test_sample_record_repository.py b/tests/integration/repository/test_sample_record_repository.py similarity index 82% rename from tests/infrastructure/repository/test_sample_record_repository.py rename to tests/integration/repository/test_sample_record_repository.py index 22e601b..cc93d0c 100644 --- a/tests/infrastructure/repository/test_sample_record_repository.py +++ b/tests/integration/repository/test_sample_record_repository.py @@ -1,6 +1,6 @@ import pytest from deepdiff import DeepDiff -from sqlalchemy import select, func +from sqlalchemy import func, select from sqlalchemy.ext.asyncio import AsyncSession from app.application.repository.exceptions import RecordDoesNotExistError @@ -12,14 +12,14 @@ def assert_database_object_equal_domain( db_object: SampleRecordModel, domain_object: SampleRecord -): +) -> None: assert db_object.id == domain_object.id assert db_object.record_data == domain_object.record_data async def test_add_record( sample_record_repository: SampleRecordRepository, - sample_record_factory: SampleRecordModelFactory, + sample_record_factory: type[SampleRecordModelFactory], isolated_session: AsyncSession, ): """Test adding a new record.""" @@ -39,11 +39,11 @@ async def test_add_record( async def test_update_record( sample_record_repository: SampleRecordRepository, isolated_session: AsyncSession, - sample_record_factory: SampleRecordModelFactory, + sample_record_factory: type[SampleRecordModelFactory], ): """Test updating an existing record.""" - existing_record = await sample_record_factory(record_data="test_update") + existing_record = await sample_record_factory.create(record_data="test_update") updated_record = SampleRecord( id=existing_record.id, record_data="test_update_new_value" @@ -61,20 +61,16 @@ async def test_update_record( async def test_delete_record( sample_record_repository: SampleRecordRepository, isolated_session: AsyncSession, - sample_record_factory: SampleRecordModelFactory, + sample_record_factory: type[SampleRecordModelFactory], ): """Test deleting a record.""" - print(isolated_session) - existing_record = await sample_record_factory(record_data="test_delete") - deleted_record_id = await sample_record_repository.delete(existing_record.id) + existing_record = await sample_record_factory.create(record_data="test_delete") + await sample_record_repository.delete(existing_record.id) - assert deleted_record_id == existing_record.id - count = await isolated_session.scalar( + db_records_count = await isolated_session.scalar( select(func.count()).select_from(SampleRecordModel) ) - assert count == 0 - - assert existing_record + assert db_records_count == 0 async def test_delete_non_exist_record( @@ -89,7 +85,7 @@ async def test_delete_non_exist_record( async def test_get_record( sample_record_repository: SampleRecordRepository, sample_record_factory ): - existing_record = await sample_record_factory() + existing_record = await sample_record_factory.create() record_from_db = await sample_record_repository.get_by_id(existing_record.id) assert_database_object_equal_domain(existing_record, record_from_db) @@ -107,7 +103,7 @@ async def test_get_non_existing_record( async def test_get_all_records( sample_record_repository: SampleRecordRepository, - sample_record_factory: SampleRecordModelFactory, + sample_record_factory: type[SampleRecordModelFactory], ): existing_records_map = { record.id: record for record in await sample_record_factory.create_batch(4) diff --git a/tests/application/__init__.py b/tests/integration/sample_record_use_cases/__init__.py similarity index 100% rename from tests/application/__init__.py rename to tests/integration/sample_record_use_cases/__init__.py diff --git a/tests/application/integration/conftest.py b/tests/integration/sample_record_use_cases/conftest.py similarity index 77% rename from tests/application/integration/conftest.py rename to tests/integration/sample_record_use_cases/conftest.py index 0260f1e..26209ad 100644 --- a/tests/application/integration/conftest.py +++ b/tests/integration/sample_record_use_cases/conftest.py @@ -2,9 +2,7 @@ from app.application.use_cases.interfaces import ISampleRecordUseCases from app.application.use_cases.record_use_cases import SampleRecordUseCases -from app.domain.entities.sample_record import SampleRecord from app.infrastructure.repositories.sample_record import SampleRecordRepository -from tests.application.unit.fake_repository import FakeSampleRecordRepository @pytest.fixture diff --git a/tests/application/integration/test_sample_record_use_cases_int.py b/tests/integration/sample_record_use_cases/test_sample_record_use_cases_int.py similarity index 91% rename from tests/application/integration/test_sample_record_use_cases_int.py rename to tests/integration/sample_record_use_cases/test_sample_record_use_cases_int.py index 39a85e3..2bd606f 100644 --- a/tests/application/integration/test_sample_record_use_cases_int.py +++ b/tests/integration/sample_record_use_cases/test_sample_record_use_cases_int.py @@ -3,7 +3,7 @@ from app.application.use_cases.interfaces import ISampleRecordUseCases from app.infrastructure.db.sample_record.models import SampleRecordModel -from app.presentation.bot.schemas import SampleRecordResponseSchema +from app.presentation.bot.schemas.sample_record import SampleRecordResponseSchema from tests.factories import SampleRecordCreateSchemaFactory, SampleRecordModelFactory @@ -23,7 +23,7 @@ async def test_sample_record_use_case_add_record_in_database( sample_record_create_request = SampleRecordCreateSchemaFactory() response = await sample_record_use_cases_with_real_repo.create_record( - sample_record_create_request + sample_record_create_request # type: ignore ) query = select(SampleRecordModel).where(SampleRecordModel.id == response.id) @@ -61,11 +61,7 @@ async def test_sample_record_use_case_remove_record_from_database( isolated_session.add(existing_record) await isolated_session.flush() - response = await sample_record_use_cases_with_real_repo.delete_record( - existing_record.id - ) - - assert response == existing_record.id + await sample_record_use_cases_with_real_repo.delete_record(existing_record.id) assert await isolated_session.get(SampleRecordModel, existing_record.id) is None diff --git a/tests/presentation/endpoints/__init__.py b/tests/presentation/endpoints/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/presentation/unit/__init__.py b/tests/presentation/unit/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/services/__init__.py b/tests/services/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/services/test_botx_user_search.py b/tests/services/test_botx_user_search.py deleted file mode 100644 index 2aabd7c..0000000 --- a/tests/services/test_botx_user_search.py +++ /dev/null @@ -1,80 +0,0 @@ -from unittest.mock import AsyncMock -from uuid import UUID - -import pytest -from pybotx import Bot, UserFromSearch, UserKinds, UserNotFoundError - -from app.services.botx_user_search import UserIsBotError, search_user_on_each_cts - - -async def test_search_user_on_each_cts_user_is_bot_error_raised( - bot: Bot, -) -> None: - # - Arrange - - bot_user = UserFromSearch( - huid=UUID("86c4814b-feee-4ff0-b04d-4b3226318078"), - ad_login=None, - ad_domain=None, - username="Test Bot", - company=None, - company_position=None, - department=None, - emails=[], - other_id=None, - user_kind=UserKinds.BOT, - ) - - bot.search_user_by_huid = AsyncMock(return_value=bot_user) # type: ignore - - # - Act - - with pytest.raises(UserIsBotError): - await search_user_on_each_cts(bot, UUID("86c4814b-feee-4ff0-b04d-4b3226318078")) - - -async def test_search_user_on_each_cts_not_found( - bot: Bot, -) -> None: - # - Arrange - - bot.search_user_by_huid = AsyncMock( # type: ignore - side_effect=UserNotFoundError("not found") - ) - - # - Act - - search_result = await search_user_on_each_cts( - bot, UUID("86c4814b-feee-4ff0-b04d-4b3226318078") - ) - - # - Assert - - assert search_result is None - - -async def test_search_user_on_each_cts_suceed( - bot: Bot, -) -> None: - # - Arrange - - user = UserFromSearch( - huid=UUID("86c4814b-feee-4ff0-b04d-4b3226318078"), - ad_login=None, - ad_domain=None, - username="Test User", - company=None, - company_position=None, - department=None, - emails=[], - other_id=None, - user_kind=UserKinds.CTS_USER, - ) - - bot.search_user_by_huid = AsyncMock(return_value=user) # type: ignore - - # - Act - - search_result = await search_user_on_each_cts( - bot, UUID("86c4814b-feee-4ff0-b04d-4b3226318078") - ) - - # - Assert - - assert search_result - - found_user, bot_account = search_result - assert found_user is user - assert bot_account is list(bot.bot_accounts)[0] diff --git a/tests/application/integration/__init__.py b/tests/unit/__init__.py similarity index 100% rename from tests/application/integration/__init__.py rename to tests/unit/__init__.py diff --git a/tests/application/unit/__init__.py b/tests/unit/bot_commands/__init__.py similarity index 100% rename from tests/application/unit/__init__.py rename to tests/unit/bot_commands/__init__.py diff --git a/tests/presentation/commands/test_common.py b/tests/unit/bot_commands/test_common.py similarity index 69% rename from tests/presentation/commands/test_common.py rename to tests/unit/bot_commands/test_common.py index 8bf7a45..62db381 100644 --- a/tests/presentation/commands/test_common.py +++ b/tests/unit/bot_commands/test_common.py @@ -70,38 +70,9 @@ async def test_chat_created_handler( # - Assert - bot.answer_message.assert_awaited_once_with( # type: ignore ( - "Вас приветствует gubarik_bot_refactor!\n\n" + "Вас приветствует template_bot!\n\n" "Для более подробной информации нажмите кнопку `/help`" ), bubbles=BubbleMarkup([[Button(command="/help", label="/help")]]), ) - -async def test_help_handler( - bot: Bot, - incoming_message_factory: Callable[..., IncomingMessage], -) -> None: - # - Arrange - - message = incoming_message_factory(body="/help") - - # - Act - - await bot.async_execute_bot_command(message) - - # - Assert - - bot.answer_message.assert_awaited_once_with( # type: ignore - "`/help` -- Get available commands" - ) - - -async def test_git_commit_sha_handler( - bot: Bot, - incoming_message_factory: Callable[..., IncomingMessage], -) -> None: - # - Arrange - - message = incoming_message_factory(body="/_debug:git-commit-sha") - - # - Act - - await bot.async_execute_bot_command(message) - - # - Assert - - bot.answer_message.assert_awaited_once_with("") # type: ignore diff --git a/tests/infrastructure/__init__.py b/tests/unit/decorators/__init__.py similarity index 100% rename from tests/infrastructure/__init__.py rename to tests/unit/decorators/__init__.py diff --git a/tests/unit/decorators/test_classes.py b/tests/unit/decorators/test_classes.py new file mode 100644 index 0000000..4534d08 --- /dev/null +++ b/tests/unit/decorators/test_classes.py @@ -0,0 +1,41 @@ +from app.decorators.exception_mapper import ExceptionContext, ExceptionFactory + + +class ParentError(Exception): + """Parent error class.""" + + +class ChildError(ParentError): + """Child error class.""" + + +class UnmappedError(Exception): + """Unmapped error class.""" + + +class GeneratedError(Exception): + """Generated error class.""" + + +class DummyFactory(ExceptionFactory): + """Dummy factory class.""" + + def __init__( + self, + tag: str, + generated_exception: type[Exception] = GeneratedError, + detailed: bool = False, + ) -> None: + self.tag = tag + self.generated_exception = generated_exception + self.detailed = detailed + + def make_exception(self, context: ExceptionContext) -> Exception: + if self.detailed: + return self.generated_exception( + f"[{self.tag}] {str(context.formatted_context)}" + ) + + return self.generated_exception( + f"[{self.tag}] {str(context.original_exception)}" + ) diff --git a/tests/unit/decorators/test_errors_mapper.py b/tests/unit/decorators/test_errors_mapper.py new file mode 100644 index 0000000..ca93a88 --- /dev/null +++ b/tests/unit/decorators/test_errors_mapper.py @@ -0,0 +1,253 @@ +from unittest.mock import patch + +import pytest + +from app.decorators.exception_mapper import ExceptionMapper +from app.logger import logger +from tests.unit.decorators.test_classes import ( + ChildError, + DummyFactory, + GeneratedError, + ParentError, + UnmappedError, +) + + +def test_sync_exception_mapping() -> None: + """Test that the exception mapper works fine with sync functions.""" + + def sync_function() -> None: + raise ChildError("sync error") + + mapper = ExceptionMapper( + { + ParentError: DummyFactory("mapped"), + } + ) + + wrapped = mapper(sync_function) + + with pytest.raises(GeneratedError) as exc_info: + wrapped() + + assert str(exc_info.value).startswith("[mapped] sync error") + assert isinstance(exc_info.value.__cause__, ChildError) + + +def test_sync_exception_mapping_works_fine_with_mutlticatch() -> None: + """Test that the exception mapper works fine with sync functions.""" + + def sync_function(error_type: type[Exception]) -> None: + raise error_type("test_error") + + mapper = ExceptionMapper( + { + (ValueError, TypeError): DummyFactory("multicatch"), + ZeroDivisionError: DummyFactory("singlecatch"), + } + ) + + wrapped = mapper(sync_function) + + # 1. Check for first exception in multicatch + with pytest.raises(GeneratedError) as exc_info: + wrapped(ValueError) + + assert str(exc_info.value).startswith("[multicatch] test_error") + + # 2. Check for last exception in multicatch + with pytest.raises(GeneratedError) as exc_info: + wrapped(TypeError) + + assert str(exc_info.value).startswith("[multicatch] test_error") + + # 3. Check for single exception + with pytest.raises(GeneratedError) as exc_info: + wrapped(ZeroDivisionError) + + assert str(exc_info.value).startswith("[singlecatch] test_error") + + +@pytest.mark.asyncio +async def test_async_exception_mapping() -> None: + """Test that the exception mapper works fine with async functions.""" + + def async_function() -> None: + raise ChildError("async error") + + mapper = ExceptionMapper( + { + ParentError: DummyFactory("mapped"), + } + ) + + wrapped = mapper(async_function) + + with pytest.raises(GeneratedError) as exc_info: + await wrapped() + + assert str(exc_info.value).startswith("[mapped] async error") + assert isinstance(exc_info.value.__cause__, ChildError) + + +@pytest.mark.asyncio +async def test_catchall_mapping() -> None: + """Test that the exception mapper works fine with Exception in mapping.""" + mapper = ExceptionMapper( + { + ChildError: DummyFactory("mapped"), + Exception: DummyFactory("catchall"), + } + ) + + def raise_unmapped() -> None: + raise UnmappedError("unmapped!") + + wrapped = mapper(raise_unmapped) + + with pytest.raises(GeneratedError) as exc_info: + await wrapped() + + assert str(exc_info.value) == "[catchall] unmapped!" + assert isinstance(exc_info.value.__cause__, UnmappedError) + + +@pytest.mark.asyncio +async def test_no_mapping_no_catchall() -> None: + """Test that the exception mapper works fine then non mapped exception is raised.""" + mapper = ExceptionMapper({}) + + async def raise_unknown() -> None: + raise UnmappedError("unmapped!") + + wrapped = mapper(raise_unknown) + + with pytest.raises(UnmappedError): + await wrapped() + + +def test_sync_function_no_exception() -> None: + """Test that the decorator passes through when no exception is raised.""" + + mapper = ExceptionMapper( + { + ParentError: DummyFactory("mapped"), + } + ) + + @mapper + def test_func() -> str: + return "success" + + assert test_func() == "success" + + +def test_mapper_works_fine_then_child_and_parent_in_map() -> None: + mapper = ExceptionMapper( + exception_map={ + ChildError: DummyFactory("child"), + ParentError: DummyFactory("parent"), + TypeError: DummyFactory("other"), + }, + max_cache_size=1, + ) + + @mapper + def function(exception: type[Exception]) -> None: + raise exception("Error") + + # 1. DummyChild raised and put in cache + with pytest.raises(GeneratedError) as exc_info: + function(ChildError) + + assert str(exc_info.value).startswith("[child]") + assert isinstance(exc_info.value.__cause__, ChildError) + + # 2. TypeError raised and put in cache + with pytest.raises(GeneratedError) as exc_info: + function(TypeError) + + assert isinstance(exc_info.value.__cause__, TypeError) + assert str(exc_info.value).startswith("[other]") + + # 3. DummyException raised and put in cache + with pytest.raises(GeneratedError) as exc_info: + function(ParentError) + assert isinstance(exc_info.value.__cause__, ParentError) + assert str(exc_info.value).startswith("[parent]") + + +def test_nested_exception_mappers() -> None: + """Test that nested exception mappers preserve the exception chain.""" + + child_mapper = ExceptionMapper( + exception_map={ + ChildError: DummyFactory("child"), + } + ) + + parent_mapper = ExceptionMapper( + exception_map={ + ParentError: DummyFactory("parent"), + } + ) + + @parent_mapper + @child_mapper + def function(exception: type[Exception]) -> None: + raise exception("Error") + + with pytest.raises(GeneratedError) as exc_info: + function(ChildError) + + assert str(exc_info.value).startswith("[child]") + assert isinstance(exc_info.value.__cause__, ChildError) + + with pytest.raises(GeneratedError) as exc_info: + function(ParentError) + + assert str(exc_info.value).startswith("[parent]") + assert isinstance(exc_info.value.__cause__, ParentError) + + +@pytest.mark.asyncio +async def test_work_with_logging() -> None: + """Test that the decorator logs exceptions for functions when configured.""" + + mapper = ExceptionMapper( + exception_map={ + ChildError: DummyFactory("child"), + } + ) + + @mapper + def test_func() -> None: + raise ChildError("[child]") + + with patch.object(logger, "error") as mock_logger: + with pytest.raises(GeneratedError): + test_func() + + mock_logger.assert_called_once() + assert "test_func" in mock_logger.call_args[0][0] + assert mock_logger.call_args[1]["exc_info"] is True + + +def test_sync_function_detailed_error_message() -> None: + """Test that the decorator provides detailed error messages when configured.""" + + mapper = ExceptionMapper( + exception_map={ChildError: DummyFactory("child", detailed=True)}, log_error=True + ) + + @mapper + def test_func(arg1: str, arg2: str) -> None: + raise ChildError("Original error") + + with pytest.raises(GeneratedError) as exc_info: + test_func("value1", "value2") + + error_message = str(exc_info.value) + assert "[child]" in error_message + assert "test_func" in error_message + assert "Args: [value1, value2]" in error_message diff --git a/tests/infrastructure/repository/__init__.py b/tests/unit/use_cases/__init__.py similarity index 100% rename from tests/infrastructure/repository/__init__.py rename to tests/unit/use_cases/__init__.py diff --git a/tests/application/unit/conftest.py b/tests/unit/use_cases/conftest.py similarity index 73% rename from tests/application/unit/conftest.py rename to tests/unit/use_cases/conftest.py index 6de516e..36b83c5 100644 --- a/tests/application/unit/conftest.py +++ b/tests/unit/use_cases/conftest.py @@ -2,8 +2,7 @@ from app.application.use_cases.interfaces import ISampleRecordUseCases from app.application.use_cases.record_use_cases import SampleRecordUseCases -from app.domain.entities.sample_record import SampleRecord -from tests.application.unit.fake_repository import FakeSampleRecordRepository +from tests.unit.use_cases.fake_repository import FakeSampleRecordRepository @pytest.fixture diff --git a/tests/application/unit/fake_repository.py b/tests/unit/use_cases/fake_repository.py similarity index 91% rename from tests/application/unit/fake_repository.py rename to tests/unit/use_cases/fake_repository.py index 3b95f75..0e8cd9a 100644 --- a/tests/application/unit/fake_repository.py +++ b/tests/unit/use_cases/fake_repository.py @@ -6,7 +6,7 @@ class FakeSampleRecordRepository(ISampleRecordRepository): - def __init__(self, records: List[SampleRecord] = None): + def __init__(self, records: List[SampleRecord] | None = None): self._records = {} if records: for id, record in enumerate(records): @@ -27,12 +27,11 @@ async def update(self, record: SampleRecord) -> SampleRecord: self._records[record.id] = record return record - async def delete(self, record_id: int) -> int: + async def delete(self, record_id: int) -> None: if record_id not in self._records: raise RecordDoesNotExistError(f"Record with id={record_id} does not exist.") del self._records[record_id] - return record_id async def get_by_id(self, record_id: int) -> SampleRecord: if record_id not in self._records: diff --git a/tests/application/unit/test_sample_record_use_cases.py b/tests/unit/use_cases/test_sample_record_use_cases.py similarity index 82% rename from tests/application/unit/test_sample_record_use_cases.py rename to tests/unit/use_cases/test_sample_record_use_cases.py index 3601345..b13f951 100644 --- a/tests/application/unit/test_sample_record_use_cases.py +++ b/tests/unit/use_cases/test_sample_record_use_cases.py @@ -2,9 +2,9 @@ from app.application.repository.exceptions import RecordDoesNotExistError from app.application.use_cases.interfaces import ISampleRecordUseCases -from app.presentation.bot.schemas import ( - SampleRecordResponseSchema, +from app.presentation.bot.schemas.sample_record import ( SampleRecordResponseListSchema, + SampleRecordResponseSchema, ) from tests.factories import ( SampleRecordCreateSchemaFactory, @@ -17,7 +17,7 @@ async def test_sample_record_use_case_add_record( ): sample_record_create_request = SampleRecordCreateSchemaFactory() response = await sample_record_use_cases_with_fake_repo.create_record( - sample_record_create_request + sample_record_create_request # type: ignore ) assert isinstance(response, SampleRecordResponseSchema) @@ -28,13 +28,13 @@ async def test_sample_record_use_case_update_record( sample_record_use_cases_with_fake_repo: ISampleRecordUseCases, ): existing_record = await sample_record_use_cases_with_fake_repo.create_record( - SampleRecordCreateSchemaFactory() + SampleRecordCreateSchemaFactory() # type: ignore ) update_request = SampleRecordUpdateSchemaFactory(id=existing_record.id) response = await sample_record_use_cases_with_fake_repo.update_record( - update_request + update_request # type: ignore ) assert isinstance(response, SampleRecordResponseSchema) @@ -45,20 +45,24 @@ async def test_sample_record_use_case_delete_record( sample_record_use_cases_with_fake_repo: ISampleRecordUseCases, ): existing_record = await sample_record_use_cases_with_fake_repo.create_record( - SampleRecordCreateSchemaFactory() + SampleRecordCreateSchemaFactory() # type: ignore ) - result = await sample_record_use_cases_with_fake_repo.delete_record( - existing_record.id - ) - assert result == existing_record.id + record = await sample_record_use_cases_with_fake_repo.get_record(existing_record.id) + + assert record == existing_record + + await sample_record_use_cases_with_fake_repo.delete_record(existing_record.id) + + with pytest.raises(RecordDoesNotExistError): + await sample_record_use_cases_with_fake_repo.get_record(existing_record.id) async def test_sample_record_use_case_get_record( sample_record_use_cases_with_fake_repo: ISampleRecordUseCases, ): existing_record = await sample_record_use_cases_with_fake_repo.create_record( - SampleRecordCreateSchemaFactory() + SampleRecordCreateSchemaFactory() # type: ignore ) response = await sample_record_use_cases_with_fake_repo.get_record( @@ -82,7 +86,7 @@ async def test_sample_record_use_case_get_all_records( assert isinstance(response, SampleRecordResponseListSchema) assert len(response.data) == len(existing_records) - for record, response_record in zip(existing_records, response.data): + for record, response_record in zip(existing_records, response.data, strict=True): assert record.id == response_record.id assert record.record_data == response_record.record_data @@ -103,7 +107,7 @@ async def test_update_non_existing_record_raises_error( with pytest.raises(RecordDoesNotExistError): await sample_record_use_cases_with_fake_repo.update_record( - SampleRecordUpdateSchemaFactory(id=42) + SampleRecordUpdateSchemaFactory(id=42) # type: ignore ) From 16bce98ce9571f70a13c863a120063458e7dfba5 Mon Sep 17 00:00:00 2001 From: vladimirgubarik Date: Fri, 1 Aug 2025 17:28:29 +0300 Subject: [PATCH 03/15] switch to dependency injector --- app/application/repository/interfaces.py | 2 +- app/decorators/exceptions_mapper.py | 151 ----------------- app/infrastructure/containers.py | 158 ++++++++++------- app/infrastructure/db/sample_record/models.py | 2 +- app/infrastructure/db/sqlalchemy.py | 25 +-- .../middlewares/answer_error.py | 34 ---- .../{ => repositories}/caching/__init__.py | 0 .../caching/callback_redis_repo.py | 0 .../caching/exception_handlers.py | 0 .../{ => repositories}/caching/redis_repo.py | 0 .../repositories/sample_record.py | 27 +-- app/infrastructure/worker/worker.py | 4 +- app/main.py | 94 ++++++----- app/presentation/api/bot.py | 7 +- app/presentation/bot/bot.py | 16 +- ...mple_record_simple.py => sample_record.py} | 5 +- .../bot/decorators}/__init__.py | 0 .../bot}/decorators/bot_exception_answer.py | 0 .../bot/middlewares}/__init__.py | 0 .../bot/middlewares}/answer_error.py | 43 ++++- .../bot}/middlewares/smart_logger.py | 15 +- .../bot/validators/sample_record.py | 13 +- app/presentation/dependencies/healthcheck.py | 9 +- app/services/log_formatters.py | 17 -- app/settings.py | 11 +- tests/conftest.py | 159 ++---------------- tests/factories.py | 11 -- .../bot_commands/__init__.py | 0 .../bot_commands/test_common.py | 0 tests/integration/conftest.py | 141 ++++++++++++++++ tests/integration/endpoints/test_botx.py | 11 +- tests/integration/factories.py | 13 ++ .../test_sample_record_repository.py | 2 +- .../test_sample_record_use_cases_int.py | 3 +- ...y => test_sample_record_use_cases_unit.py} | 0 35 files changed, 442 insertions(+), 531 deletions(-) delete mode 100644 app/decorators/exceptions_mapper.py delete mode 100644 app/infrastructure/middlewares/answer_error.py rename app/infrastructure/{ => repositories}/caching/__init__.py (100%) rename app/infrastructure/{ => repositories}/caching/callback_redis_repo.py (100%) rename app/infrastructure/{ => repositories}/caching/exception_handlers.py (100%) rename app/infrastructure/{ => repositories}/caching/redis_repo.py (100%) rename app/presentation/bot/commands/{sample_record_simple.py => sample_record.py} (87%) rename app/{infrastructure/middlewares => presentation/bot/decorators}/__init__.py (100%) rename app/{ => presentation/bot}/decorators/bot_exception_answer.py (100%) rename app/{services => presentation/bot/middlewares}/__init__.py (100%) rename app/{services => presentation/bot/middlewares}/answer_error.py (58%) rename app/{infrastructure => presentation/bot}/middlewares/smart_logger.py (60%) delete mode 100644 app/services/log_formatters.py rename tests/{unit => integration}/bot_commands/__init__.py (100%) rename tests/{unit => integration}/bot_commands/test_common.py (100%) create mode 100644 tests/integration/conftest.py create mode 100644 tests/integration/factories.py rename tests/unit/use_cases/{test_sample_record_use_cases.py => test_sample_record_use_cases_unit.py} (100%) diff --git a/app/application/repository/interfaces.py b/app/application/repository/interfaces.py index f3a486e..2d3439a 100644 --- a/app/application/repository/interfaces.py +++ b/app/application/repository/interfaces.py @@ -53,7 +53,7 @@ async def get_by_id(self, record_id: int) -> SampleRecord: record_id: The record id to be created. Returns: - SampleRecord: The record from database.""" + SampleRecord: The record from a database.""" pass @abstractmethod diff --git a/app/decorators/exceptions_mapper.py b/app/decorators/exceptions_mapper.py deleted file mode 100644 index 7c8d3c1..0000000 --- a/app/decorators/exceptions_mapper.py +++ /dev/null @@ -1,151 +0,0 @@ -"""Decorators to rethrow and log exceptions.""" - -import asyncio -import inspect -from functools import wraps -from typing import Any, Callable, Tuple, Type, TypeVar, Union, cast - -from app.logger import logger - -FunctionType = TypeVar("FunctionType", bound=Callable[..., Any]) - -CatchExceptionClass = Union[Type[Exception], Tuple[Type[Exception], ...]] -T = TypeVar("T") -Decorator = Callable[[Callable[..., T]], Callable[..., T]] - - -def _get_error_message( - ex: Exception, - func: Callable[..., Any], - args: tuple[Any, ...] | None = None, - kwargs: dict[str, Any] | None = None, - use_short_error_message: bool = True, -) -> str: - """ - Generate an error message string based on the given exception and function context. - - :param ex: The exception that occurred. - :param func: The function in which the exception occurred. - :param args: Optional tuple of positional arguments passed to the function. - :param kwargs: Optional dictionary of keyword arguments passed to the function. - :param use_short_error_message: Flag to indicate whether to generate a brief - error message (True) or a detailed one (False). - :return: A formatted error message string representing the exception and its - context. - """ - - if use_short_error_message: - return str(ex) - - error_context = [ - f"Error in function '{func.__module__}.{func.__qualname__}'", - f"Original exception: {ex.__class__.__name__}: {str(ex)}", - ] - - filtered_args = args[1:] if args and inspect.ismethod(func) else args - - if filtered_args: - args_str = ", ".join(str(arg)[:100] for arg in filtered_args) - error_context.append(f"Args: [{args_str}]") - - if kwargs: - kwargs_str = ", ".join(f"{k}={str(v)[:100]}" for k, v in kwargs.items()) - error_context.append(f"Kwargs: {kwargs_str}") - - return "\n".join(error_context) - - -def _create_sync_wrapper( - func: Callable[..., Any], - catch_exceptions: CatchExceptionClass, - raise_exception: Type[Exception], - use_short_erroro_message: bool, - log_exception: bool, -) -> Callable[..., Any]: - """Create a synchronous wrapper function for exception mapping.""" - - @wraps(func) - def sync_wrapper(*args: Any, **kwargs: Any) -> Any: - try: - return func(*args, **kwargs) - except catch_exceptions as ex: - if log_exception: - logger.error(f"Error in {func.__name__}", exc_info=True) - error_message = _get_error_message( - ex, func, args, kwargs, use_short_erroro_message - ) - raise raise_exception(error_message) from ex - - return sync_wrapper - - -def _create_async_wrapper( - func: Callable[..., Any], - catch_exceptions: CatchExceptionClass, - raise_exception_class: Type[Exception], - use_short_error_message: bool, - log_exception: bool, -) -> Callable[..., Any]: - """Create an asynchronous wrapper function for exception mapping.""" - - @wraps(func) - async def async_wrapper(*args: Any, **kwargs: Any) -> Any: - try: - return await func(*args, **kwargs) - except catch_exceptions as ex: - if log_exception: - logger.error(f"Error in {func.__name__}", exc_info=True) - error_message = _get_error_message( - ex, func, args, kwargs, use_short_error_message - ) - raise raise_exception_class(error_message) from ex - - return async_wrapper - - -def exception_mapper( - raise_exception: Type[Exception], - catch_exceptions: CatchExceptionClass = Exception, - use_short_error_message: bool = False, - log_exception: bool = False, -) -> Decorator: - """ - Map exceptions from one to another, with optional logging and message adjustments. - - This function creates a decorator to wrap a function or coroutine and modify its - exception handling behavior. Specifically, it catches specified exceptions and - raises them as another exception type, with options to log the exception and - adjust whether a short error message is used. - - :param raise_exception: The exception type to raise instead of the caught exception. - :param catch_exceptions: The exception type or types to catch within the function. - :param use_short_error_message: Whether to use a shortened error message when - raising the new exception. - :param log_exception: Whether to log the exception when it is caught. - :return: A decorator for handling exceptions as per the specified parameters. - """ - - def decorator(func: Callable[..., Any]) -> Callable[..., Any]: - if asyncio.iscoroutinefunction(func): - return cast( - Callable[..., Any], - _create_async_wrapper( - func, - catch_exceptions, - raise_exception, - use_short_error_message, - log_exception, - ), - ) - return cast( - Callable[..., Any], - _create_sync_wrapper( - func, - catch_exceptions, - raise_exception, - use_short_error_message, - log_exception, - ), - ) - - return decorator diff --git a/app/infrastructure/containers.py b/app/infrastructure/containers.py index 5ec979c..5fd98ba 100644 --- a/app/infrastructure/containers.py +++ b/app/infrastructure/containers.py @@ -1,38 +1,30 @@ -from dependency_injector import containers +import asyncio + +from dependency_injector import containers, providers from dependency_injector.providers import Callable, Factory, Singleton +from httpx import AsyncClient, Limits +from pybotx import Bot from redis import asyncio as aioredis from app.application.use_cases.record_use_cases import SampleRecordUseCases -from app.infrastructure.caching.redis_repo import RedisRepo -from app.infrastructure.db.sqlalchemy import build_db_session_factory +from app.infrastructure.repositories.caching.callback_redis_repo import ( + CallbackRedisRepo, +) +from app.infrastructure.repositories.caching.exception_handlers import ( + PubsubExceptionHandler, +) +from app.infrastructure.repositories.caching.redis_repo import RedisRepo from app.infrastructure.repositories.sample_record import SampleRecordRepository +from app.logger import logger + +from app.presentation.bot.handlers.internal_error import internal_error_handler +from app.presentation.bot.middlewares.answer_error import answer_error_middleware +from app.presentation.bot.middlewares.smart_logger import smart_logger_middleware from app.presentation.bot.resources import strings from app.settings import settings class BotSampleRecordCommandContainer(containers.DeclarativeContainer): - wiring_config = containers.WiringConfiguration( - modules=["app.presentation.bot.commands.sample_records"] - ) - - # Session factory provider - returns a factory that creates AsyncSession instances - session_factory = Factory(build_db_session_factory) - - record_use_cases_factory = Callable( - lambda session: SampleRecordUseCases( - record_repo=SampleRecordRepository(session=session) - ) - ) - - -class HealthCheckContainer(containers.DeclarativeContainer): - wiring_config = containers.WiringConfiguration( - modules=["app.presentation.api.healthcheck"] - ) - - # Session factory provider - returns a factory that creates AsyncSession instances - session_factory = Factory(build_db_session_factory) - record_use_cases_factory = Callable( lambda session: SampleRecordUseCases( record_repo=SampleRecordRepository(session=session) @@ -61,19 +53,46 @@ class HealthCheckContainer(containers.DeclarativeContainer): # ) -class ApplicationStartupContainer(containers.DeclarativeContainer): - """Container for application startup dependencies.""" +class CallbackTaskManager: + """Менеджер для управления задачей обработки callbacks""" - wiring_config = containers.WiringConfiguration(modules=["app.main"]) + def __init__(self, callback_repo: CallbackRedisRepo): + self.callback_repo = callback_repo + self._task: asyncio.Task | None = None - # Database - # db_session_factory = Factory(build_db_session_factory) + def _get_task(self) -> asyncio.Task: + """Получает или создает задачу в текущем цикле событий""" + if self._task is None or self._task.done(): + self._task = asyncio.create_task( + self.callback_repo.pubsub.run( + exception_handler=PubsubExceptionHandler() + ) + ) + return self._task - # Redis client - redis_client = Singleton( - aioredis.from_url, - settings.REDIS_DSN, - ) + def _cancel_task(self) -> None: + """Отменяет задачу если она существует""" + if self._task and not self._task.done(): + self._task.cancel() + + async def shutdown(self) -> None: + """Корректное завершение работы менеджера""" + if self._task: + self._cancel_task() + try: + await asyncio.gather(self._task, return_exceptions=True) + except RuntimeError as e: + logger.warning(f"Error at gather CallbackTaskManager tasks: {e}") + + def __call__(self) -> asyncio.Task: + """Позволяет использовать как callable для провайдера""" + return self._get_task() + + +class ApplicationStartupContainer(containers.DeclarativeContainer): + """Container for application startup dependencies.""" + + redis_client = Singleton(lambda: aioredis.from_url(settings.REDIS_DSN)) redis_repo = Factory( RedisRepo, @@ -81,30 +100,45 @@ class ApplicationStartupContainer(containers.DeclarativeContainer): prefix=strings.BOT_PROJECT_NAME, ) - ## Configure connection pool for Redis - # redis_connection_pool = Callable( - # lambda: aioredis.BlockingConnectionPool( - # max_connections=settings.REDIS_CONNECTION_POOL_SIZE, - # **(redis_client.provided.connection_pool.connection_kwargs), - # ) - # ) - # - # # Set connection pool for Redis client - # redis_client_with_pool = Callable( - # lambda: redis_client.provided.__setattr__( - # "connection_pool", redis_connection_pool() - # ) or redis_client.provided - # ) - # - # # Redis repo - # redis_repo = Factory( - # RedisRepo, - # redis=redis_client_with_pool, - # prefix=strings.BOT_PROJECT_NAME, - # ) - # - # # Callback repo - # callback_repo = Factory( - # CallbackRedisRepo, - # redis=redis_client_with_pool, - # ) + async_client = providers.Singleton( + AsyncClient, + timeout=60, + limits=Limits(max_keepalive_connections=None, max_connections=None), + ) + + callback_repo = providers.Singleton( + CallbackRedisRepo, + redis=redis_client, + ) + + exception_handlers = ( + {} if not settings.RAISE_BOT_EXCEPTIONS else {Exception: internal_error_handler} + ) + + from app.presentation.bot.commands import common, sample_record + + bot = providers.Singleton( + Bot, + collectors=[common.collector, sample_record.collector], + bot_accounts=settings.BOT_CREDENTIALS, + exception_handlers=exception_handlers, # type: ignore + default_callback_timeout=settings.BOTX_CALLBACK_TIMEOUT_IN_SECONDS, + httpx_client=async_client, + middlewares=[ + smart_logger_middleware, + answer_error_middleware, + ], + callback_repo=callback_repo, + ) + + # Используем менеджер задач для ленивой инициализации + callback_task_manager = providers.Singleton( + CallbackTaskManager, + callback_repo, + ) + + # Провайдер который возвращает задачу через менеджер + process_callbacks_task = providers.Callable( + lambda manager: manager(), + callback_task_manager, + ) diff --git a/app/infrastructure/db/sample_record/models.py b/app/infrastructure/db/sample_record/models.py index 669bcae..ed75483 100644 --- a/app/infrastructure/db/sample_record/models.py +++ b/app/infrastructure/db/sample_record/models.py @@ -14,5 +14,5 @@ class SampleRecordModel(Base): record_data: Mapped[str] def __repr__(self) -> str: - """Show string representation of record.""" + """Show string representation of a record.""" return self.record_data diff --git a/app/infrastructure/db/sqlalchemy.py b/app/infrastructure/db/sqlalchemy.py index 5e6605c..e92d8ca 100644 --- a/app/infrastructure/db/sqlalchemy.py +++ b/app/infrastructure/db/sqlalchemy.py @@ -48,26 +48,13 @@ def get_engine() -> AsyncEngine: return create_async_engine( make_url_async(settings.POSTGRES_DSN), poolclass=AsyncAdaptedQueuePool, + pool_size=settings.DB_ENGINE_POOL_SIZE, + max_overflow=settings.DB_ENGINE_MAX_OVERFLOW, + pool_recycle=settings.DB_ENGINE_POOL_RECYCLE, ) -async def build_db_session_factory() -> AsyncSessionFactory: - await verify_db_connection(get_engine()) - - return async_scoped_session( - async_sessionmaker(bind=get_engine(), expire_on_commit=False), - scopefunc=current_task, - ) - - -@asynccontextmanager -async def session_resource() -> AsyncSession: - factory = await build_db_session_factory() - session: AsyncSession = factory() - try: - yield session - finally: - await session.close() +session_factory = async_sessionmaker(bind=get_engine(), expire_on_commit=False) async def verify_db_connection(engine: AsyncEngine) -> None: @@ -79,7 +66,7 @@ async def close_db_connections() -> None: await get_engine().dispose() -def provide_session(func: Callable) -> Callable: +def provide_transaction_session(func: Callable) -> Callable: """ Provides a database session to an async function if one is not already passed. @@ -91,8 +78,6 @@ def provide_session(func: Callable) -> Callable: async def wrapper(*args: Any, **kwargs: Any) -> Any: if kwargs.get("session"): return await func(*args, **kwargs) - - session_factory = await build_db_session_factory() async with session_factory() as session: return await func(*args, **kwargs, session=session) diff --git a/app/infrastructure/middlewares/answer_error.py b/app/infrastructure/middlewares/answer_error.py deleted file mode 100644 index 8d0a850..0000000 --- a/app/infrastructure/middlewares/answer_error.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Middleware to handle AnswerError and AnswerMessageError exceptions.""" - -from pybotx import Bot, IncomingMessage, IncomingMessageHandlerFunc - -from app.services.answer_error import AnswerError, AnswerMessageError - - -async def answer_error_middleware( - message: IncomingMessage, bot: Bot, call_next: IncomingMessageHandlerFunc -) -> None: - try: - await call_next(message, bot) - except AnswerError as exc: - await bot.send( - message=exc.message, - wait_callback=exc.wait_callback, - callback_timeout=exc.callback_timeout, - ) - except AnswerMessageError as exc: - await bot.answer_message( - body=exc.body, - metadata=exc.metadata, - bubbles=exc.bubbles, - keyboard=exc.keyboard, - file=exc.file, - recipients=exc.recipients, - silent_response=exc.silent_response, - markup_auto_adjust=exc.markup_auto_adjust, - stealth_mode=exc.stealth_mode, - send_push=exc.send_push, - ignore_mute=exc.ignore_mute, - wait_callback=exc.wait_callback, - callback_timeout=exc.callback_timeout, - ) diff --git a/app/infrastructure/caching/__init__.py b/app/infrastructure/repositories/caching/__init__.py similarity index 100% rename from app/infrastructure/caching/__init__.py rename to app/infrastructure/repositories/caching/__init__.py diff --git a/app/infrastructure/caching/callback_redis_repo.py b/app/infrastructure/repositories/caching/callback_redis_repo.py similarity index 100% rename from app/infrastructure/caching/callback_redis_repo.py rename to app/infrastructure/repositories/caching/callback_redis_repo.py diff --git a/app/infrastructure/caching/exception_handlers.py b/app/infrastructure/repositories/caching/exception_handlers.py similarity index 100% rename from app/infrastructure/caching/exception_handlers.py rename to app/infrastructure/repositories/caching/exception_handlers.py diff --git a/app/infrastructure/caching/redis_repo.py b/app/infrastructure/repositories/caching/redis_repo.py similarity index 100% rename from app/infrastructure/caching/redis_repo.py rename to app/infrastructure/repositories/caching/redis_repo.py diff --git a/app/infrastructure/repositories/sample_record.py b/app/infrastructure/repositories/sample_record.py index f829643..6c73b5c 100644 --- a/app/infrastructure/repositories/sample_record.py +++ b/app/infrastructure/repositories/sample_record.py @@ -13,7 +13,7 @@ RecordUpdateError, ) from app.application.repository.interfaces import ISampleRecordRepository -from app.decorators.exceptions_mapper import exception_mapper +from app.decorators.exception_mapper import ExceptionMapper, EnrichedExceptionFactory from app.domain.entities.sample_record import SampleRecord from app.infrastructure.db.sample_record.models import SampleRecordModel from app.infrastructure.db.sqlalchemy import AsyncSession @@ -30,8 +30,9 @@ def __init__(self, session: AsyncSession): """ self._session = session - @exception_mapper( - catch_exceptions=SQLAlchemyError, raise_exception=RecordCreateError + @ExceptionMapper( + {SQLAlchemyError: EnrichedExceptionFactory(RecordCreateError)}, + is_bound_method=True, ) async def create(self, record: SampleRecord) -> SampleRecord: query = ( @@ -44,8 +45,9 @@ async def create(self, record: SampleRecord) -> SampleRecord: record_model = result.scalar_one() return self._to_domain_object(record_model) - @exception_mapper( - catch_exceptions=SQLAlchemyError, raise_exception=RecordUpdateError + @ExceptionMapper( + {SQLAlchemyError: EnrichedExceptionFactory(RecordUpdateError)}, + is_bound_method=True, ) async def update(self, record: SampleRecord) -> SampleRecord: query = ( @@ -63,8 +65,9 @@ async def update(self, record: SampleRecord) -> SampleRecord: return self._to_domain_object(execute_result) - @exception_mapper( - catch_exceptions=SQLAlchemyError, raise_exception=RecordDeleteError + @ExceptionMapper( + {SQLAlchemyError: EnrichedExceptionFactory(RecordDeleteError)}, + is_bound_method=True, ) async def delete(self, record_id: int) -> None: """Delete a record. @@ -88,8 +91,9 @@ async def delete(self, record_id: int) -> None: await self._session.flush() - @exception_mapper( - catch_exceptions=NoResultFound, raise_exception=RecordDoesNotExistError + @ExceptionMapper( + {SQLAlchemyError: EnrichedExceptionFactory(RecordDoesNotExistError)}, + is_bound_method=True, ) async def get_by_id(self, record_id: int) -> SampleRecord: """Get a record by ID. @@ -107,8 +111,9 @@ async def get_by_id(self, record_id: int) -> SampleRecord: result = await self._session.execute(query) return self._to_domain_object(result.scalar_one()) - @exception_mapper( - catch_exceptions=SQLAlchemyError, raise_exception=RecordRetreiveError + @ExceptionMapper( + {SQLAlchemyError: EnrichedExceptionFactory(RecordRetreiveError)}, + is_bound_method=True, ) async def get_all(self) -> List[SampleRecord]: """Get all records. diff --git a/app/infrastructure/worker/worker.py b/app/infrastructure/worker/worker.py index dc61452..329e160 100644 --- a/app/infrastructure/worker/worker.py +++ b/app/infrastructure/worker/worker.py @@ -6,7 +6,9 @@ from redis import asyncio as aioredis from saq import Queue -from app.infrastructure.caching.callback_redis_repo import CallbackRedisRepo +from app.infrastructure.repositories.caching.callback_redis_repo import ( + CallbackRedisRepo, +) from app.logger import logger # `saq` import its own settings and hides our module diff --git a/app/main.py b/app/main.py index 33e308d..207db40 100644 --- a/app/main.py +++ b/app/main.py @@ -3,17 +3,27 @@ import asyncio from functools import partial -from dependency_injector.wiring import Provide +from dependency_injector.wiring import Provide, inject from fastapi import FastAPI from pybotx import Bot from redis import asyncio as aioredis from redis.asyncio import Redis -from app.infrastructure.caching.callback_redis_repo import CallbackRedisRepo -from app.infrastructure.caching.exception_handlers import PubsubExceptionHandler -from app.infrastructure.caching.redis_repo import RedisRepo -from app.infrastructure.containers import ApplicationStartupContainer +import tests.integration.conftest +from app.infrastructure.repositories.caching.callback_redis_repo import ( + CallbackRedisRepo, +) +from app.infrastructure.repositories.caching.exception_handlers import ( + PubsubExceptionHandler, +) +from app.infrastructure.repositories.caching.redis_repo import RedisRepo +from app.infrastructure.containers import ( + ApplicationStartupContainer, + BotSampleRecordCommandContainer, + CallbackTaskManager, +) from app.infrastructure.db.sqlalchemy import close_db_connections +from app.logger import logger from app.presentation.api.routers import router from app.presentation.bot.bot import get_bot from app.presentation.bot.resources import strings @@ -21,63 +31,57 @@ async def startup( - application: FastAPI, - raise_bot_exceptions: bool, - redis_client: Redis = Provide[ApplicationStartupContainer.redis_client], - redis_repo: RedisRepo = Provide[ApplicationStartupContainer.redis_repo], + bot: Bot = Provide[ApplicationStartupContainer.bot], ) -> None: - pool = aioredis.BlockingConnectionPool( - max_connections=settings.REDIS_CONNECTION_POOL_SIZE, - **redis_client.connection_pool.connection_kwargs, - ) - redis_client.connection_pool = pool - - # -- Bot -- - callback_repo = CallbackRedisRepo(redis_client) - process_callbacks_task = asyncio.create_task( - callback_repo.pubsub.run(exception_handler=PubsubExceptionHandler()) - ) - bot = get_bot(callback_repo, raise_exceptions=raise_bot_exceptions) - await bot.startup() - # bot.state.db_session_factory = db_session_factory - bot.state.redis_repo = redis_repo - - application.state.bot = bot - application.state.redis = redis_client - application.state.process_callbacks_task = process_callbacks_task - -async def shutdown(application: FastAPI) -> None: - # -- Bot -- - bot: Bot = application.state.bot +async def shutdown( + callback_task_manager: CallbackTaskManager = Provide[ + ApplicationStartupContainer.callback_task_manager + ], + bot: Bot = Provide[ApplicationStartupContainer.bot], + redis_client: Redis = Provide[ApplicationStartupContainer.redis_client], +) -> None: await bot.shutdown() - process_callbacks_task: asyncio.Task = application.state.process_callbacks_task - process_callbacks_task.cancel() - await asyncio.gather(process_callbacks_task, return_exceptions=True) - # -- Redis -- - redis_client: aioredis.Redis = application.state.redis - await redis_client.close() + await callback_task_manager.shutdown() - # -- Database -- + await redis_client.aclose() await close_db_connections() -def get_application(raise_bot_exceptions: bool = False) -> FastAPI: +def get_application() -> FastAPI: """Create configured server application instance.""" - # Initialize the container - container = ApplicationStartupContainer() - container.wire(modules=["app.main"]) + # Initialize the main application container + main_container = ApplicationStartupContainer() + main_container.wire(modules=["app.main"]) + + # Initialize the SampleRecord commands container + sample_record_commands_container = BotSampleRecordCommandContainer() + sample_record_commands_container.wire( + modules=["app.presentation.bot.commands.sample_record"] + ) application = FastAPI(title=strings.BOT_PROJECT_NAME, openapi_url=None) + # put bot to state for tests + application.state.bot = main_container.bot() + + application.add_event_handler( + "startup", + partial(startup, bot=main_container.bot()), + ) application.add_event_handler( - "startup", partial(startup, application, raise_bot_exceptions) + "shutdown", + partial( + shutdown, + callback_task_manager=main_container.callback_task_manager(), + bot=main_container.bot(), + redis_client=main_container.redis_client(), + ), ) - application.add_event_handler("shutdown", partial(shutdown, application)) application.include_router(router) diff --git a/app/presentation/api/bot.py b/app/presentation/api/bot.py index cba1eb8..62cf07d 100644 --- a/app/presentation/api/bot.py +++ b/app/presentation/api/bot.py @@ -3,11 +3,14 @@ from fastapi import Depends, Request from pybotx import Bot +import tests.integration.conftest + def get_bot(request: Request) -> Bot: - assert isinstance(request.app.state.bot, Bot) + if not isinstance(bot := request.app.state.bot, Bot): + raise RuntimeError(f"request.app.state.bot should be Bot instance. ") - return request.app.state.bot + return bot bot_dependency = Depends(get_bot) diff --git a/app/presentation/bot/bot.py b/app/presentation/bot/bot.py index c75f38e..dc515d6 100644 --- a/app/presentation/bot/bot.py +++ b/app/presentation/bot/bot.py @@ -3,25 +3,23 @@ from httpx import AsyncClient, Limits from pybotx import Bot, CallbackRepoProto -from app.infrastructure.middlewares.answer_error import answer_error_middleware -from app.infrastructure.middlewares.smart_logger import smart_logger_middleware -from app.presentation.bot.commands import common, sample_record_simple +from app.presentation.bot.middlewares.answer_error import answer_error_middleware +from app.presentation.bot.middlewares.smart_logger import smart_logger_middleware +from app.presentation.bot.commands import common, sample_record from app.presentation.bot.handlers.internal_error import internal_error_handler from app.settings import settings -BOTX_CALLBACK_TIMEOUT = 30 - -def get_bot(callback_repo: CallbackRepoProto, raise_exceptions: bool) -> Bot: +def get_bot(callback_repo: CallbackRepoProto) -> Bot: exception_handlers = {} - if not raise_exceptions: + if not settings.RAISE_BOT_EXCEPTIONS: exception_handlers[Exception] = internal_error_handler return Bot( - collectors=[common.collector, sample_record_simple.collector], + collectors=[common.collector, sample_record.collector], bot_accounts=settings.BOT_CREDENTIALS, exception_handlers=exception_handlers, # type: ignore - default_callback_timeout=BOTX_CALLBACK_TIMEOUT, + default_callback_timeout=settings.BOTX_CALLBACK_TIMEOUT_IN_SECONDS, httpx_client=AsyncClient( timeout=60, limits=Limits(max_keepalive_connections=None, max_connections=None), diff --git a/app/presentation/bot/commands/sample_record_simple.py b/app/presentation/bot/commands/sample_record.py similarity index 87% rename from app/presentation/bot/commands/sample_record_simple.py rename to app/presentation/bot/commands/sample_record.py index 4dece5d..22f3f95 100644 --- a/app/presentation/bot/commands/sample_record_simple.py +++ b/app/presentation/bot/commands/sample_record.py @@ -5,7 +5,7 @@ from app.application.use_cases.interfaces import ISampleRecordUseCases from app.infrastructure.containers import BotSampleRecordCommandContainer -from app.infrastructure.db.sqlalchemy import provide_session +from app.infrastructure.db.sqlalchemy import provide_transaction_session from app.presentation.bot.commands.command_listing import SampleRecordCommands from app.presentation.bot.handlers.sample_record import CreateSampleRecordHandler @@ -13,7 +13,7 @@ @collector.command(**SampleRecordCommands.CREATE_RECORD.command_data()) -@provide_session +@provide_transaction_session @inject async def create_sample_record( message: IncomingMessage, @@ -23,6 +23,7 @@ async def create_sample_record( BotSampleRecordCommandContainer.record_use_cases_factory ], ) -> None: + """Creates a sample record in the database.""" await CreateSampleRecordHandler( bot=bot, message=message, use_cases=record_use_cases_factory.provider(session) ).execute() diff --git a/app/infrastructure/middlewares/__init__.py b/app/presentation/bot/decorators/__init__.py similarity index 100% rename from app/infrastructure/middlewares/__init__.py rename to app/presentation/bot/decorators/__init__.py diff --git a/app/decorators/bot_exception_answer.py b/app/presentation/bot/decorators/bot_exception_answer.py similarity index 100% rename from app/decorators/bot_exception_answer.py rename to app/presentation/bot/decorators/bot_exception_answer.py diff --git a/app/services/__init__.py b/app/presentation/bot/middlewares/__init__.py similarity index 100% rename from app/services/__init__.py rename to app/presentation/bot/middlewares/__init__.py diff --git a/app/services/answer_error.py b/app/presentation/bot/middlewares/answer_error.py similarity index 58% rename from app/services/answer_error.py rename to app/presentation/bot/middlewares/answer_error.py index 8e62bab..d94eb6c 100644 --- a/app/services/answer_error.py +++ b/app/presentation/bot/middlewares/answer_error.py @@ -1,13 +1,50 @@ -"""Exceptions to break command handling and answer message.""" +"""Middleware to handle AnswerError and AnswerMessageError exceptions.""" -from typing import Any, Dict, List, Optional, Union +from typing import Dict, Any, Union, List, Optional from uuid import UUID -from pybotx import BubbleMarkup, KeyboardMarkup, OutgoingAttachment, OutgoingMessage +from pybotx import ( + Bot, + IncomingMessage, + IncomingMessageHandlerFunc, + BubbleMarkup, + KeyboardMarkup, + OutgoingAttachment, + OutgoingMessage, +) from pybotx.missing import Missing, Undefined from pybotx.models.attachments import IncomingFileAttachment +async def answer_error_middleware( + message: IncomingMessage, bot: Bot, call_next: IncomingMessageHandlerFunc +) -> None: + try: + await call_next(message, bot) + except AnswerError as exc: + await bot.send( + message=exc.message, + wait_callback=exc.wait_callback, + callback_timeout=exc.callback_timeout, + ) + except AnswerMessageError as exc: + await bot.answer_message( + body=exc.body, + metadata=exc.metadata, + bubbles=exc.bubbles, + keyboard=exc.keyboard, + file=exc.file, + recipients=exc.recipients, + silent_response=exc.silent_response, + markup_auto_adjust=exc.markup_auto_adjust, + stealth_mode=exc.stealth_mode, + send_push=exc.send_push, + ignore_mute=exc.ignore_mute, + wait_callback=exc.wait_callback, + callback_timeout=exc.callback_timeout, + ) + + class AnswerMessageError(Exception): def __init__( self, diff --git a/app/infrastructure/middlewares/smart_logger.py b/app/presentation/bot/middlewares/smart_logger.py similarity index 60% rename from app/infrastructure/middlewares/smart_logger.py rename to app/presentation/bot/middlewares/smart_logger.py index 13eeebd..bdc6dec 100644 --- a/app/infrastructure/middlewares/smart_logger.py +++ b/app/presentation/bot/middlewares/smart_logger.py @@ -1,9 +1,13 @@ """Middlewares to log all requests using smart logger wrapper.""" +from pprint import pformat +from typing import Optional, Dict, Any + from pybotx import Bot, IncomingMessage, IncomingMessageHandlerFunc +from pybotx.logger import trim_file_data_in_incoming_json from pybotx_smart_logger.wrapper import wrap_smart_logger -from app.services.log_formatters import format_raw_command +from app.logger import logger from app.settings import settings @@ -20,3 +24,12 @@ async def smart_logger_middleware( debug=is_enabled_debug(message), ): await call_next(message, bot) + + +def format_raw_command(raw_command: Optional[Dict[str, Any]]) -> str: + if raw_command is None: + logger.warning("Empty `raw_command`") + return "" + + trimmed_raw_command = trim_file_data_in_incoming_json(raw_command) + return pformat(trimmed_raw_command) diff --git a/app/presentation/bot/validators/sample_record.py b/app/presentation/bot/validators/sample_record.py index e38ca12..2c5345a 100644 --- a/app/presentation/bot/validators/sample_record.py +++ b/app/presentation/bot/validators/sample_record.py @@ -2,8 +2,9 @@ from orjson import JSONDecodeError from pybotx import IncomingMessage from pydantic import ValidationError +from sqlalchemy.exc import SQLAlchemyError -from app.decorators.exceptions_mapper import exception_mapper +from app.decorators.exception_mapper import ExceptionMapper, EnrichedExceptionFactory from app.presentation.bot.schemas.sample_record import SampleRecordCreateRequestSchema from app.presentation.bot.validators.base import IBotRequestParser from app.presentation.bot.validators.exceptions import MessageValidationError @@ -12,9 +13,13 @@ class SampleRecordJsonCreateRequestValidator( IBotRequestParser[SampleRecordCreateRequestSchema] ): - @exception_mapper( - catch_exceptions=(JSONDecodeError, ValidationError), - raise_exception=MessageValidationError, + @ExceptionMapper( + { + (JSONDecodeError, ValidationError): EnrichedExceptionFactory( + MessageValidationError + ) + }, + is_bound_method=True, ) def parse(self, raw_input: IncomingMessage) -> SampleRecordCreateRequestSchema: message_json = orjson.loads(raw_input.argument) diff --git a/app/presentation/dependencies/healthcheck.py b/app/presentation/dependencies/healthcheck.py index 9fe9adc..38e3b2c 100644 --- a/app/presentation/dependencies/healthcheck.py +++ b/app/presentation/dependencies/healthcheck.py @@ -7,14 +7,15 @@ from pybotx import Bot from sqlalchemy.sql import text +import tests.integration.conftest from app.infrastructure.worker.worker import queue from app.settings import settings async def check_db_connection(request: Request) -> Optional[str]: - assert isinstance(request.app.state.bot, Bot) + assert isinstance(tests.integration.conftest.bot, Bot) - bot = request.app.state.bot + bot = tests.integration.conftest.bot session_factory = bot.state.db_session_factory async with session_factory() as db_session: @@ -30,9 +31,9 @@ async def check_db_connection(request: Request) -> Optional[str]: async def check_redis_connection(request: Request) -> Optional[str]: - assert isinstance(request.app.state.bot, Bot) + assert isinstance(tests.integration.conftest.bot, Bot) - bot = request.app.state.bot + bot = tests.integration.conftest.bot return await bot.state.redis_repo.ping() diff --git a/app/services/log_formatters.py b/app/services/log_formatters.py deleted file mode 100644 index d6b1018..0000000 --- a/app/services/log_formatters.py +++ /dev/null @@ -1,17 +0,0 @@ -"""Helpers to format log messages in smart logger wrapper.""" - -from pprint import pformat -from typing import Any, Dict, Optional - -from pybotx.logger import trim_file_data_in_incoming_json - -from app.logger import logger - - -def format_raw_command(raw_command: Optional[Dict[str, Any]]) -> str: - if raw_command is None: - logger.warning("Empty `raw_command`") - return "" - - trimmed_raw_command = trim_file_data_in_incoming_json(raw_command) - return pformat(trimmed_raw_command) diff --git a/app/settings.py b/app/settings.py index fb7d2d3..1f8101e 100644 --- a/app/settings.py +++ b/app/settings.py @@ -4,7 +4,7 @@ from uuid import UUID from pybotx import BotAccountWithSecret -from pydantic import BaseSettings +from pydantic import BaseSettings, PositiveInt, Field class AppSettings(BaseSettings): @@ -59,11 +59,18 @@ def _build_credentials_from_string( # base kwargs DEBUG: bool = False + # hide original exceptions from bot user + RAISE_BOT_EXCEPTIONS: bool = False + # User huids for debug SMARTLOG_DEBUG_HUIDS: List[UUID] # database POSTGRES_DSN: str + DB_ENGINE_POOL_SIZE: PositiveInt = 4 + DB_ENGINE_MAX_OVERFLOW: int = Field(ge=-1, default=10) + DB_ENGINE_POOL_RECYCLE: int = Field(ge=-1, default=60 * 60) # 1 hour + SQL_DEBUG: bool = False # redis @@ -73,5 +80,7 @@ def _build_credentials_from_string( # healthcheck WORKER_TIMEOUT_SEC: float = 4 + BOTX_CALLBACK_TIMEOUT_IN_SECONDS = 30 + settings = AppSettings() # type: ignore[call-arg] diff --git a/tests/conftest.py b/tests/conftest.py index 7676021..4d9732c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,22 +1,11 @@ import asyncio -import re -from asyncio import AbstractEventLoop, current_task from datetime import datetime -from http import HTTPStatus -from pathlib import Path -from typing import Any, AsyncGenerator, Callable, Dict, Generator, Optional -from unittest.mock import AsyncMock, patch +from typing import Any, Callable, Dict, Optional from uuid import UUID, uuid4 -import httpx import jwt import pytest -import respx -from alembic import command -from alembic.config import Config -from asgi_lifespan import LifespanManager from pybotx import ( - Bot, BotAccount, Chat, ChatTypes, @@ -24,141 +13,21 @@ UserDevice, UserSender, ) -from pybotx.logger import logger -from sqlalchemy import NullPool -from sqlalchemy.ext.asyncio import ( - AsyncEngine, - AsyncSession, - async_scoped_session, - create_async_engine, -) -from sqlalchemy.orm import sessionmaker from testcontainers.postgres import PostgresContainer # type: ignore -from app.infrastructure.caching.redis_repo import RedisRepo -from app.infrastructure.db.sqlalchemy import ( - AsyncSessionFactory, - make_url_async, -) -from app.infrastructure.repositories.sample_record import SampleRecordRepository -from app.main import get_application from app.settings import settings -from tests.factories import SampleRecordModelFactory - - -@pytest.fixture(scope="session") -def postgres_container() -> Generator[PostgresContainer, None, None]: - """Starts a temporary PostgreSQL container for the test session.""" - container_name = "bot_testing_container" - - with PostgresContainer("postgres:15").with_name(container_name) as postgres: - container_url = postgres.get_connection_url() - with patch.object(settings, "POSTGRES_DSN", container_url): - yield postgres @pytest.fixture(scope="session") -def event_loop() -> Generator[AbstractEventLoop, None, None]: - """Create a session-scoped event loop for async session-scoped fixtures.""" +def event_loop(): + """Create a session-scoped event loop for async session-scoped fixtures. + Don't touch this fixture. Its internally used by pytest-asyncio.""" loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) yield loop loop.close() -@pytest.fixture(scope="session") -async def db_session_factory(postgres_container) -> AsyncSessionFactory: - engine: AsyncEngine = create_async_engine( - make_url_async(settings.POSTGRES_DSN), poolclass=NullPool - ) - - factory = async_scoped_session( - sessionmaker( - bind=engine, - expire_on_commit=False, - class_=AsyncSession, # type:ignore - ), - scopefunc=current_task, - ) - return factory - - -@pytest.fixture -async def isolated_session(db_session_factory): - """Isolated session with proper rollback to prevent test data leaks.""" - alembic_cfg = Config(str(Path(__file__).parent.parent / "alembic.ini")) - command.upgrade(alembic_cfg, "head") - async with db_session_factory() as session: - yield session - command.downgrade(alembic_cfg, "base") - - -@pytest.fixture -async def sample_record_repository(isolated_session) -> SampleRecordRepository: - return SampleRecordRepository(isolated_session) - - -@pytest.fixture -def sample_record_factory( - isolated_session, -) -> Generator[type[SampleRecordModelFactory], None, None]: - SampleRecordModelFactory._meta.sqlalchemy_session = isolated_session - yield SampleRecordModelFactory - SampleRecordModelFactory._meta.sqlalchemy_session = None - - -@pytest.fixture -async def db_session(bot: Bot) -> AsyncGenerator[AsyncSession, None]: - async with bot.state.db_session_factory() as session: - yield session - - -@pytest.fixture -async def redis_repo(bot: Bot) -> RedisRepo: - return bot.state.redis_repo - - -# def mock_authorization() -> None: -# respx.route(method="GET", path__regex="/api/v2/botx/bots/.*/token").mock( -# return_value=httpx.Response( -# HTTPStatus.OK, -# json={ -# "status": "ok", -# "result": "token", -# }, -# ), -# ) - - -def mock_authorization() -> None: - respx.get( - # url__regex=re.compile(r"^https://.*?/api/v2/botx/bots/[^/]+/token") - url__regex=re.compile(r"^https://[^/]+/api/v2/botx/bots/[^/]+/token(\?.*)?$") - # url__regex=re.compile(r"^https://.*?/api/v2/botx/bots/[^/]+/token(?:\?.*)?$") - ).mock( - return_value=httpx.Response( - HTTPStatus.OK, - json={ - "status": "ok", - "result": "token", - }, - ), - ) - - -@pytest.fixture -async def bot( - respx_mock: Callable[..., Any], # We can't apply pytest mark to fixture -) -> AsyncGenerator[Bot, None]: - fastapi_app = get_application() - - mock_authorization() - - async with LifespanManager(fastapi_app): - built_bot = fastapi_app.state.bot - built_bot.answer_message = AsyncMock(return_value=uuid4()) - yield built_bot - - @pytest.fixture def bot_id() -> UUID: return settings.BOT_CREDENTIALS[0].id @@ -256,12 +125,12 @@ def factory( return factory -@pytest.fixture -def loguru_caplog( - caplog: pytest.LogCaptureFixture, -) -> Generator[pytest.LogCaptureFixture, None, None]: - # https://github.com/Delgan/loguru/issues/59 - - handler_id = logger.add(caplog.handler, format="{message}") - yield caplog - logger.remove(handler_id) +# @pytest.fixture +# def loguru_caplog( +# caplog: pytest.LogCaptureFixture, +# ) -> Generator[pytest.LogCaptureFixture, None, None]: +# # https://github.com/Delgan/loguru/issues/59 +# +# handler_id = logger.add(caplog.handler, format="{message}") +# yield caplog +# logger.remove(handler_id) diff --git a/tests/factories.py b/tests/factories.py index a2c90d1..f8de85f 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -1,9 +1,7 @@ import factory -from async_factory_boy.factory.sqlalchemy import AsyncSQLAlchemyFactory # type ignore from factory import DictFactory, Factory from app.domain.entities.sample_record import SampleRecord -from app.infrastructure.db.sample_record.models import SampleRecordModel from app.presentation.bot.schemas.sample_record import ( SampleRecordCreateRequestSchema, SampleRecordDeleteRequestSchema, @@ -11,15 +9,6 @@ ) -class SampleRecordModelFactory(AsyncSQLAlchemyFactory): - """Factory for sample record model objects in the database.""" - - class Meta: - model = SampleRecordModel - - record_data = factory.Faker("text") - - class SampleRecordFactory(Factory): """Factory for sample record domain objects""" diff --git a/tests/unit/bot_commands/__init__.py b/tests/integration/bot_commands/__init__.py similarity index 100% rename from tests/unit/bot_commands/__init__.py rename to tests/integration/bot_commands/__init__.py diff --git a/tests/unit/bot_commands/test_common.py b/tests/integration/bot_commands/test_common.py similarity index 100% rename from tests/unit/bot_commands/test_common.py rename to tests/integration/bot_commands/test_common.py diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py new file mode 100644 index 0000000..875fc11 --- /dev/null +++ b/tests/integration/conftest.py @@ -0,0 +1,141 @@ +import re +from asyncio import current_task +from http import HTTPStatus +from pathlib import Path +from typing import Generator, Callable, Any, AsyncGenerator +from unittest.mock import patch, AsyncMock +from uuid import uuid4 + +import httpx +import pytest +import respx +from alembic import command +from alembic.config import Config +from asgi_lifespan import LifespanManager +from pybotx import Bot +from sqlalchemy import NullPool +from sqlalchemy.ext.asyncio import ( + AsyncEngine, + create_async_engine, + async_scoped_session, + AsyncSession, +) +from sqlalchemy.orm import sessionmaker +from testcontainers.postgres import PostgresContainer +from testcontainers.redis import RedisContainer + +from app.infrastructure.db.sqlalchemy import AsyncSessionFactory, make_url_async +from app.infrastructure.repositories.sample_record import SampleRecordRepository +from app.main import get_application +from app.settings import settings +from tests.integration.factories import SampleRecordModelFactory + + +@pytest.fixture +def sample_record_factory( + isolated_session, +) -> Generator[type[SampleRecordModelFactory], None, None]: + SampleRecordModelFactory._meta.sqlalchemy_session = isolated_session + yield SampleRecordModelFactory + SampleRecordModelFactory._meta.sqlalchemy_session = None + + +@pytest.fixture(scope="session") +def postgres_container() -> Generator[PostgresContainer, None, None]: + """Starts a temporary PostgreSQL container for the test session.""" + container_name = "bot_testing_container" + + with PostgresContainer("postgres:15").with_name(container_name) as postgres: + container_url = postgres.get_connection_url() + with patch.object(settings, "POSTGRES_DSN", container_url): + yield postgres + + +@pytest.fixture(scope="session") +def redis_container() -> Generator[RedisContainer, None, None]: + """Starts a temporary Redis container for the test session.""" + container_name = "bot_testing_redis_container" + + with RedisContainer("redis").with_name(container_name) as redis: + container_url = ( + f"redis://{redis.get_container_host_ip()}:{redis.get_exposed_port(6379)}/0" + ) + with patch.object(settings, "REDIS_DSN", container_url): + yield redis + + +@pytest.fixture(scope="session") +async def db_session_factory(postgres_container) -> AsyncSessionFactory: + engine: AsyncEngine = create_async_engine( + make_url_async(settings.POSTGRES_DSN), poolclass=NullPool + ) + + factory = async_scoped_session( + sessionmaker( + bind=engine, + expire_on_commit=False, + class_=AsyncSession, # type:ignore + ), + scopefunc=current_task, + ) + return factory + + +@pytest.fixture +def alembic_configuration() -> Config: + return Config(str(Path(__file__).parent.parent.parent / "alembic.ini")) + + +@pytest.fixture +async def isolated_session( + db_session_factory: AsyncSessionFactory, alembic_configuration: Config +): + """Isolated session with proper rollback to prevent test data leaks.""" + + command.upgrade(alembic_configuration, "head") + async with db_session_factory() as session: + yield session + command.downgrade(alembic_configuration, "base") + + +@pytest.fixture +async def sample_record_repository(isolated_session) -> SampleRecordRepository: + return SampleRecordRepository(isolated_session) + + +def mock_authorization() -> None: + respx.get( + url__regex=re.compile(r"^https://[^/]+/api/v2/botx/bots/[^/]+/token(\?.*)?$") + ).mock( + return_value=httpx.Response( + HTTPStatus.OK, + json={ + "status": "ok", + "result": "token", + }, + ), + ) + + +@pytest.fixture +async def fastapi_app( + respx_mock: Callable[..., Any], # We can't apply pytest mark to fixture + redis_container, + postgres_container, +): + fastapi_app = get_application() + mock_authorization() + async with LifespanManager(fastapi_app): + yield fastapi_app + + +@pytest.fixture +async def bot( + respx_mock: Callable[..., Any], # We can't apply pytest mark to fixture + fastapi_app, +) -> AsyncGenerator[Bot, None]: + with patch.object( + Bot, "answer_message", new_callable=AsyncMock + ) as mocked_answer_message: + mocked_answer_message.return_value = uuid4() + yield fastapi_app.state.bot diff --git a/tests/integration/endpoints/test_botx.py b/tests/integration/endpoints/test_botx.py index 902ab96..b15e1a2 100644 --- a/tests/integration/endpoints/test_botx.py +++ b/tests/integration/endpoints/test_botx.py @@ -5,6 +5,7 @@ import httpx import respx +from fastapi import FastAPI from fastapi.testclient import TestClient from pybotx import Bot @@ -15,7 +16,7 @@ async def test__web_app__bot_command_response_accepted( bot_id: UUID, host: str, - bot: Bot, + fastapi_app: FastAPI, authorization_header: Dict[str, str], command_payload_v4: Dict[str, Any], ) -> None: @@ -69,7 +70,7 @@ async def test__web_app__bot_command_response_accepted( def test__web_app__bot_command_response_service_unavailable( bot_id: UUID, host: str, - bot: Bot, + fastapi_app: FastAPI, ) -> None: # - Arrange - callback_payload = { @@ -96,7 +97,9 @@ def test__web_app__bot_command_response_service_unavailable( @respx.mock def test__web_app__unknown_bot_response_service_unavailable( - bot: Bot, authorization_header: Dict[str, str], unknown_bot_payload: Dict[str, Any] + fastapi_app: FastAPI, + authorization_header: Dict[str, str], + unknown_bot_payload: Dict[str, Any], ) -> None: # - Act - with TestClient(get_application()) as test_client: @@ -117,7 +120,7 @@ def test__web_app__unknown_bot_response_service_unavailable( @respx.mock def test__web_app__unsupported_bot_api_version_service_unavailable( - bot: Bot, + fastapi_app: FastAPI, authorization_header: Dict[str, str], command_payload_v3: Dict[str, Any], ) -> None: diff --git a/tests/integration/factories.py b/tests/integration/factories.py new file mode 100644 index 0000000..12da024 --- /dev/null +++ b/tests/integration/factories.py @@ -0,0 +1,13 @@ +import factory +from async_factory_boy.factory.sqlalchemy import AsyncSQLAlchemyFactory + +from app.infrastructure.db.sample_record.models import SampleRecordModel + + +class SampleRecordModelFactory(AsyncSQLAlchemyFactory): + """Factory for sample record model objects in the database.""" + + class Meta: + model = SampleRecordModel + + record_data = factory.Faker("text") diff --git a/tests/integration/repository/test_sample_record_repository.py b/tests/integration/repository/test_sample_record_repository.py index cc93d0c..bacf73a 100644 --- a/tests/integration/repository/test_sample_record_repository.py +++ b/tests/integration/repository/test_sample_record_repository.py @@ -7,7 +7,7 @@ from app.domain.entities.sample_record import SampleRecord from app.infrastructure.db.sample_record.models import SampleRecordModel from app.infrastructure.repositories.sample_record import SampleRecordRepository -from tests.factories import SampleRecordModelFactory +from tests.integration.factories import SampleRecordModelFactory def assert_database_object_equal_domain( diff --git a/tests/integration/sample_record_use_cases/test_sample_record_use_cases_int.py b/tests/integration/sample_record_use_cases/test_sample_record_use_cases_int.py index 2bd606f..6b95fb6 100644 --- a/tests/integration/sample_record_use_cases/test_sample_record_use_cases_int.py +++ b/tests/integration/sample_record_use_cases/test_sample_record_use_cases_int.py @@ -4,7 +4,8 @@ from app.application.use_cases.interfaces import ISampleRecordUseCases from app.infrastructure.db.sample_record.models import SampleRecordModel from app.presentation.bot.schemas.sample_record import SampleRecordResponseSchema -from tests.factories import SampleRecordCreateSchemaFactory, SampleRecordModelFactory +from tests.factories import SampleRecordCreateSchemaFactory +from tests.integration.factories import SampleRecordModelFactory def assert_database_object_equal_to_retrieved_object( diff --git a/tests/unit/use_cases/test_sample_record_use_cases.py b/tests/unit/use_cases/test_sample_record_use_cases_unit.py similarity index 100% rename from tests/unit/use_cases/test_sample_record_use_cases.py rename to tests/unit/use_cases/test_sample_record_use_cases_unit.py From 3056a09d0491235b8f8c6bd0a3b216e84c3259fa Mon Sep 17 00:00:00 2001 From: vladimirgubarik Date: Fri, 1 Aug 2025 17:44:57 +0300 Subject: [PATCH 04/15] add di to the routers --- app/main.py | 2 +- app/presentation/api/bot.py | 16 ------ app/presentation/api/botx.py | 24 ++++++-- app/presentation/dependencies/__init__.py | 0 app/presentation/dependencies/healthcheck.py | 59 -------------------- 5 files changed, 19 insertions(+), 82 deletions(-) delete mode 100644 app/presentation/api/bot.py delete mode 100644 app/presentation/dependencies/__init__.py delete mode 100644 app/presentation/dependencies/healthcheck.py diff --git a/app/main.py b/app/main.py index 207db40..be9f371 100644 --- a/app/main.py +++ b/app/main.py @@ -56,7 +56,7 @@ def get_application() -> FastAPI: # Initialize the main application container main_container = ApplicationStartupContainer() - main_container.wire(modules=["app.main"]) + main_container.wire(modules=["app.main", "app.presentation.api.botx"]) # Initialize the SampleRecord commands container sample_record_commands_container = BotSampleRecordCommandContainer() diff --git a/app/presentation/api/bot.py b/app/presentation/api/bot.py deleted file mode 100644 index 62cf07d..0000000 --- a/app/presentation/api/bot.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Bot dependency for FastAPI.""" - -from fastapi import Depends, Request -from pybotx import Bot - -import tests.integration.conftest - - -def get_bot(request: Request) -> Bot: - if not isinstance(bot := request.app.state.bot, Bot): - raise RuntimeError(f"request.app.state.bot should be Bot instance. ") - - return bot - - -bot_dependency = Depends(get_bot) diff --git a/app/presentation/api/botx.py b/app/presentation/api/botx.py index 66d43fc..dd0badc 100644 --- a/app/presentation/api/botx.py +++ b/app/presentation/api/botx.py @@ -1,8 +1,8 @@ """Endpoints for communication with botx.""" from http import HTTPStatus - -from fastapi import APIRouter, Request +from dependency_injector.wiring import inject, Provide +from fastapi import APIRouter, Request, Depends from fastapi.responses import JSONResponse from pybotx import ( Bot, @@ -16,15 +16,19 @@ ) from pybotx.constants import BOT_API_VERSION +from app.infrastructure.containers import ApplicationStartupContainer from app.logger import logger -from app.presentation.api.bot import bot_dependency from app.settings import settings router = APIRouter() @router.post("/command") -async def command_handler(request: Request, bot: Bot = bot_dependency) -> JSONResponse: +@inject +async def command_handler( + request: Request, + bot: Bot = Depends(Provide[ApplicationStartupContainer.bot]), +) -> JSONResponse: """Receive commands from users. Max timeout - 5 seconds.""" try: @@ -78,7 +82,11 @@ async def command_handler(request: Request, bot: Bot = bot_dependency) -> JSONRe @router.get("/status") -async def status_handler(request: Request, bot: Bot = bot_dependency) -> JSONResponse: +@inject +async def status_handler( + request: Request, + bot: Bot = Depends(Provide[ApplicationStartupContainer.bot]), +) -> JSONResponse: """Show bot status and commands list.""" try: @@ -112,7 +120,11 @@ async def status_handler(request: Request, bot: Bot = bot_dependency) -> JSONRes @router.post("/notification/callback") -async def callback_handler(request: Request, bot: Bot = bot_dependency) -> JSONResponse: +@inject +async def callback_handler( + request: Request, + bot: Bot = Depends(Provide[ApplicationStartupContainer.bot]), +) -> JSONResponse: """Process BotX methods callbacks.""" try: diff --git a/app/presentation/dependencies/__init__.py b/app/presentation/dependencies/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/app/presentation/dependencies/healthcheck.py b/app/presentation/dependencies/healthcheck.py deleted file mode 100644 index 38e3b2c..0000000 --- a/app/presentation/dependencies/healthcheck.py +++ /dev/null @@ -1,59 +0,0 @@ -"""Bot dependency for healthcheck.""" - -from asyncio.exceptions import TimeoutError -from typing import Optional - -from fastapi import Depends, Request -from pybotx import Bot -from sqlalchemy.sql import text - -import tests.integration.conftest -from app.infrastructure.worker.worker import queue -from app.settings import settings - - -async def check_db_connection(request: Request) -> Optional[str]: - assert isinstance(tests.integration.conftest.bot, Bot) - - bot = tests.integration.conftest.bot - session_factory = bot.state.db_session_factory - - async with session_factory() as db_session: - try: - await db_session.execute(text("SELECT 1")) - except Exception as exc: - return str(exc) - - return None - - -check_db_connection_dependency = Depends(check_db_connection) - - -async def check_redis_connection(request: Request) -> Optional[str]: - assert isinstance(tests.integration.conftest.bot, Bot) - - bot = tests.integration.conftest.bot - return await bot.state.redis_repo.ping() - - -check_redis_connection_dependency = Depends(check_redis_connection) - - -async def check_worker_status() -> Optional[str]: - job = await queue.enqueue("healthcheck") - - if not job: - return None - - try: - await job.refresh(settings.WORKER_TIMEOUT_SEC) - except TimeoutError: - return "Worker is overloaded or not launched" - except Exception as exc: - return str(exc) - - return None - - -check_worker_status_dependency = Depends(check_worker_status) From caeb0b268caada9e3608aabb7f496192a349b91d Mon Sep 17 00:00:00 2001 From: vladimirgubarik Date: Wed, 6 Aug 2025 00:03:47 +0300 Subject: [PATCH 05/15] fix tests add di to worker --- app/application/repository/exceptions.py | 19 ++- app/application/use_cases/record_use_cases.py | 8 +- .../mapper}/__init__.py | 0 app/decorators/mapper/context.py | 52 +++++++ .../{ => mapper}/exception_mapper.py | 102 +------------- app/decorators/mapper/factories.py | 60 ++++++++ app/domain/entities/sample_record.py | 11 +- app/domain/exceptions/domain_exceptions.py | 16 --- app/infrastructure/containers.py | 46 +++++- app/infrastructure/db/migrations/env.py | 1 + .../migrations/versions/765dcfed2d16_init.py | 33 ----- app/infrastructure/db/sample_record/models.py | 11 +- .../repositories/sample_record.py | 62 ++++++-- .../worker/tasks}/__init__.py | 0 .../worker/tasks/simple_task.py | 18 +++ app/infrastructure/worker/worker.py | 59 +++++--- app/main.py | 15 +- app/presentation/bot/bot.py | 2 +- .../bot/command_handlers}/__init__.py | 0 .../base_handler.py} | 10 +- .../bot/command_handlers/sample_record.py | 50 +++++++ .../bot/commands/command_listing.py | 1 + app/presentation/bot/commands/common.py | 1 - .../bot/commands/sample_record.py | 7 +- .../bot/error_handlers/base_handlers.py | 133 ++++++++++++++++++ .../exceptions_chain_executor.py | 104 ++++++++++++++ .../internal_error_handler.py} | 0 app/presentation/bot/handlers/error.py | 48 ------- .../bot/handlers/sample_record.py | 29 ---- app/presentation/bot/schemas/sample_record.py | 11 +- app/presentation/bot/validators/exceptions.py | 5 +- .../bot/validators/sample_record.py | 4 +- app/settings.py | 2 + docker-compose.dev.yml | 15 ++ docker-compose.yml | 2 +- tests/factories.py | 17 +-- tests/integration/factories.py | 3 +- .../test_sample_record_repository.py | 99 +++++++++++-- .../test_sample_record_use_cases_int.py | 13 +- tests/presentation/commands/__init__.py | 0 tests/unit/decorators/test_classes.py | 3 +- tests/unit/decorators/test_errors_mapper.py | 29 +--- 42 files changed, 754 insertions(+), 347 deletions(-) rename app/{domain/exceptions => decorators/mapper}/__init__.py (100%) create mode 100644 app/decorators/mapper/context.py rename app/decorators/{ => mapper}/exception_mapper.py (52%) create mode 100644 app/decorators/mapper/factories.py delete mode 100644 app/domain/exceptions/domain_exceptions.py delete mode 100644 app/infrastructure/db/migrations/versions/765dcfed2d16_init.py rename app/{presentation/bot/handlers => infrastructure/worker/tasks}/__init__.py (100%) create mode 100644 app/infrastructure/worker/tasks/simple_task.py rename {tests/presentation => app/presentation/bot/command_handlers}/__init__.py (100%) rename app/presentation/bot/{handlers/command.py => command_handlers/base_handler.py} (77%) create mode 100644 app/presentation/bot/command_handlers/sample_record.py create mode 100644 app/presentation/bot/error_handlers/base_handlers.py create mode 100644 app/presentation/bot/error_handlers/exceptions_chain_executor.py rename app/presentation/bot/{handlers/internal_error.py => error_handlers/internal_error_handler.py} (100%) delete mode 100644 app/presentation/bot/handlers/error.py delete mode 100644 app/presentation/bot/handlers/sample_record.py delete mode 100644 tests/presentation/commands/__init__.py diff --git a/app/application/repository/exceptions.py b/app/application/repository/exceptions.py index 0043878..c30f8a8 100644 --- a/app/application/repository/exceptions.py +++ b/app/application/repository/exceptions.py @@ -1,4 +1,7 @@ -class BaseRepositoryError(Exception): +from app.decorators.mapper.factories import ContextAwareError + + +class BaseRepositoryError(ContextAwareError): """Base exception for all repository-specific exceptions.""" @@ -18,5 +21,17 @@ class RecordCreateError(BaseRepositoryError): """Raised when a creation fails.""" -class RecordRetreiveError(BaseRepositoryError): +class RecordRetrieveError(BaseRepositoryError): """Raised when a get fails.""" + + +class RecordAlreadyExistsError(BaseRepositoryError): + """Raised when a record already exists.""" + + +class ForeignKeyError(BaseRepositoryError): + """Raised when a foreign key constraint is violated.""" + + +class ValidationError(BaseRepositoryError): + """Raised when a validation error occurs.""" diff --git a/app/application/use_cases/record_use_cases.py b/app/application/use_cases/record_use_cases.py index ea18e38..6b4ee76 100644 --- a/app/application/use_cases/record_use_cases.py +++ b/app/application/use_cases/record_use_cases.py @@ -20,7 +20,9 @@ def __init__(self, record_repo: ISampleRecordRepository): async def create_record( self, request_object: SampleRecordCreateRequestSchema ) -> SampleRecordResponseSchema: - domain_object = SampleRecord(record_data=request_object.record_data) + domain_object = SampleRecord( + record_data=request_object.record_data, name=request_object.name + ) created_record = await self._repo.create(domain_object) return SampleRecordResponseSchema.from_orm(created_record) @@ -29,7 +31,9 @@ async def update_record( ) -> SampleRecordResponseSchema: """Update an existing record.""" domain_object = SampleRecord( - record_data=update_request.record_data, id=update_request.id + record_data=update_request.record_data, + id=update_request.id, + name=update_request.name ) updated_record = await self._repo.update(domain_object) return SampleRecordResponseSchema.from_orm(updated_record) diff --git a/app/domain/exceptions/__init__.py b/app/decorators/mapper/__init__.py similarity index 100% rename from app/domain/exceptions/__init__.py rename to app/decorators/mapper/__init__.py diff --git a/app/decorators/mapper/context.py b/app/decorators/mapper/context.py new file mode 100644 index 0000000..f4e34c3 --- /dev/null +++ b/app/decorators/mapper/context.py @@ -0,0 +1,52 @@ +from functools import cached_property +from typing import Callable, Any + + +class ExceptionContext: + SENSITIVE_KEYS: frozenset[str] = frozenset( + ("password", "token", "key", "secret", "auth", "credential", "passwd") + ) + + def __init__( + self, + original_exception: Exception, + func: Callable, + args: tuple[Any, ...], + kwargs: dict[str, Any], + ): + self.original_exception = original_exception + self.func = func + self.args = args + self.kwargs = kwargs + + @cached_property + def formatted_context(self) -> str: + error_context = [ + f"Error in function '{self.func.__module__}.{self.func.__qualname__}'" + ] + + if self.args: + args_str = ", ".join(self._sanitised_value(arg) for arg in self.args) + error_context.append(f"Args: [{args_str}]") + + if self.kwargs: + kwargs_str = ", ".join( + f"{k}={self._sanitised_value(v, k)}" for k, v in self.kwargs.items() + ) + error_context.append(f"Kwargs: {kwargs_str}") + + return "\n".join(error_context).replace("{", "{{").replace("}", "}}") + + def _sanitised_value( + self, + value: Any, + key: str | None = None, + ) -> str: + if key is not None and key.lower() in self.SENSITIVE_KEYS: + return "****HIDDEN****" + + try: + str_value = str(value) + return f"{str_value[:100]}..." if len(str_value) > 100 else str_value + except Exception: + return f"<{type(value).__name__} object - str() failed>" diff --git a/app/decorators/exception_mapper.py b/app/decorators/mapper/exception_mapper.py similarity index 52% rename from app/decorators/exception_mapper.py rename to app/decorators/mapper/exception_mapper.py index 946216d..c1771bc 100644 --- a/app/decorators/exception_mapper.py +++ b/app/decorators/mapper/exception_mapper.py @@ -1,100 +1,15 @@ """Decorators to rethrow and log exceptions.""" -from abc import ABC, abstractmethod -from functools import cached_property, wraps +from functools import wraps from inspect import iscoroutinefunction from typing import Any, Callable, Type from cachetools import LRUCache # type:ignore +from app.decorators.mapper.context import ExceptionContext +from app.decorators.mapper.factories import ExceptionFactory from app.logger import logger - -class ExceptionContext: - SENSITIVE_KEYS: frozenset[str] = frozenset( - ("password", "token", "key", "secret", "auth", "credential", "passwd") - ) - - def __init__( - self, - original_exception: Exception, - func: Callable, - args: tuple[Any, ...], - kwargs: dict[str, Any], - ): - self.original_exception = original_exception - self.func = func - self.args = args - self.kwargs = kwargs - - @cached_property - def formatted_context(self) -> str: - error_context = [ - f"Error in function '{self.func.__module__}.{self.func.__qualname__}'" - ] - - if self.args: - args_str = ", ".join(self._sanitised_value(arg) for arg in self.args) - error_context.append(f"Args: [{args_str}]") - - if self.kwargs: - kwargs_str = ", ".join( - f"{k}={self._sanitised_value(v, k)}" for k, v in self.kwargs.items() - ) - error_context.append(f"Kwargs: {kwargs_str}") - - return "\n".join(error_context).replace("{", "{{").replace("}", "}}") - - def _sanitised_value( - self, - value: Any, - key: str | None = None, - ) -> str: - if key is not None and key.lower() in self.SENSITIVE_KEYS: - return "****HIDDEN****" - - try: - str_value = str(value) - return f"{str_value[:100]}..." if len(str_value) > 100 else str_value - except Exception: - return f"<{type(value).__name__} object - str() failed>" - - -class ExceptionFactory(ABC): - """ - Create and describe a factory for exceptions. - - This class is an abstract base class meant to define the interface for an - exception factory. - - """ - - @abstractmethod - def make_exception(self, context: ExceptionContext) -> Exception: - """Make an exception based on the given context.""" - - -class EnrichedExceptionFactory(ExceptionFactory): - """ - Create and manage enriched exceptions based on a given exception type. - - This class provides a mechanism to create exceptions dynamically, - enriching them with a formatted context. It extends the behavior of - the base ExceptionFactory class by incorporating the concept of a - generated error type and formatted context. - - :ivar generated_error: The type of exception to generate when creating - an enriched exception. - :type generated_error: type[Exception] - """ - - def __init__(self, generated_error: type[Exception]): - self.generated_error = generated_error - - def make_exception(self, context: ExceptionContext) -> Exception: - return self.generated_error(context.formatted_context) - - ExceptionOrTupleOfExceptions = Type[Exception] | tuple[Type[Exception], ...] @@ -105,13 +20,11 @@ def __init__( self, exception_map: dict[ExceptionOrTupleOfExceptions, ExceptionFactory], max_cache_size: int = 512, - log_error: bool = True, is_bound_method: bool = False, ): - self.mapping = self._get_flat_map(exception_map) + self.mapping = self._get_exceptions_flat_map(exception_map) self.exception_catchall_factory = self.mapping.pop(Exception, None) self._lru_cache: LRUCache = LRUCache(maxsize=max_cache_size) - self.log_error = log_error self.is_bound_method = is_bound_method def __call__(self, func: Callable) -> Callable: @@ -121,7 +34,7 @@ def __call__(self, func: Callable) -> Callable: else self._sync_wrapper(func) ) - def _get_flat_map( + def _get_exceptions_flat_map( self, exception_map: dict[ExceptionOrTupleOfExceptions, ExceptionFactory], ) -> dict[Type[Exception], ExceptionFactory]: @@ -164,11 +77,8 @@ def _handle_exception_logic( args: tuple[Any, ...], kwargs: dict[str, Any], ) -> None: - context = ExceptionContext(exc, func, self._filtered_args(args), kwargs) - if self.log_error: - logger.error(context.formatted_context, exc_info=True) - if exception_factory := self._get_exception_factory(type(exc)): + context = ExceptionContext(exc, func, self._filtered_args(args), kwargs) raise exception_factory.make_exception(context) from exc raise exc diff --git a/app/decorators/mapper/factories.py b/app/decorators/mapper/factories.py new file mode 100644 index 0000000..99bf827 --- /dev/null +++ b/app/decorators/mapper/factories.py @@ -0,0 +1,60 @@ +from abc import ABC, abstractmethod + +from app.decorators.mapper.context import ExceptionContext + + +class ContextAwareError(Exception): + def __init__(self, message: str, context: ExceptionContext = None, *args): + super().__init__(message, *args) + self.context = context + + def __str__(self) -> str: + base = super().__str__() + if self.context: + return f"{base} | context={self.context.formatted_context}" + return base + + +class ExceptionFactory(ABC): + """ + Create and describe a factory for exceptions. + + This class is an abstract base class meant to define the interface for an + exception factory. + + """ + + @abstractmethod + def make_exception(self, context: ExceptionContext) -> Exception: + """Make an exception based on the given context.""" + + +class PassThroughExceptionFactory(ExceptionFactory): + """Factory for exceptions that should be passed through without create a new one. + + Useful for cases when broad Exception cached and wrapped to a common Exception type. + """ + + def make_exception(self, context: ExceptionContext) -> Exception: + return context.original_exception + + +class EnrichedExceptionFactory(ExceptionFactory): + """ + Create and manage enriched exceptions based on a given exception type. + + This class provides a mechanism to create exceptions dynamically, + enriching them with a formatted context. It extends the behavior of + the base ExceptionFactory class by incorporating the concept of a + generated error type and formatted context. + + :ivar generated_error: The type of exception to generate when creating + an enriched exception. + :type generated_error: type[Exception] + """ + + def __init__(self, generated_error: type[ContextAwareError]): + self.generated_error = generated_error + + def make_exception(self, context: ExceptionContext) -> ContextAwareError: + return self.generated_error(str(context.original_exception), context=context) diff --git a/app/domain/entities/sample_record.py b/app/domain/entities/sample_record.py index 52a3c6e..05eceb2 100644 --- a/app/domain/entities/sample_record.py +++ b/app/domain/entities/sample_record.py @@ -2,7 +2,6 @@ from dataclasses import dataclass -from app.domain.exceptions.domain_exceptions import WrongRecordDataError @dataclass @@ -10,15 +9,9 @@ class SampleRecord: """Record entity representing a simple record in the system.""" record_data: str + name: str id: int | None = None def __str__(self) -> str: """Return string representation of the record.""" - return self.record_data - - def __post_init__(self) -> None: - """Insert business validation here - For example for some reason record data shouldn't start with A123 - """ - if self.record_data.startswith("A123"): - raise WrongRecordDataError("Record data shouldn't start with A") + return f"id={self.id}, record_data={self.record_data}, name={self.name}" diff --git a/app/domain/exceptions/domain_exceptions.py b/app/domain/exceptions/domain_exceptions.py deleted file mode 100644 index ba734d4..0000000 --- a/app/domain/exceptions/domain_exceptions.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Domain-specific exceptions.""" - - -class DomainError(Exception): - """Base exception for all domain-specific exceptions.""" - - def __init__(self, message: str = "Domain error occurred"): - self.message = message - super().__init__(self.message) - - -class WrongRecordDataError(DomainError): - """Raised when record data is not valid.""" - - def __init__(self, message: str = "Wrong record data"): - super().__init__(message) diff --git a/app/infrastructure/containers.py b/app/infrastructure/containers.py index 5fd98ba..caf3d6e 100644 --- a/app/infrastructure/containers.py +++ b/app/infrastructure/containers.py @@ -17,7 +17,9 @@ from app.infrastructure.repositories.sample_record import SampleRecordRepository from app.logger import logger -from app.presentation.bot.handlers.internal_error import internal_error_handler +from app.presentation.bot.error_handlers.internal_error_handler import ( + internal_error_handler, +) from app.presentation.bot.middlewares.answer_error import answer_error_middleware from app.presentation.bot.middlewares.smart_logger import smart_logger_middleware from app.presentation.bot.resources import strings @@ -102,7 +104,7 @@ class ApplicationStartupContainer(containers.DeclarativeContainer): async_client = providers.Singleton( AsyncClient, - timeout=60, + timeout=settings.BOT_ASYNC_CLIENT_TIMEOUT_IN_SECONDS, limits=Limits(max_keepalive_connections=None, max_connections=None), ) @@ -142,3 +144,43 @@ class ApplicationStartupContainer(containers.DeclarativeContainer): lambda manager: manager(), callback_task_manager, ) + + +class WorkerStartupContainer(containers.DeclarativeContainer): + redis_client = Singleton(lambda: aioredis.from_url(settings.REDIS_DSN)) + + redis_repo = Factory( + RedisRepo, + redis=redis_client, + prefix=strings.BOT_PROJECT_NAME, + ) + + async_client = providers.Singleton( + AsyncClient, + timeout=settings.BOT_ASYNC_CLIENT_TIMEOUT_IN_SECONDS, + limits=Limits(max_keepalive_connections=None, max_connections=None), + ) + + callback_repo = providers.Singleton( + CallbackRedisRepo, + redis=redis_client, + ) + from app.presentation.bot.commands import common, sample_record + + exception_handlers = ( + {} if not settings.RAISE_BOT_EXCEPTIONS else {Exception: internal_error_handler} + ) + + bot = providers.Singleton( + Bot, + collectors=[common.collector, sample_record.collector], + bot_accounts=settings.BOT_CREDENTIALS, + exception_handlers=exception_handlers, # type: ignore + default_callback_timeout=settings.BOTX_CALLBACK_TIMEOUT_IN_SECONDS, + httpx_client=async_client, + middlewares=[ + smart_logger_middleware, + answer_error_middleware, + ], + callback_repo=callback_repo, + ) diff --git a/app/infrastructure/db/migrations/env.py b/app/infrastructure/db/migrations/env.py index 1980855..f275412 100644 --- a/app/infrastructure/db/migrations/env.py +++ b/app/infrastructure/db/migrations/env.py @@ -13,6 +13,7 @@ from app.infrastructure.db.sqlalchemy import Base, make_url_sync # isort:skip # Import models to make them visible by alembic +from app.infrastructure.db.sample_record.models import SampleRecordModel # isort:skip postgres_dsn = make_url_sync(settings.POSTGRES_DSN) context_config = context.config diff --git a/app/infrastructure/db/migrations/versions/765dcfed2d16_init.py b/app/infrastructure/db/migrations/versions/765dcfed2d16_init.py deleted file mode 100644 index 4c7c2dc..0000000 --- a/app/infrastructure/db/migrations/versions/765dcfed2d16_init.py +++ /dev/null @@ -1,33 +0,0 @@ -"""init - -Revision ID: 765dcfed2d16 -Revises: -Create Date: 2025-06-11 16:30:02.133576 - -Doc: https://alembic.sqlalchemy.org/en/latest/tutorial.html#create-a-migration-script -""" - -import sqlalchemy as sa -from alembic import op - -revision = "765dcfed2d16" -down_revision = None -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "sample_record", - sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), - sa.Column("record_data", sa.String(), nullable=False), - sa.PrimaryKeyConstraint("id", name=op.f("pk_sample_record")), - ) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("sample_record") - # ### end Alembic commands ### diff --git a/app/infrastructure/db/sample_record/models.py b/app/infrastructure/db/sample_record/models.py index ed75483..52ab7aa 100644 --- a/app/infrastructure/db/sample_record/models.py +++ b/app/infrastructure/db/sample_record/models.py @@ -1,5 +1,6 @@ """Database models declarations.""" +from sqlalchemy import String, CheckConstraint from sqlalchemy.orm import Mapped, mapped_column from app.infrastructure.db.sqlalchemy import Base @@ -11,8 +12,14 @@ class SampleRecordModel(Base): __tablename__ = "sample_record" id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) - record_data: Mapped[str] + record_data: Mapped[str] = mapped_column(String(128), nullable=False) + name: Mapped[str] = mapped_column(String(64), nullable=False, unique=True) + + __table_args__ = ( + CheckConstraint("length(record_data) >= 1", name="record_data_min_length"), + CheckConstraint("length(name) >= 1", name="name_min_length"), + ) def __repr__(self) -> str: """Show string representation of a record.""" - return self.record_data + return f"{self.name}:{self.record_data}" diff --git a/app/infrastructure/repositories/sample_record.py b/app/infrastructure/repositories/sample_record.py index 6c73b5c..285e93f 100644 --- a/app/infrastructure/repositories/sample_record.py +++ b/app/infrastructure/repositories/sample_record.py @@ -3,20 +3,48 @@ from typing import List from sqlalchemy import delete, insert, select, update -from sqlalchemy.exc import NoResultFound, SQLAlchemyError +from sqlalchemy.exc import SQLAlchemyError, IntegrityError, NoResultFound from app.application.repository.exceptions import ( RecordCreateError, RecordDeleteError, RecordDoesNotExistError, - RecordRetreiveError, + RecordRetrieveError, RecordUpdateError, + RecordAlreadyExistsError, + ForeignKeyError, + ValidationError, ) from app.application.repository.interfaces import ISampleRecordRepository -from app.decorators.exception_mapper import ExceptionMapper, EnrichedExceptionFactory +from app.decorators.mapper.exception_mapper import ( + ExceptionMapper, +) +from app.decorators.mapper.factories import EnrichedExceptionFactory +from app.decorators.mapper.context import ExceptionContext from app.domain.entities.sample_record import SampleRecord from app.infrastructure.db.sample_record.models import SampleRecordModel from app.infrastructure.db.sqlalchemy import AsyncSession +from psycopg2 import errorcodes + + +class IntegrityErrorFactory(EnrichedExceptionFactory): + def make_exception(self, context: ExceptionContext) -> Exception: + if not (orig := getattr(context.original_exception, "orig", None)): + return self.generated_error(context.formatted_context) + + if not (sqlstate := getattr(orig, "sqlstate", None)): + return self.generated_error(context.formatted_context) + + if sqlstate == errorcodes.UNIQUE_VIOLATION: + return RecordAlreadyExistsError(context.formatted_context) + + if sqlstate == errorcodes.FOREIGN_KEY_VIOLATION: + return ForeignKeyError(context.formatted_context) + + if sqlstate == errorcodes.NOT_NULL_VIOLATION: + return ValidationError(context.formatted_context) + + return self.generated_error(context.formatted_context) class SampleRecordRepository(ISampleRecordRepository): @@ -31,13 +59,16 @@ def __init__(self, session: AsyncSession): self._session = session @ExceptionMapper( - {SQLAlchemyError: EnrichedExceptionFactory(RecordCreateError)}, + { + IntegrityError: IntegrityErrorFactory(RecordCreateError), + Exception: EnrichedExceptionFactory(RecordCreateError), + }, is_bound_method=True, ) async def create(self, record: SampleRecord) -> SampleRecord: query = ( insert(SampleRecordModel) - .values(record_data=record.record_data) + .values(record_data=record.record_data, name=record.name) .returning(SampleRecordModel) ) result = await self._session.execute(query) @@ -46,14 +77,17 @@ async def create(self, record: SampleRecord) -> SampleRecord: return self._to_domain_object(record_model) @ExceptionMapper( - {SQLAlchemyError: EnrichedExceptionFactory(RecordUpdateError)}, + { + IntegrityError: IntegrityErrorFactory(RecordUpdateError), + Exception: EnrichedExceptionFactory(RecordUpdateError), + }, is_bound_method=True, ) async def update(self, record: SampleRecord) -> SampleRecord: query = ( update(SampleRecordModel) .where(SampleRecordModel.id == record.id) - .values(record_data=record.record_data) + .values(record_data=record.record_data, name=record.name) .returning(SampleRecordModel) ) execute_result = (await self._session.execute(query)).scalar_one_or_none() @@ -66,7 +100,9 @@ async def update(self, record: SampleRecord) -> SampleRecord: return self._to_domain_object(execute_result) @ExceptionMapper( - {SQLAlchemyError: EnrichedExceptionFactory(RecordDeleteError)}, + { + SQLAlchemyError: EnrichedExceptionFactory(RecordDeleteError), + }, is_bound_method=True, ) async def delete(self, record_id: int) -> None: @@ -92,7 +128,10 @@ async def delete(self, record_id: int) -> None: await self._session.flush() @ExceptionMapper( - {SQLAlchemyError: EnrichedExceptionFactory(RecordDoesNotExistError)}, + { + NoResultFound: EnrichedExceptionFactory(RecordDoesNotExistError), + Exception: EnrichedExceptionFactory(RecordRetrieveError), + }, is_bound_method=True, ) async def get_by_id(self, record_id: int) -> SampleRecord: @@ -112,7 +151,9 @@ async def get_by_id(self, record_id: int) -> SampleRecord: return self._to_domain_object(result.scalar_one()) @ExceptionMapper( - {SQLAlchemyError: EnrichedExceptionFactory(RecordRetreiveError)}, + { + Exception: EnrichedExceptionFactory(RecordRetrieveError), + }, is_bound_method=True, ) async def get_all(self) -> List[SampleRecord]: @@ -139,4 +180,5 @@ def _to_domain_object(self, record_model: SampleRecordModel) -> SampleRecord: return SampleRecord( id=record_model.id, record_data=record_model.record_data, + name=record_model.name, ) diff --git a/app/presentation/bot/handlers/__init__.py b/app/infrastructure/worker/tasks/__init__.py similarity index 100% rename from app/presentation/bot/handlers/__init__.py rename to app/infrastructure/worker/tasks/__init__.py diff --git a/app/infrastructure/worker/tasks/simple_task.py b/app/infrastructure/worker/tasks/simple_task.py new file mode 100644 index 0000000..a1a9921 --- /dev/null +++ b/app/infrastructure/worker/tasks/simple_task.py @@ -0,0 +1,18 @@ +from typing import Any + +from dependency_injector.wiring import inject, Provide +from pybotx import Bot + +from app.infrastructure.containers import WorkerStartupContainer + +from app.logger import logger + + +@inject +async def heartbeat_task( + ctx: dict[str, Any], + bot: Bot = Provide[WorkerStartupContainer.bot], +): + # logger.info("Heartbeat task started") + + logger.info(f"Heartbeat task executed {[account.id for account in bot.bot_accounts]}") diff --git a/app/infrastructure/worker/worker.py b/app/infrastructure/worker/worker.py index 329e160..564538b 100644 --- a/app/infrastructure/worker/worker.py +++ b/app/infrastructure/worker/worker.py @@ -2,13 +2,17 @@ from typing import Any, Dict, Literal +from dependency_injector.wiring import inject, Provide from pybotx import Bot from redis import asyncio as aioredis -from saq import Queue +from redis.asyncio import Redis +from saq import Queue, CronJob +from app.infrastructure.containers import WorkerStartupContainer from app.infrastructure.repositories.caching.callback_redis_repo import ( CallbackRedisRepo, ) +from app.infrastructure.worker.tasks.simple_task import heartbeat_task from app.logger import logger # `saq` import its own settings and hides our module @@ -17,36 +21,59 @@ SaqCtx = Dict[str, Any] -async def startup(ctx: SaqCtx) -> None: - from app.presentation.bot.bot import get_bot - callback_repo = CallbackRedisRepo(aioredis.from_url(app_settings.REDIS_DSN)) - bot = get_bot(callback_repo, raise_exceptions=False) +queue = Queue(aioredis.from_url(app_settings.REDIS_DSN), name="bot_refactor") +@inject +async def _startup_with_injection( + bot: Bot = Provide[WorkerStartupContainer.bot], +) -> None: + """Helper function for starting bot with dependencies injection.""" await bot.startup(fetch_tokens=False) - ctx["bot"] = bot - logger.info("Worker started") +@inject +async def _shutdown_with_injection( + bot: Bot = Provide[WorkerStartupContainer.bot], + redis_client: Redis = Provide[WorkerStartupContainer.redis_client], +) -> None: + """Helper function for shutting down bot with dependencies injection.""" + await bot.shutdown() + await redis_client.aclose() -async def shutdown(ctx: SaqCtx) -> None: - bot: Bot = ctx["bot"] - await bot.shutdown() +async def startup(ctx: SaqCtx) -> None: + worker_startup_container = WorkerStartupContainer() - logger.info("Worker stopped") + queue.add_cron_job( + CronJob(function=heartbeat_task, cron="*/5 * * * * *", unique=True) + ) + worker_startup_container.wire(modules=[__name__, "app.infrastructure.worker.tasks"]) + await _startup_with_injection() -async def healthcheck(_: SaqCtx) -> Literal[True]: - return True + logger.info("Worker started") -queue = Queue(aioredis.from_url(app_settings.REDIS_DSN), name="bot_refactor") +async def shutdown(ctx: SaqCtx) -> None: + await _shutdown_with_injection() + logger.info("Worker stopped") + settings = { "queue": queue, - "functions": [healthcheck], - "cron_jobs": [], + "functions": [], + # "cron_jobs": [ + # CronJob( + # function=heartbeat_task, + # cron="*/5 * * * * *", + # unique=True, + # # timeout=app_settings.PERIODIC_TASKS_DEFAULT_TIMEOUT, + # # heartbeat=app_settings.PERIODIC_TASKS_DEFAULT_HEARTBEAT, + # # retries=app_settings.PERIODIC_TASKS_DEFAULT_RETRIES, + # # ttl=app_settings.PERIODIC_TASKS_DEFAULT_TTL, + # ), + # ], "concurrency": 8, "startup": startup, "shutdown": shutdown, diff --git a/app/main.py b/app/main.py index be9f371..0830d6d 100644 --- a/app/main.py +++ b/app/main.py @@ -1,33 +1,20 @@ """Application with configuration for events, routers and middleware.""" -import asyncio from functools import partial -from dependency_injector.wiring import Provide, inject +from dependency_injector.wiring import Provide from fastapi import FastAPI from pybotx import Bot -from redis import asyncio as aioredis from redis.asyncio import Redis -import tests.integration.conftest -from app.infrastructure.repositories.caching.callback_redis_repo import ( - CallbackRedisRepo, -) -from app.infrastructure.repositories.caching.exception_handlers import ( - PubsubExceptionHandler, -) -from app.infrastructure.repositories.caching.redis_repo import RedisRepo from app.infrastructure.containers import ( ApplicationStartupContainer, BotSampleRecordCommandContainer, CallbackTaskManager, ) from app.infrastructure.db.sqlalchemy import close_db_connections -from app.logger import logger from app.presentation.api.routers import router -from app.presentation.bot.bot import get_bot from app.presentation.bot.resources import strings -from app.settings import settings async def startup( diff --git a/app/presentation/bot/bot.py b/app/presentation/bot/bot.py index dc515d6..7929224 100644 --- a/app/presentation/bot/bot.py +++ b/app/presentation/bot/bot.py @@ -6,7 +6,7 @@ from app.presentation.bot.middlewares.answer_error import answer_error_middleware from app.presentation.bot.middlewares.smart_logger import smart_logger_middleware from app.presentation.bot.commands import common, sample_record -from app.presentation.bot.handlers.internal_error import internal_error_handler +from app.presentation.bot.error_handlers.internal_error_handler import internal_error_handler from app.settings import settings diff --git a/tests/presentation/__init__.py b/app/presentation/bot/command_handlers/__init__.py similarity index 100% rename from tests/presentation/__init__.py rename to app/presentation/bot/command_handlers/__init__.py diff --git a/app/presentation/bot/handlers/command.py b/app/presentation/bot/command_handlers/base_handler.py similarity index 77% rename from app/presentation/bot/handlers/command.py rename to app/presentation/bot/command_handlers/base_handler.py index f257bff..73b8eef 100644 --- a/app/presentation/bot/handlers/command.py +++ b/app/presentation/bot/command_handlers/base_handler.py @@ -4,7 +4,9 @@ from pybotx import Bot, IncomingMessage from pydantic import BaseModel -from app.presentation.bot.handlers.error import BaseExceptionHandler +from app.presentation.bot.error_handlers.exceptions_chain_executor import ( + ExceptionHandlersChainExecutor, +) from app.presentation.bot.validators.base import IBotRequestParser @@ -13,11 +15,11 @@ def __init__( self, bot: Bot, message: IncomingMessage, - exception_handler: BaseExceptionHandler | None = None, + exception_handler_executor: ExceptionHandlersChainExecutor | None, ): self._bot = bot self._message = message - self._exception_handler = exception_handler or BaseExceptionHandler() + self._exception_handler_executor = exception_handler_executor @property @abc.abstractmethod @@ -49,6 +51,6 @@ async def execute( parameter = self.get_request_parameter() await self.handle_logic(parameter) except Exception as exc: - await self._exception_handler.handle_exception( + await self._exception_handler_executor.execute_chain( exc, self._bot, self._message ) diff --git a/app/presentation/bot/command_handlers/sample_record.py b/app/presentation/bot/command_handlers/sample_record.py new file mode 100644 index 0000000..7c02667 --- /dev/null +++ b/app/presentation/bot/command_handlers/sample_record.py @@ -0,0 +1,50 @@ +from pybotx import Bot, IncomingMessage + +from app.application.repository.exceptions import RecordCreateError, \ + RecordAlreadyExistsError, ValidationError +from app.application.use_cases.interfaces import ISampleRecordUseCases +from app.presentation.bot.command_handlers.base_handler import BaseCommandHandler +from app.presentation.bot.error_handlers.exceptions_chain_executor import ( + ExceptionHandlersChainExecutor, DEFAULT_HANDLERS, +) +from app.presentation.bot.error_handlers.base_handlers import \ + SendErrorExplainToUserHandler +from app.presentation.bot.resources.strings import SAMPLE_RECORD_CREATED_ANSWER +from app.presentation.bot.schemas.sample_record import SampleRecordCreateRequestSchema +from app.presentation.bot.validators.base import BotXJsonRequestParser + + +class CreateSampleRecordHandler(BaseCommandHandler): + incoming_argument_parser = BotXJsonRequestParser(SampleRecordCreateRequestSchema) + + _EXCEPTIONS_HANDLERS = DEFAULT_HANDLERS + [ + SendErrorExplainToUserHandler( + exception_explain_mapping={ + RecordAlreadyExistsError: "Запись с такими параметрами уже существует", + RecordCreateError: "Внутренняя ошибка создания записи", + ValidationError: "Неправильный формат данных" + } + ) + ] + exception_handler_chain_executor = ExceptionHandlersChainExecutor( + _EXCEPTIONS_HANDLERS + ) + + def __init__( + self, + bot: Bot, + message: IncomingMessage, + use_cases: ISampleRecordUseCases, + ): + self._use_cases = use_cases + + super().__init__(bot, message, self.exception_handler_chain_executor) + + async def handle_logic( + self, + request_parameter: SampleRecordCreateRequestSchema, # type: ignore + ) -> None: + created_record = await self._use_cases.create_record(request_parameter) + await self._bot.answer_message( + SAMPLE_RECORD_CREATED_ANSWER.format(**created_record.dict()) + ) diff --git a/app/presentation/bot/commands/command_listing.py b/app/presentation/bot/commands/command_listing.py index 91fa871..7c4afd7 100644 --- a/app/presentation/bot/commands/command_listing.py +++ b/app/presentation/bot/commands/command_listing.py @@ -19,3 +19,4 @@ class SampleRecordCommands: command_name="/create_record", description="Создать запись", ) + diff --git a/app/presentation/bot/commands/common.py b/app/presentation/bot/commands/common.py index f09c050..5a71501 100644 --- a/app/presentation/bot/commands/common.py +++ b/app/presentation/bot/commands/common.py @@ -20,7 +20,6 @@ async def default_handler( bot: Bot, ) -> None: """Run if command handler not found.""" - await bot.answer_message("Hello!") diff --git a/app/presentation/bot/commands/sample_record.py b/app/presentation/bot/commands/sample_record.py index 22f3f95..d62f60b 100644 --- a/app/presentation/bot/commands/sample_record.py +++ b/app/presentation/bot/commands/sample_record.py @@ -7,7 +7,7 @@ from app.infrastructure.containers import BotSampleRecordCommandContainer from app.infrastructure.db.sqlalchemy import provide_transaction_session from app.presentation.bot.commands.command_listing import SampleRecordCommands -from app.presentation.bot.handlers.sample_record import CreateSampleRecordHandler +from app.presentation.bot.command_handlers.sample_record import CreateSampleRecordHandler collector = HandlerCollector() @@ -25,5 +25,8 @@ async def create_sample_record( ) -> None: """Creates a sample record in the database.""" await CreateSampleRecordHandler( - bot=bot, message=message, use_cases=record_use_cases_factory.provider(session) + bot=bot, + message=message, + use_cases=record_use_cases_factory.provider(session), + ).execute() diff --git a/app/presentation/bot/error_handlers/base_handlers.py b/app/presentation/bot/error_handlers/base_handlers.py new file mode 100644 index 0000000..1fa6fbc --- /dev/null +++ b/app/presentation/bot/error_handlers/base_handlers.py @@ -0,0 +1,133 @@ +from abc import ABC, abstractmethod +from typing import Self, Callable +from uuid import UUID + +from pybotx import Bot, IncomingMessage, BotShuttingDownError + +from app.logger import logger +from app.presentation.bot.resources import strings + + +class AbstractExceptionHandler(ABC): + def __init__( + self, + next_handler: Self | None = None, + stop_on_failure: bool = False, + break_the_chain: bool = False, + ): + self._next_handler = next_handler + self._stop_on_failure = stop_on_failure + self._break_the_chain = break_the_chain + + @abstractmethod + def should_process_exception( + self, exc: Exception, bot: Bot, message: IncomingMessage + ) -> bool: + pass + + @abstractmethod + async def process_exception( + self, + exc: Exception, + bot: Bot, + message: IncomingMessage, + exception_id: UUID | None, + ): + pass + + async def handle_exception( + self, + exc: Exception, + bot: Bot, + message: IncomingMessage, + exception_id: UUID | None = None, + ) -> None: + if self.should_process_exception(exc, bot, message): + try: + await self.process_exception(exc, bot, message, exception_id) + if self._next_handler and not self._break_the_chain: + await self._next_handler.handle_exception( + exc, bot, message, exception_id + ) + except Exception as exc: + logger.error( + f"Error handling exception {exception_id}: {exc}", exc_info=True + ) + if self._stop_on_failure: + return + if self._next_handler: + await self._next_handler.process_exception(exc, bot, message) + + +class LoggingExceptionHandler(AbstractExceptionHandler): + def should_process_exception( + self, exc: Exception, bot: Bot, message: IncomingMessage + ) -> bool: + return True + + def process_exception( + self, + exc: Exception, + bot: Bot, + message: IncomingMessage, + exception_id: UUID | None, + ) -> None: + logger.error(f"Error {exception_id}:{exc}", exc_info=exc) + + +class DropFSMOnErrorHandler(AbstractExceptionHandler): + def should_process_exception( + self, exc: Exception, bot: Bot, message: IncomingMessage + ) -> bool: + return True + + async def process_exception( + self, exc: Exception, bot: Bot, message: IncomingMessage, exception_id: UUID + ) -> None: + if fsm_manager := getattr(message.state, "fsm", None): + await fsm_manager.drop_state() + + +class SendErrorExplainToUserHandler(AbstractExceptionHandler): + def __init__( + self, + next_handler: Self | None = None, + exception_explain_mapping: dict[type[Exception], str | Callable] | None = None, + ): + super().__init__(next_handler) + self.exception_explain_mapping = exception_explain_mapping or {} + + def should_process_exception( + self, exc: Exception, bot: Bot, message: IncomingMessage + ) -> bool: + return True + + async def _get_exception_message_for_user( + self, + exc: Exception, + bot: Bot, + message: IncomingMessage, + exception_id: UUID | None = None, + ) -> str: + if explanation := self.exception_explain_mapping.get(type(exc)) is not None: + if isinstance(explanation, str): + raw_explanation = explanation + else: + raw_explanation = explanation(exc, bot, message, exception_id) + + return f"{raw_explanation}. Идентификатор ошибки:{exception_id}" + + return strings.SOMETHING_GOES_WRONG.format(error_uuid=exception_id) + + async def process_exception( + self, + exc: Exception, + bot: Bot, + message: IncomingMessage, + exception_id: UUID | None, + ) -> None: + message_text = await self._get_exception_message_for_user(exc, bot, message) + await bot.answer_message( + message_text, + wait_callback=not isinstance(exc, BotShuttingDownError), + ) diff --git a/app/presentation/bot/error_handlers/exceptions_chain_executor.py b/app/presentation/bot/error_handlers/exceptions_chain_executor.py new file mode 100644 index 0000000..d896038 --- /dev/null +++ b/app/presentation/bot/error_handlers/exceptions_chain_executor.py @@ -0,0 +1,104 @@ +from uuid import uuid4 + +from pybotx import Bot, IncomingMessage + +from app.presentation.bot.error_handlers.base_handlers import ( + AbstractExceptionHandler, + SendErrorExplainToUserHandler, + LoggingExceptionHandler, + DropFSMOnErrorHandler, +) + + +class ExceptionHandlersChainExecutor: + """ + Executes a chain of exception handlers in sequence. + + This class manages the execution chain of exception handlers, allowing for the + construction, extension, and execution of a linked chain. The purpose is to + process exceptions by passing them through a series of handlers where each + handler may handle or propagate the exception down the chain. Handlers can be + defined as either instances or types of `AbstractExceptionHandler`. + + + """ + + def __init__( + self, handlers: list[type[AbstractExceptionHandler] | AbstractExceptionHandler] + ): + self._chain_head, self._chain_tail = self._create_chain(handlers) + + async def execute_chain( + self, exc: Exception, bot: Bot, message: IncomingMessage + ) -> None: + """ + Handles the execution of an exception handling chain. + + This method initiates a chain of exception handling by starting with + the head of the chain if it exists. Each node in the chain processes + the exception and potentially passes it along for further handling. + The chain makes use of a unique exception identifier for tracking. + + Parameters: + exc (Exception): The exception instance to be handled. + bot (Bot): The bot context required for processing the exception. + message (IncomingMessage): The incoming message context related + to the exception. + """ + if self._chain_head is None: + return + + exception_id = uuid4() + await self._chain_head.handle_exception(exc, bot, message, exception_id) + + def _get_handler( + self, handler: AbstractExceptionHandler | type[AbstractExceptionHandler] + ) -> AbstractExceptionHandler: + return handler if isinstance(handler, AbstractExceptionHandler) else handler() + + def _create_chain( + self, handlers: list[type[AbstractExceptionHandler] | AbstractExceptionHandler] + ) -> tuple[AbstractExceptionHandler | None, AbstractExceptionHandler | None]: + if not handlers: + return None, None + + first_handler = self._get_handler(handlers[0]) + + last_handler = first_handler + for handler in handlers[1:]: + next_handler = self._get_handler(handler) + last_handler.next_handler = next_handler + last_handler = next_handler + return first_handler, last_handler + + def extend( + self, handlers: list[AbstractExceptionHandler | type[AbstractExceptionHandler]] + ): + """Append handlers to the chain""" + new_head, new_tail = self._create_chain(handlers) + if self._chain_head is None: + self._chain_head = new_head + else: + self._chain_tail.next_handler = new_head + + self._chain_tail = new_tail + + def append( + self, handler: AbstractExceptionHandler | type[AbstractExceptionHandler] + ): + """Append handler to the end of chain""" + new_tail = self._get_handler(handler) + self._chain_tail.next_handler = new_tail + + +DEFAULT_HANDLERS = [ + LoggingExceptionHandler, + DropFSMOnErrorHandler, +] +DEFAULT_EXCEPTION_HANDLER_EXECUTOR = ExceptionHandlersChainExecutor(DEFAULT_HANDLERS) + +DEFAULT_HANDLERS_WITH_EXPLAIN = DEFAULT_HANDLERS + [SendErrorExplainToUserHandler] + +DEFAULT_EXCEPTION_HANDLER_EXECUTOR_WITH_EXPLAIN = ExceptionHandlersChainExecutor( + handlers=DEFAULT_HANDLERS_WITH_EXPLAIN +) diff --git a/app/presentation/bot/handlers/internal_error.py b/app/presentation/bot/error_handlers/internal_error_handler.py similarity index 100% rename from app/presentation/bot/handlers/internal_error.py rename to app/presentation/bot/error_handlers/internal_error_handler.py diff --git a/app/presentation/bot/handlers/error.py b/app/presentation/bot/handlers/error.py deleted file mode 100644 index 43be1b3..0000000 --- a/app/presentation/bot/handlers/error.py +++ /dev/null @@ -1,48 +0,0 @@ -from typing import Callable -from uuid import uuid4 - -from pybotx import Bot, BotShuttingDownError, IncomingMessage - -from app.logger import logger -from app.presentation.bot.resources import strings -from app.presentation.bot.validators.exceptions import MessageValidationError - - -class BaseExceptionHandler: - def __init__( - self, - exception_explain_mapping: dict[type[Exception], str | Callable] | None = None, - ): - self.exception_explain_mapping = exception_explain_mapping or {} - - async def handle_exception( - self, exc: Exception, bot: Bot, message: IncomingMessage - ) -> None: - if fsm_manager := getattr(message.state, "fsm", None): - await fsm_manager.drop_state() - - user_answer = await self._get_exception_message_for_user( - exc, - ) - - logger.error(f"Error: {user_answer}", exc_info=exc) - - await bot.answer_message( - user_answer, - wait_callback=not isinstance(exc, BotShuttingDownError), - ) - - async def _get_exception_message_for_user(self, exc: Exception) -> str: - error_uuid = uuid4() - - if explanation := self.exception_explain_mapping.get(type(exc)): - if isinstance(explanation, str): - raw_message = explanation - else: - raw_message = explanation(exc) - - return f"{raw_message}. Идентификатор ошибки:{error_uuid}" - elif isinstance(exc, MessageValidationError): - return f"Ошибка валидации запроса: {exc}. Идентификатор ошибки:{error_uuid}" - else: - return strings.SOMETHING_GOES_WRONG.format(error_uuid=error_uuid) diff --git a/app/presentation/bot/handlers/sample_record.py b/app/presentation/bot/handlers/sample_record.py deleted file mode 100644 index 947631a..0000000 --- a/app/presentation/bot/handlers/sample_record.py +++ /dev/null @@ -1,29 +0,0 @@ -from pybotx import Bot, IncomingMessage - -from app.application.use_cases.interfaces import ISampleRecordUseCases -from app.presentation.bot.handlers.command import BaseCommandHandler -from app.presentation.bot.resources.strings import SAMPLE_RECORD_CREATED_ANSWER -from app.presentation.bot.schemas.sample_record import SampleRecordCreateRequestSchema -from app.presentation.bot.validators.base import BotXJsonRequestParser - - -class CreateSampleRecordHandler(BaseCommandHandler): - incoming_argument_parser = BotXJsonRequestParser(SampleRecordCreateRequestSchema) - - def __init__( - self, - bot: Bot, - message: IncomingMessage, - use_cases: ISampleRecordUseCases, - ): - self._use_cases = use_cases - super().__init__(bot, message) - - async def handle_logic( - self, - request_parameter: SampleRecordCreateRequestSchema, # type: ignore - ) -> None: - created_record = await self._use_cases.create_record(request_parameter) - await self._bot.answer_message( - SAMPLE_RECORD_CREATED_ANSWER.format(**created_record.dict()) - ) diff --git a/app/presentation/bot/schemas/sample_record.py b/app/presentation/bot/schemas/sample_record.py index d18a629..eda4e59 100644 --- a/app/presentation/bot/schemas/sample_record.py +++ b/app/presentation/bot/schemas/sample_record.py @@ -10,6 +10,7 @@ class SampleRecordResponseSchema(BaseModel): id: int record_data: str + name: str class Config: orm_mode = True @@ -25,7 +26,8 @@ class Config: class SampleRecordCreateRequestSchema( BaseModel, ): - record_data: str = Field(..., min_length=1) + record_data: str = Field(..., min_length=1, max_length=128) + name: str = Field(..., min_length=1, max_length=32) class SampleRecordDeleteRequestSchema(BaseModel): @@ -34,9 +36,10 @@ class SampleRecordDeleteRequestSchema(BaseModel): class SampleRecordUpdateRequestSchema(BaseModel): id: int - record_data: str = Field(..., min_length=1) + record_data: str | None = Field(..., min_length=1, max_length=128) + name: str | None = Field(..., min_length=1, max_length=32) @classmethod def _from_plain_message_data(cls, message_data: str) -> Self: - record_id, record_data = message_data.split(" ") - return cls(id=record_id, record_data=record_data) # type: ignore[arg-type] + record_id, record_name, record_data = message_data.split(" ") + return cls(id=record_id, record_name=record_name, record_data=record_data) # type: ignore[arg-type] diff --git a/app/presentation/bot/validators/exceptions.py b/app/presentation/bot/validators/exceptions.py index f2b48fa..9111ea2 100644 --- a/app/presentation/bot/validators/exceptions.py +++ b/app/presentation/bot/validators/exceptions.py @@ -1,2 +1,5 @@ -class MessageValidationError(Exception): +from app.decorators.mapper.factories import ContextAwareError + + +class MessageValidationError(ContextAwareError): """Base class for message validation errors.""" diff --git a/app/presentation/bot/validators/sample_record.py b/app/presentation/bot/validators/sample_record.py index 2c5345a..592aa10 100644 --- a/app/presentation/bot/validators/sample_record.py +++ b/app/presentation/bot/validators/sample_record.py @@ -2,9 +2,9 @@ from orjson import JSONDecodeError from pybotx import IncomingMessage from pydantic import ValidationError -from sqlalchemy.exc import SQLAlchemyError -from app.decorators.exception_mapper import ExceptionMapper, EnrichedExceptionFactory +from app.decorators.mapper.exception_mapper import ExceptionMapper +from app.decorators.mapper.factories import EnrichedExceptionFactory from app.presentation.bot.schemas.sample_record import SampleRecordCreateRequestSchema from app.presentation.bot.validators.base import IBotRequestParser from app.presentation.bot.validators.exceptions import MessageValidationError diff --git a/app/settings.py b/app/settings.py index 1f8101e..af0c3aa 100644 --- a/app/settings.py +++ b/app/settings.py @@ -82,5 +82,7 @@ def _build_credentials_from_string( BOTX_CALLBACK_TIMEOUT_IN_SECONDS = 30 + BOT_ASYNC_CLIENT_TIMEOUT_IN_SECONDS = 60 + settings = AppSettings() # type: ignore[call-arg] diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index d9e32a6..3918f74 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -12,3 +12,18 @@ services: image: redis:7.0-alpine ports: - "6379:6379" + + worker: + build: . + environment: + - BOT_CREDENTIALS=$BOT_CREDENTIALS # cts_host@secret_key@bot_id + - POSTGRES_DSN=postgres://postgres:postgres@postgres/bot_refactor_db + - REDIS_DSN=redis://redis/0 + - DEBUG=true + # '$$' prevents docker-compose from interpolating a value + command: /bin/sh -c 'PYTHONPATH="$$PYTHONPATH:$$PWD" saq app.infrastructure.worker.worker.settings' + env_file: + - .env + restart: always + depends_on: [db] + diff --git a/docker-compose.yml b/docker-compose.yml index 0b1698f..94d279c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -31,7 +31,7 @@ services: build: . container_name: bot_refactor-worker # '$$' prevents docker-compose from interpolating a value - command: /bin/sh -c 'PYTHONPATH="$$PYTHONPATH:$$PWD" saq app.worker.worker.settings' + command: /bin/sh -c 'PYTHONPATH="$$PYTHONPATH:$$PWD" saq app.infrastructure.worker.settings' environment: *environment restart: always depends_on: *depends_on diff --git a/tests/factories.py b/tests/factories.py index f8de85f..38132dd 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -15,31 +15,32 @@ class SampleRecordFactory(Factory): class Meta: model = SampleRecord - record_data = factory.Faker("text") + record_data = factory.Faker("text", max_nb_chars=128) + name = factory.Faker("text", max_nb_chars=32) class SampleRecordCreateSchemaFactory(Factory): - """Factory for sample record create schema objects. - - Dict factory used to break dependency from inner schema object""" + """Factory for sample record create schema objects.""" class Meta: model = SampleRecordCreateRequestSchema - record_data = factory.Faker("text") + record_data = factory.Faker("text", max_nb_chars=128) + name = factory.Faker("text", max_nb_chars=32) -class SampleRecordUpdateSchemaFactory(DictFactory): +class SampleRecordUpdateSchemaFactory(Factory): """Factory for sample record update schema objects.""" class Meta: model = SampleRecordUpdateRequestSchema id = factory.Faker("integer") - record_data = factory.Faker("text") + record_data = factory.Faker("text", max_nb_chars=128) + name = factory.Faker("text", max_nb_chars=32) -class SampleRecordDeleteSchemaFactory(DictFactory): +class SampleRecordDeleteSchemaFactory(Factory): """Factory for sample record delete schema objects.""" class Meta: diff --git a/tests/integration/factories.py b/tests/integration/factories.py index 12da024..60d8402 100644 --- a/tests/integration/factories.py +++ b/tests/integration/factories.py @@ -10,4 +10,5 @@ class SampleRecordModelFactory(AsyncSQLAlchemyFactory): class Meta: model = SampleRecordModel - record_data = factory.Faker("text") + record_data = factory.Faker("text", max_nb_chars=100) + name = factory.Faker("text", max_nb_chars=32) diff --git a/tests/integration/repository/test_sample_record_repository.py b/tests/integration/repository/test_sample_record_repository.py index bacf73a..89e29ee 100644 --- a/tests/integration/repository/test_sample_record_repository.py +++ b/tests/integration/repository/test_sample_record_repository.py @@ -1,9 +1,15 @@ import pytest from deepdiff import DeepDiff from sqlalchemy import func, select +from sqlalchemy.exc import DataError from sqlalchemy.ext.asyncio import AsyncSession -from app.application.repository.exceptions import RecordDoesNotExistError +from app.application.repository.exceptions import ( + RecordDoesNotExistError, + RecordAlreadyExistsError, + ValidationError, + RecordCreateError, +) from app.domain.entities.sample_record import SampleRecord from app.infrastructure.db.sample_record.models import SampleRecordModel from app.infrastructure.repositories.sample_record import SampleRecordRepository @@ -15,16 +21,16 @@ def assert_database_object_equal_domain( ) -> None: assert db_object.id == domain_object.id assert db_object.record_data == domain_object.record_data + assert db_object.name == domain_object.name async def test_add_record( sample_record_repository: SampleRecordRepository, - sample_record_factory: type[SampleRecordModelFactory], isolated_session: AsyncSession, ): """Test adding a new record.""" - new_record = SampleRecord(record_data="test_add") + new_record = SampleRecord(record_data="test_add", name="test_name") created_record = await sample_record_repository.create(new_record) count = await isolated_session.scalar( @@ -35,6 +41,55 @@ async def test_add_record( diff = DeepDiff(new_record, created_record, exclude_paths={"id"}) assert not diff, diff + db_object = await isolated_session.scalar( + select(SampleRecordModel).where(SampleRecordModel.id == created_record.id) + ) + assert_database_object_equal_domain(db_object, created_record) + + +async def test_add_record_with_non_unique_name( + sample_record_factory: SampleRecordModelFactory, + sample_record_repository: SampleRecordRepository, + isolated_session: AsyncSession, +): + existing_record = await sample_record_factory.create( + record_data="test_add", name="test_name" + ) + new_record = SampleRecord(record_data="new_data", name="test_name") + + with pytest.raises(RecordAlreadyExistsError): + await sample_record_repository.create(new_record) + + +async def test_create_record_with_null_required_field( + sample_record_repository: SampleRecordRepository, +): + """Test creating a record with null required field raises ValidationError.""" + invalid_record = SampleRecord(record_data="test_add", name="test_name") # type: ignore + invalid_record.record_data = None + + with pytest.raises(ValidationError): + await sample_record_repository.create(invalid_record) + + +async def test_repository_handles_unexpected_database_error( + sample_record_repository: SampleRecordRepository, + monkeypatch, +): + """Test that unexpected database errors are re-raised as default exceptions.""" + + async def mock_add_and_commit(*args, **kwargs): + raise DataError("Unexpected database error", None, Exception()) + + monkeypatch.setattr( + sample_record_repository._session, "execute", mock_add_and_commit + ) + + record = SampleRecord(record_data="test_data", name="test_name") + + with pytest.raises(RecordCreateError): + await sample_record_repository.create(record) + async def test_update_record( sample_record_repository: SampleRecordRepository, @@ -43,19 +98,43 @@ async def test_update_record( ): """Test updating an existing record.""" - existing_record = await sample_record_factory.create(record_data="test_update") + existing_record = await sample_record_factory() updated_record = SampleRecord( - id=existing_record.id, record_data="test_update_new_value" + id=existing_record.id, record_data="updated_data", name="updated_name" ) - updated_record_in_db = await sample_record_repository.update(updated_record) + updated_record_from_repo = await sample_record_repository.update(updated_record) count = await isolated_session.scalar( select(func.count()).select_from(SampleRecordModel) ) assert count == 1 - assert updated_record_in_db.id == existing_record.id - assert updated_record_in_db.record_data == "test_update_new_value" + assert updated_record_from_repo.id == existing_record.id + assert updated_record_from_repo.record_data == updated_record.record_data + + record_from_db = await isolated_session.scalar( + select(SampleRecordModel).where(SampleRecordModel.id == existing_record.id) + ) + + assert_database_object_equal_domain(record_from_db, updated_record_from_repo) + + +async def test_update_record_with_non_unique_name( + sample_record_factory: type[SampleRecordModelFactory], + sample_record_repository: SampleRecordRepository, + isolated_session: AsyncSession, +): + existing_record_1 = await sample_record_factory.create() + existing_record_2 = await sample_record_factory.create() + + updated_record_2 = SampleRecord( + id=existing_record_2.id, + record_data=existing_record_2.record_data, + name=existing_record_1.name, + ) + + with pytest.raises(RecordAlreadyExistsError): + await sample_record_repository.update(updated_record_2) async def test_delete_record( @@ -64,7 +143,7 @@ async def test_delete_record( sample_record_factory: type[SampleRecordModelFactory], ): """Test deleting a record.""" - existing_record = await sample_record_factory.create(record_data="test_delete") + existing_record = await sample_record_factory.create() await sample_record_repository.delete(existing_record.id) db_records_count = await isolated_session.scalar( @@ -95,7 +174,7 @@ async def test_get_non_existing_record( sample_record_repository: SampleRecordRepository, isolated_session: AsyncSession, ): - """Test deleting a not existing record raises the error.""" + """Test get a not existing record raises the error.""" with pytest.raises(RecordDoesNotExistError): await sample_record_repository.get_by_id(42) diff --git a/tests/integration/sample_record_use_cases/test_sample_record_use_cases_int.py b/tests/integration/sample_record_use_cases/test_sample_record_use_cases_int.py index 6b95fb6..1aae251 100644 --- a/tests/integration/sample_record_use_cases/test_sample_record_use_cases_int.py +++ b/tests/integration/sample_record_use_cases/test_sample_record_use_cases_int.py @@ -14,6 +14,7 @@ def assert_database_object_equal_to_retrieved_object( assert isinstance(retrieved_object, SampleRecordResponseSchema) assert database_object.id == retrieved_object.id assert database_object.record_data == retrieved_object.record_data + assert database_object.name == retrieved_object.name async def test_sample_record_use_case_add_record_in_database( @@ -36,12 +37,13 @@ async def test_sample_record_use_case_add_record_in_database( async def test_sample_record_use_case_get_record_from_database( sample_record_use_cases_with_real_repo: ISampleRecordUseCases, isolated_session: AsyncSession, + sample_record_factory:type[SampleRecordModelFactory] ): """Test get a record.""" - existing_record = SampleRecordModel(record_data="existing_record") - isolated_session.add(existing_record) - await isolated_session.flush() + + + existing_record = await sample_record_factory.create() response = await sample_record_use_cases_with_real_repo.get_record( existing_record.id @@ -55,12 +57,11 @@ async def test_sample_record_use_case_get_record_from_database( async def test_sample_record_use_case_remove_record_from_database( sample_record_use_cases_with_real_repo: ISampleRecordUseCases, isolated_session: AsyncSession, + sample_record_factory:type[SampleRecordModelFactory] ): """Test adding a new record.""" - existing_record = SampleRecordModel(record_data="existing_record") - isolated_session.add(existing_record) - await isolated_session.flush() + existing_record = await sample_record_factory.create() await sample_record_use_cases_with_real_repo.delete_record(existing_record.id) diff --git a/tests/presentation/commands/__init__.py b/tests/presentation/commands/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/unit/decorators/test_classes.py b/tests/unit/decorators/test_classes.py index 4534d08..25b83b3 100644 --- a/tests/unit/decorators/test_classes.py +++ b/tests/unit/decorators/test_classes.py @@ -1,4 +1,5 @@ -from app.decorators.exception_mapper import ExceptionContext, ExceptionFactory +from app.decorators.mapper.factories import ExceptionFactory +from app.decorators.mapper.context import ExceptionContext class ParentError(Exception): diff --git a/tests/unit/decorators/test_errors_mapper.py b/tests/unit/decorators/test_errors_mapper.py index ca93a88..bfb6dad 100644 --- a/tests/unit/decorators/test_errors_mapper.py +++ b/tests/unit/decorators/test_errors_mapper.py @@ -2,7 +2,7 @@ import pytest -from app.decorators.exception_mapper import ExceptionMapper +from app.decorators.mapper.exception_mapper import ExceptionMapper from app.logger import logger from tests.unit.decorators.test_classes import ( ChildError, @@ -210,34 +210,11 @@ def function(exception: type[Exception]) -> None: assert isinstance(exc_info.value.__cause__, ParentError) -@pytest.mark.asyncio -async def test_work_with_logging() -> None: - """Test that the decorator logs exceptions for functions when configured.""" - - mapper = ExceptionMapper( - exception_map={ - ChildError: DummyFactory("child"), - } - ) - - @mapper - def test_func() -> None: - raise ChildError("[child]") - - with patch.object(logger, "error") as mock_logger: - with pytest.raises(GeneratedError): - test_func() - - mock_logger.assert_called_once() - assert "test_func" in mock_logger.call_args[0][0] - assert mock_logger.call_args[1]["exc_info"] is True - - def test_sync_function_detailed_error_message() -> None: - """Test that the decorator provides detailed error messages when configured.""" + """Test that the decorator pass right context to the exception factory""" mapper = ExceptionMapper( - exception_map={ChildError: DummyFactory("child", detailed=True)}, log_error=True + exception_map={ChildError: DummyFactory("child", detailed=True)} ) @mapper From f765535e3d88b41157e6a5f27919e9734ffa4cf0 Mon Sep 17 00:00:00 2001 From: vladimirgubarik Date: Wed, 6 Aug 2025 15:48:52 +0300 Subject: [PATCH 06/15] fix worker for di --- app/infrastructure/containers.py | 1 + app/infrastructure/db/sqlalchemy.py | 16 +++--- .../worker/tasks/simple_task.py | 8 +-- app/infrastructure/worker/worker.py | 33 +++++------- app/main.py | 2 +- .../bot/commands/sample_record.py | 5 +- app/presentation/bot/decorators/__init__.py | 0 .../bot/decorators/bot_exception_answer.py | 53 ------------------- app/settings.py | 6 +-- docker-compose.dev.yml | 4 +- pyproject.toml | 2 +- tests/conftest.py | 11 ---- 12 files changed, 38 insertions(+), 103 deletions(-) delete mode 100644 app/presentation/bot/decorators/__init__.py delete mode 100644 app/presentation/bot/decorators/bot_exception_answer.py diff --git a/app/infrastructure/containers.py b/app/infrastructure/containers.py index caf3d6e..9803f9a 100644 --- a/app/infrastructure/containers.py +++ b/app/infrastructure/containers.py @@ -146,6 +146,7 @@ class ApplicationStartupContainer(containers.DeclarativeContainer): ) + class WorkerStartupContainer(containers.DeclarativeContainer): redis_client = Singleton(lambda: aioredis.from_url(settings.REDIS_DSN)) diff --git a/app/infrastructure/db/sqlalchemy.py b/app/infrastructure/db/sqlalchemy.py index e92d8ca..f7d8d73 100644 --- a/app/infrastructure/db/sqlalchemy.py +++ b/app/infrastructure/db/sqlalchemy.py @@ -57,16 +57,11 @@ def get_engine() -> AsyncEngine: session_factory = async_sessionmaker(bind=get_engine(), expire_on_commit=False) -async def verify_db_connection(engine: AsyncEngine) -> None: - connection = await engine.connect() - await connection.close() - - async def close_db_connections() -> None: await get_engine().dispose() -def provide_transaction_session(func: Callable) -> Callable: +def provide_session(func: Callable) -> Callable: """ Provides a database session to an async function if one is not already passed. @@ -78,7 +73,14 @@ def provide_transaction_session(func: Callable) -> Callable: async def wrapper(*args: Any, **kwargs: Any) -> Any: if kwargs.get("session"): return await func(*args, **kwargs) + async with session_factory() as session: - return await func(*args, **kwargs, session=session) + try: + return await func(*args, **kwargs, session=session) + except Exception: + await session.rollback() + raise + finally: + await session.close() return wrapper diff --git a/app/infrastructure/worker/tasks/simple_task.py b/app/infrastructure/worker/tasks/simple_task.py index a1a9921..2681e91 100644 --- a/app/infrastructure/worker/tasks/simple_task.py +++ b/app/infrastructure/worker/tasks/simple_task.py @@ -1,3 +1,4 @@ +import asyncio from typing import Any from dependency_injector.wiring import inject, Provide @@ -13,6 +14,7 @@ async def heartbeat_task( ctx: dict[str, Any], bot: Bot = Provide[WorkerStartupContainer.bot], ): - # logger.info("Heartbeat task started") - - logger.info(f"Heartbeat task executed {[account.id for account in bot.bot_accounts]}") + task_name = asyncio.current_task().get_name() + logger.info(f"Task {task_name} Heartbeat task executed start bot id {id(bot)}") + await asyncio.sleep(10) + logger.info(f"Task {task_name} Heartbeat task executed end bot id {id(bot)}") diff --git a/app/infrastructure/worker/worker.py b/app/infrastructure/worker/worker.py index 564538b..e63a030 100644 --- a/app/infrastructure/worker/worker.py +++ b/app/infrastructure/worker/worker.py @@ -15,14 +15,14 @@ from app.infrastructure.worker.tasks.simple_task import heartbeat_task from app.logger import logger -# `saq` import its own settings and hides our module -from app.settings import settings as app_settings +from app.settings import settings SaqCtx = Dict[str, Any] +# queue = Queue(aioredis.from_url(settings.REDIS_DSN), name="bot_template") +queue = Queue.from_url(settings.REDIS_DSN, name="bot_template_worker") -queue = Queue(aioredis.from_url(app_settings.REDIS_DSN), name="bot_refactor") @inject async def _startup_with_injection( @@ -45,11 +45,7 @@ async def _shutdown_with_injection( async def startup(ctx: SaqCtx) -> None: worker_startup_container = WorkerStartupContainer() - queue.add_cron_job( - CronJob(function=heartbeat_task, cron="*/5 * * * * *", unique=True) - ) worker_startup_container.wire(modules=[__name__, "app.infrastructure.worker.tasks"]) - await _startup_with_injection() logger.info("Worker started") @@ -60,21 +56,18 @@ async def shutdown(ctx: SaqCtx) -> None: logger.info("Worker stopped") -settings = { +saq_settings = { "queue": queue, "functions": [], - # "cron_jobs": [ - # CronJob( - # function=heartbeat_task, - # cron="*/5 * * * * *", - # unique=True, - # # timeout=app_settings.PERIODIC_TASKS_DEFAULT_TIMEOUT, - # # heartbeat=app_settings.PERIODIC_TASKS_DEFAULT_HEARTBEAT, - # # retries=app_settings.PERIODIC_TASKS_DEFAULT_RETRIES, - # # ttl=app_settings.PERIODIC_TASKS_DEFAULT_TTL, - # ), - # ], - "concurrency": 8, + "cron_jobs": [ + CronJob( + function=heartbeat_task, + cron="* * * * * */5", + unique=False, + timeout=15, + ), + ], + "concurrency": settings.WORKER_CONCURRENCY, "startup": startup, "shutdown": shutdown, } diff --git a/app/main.py b/app/main.py index 0830d6d..ab155bf 100644 --- a/app/main.py +++ b/app/main.py @@ -53,7 +53,7 @@ def get_application() -> FastAPI: application = FastAPI(title=strings.BOT_PROJECT_NAME, openapi_url=None) - # put bot to state for tests + # put bot to state only for tests application.state.bot = main_container.bot() application.add_event_handler( diff --git a/app/presentation/bot/commands/sample_record.py b/app/presentation/bot/commands/sample_record.py index d62f60b..8b67cfb 100644 --- a/app/presentation/bot/commands/sample_record.py +++ b/app/presentation/bot/commands/sample_record.py @@ -5,15 +5,14 @@ from app.application.use_cases.interfaces import ISampleRecordUseCases from app.infrastructure.containers import BotSampleRecordCommandContainer -from app.infrastructure.db.sqlalchemy import provide_transaction_session +from app.infrastructure.db.sqlalchemy import provide_session from app.presentation.bot.commands.command_listing import SampleRecordCommands from app.presentation.bot.command_handlers.sample_record import CreateSampleRecordHandler collector = HandlerCollector() - @collector.command(**SampleRecordCommands.CREATE_RECORD.command_data()) -@provide_transaction_session +@provide_session @inject async def create_sample_record( message: IncomingMessage, diff --git a/app/presentation/bot/decorators/__init__.py b/app/presentation/bot/decorators/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/app/presentation/bot/decorators/bot_exception_answer.py b/app/presentation/bot/decorators/bot_exception_answer.py deleted file mode 100644 index db71832..0000000 --- a/app/presentation/bot/decorators/bot_exception_answer.py +++ /dev/null @@ -1,53 +0,0 @@ -"""Module for translation Exception errors to user friendly messages.""" - -import functools -import logging -from typing import Any, Callable - -from pybotx import Bot - -logger = logging.getLogger(__name__) - - -def _get_user_message( - exception_map: dict[type[Exception], str | Callable], exc: Exception -) -> str | None: - """Extract user message from exception mapping.""" - exception_message_to_user = exception_map.get(type(exc)) - if callable(exception_message_to_user): - return exception_message_to_user(exc) - return exception_message_to_user - - -def explain_exception_to_user( - mapping: dict[type[Exception], str | Callable[[Exception], str]], -) -> Callable: - """ - Decorate a function to catch specified exceptions and send a response to the user. - - For each caught exception, it responds using either a string message or a callable - response provided in the `exception_map`. - - :param mapping: A dictionary mapping exception types to either string messages - or callables that construct a response when invoked with the exception as an - argument. The keys must be subclasses of `Exception`, and the values must be - either strings or callables. - """ - - def decorator(func: Callable) -> Callable: # type: ignore - @functools.wraps(func) - async def wrapper(bot: Bot, *args, **kwargs) -> Any: # type: ignore - try: - return await func( - bot, - *args, - **kwargs, - ) - except tuple(mapping.keys()) as exc: - if (message := _get_user_message(mapping, exc)) is not None: - await bot.answer_message(message) - raise - - return wrapper - - return decorator diff --git a/app/settings.py b/app/settings.py index af0c3aa..e50b15d 100644 --- a/app/settings.py +++ b/app/settings.py @@ -77,11 +77,11 @@ def _build_credentials_from_string( REDIS_DSN: str REDIS_CONNECTION_POOL_SIZE: int = 10 - # healthcheck - WORKER_TIMEOUT_SEC: float = 4 + # worker + WORKER_CONCURRENCY: int = 2 + WORKERS_COUNT: int = 1 BOTX_CALLBACK_TIMEOUT_IN_SECONDS = 30 - BOT_ASYNC_CLIENT_TIMEOUT_IN_SECONDS = 60 diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 3918f74..cc09328 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -21,7 +21,9 @@ services: - REDIS_DSN=redis://redis/0 - DEBUG=true # '$$' prevents docker-compose from interpolating a value - command: /bin/sh -c 'PYTHONPATH="$$PYTHONPATH:$$PWD" saq app.infrastructure.worker.worker.settings' + command: /bin/sh -c 'PYTHONPATH="$$PYTHONPATH:$$PWD" saq app.infrastructure.worker.worker.saq_settings --web --workers 2' + ports: + - "8081:8080" env_file: - .env restart: always diff --git a/pyproject.toml b/pyproject.toml index ad82758..de817c7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,6 @@ asyncpg = "~0.29.0" # DB backend for application psycopg2-binary = "~2.9.9" # DB backend for alembic (migration tool) redis = "~5.0.3" -saq = { version = "~0.12.4", extras = ["hiredis"] } importlib-resources = { version = "~5.4.0", python = "<3.9" } zipp = { version = "~3.7.0", python = "<3.9" } @@ -40,6 +39,7 @@ orjson = "^3.10.18" factory-boy = "^3.3.3" async-factory-boy = "^1.0.1" cachetools = "^6.1.0" +saq = {extras = ["hiredis", "web"], version = "^0.25.2"} [tool.poetry.dev-dependencies] diff --git a/tests/conftest.py b/tests/conftest.py index 4d9732c..ede2009 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -123,14 +123,3 @@ def factory( ) return factory - - -# @pytest.fixture -# def loguru_caplog( -# caplog: pytest.LogCaptureFixture, -# ) -> Generator[pytest.LogCaptureFixture, None, None]: -# # https://github.com/Delgan/loguru/issues/59 -# -# handler_id = logger.add(caplog.handler, format="{message}") -# yield caplog -# logger.remove(handler_id) From e117948d606223f3f8a0dabd2d6bf613ac31aa4a Mon Sep 17 00:00:00 2001 From: vladimirgubarik Date: Thu, 7 Aug 2025 16:25:59 +0300 Subject: [PATCH 07/15] uof with di for handlers --- app/application/repository/interfaces.py | 25 ++++++ app/application/use_cases/record_use_cases.py | 15 ++-- app/infrastructure/containers.py | 50 +++++------ app/infrastructure/db/sqlalchemy.py | 59 ++++++------ .../repositories/sample_record.py | 11 ++- .../repositories/unit_of_work.py | 51 +++++++++++ app/main.py | 40 +++++---- .../bot/command_handlers/sample_record.py | 89 ++++++++++++++++--- .../bot/commands/command_listing.py | 4 + .../bot/commands/sample_record.py | 49 ++++++++-- .../bot/error_handlers/base_handlers.py | 33 ++++--- .../exceptions_chain_executor.py | 12 +-- app/presentation/bot/resources/strings.py | 8 ++ .../sample_record_bad_data_format.txt.mako | 2 + .../sample_record_created_answer.txt.mako | 2 +- .../sample_record_deleted_answer.txt.mako | 1 + app/presentation/bot/schemas/sample_record.py | 6 ++ app/presentation/bot/validators/base.py | 44 +++++++-- .../bot/validators/sample_record.py | 27 ------ tests/unit/error_handlers/__init__.py | 0 .../error_handlers/test_chain_executor.py | 89 +++++++++++++++++++ tests/unit/error_handlers/test_classes.py | 27 ++++++ 22 files changed, 489 insertions(+), 155 deletions(-) create mode 100644 app/infrastructure/repositories/unit_of_work.py create mode 100644 app/presentation/bot/resources/templates/sample_record/sample_record_bad_data_format.txt.mako create mode 100644 app/presentation/bot/resources/templates/sample_record/sample_record_deleted_answer.txt.mako delete mode 100644 app/presentation/bot/validators/sample_record.py create mode 100644 tests/unit/error_handlers/__init__.py create mode 100644 tests/unit/error_handlers/test_chain_executor.py create mode 100644 tests/unit/error_handlers/test_classes.py diff --git a/app/application/repository/interfaces.py b/app/application/repository/interfaces.py index 2d3439a..4ddadfc 100644 --- a/app/application/repository/interfaces.py +++ b/app/application/repository/interfaces.py @@ -6,6 +6,26 @@ from app.domain.entities.sample_record import SampleRecord +class IUnitOfWork(ABC): + """Interface for unit of work operations.""" + + @abstractmethod + async def __aenter__(self): + return self + + @abstractmethod + async def __aexit__(self, exc_type, exc_val, exc_tb): + pass + + +class ISampleRecordUnitOfWork(IUnitOfWork, ABC): + """Interface for record unit of work operations.""" + + @abstractmethod + def get_sample_record_repository(self) -> "ISampleRecordRepository": + pass + + class ISampleRecordRepository(ABC): """Interface for record repository operations.""" @@ -60,3 +80,8 @@ async def get_by_id(self, record_id: int) -> SampleRecord: async def get_all(self) -> List[SampleRecord]: """Get all records from the database""" pass + + @abstractmethod + async def commit(self) -> None: + """Commit changes to the database""" + pass diff --git a/app/application/use_cases/record_use_cases.py b/app/application/use_cases/record_use_cases.py index 6b4ee76..e650d05 100644 --- a/app/application/use_cases/record_use_cases.py +++ b/app/application/use_cases/record_use_cases.py @@ -23,8 +23,11 @@ async def create_record( domain_object = SampleRecord( record_data=request_object.record_data, name=request_object.name ) - created_record = await self._repo.create(domain_object) - return SampleRecordResponseSchema.from_orm(created_record) + created_record = SampleRecordResponseSchema.from_orm( + await self._repo.create(domain_object) + ) + + return created_record async def update_record( self, update_request: SampleRecordUpdateRequestSchema @@ -33,10 +36,12 @@ async def update_record( domain_object = SampleRecord( record_data=update_request.record_data, id=update_request.id, - name=update_request.name + name=update_request.name, + ) + updated_record = SampleRecordResponseSchema.from_orm( + await self._repo.update(domain_object) ) - updated_record = await self._repo.update(domain_object) - return SampleRecordResponseSchema.from_orm(updated_record) + return updated_record async def delete_record(self, record_id: int) -> None: """Delete a record.""" diff --git a/app/infrastructure/containers.py b/app/infrastructure/containers.py index 9803f9a..701ad6f 100644 --- a/app/infrastructure/containers.py +++ b/app/infrastructure/containers.py @@ -1,4 +1,5 @@ import asyncio +from importlib import import_module from dependency_injector import containers, providers from dependency_injector.providers import Callable, Factory, Singleton @@ -14,7 +15,10 @@ PubsubExceptionHandler, ) from app.infrastructure.repositories.caching.redis_repo import RedisRepo -from app.infrastructure.repositories.sample_record import SampleRecordRepository +from app.infrastructure.repositories.unit_of_work import ( + ReadOnlySampleRecordUnitOfWork, + WriteSampleRecordUnitOfWork, +) from app.logger import logger from app.presentation.bot.error_handlers.internal_error_handler import ( @@ -27,32 +31,18 @@ class BotSampleRecordCommandContainer(containers.DeclarativeContainer): - record_use_cases_factory = Callable( - lambda session: SampleRecordUseCases( - record_repo=SampleRecordRepository(session=session) - ) - ) + session_factory = providers.Dependency() + ro_unit_of_work: Factory[ReadOnlySampleRecordUnitOfWork] = Factory( + ReadOnlySampleRecordUnitOfWork, session_factory + ) + rw_unit_of_work: Factory[WriteSampleRecordUnitOfWork] = Factory( + WriteSampleRecordUnitOfWork, session_factory + ) -# class StorageContainer(containers.DeclarativeContainer): -# wiring_config = containers.WiringConfiguration( -# modules=["app.presentation.bot.commands.sample_records"] -# ) -# -# # Provider that returns a factory to create sessions -# session_factory = Factory(build_db_session_factory) -# -# # Provider that creates a session (e.g., AsyncSession instance) -# session = Resource(session_factory) -# -# # Provider that creates the SampleRecordUseCases, injecting the session -# record_use_cases = Factory( -# SampleRecordUseCases, -# record_repo=Factory( -# SampleRecordRepository, -# session=session -# ) -# ) + record_use_cases_factory = Callable( + lambda repository: SampleRecordUseCases(record_repo=repository) + ) class CallbackTaskManager: @@ -117,11 +107,16 @@ class ApplicationStartupContainer(containers.DeclarativeContainer): {} if not settings.RAISE_BOT_EXCEPTIONS else {Exception: internal_error_handler} ) - from app.presentation.bot.commands import common, sample_record + # Ленивая загрузка коллекторов + @staticmethod + def get_collectors(): + common = import_module("app.presentation.bot.commands.common") + sample_record = import_module("app.presentation.bot.commands.sample_record") + return [common.collector, sample_record.collector] bot = providers.Singleton( Bot, - collectors=[common.collector, sample_record.collector], + collectors=Callable(get_collectors), bot_accounts=settings.BOT_CREDENTIALS, exception_handlers=exception_handlers, # type: ignore default_callback_timeout=settings.BOTX_CALLBACK_TIMEOUT_IN_SECONDS, @@ -146,7 +141,6 @@ class ApplicationStartupContainer(containers.DeclarativeContainer): ) - class WorkerStartupContainer(containers.DeclarativeContainer): redis_client = Singleton(lambda: aioredis.from_url(settings.REDIS_DSN)) diff --git a/app/infrastructure/db/sqlalchemy.py b/app/infrastructure/db/sqlalchemy.py index f7d8d73..1887712 100644 --- a/app/infrastructure/db/sqlalchemy.py +++ b/app/infrastructure/db/sqlalchemy.py @@ -41,7 +41,6 @@ def make_url_sync(url: str) -> str: Base = declarative_base(metadata=MetaData(naming_convention=convention)) - @lru_cache(maxsize=1) def get_engine() -> AsyncEngine: """Lazily initialize and cache a single SQLAlchemy async engine.""" @@ -54,33 +53,31 @@ def get_engine() -> AsyncEngine: ) -session_factory = async_sessionmaker(bind=get_engine(), expire_on_commit=False) - - -async def close_db_connections() -> None: - await get_engine().dispose() - - -def provide_session(func: Callable) -> Callable: - """ - Provides a database session to an async function if one is not already passed. - - :param func: The asynchronous function to wrap. It must accept a `session` - keyword argument. - :return: The wrapped function with automatic session provisioning.""" - - @wraps(func) - async def wrapper(*args: Any, **kwargs: Any) -> Any: - if kwargs.get("session"): - return await func(*args, **kwargs) - - async with session_factory() as session: - try: - return await func(*args, **kwargs, session=session) - except Exception: - await session.rollback() - raise - finally: - await session.close() - - return wrapper +def get_session_factory() -> async_sessionmaker: + engine = get_engine() + return async_sessionmaker(bind=engine, expire_on_commit=False) + +# +# def provide_session(func: Callable) -> Callable: +# """ +# Provides a database session to an async function if one is not already passed. +# +# :param func: The asynchronous function to wrap. It must accept a `session` +# keyword argument. +# :return: The wrapped function with automatic session provisioning.""" +# +# @wraps(func) +# async def wrapper(*args: Any, **kwargs: Any) -> Any: +# if kwargs.get("session"): +# return await func(*args, **kwargs) +# +# async with session_factory() as session: +# try: +# return await func(*args, **kwargs, session=session) +# except Exception: +# await session.rollback() +# raise +# finally: +# await session.close() +# +# return wrapper diff --git a/app/infrastructure/repositories/sample_record.py b/app/infrastructure/repositories/sample_record.py index 285e93f..68e7cca 100644 --- a/app/infrastructure/repositories/sample_record.py +++ b/app/infrastructure/repositories/sample_record.py @@ -13,7 +13,7 @@ RecordUpdateError, RecordAlreadyExistsError, ForeignKeyError, - ValidationError, + ValidationError, BaseRepositoryError, ) from app.application.repository.interfaces import ISampleRecordRepository from app.decorators.mapper.exception_mapper import ( @@ -58,6 +58,15 @@ def __init__(self, session: AsyncSession): """ self._session = session + @ExceptionMapper( + { + Exception: EnrichedExceptionFactory(BaseRepositoryError), + }, + is_bound_method=True, + ) + async def commit(self) -> None: + await self._session.commit() + @ExceptionMapper( { IntegrityError: IntegrityErrorFactory(RecordCreateError), diff --git a/app/infrastructure/repositories/unit_of_work.py b/app/infrastructure/repositories/unit_of_work.py new file mode 100644 index 0000000..a313086 --- /dev/null +++ b/app/infrastructure/repositories/unit_of_work.py @@ -0,0 +1,51 @@ +import asyncio + +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker + +from app.application.repository.interfaces import ( + ISampleRecordUnitOfWork, + ISampleRecordRepository, +) +from app.infrastructure.repositories.sample_record import SampleRecordRepository + + +class ReadOnlySampleRecordUnitOfWork(ISampleRecordUnitOfWork): + def get_sample_record_repository(self) -> ISampleRecordRepository: + if not self._session: + raise RuntimeError("Session is not initialized") + + return SampleRecordRepository(self._session) + + def __init__(self, session_factory: async_sessionmaker): + super().__init__() + self.session_factory = session_factory + self._session: AsyncSession | None = None + + async def __aenter__(self): + self._session = self.session_factory() + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + try: + # Recommended for implicit resources cleanup + await self._session.rollback() + finally: + await self._session.close() + + +class WriteSampleRecordUnitOfWork(ReadOnlySampleRecordUnitOfWork): + """Unit of Work for write operations with full transaction management.""" + + async def __aenter__(self): + self._session = self.session_factory() + await asyncio.wait_for(self._session.begin(), timeout=5) + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + try: + if exc_type: + await self._session.rollback() + else: + await self._session.commit() + finally: + await self._session.close() diff --git a/app/main.py b/app/main.py index ab155bf..b1d61df 100644 --- a/app/main.py +++ b/app/main.py @@ -12,30 +12,29 @@ BotSampleRecordCommandContainer, CallbackTaskManager, ) -from app.infrastructure.db.sqlalchemy import close_db_connections +from app.infrastructure.db.sqlalchemy import ( + get_session_factory, get_engine, +) from app.presentation.api.routers import router from app.presentation.bot.resources import strings async def startup( - bot: Bot = Provide[ApplicationStartupContainer.bot], + bot: Bot, ) -> None: await bot.startup() async def shutdown( - callback_task_manager: CallbackTaskManager = Provide[ - ApplicationStartupContainer.callback_task_manager - ], - bot: Bot = Provide[ApplicationStartupContainer.bot], - redis_client: Redis = Provide[ApplicationStartupContainer.redis_client], + container: ApplicationStartupContainer = Provide[ApplicationStartupContainer], ) -> None: - await bot.shutdown() + await container.bot().shutdown() - await callback_task_manager.shutdown() + await container.callback_task_manager().shutdown() - await redis_client.aclose() - await close_db_connections() + await container.redis_client().aclose() + await container.shutdown_resources() + await get_engine().dispose() def get_application() -> FastAPI: @@ -43,10 +42,18 @@ def get_application() -> FastAPI: # Initialize the main application container main_container = ApplicationStartupContainer() - main_container.wire(modules=["app.main", "app.presentation.api.botx"]) + main_container.wire( + modules=[ + "app.main", + "app.presentation.api.botx", + "app.presentation.bot.commands.sample_record", + ] + ) # Initialize the SampleRecord commands container - sample_record_commands_container = BotSampleRecordCommandContainer() + sample_record_commands_container = BotSampleRecordCommandContainer( + session_factory=get_session_factory() + ) sample_record_commands_container.wire( modules=["app.presentation.bot.commands.sample_record"] ) @@ -64,9 +71,10 @@ def get_application() -> FastAPI: "shutdown", partial( shutdown, - callback_task_manager=main_container.callback_task_manager(), - bot=main_container.bot(), - redis_client=main_container.redis_client(), + # callback_task_manager=main_container.callback_task_manager(), + # bot=main_container.bot(), + # redis_client=main_container.redis_client(), + container=main_container, ), ) diff --git a/app/presentation/bot/command_handlers/sample_record.py b/app/presentation/bot/command_handlers/sample_record.py index 7c02667..4850d1e 100644 --- a/app/presentation/bot/command_handlers/sample_record.py +++ b/app/presentation/bot/command_handlers/sample_record.py @@ -1,17 +1,36 @@ +from typing import Callable + from pybotx import Bot, IncomingMessage -from app.application.repository.exceptions import RecordCreateError, \ - RecordAlreadyExistsError, ValidationError +from app.application.repository.exceptions import ( + RecordCreateError, + RecordAlreadyExistsError, + RecordDoesNotExistError, +) +from app.application.repository.interfaces import ISampleRecordUnitOfWork, \ + ISampleRecordRepository from app.application.use_cases.interfaces import ISampleRecordUseCases from app.presentation.bot.command_handlers.base_handler import BaseCommandHandler from app.presentation.bot.error_handlers.exceptions_chain_executor import ( - ExceptionHandlersChainExecutor, DEFAULT_HANDLERS, + ExceptionHandlersChainExecutor, + DEFAULT_HANDLERS, +) +from app.presentation.bot.error_handlers.base_handlers import ( + SendErrorExplainToUserHandler, +) +from app.presentation.bot.resources.strings import ( + SAMPLE_RECORD_CREATED_ANSWER, + SAMPLE_RECORD_DELETED_ANSWER, +) +from app.presentation.bot.schemas.sample_record import ( + SampleRecordCreateRequestSchema, + SampleRecordDeleteRequestSchema, ) -from app.presentation.bot.error_handlers.base_handlers import \ - SendErrorExplainToUserHandler -from app.presentation.bot.resources.strings import SAMPLE_RECORD_CREATED_ANSWER -from app.presentation.bot.schemas.sample_record import SampleRecordCreateRequestSchema -from app.presentation.bot.validators.base import BotXJsonRequestParser +from app.presentation.bot.validators.base import ( + BotXJsonRequestParser, + BotXPlainRequestParser, +) +from app.presentation.bot.validators.exceptions import MessageValidationError class CreateSampleRecordHandler(BaseCommandHandler): @@ -22,7 +41,51 @@ class CreateSampleRecordHandler(BaseCommandHandler): exception_explain_mapping={ RecordAlreadyExistsError: "Запись с такими параметрами уже существует", RecordCreateError: "Внутренняя ошибка создания записи", - ValidationError: "Неправильный формат данных" + MessageValidationError: "Неправильный формат данных", + } + ) + ] + exception_handler_chain_executor = ExceptionHandlersChainExecutor( + _EXCEPTIONS_HANDLERS + ) + + def __init__( + self, + bot: Bot, + message: IncomingMessage, + unit_of_work: ISampleRecordUnitOfWork, + use_case_factory: Callable[[ISampleRecordRepository], ISampleRecordUseCases] + ): + self._use_cases = use_case_factory + self.unit_of_work = unit_of_work + + super().__init__(bot, message, self.exception_handler_chain_executor) + + async def handle_logic( + self, + request_parameter: SampleRecordCreateRequestSchema, # type: ignore + ) -> None: + async with self.unit_of_work as uof: + use_case = self._use_cases(uof.get_sample_record_repository()) + created_record = await use_case.create_record(request_parameter) + + await self._bot.answer_message( + SAMPLE_RECORD_CREATED_ANSWER.format( + id=created_record.id, + record_data=created_record.record_data, + name=created_record.name, + ) + ) + + +class DeleteSampleRecordHandler(BaseCommandHandler): + incoming_argument_parser = BotXPlainRequestParser(SampleRecordDeleteRequestSchema) + + _EXCEPTIONS_HANDLERS = DEFAULT_HANDLERS + [ + SendErrorExplainToUserHandler( + exception_explain_mapping={ + RecordDoesNotExistError: "Запиcь с указанным id не найдена", + MessageValidationError: "Неправильный формат данных", } ) ] @@ -42,9 +105,11 @@ def __init__( async def handle_logic( self, - request_parameter: SampleRecordCreateRequestSchema, # type: ignore + request_parameter: SampleRecordDeleteRequestSchema, # type: ignore ) -> None: - created_record = await self._use_cases.create_record(request_parameter) + await self._use_cases.delete_record(request_parameter.id) await self._bot.answer_message( - SAMPLE_RECORD_CREATED_ANSWER.format(**created_record.dict()) + SAMPLE_RECORD_DELETED_ANSWER.format( + id=request_parameter.id, + ) ) diff --git a/app/presentation/bot/commands/command_listing.py b/app/presentation/bot/commands/command_listing.py index 7c4afd7..2881bed 100644 --- a/app/presentation/bot/commands/command_listing.py +++ b/app/presentation/bot/commands/command_listing.py @@ -19,4 +19,8 @@ class SampleRecordCommands: command_name="/create_record", description="Создать запись", ) + DELETE_RECORD = BotCommand( + command_name="/delete_record", + description="Удалить запись", + ) diff --git a/app/presentation/bot/commands/sample_record.py b/app/presentation/bot/commands/sample_record.py index 8b67cfb..aaac9e1 100644 --- a/app/presentation/bot/commands/sample_record.py +++ b/app/presentation/bot/commands/sample_record.py @@ -1,31 +1,64 @@ +from typing import Callable + from dependency_injector.providers import Factory -from dependency_injector.wiring import Provider, inject +from dependency_injector.wiring import Provider, inject, Provide from pybotx import Bot, HandlerCollector, IncomingMessage from sqlalchemy.ext.asyncio import AsyncSession +from app.application.repository.interfaces import ISampleRecordRepository from app.application.use_cases.interfaces import ISampleRecordUseCases -from app.infrastructure.containers import BotSampleRecordCommandContainer +from app.infrastructure.containers import ( + BotSampleRecordCommandContainer, + ApplicationStartupContainer, +) from app.infrastructure.db.sqlalchemy import provide_session +from app.infrastructure.repositories.unit_of_work import WriteSampleRecordUnitOfWork from app.presentation.bot.commands.command_listing import SampleRecordCommands -from app.presentation.bot.command_handlers.sample_record import CreateSampleRecordHandler +from app.presentation.bot.command_handlers.sample_record import ( + CreateSampleRecordHandler, + DeleteSampleRecordHandler, +) collector = HandlerCollector() + @collector.command(**SampleRecordCommands.CREATE_RECORD.command_data()) -@provide_session +# @provide_session @inject async def create_sample_record( message: IncomingMessage, bot: Bot, - session: AsyncSession, - record_use_cases_factory: Factory[ISampleRecordUseCases] = Provider[ + # session: AsyncSession, + unit_of_work: WriteSampleRecordUnitOfWork=Provide[BotSampleRecordCommandContainer.rw_unit_of_work], + record_use_cases_factory: Callable[[ISampleRecordRepository], ISampleRecordUseCases] = Provider[ BotSampleRecordCommandContainer.record_use_cases_factory ], ) -> None: """Creates a sample record in the database.""" - await CreateSampleRecordHandler( + handler = CreateSampleRecordHandler( bot=bot, message=message, - use_cases=record_use_cases_factory.provider(session), + use_case_factory=record_use_cases_factory, + unit_of_work=unit_of_work, + ) + await handler.execute() + + +@collector.command(**SampleRecordCommands.DELETE_RECORD.command_data()) +# @provide_session +@inject +async def delete_sample_record( + message: IncomingMessage, + bot: Bot, + session: AsyncSession, + record_use_cases_factory: Factory[ISampleRecordUseCases] = Provider[ + BotSampleRecordCommandContainer.record_use_cases_factory + ], +) -> None: + """Delete a sample record in the database.""" + await DeleteSampleRecordHandler( + bot=bot, + message=message, + use_cases=record_use_cases_factory(session), ).execute() diff --git a/app/presentation/bot/error_handlers/base_handlers.py b/app/presentation/bot/error_handlers/base_handlers.py index 1fa6fbc..08f0f16 100644 --- a/app/presentation/bot/error_handlers/base_handlers.py +++ b/app/presentation/bot/error_handlers/base_handlers.py @@ -15,7 +15,7 @@ def __init__( stop_on_failure: bool = False, break_the_chain: bool = False, ): - self._next_handler = next_handler + self.next_handler = next_handler self._stop_on_failure = stop_on_failure self._break_the_chain = break_the_chain @@ -45,18 +45,25 @@ async def handle_exception( if self.should_process_exception(exc, bot, message): try: await self.process_exception(exc, bot, message, exception_id) - if self._next_handler and not self._break_the_chain: - await self._next_handler.handle_exception( + if self.next_handler and not self._break_the_chain: + await self.next_handler.handle_exception( exc, bot, message, exception_id ) except Exception as exc: - logger.error( - f"Error handling exception {exception_id}: {exc}", exc_info=True + logger.opt(exception=exc).error( + f"Error handling exception {exception_id}" ) if self._stop_on_failure: return - if self._next_handler: - await self._next_handler.process_exception(exc, bot, message) + if self.next_handler: + await self.next_handler.process_exception( + exc, bot, message, exception_id + ) + else: + if self.next_handler: + await self.next_handler.handle_exception( + exc, bot, message, exception_id + ) class LoggingExceptionHandler(AbstractExceptionHandler): @@ -65,7 +72,7 @@ def should_process_exception( ) -> bool: return True - def process_exception( + async def process_exception( self, exc: Exception, bot: Bot, @@ -109,13 +116,15 @@ async def _get_exception_message_for_user( message: IncomingMessage, exception_id: UUID | None = None, ) -> str: - if explanation := self.exception_explain_mapping.get(type(exc)) is not None: + if (explanation := self.exception_explain_mapping.get(type(exc))) is not None: if isinstance(explanation, str): raw_explanation = explanation else: raw_explanation = explanation(exc, bot, message, exception_id) - return f"{raw_explanation}. Идентификатор ошибки:{exception_id}" + return strings.SAMPLE_RECORD_BAD_DATA_FORMAT.format( + explanation=raw_explanation, exception_id=exception_id + ) return strings.SOMETHING_GOES_WRONG.format(error_uuid=exception_id) @@ -126,7 +135,9 @@ async def process_exception( message: IncomingMessage, exception_id: UUID | None, ) -> None: - message_text = await self._get_exception_message_for_user(exc, bot, message) + message_text = await self._get_exception_message_for_user( + exc, bot, message, exception_id + ) await bot.answer_message( message_text, wait_callback=not isinstance(exc, BotShuttingDownError), diff --git a/app/presentation/bot/error_handlers/exceptions_chain_executor.py b/app/presentation/bot/error_handlers/exceptions_chain_executor.py index d896038..8b0ad1f 100644 --- a/app/presentation/bot/error_handlers/exceptions_chain_executor.py +++ b/app/presentation/bot/error_handlers/exceptions_chain_executor.py @@ -62,14 +62,14 @@ def _create_chain( if not handlers: return None, None - first_handler = self._get_handler(handlers[0]) + head_handler = self._get_handler(handlers[0]) - last_handler = first_handler + tail_handler = head_handler for handler in handlers[1:]: - next_handler = self._get_handler(handler) - last_handler.next_handler = next_handler - last_handler = next_handler - return first_handler, last_handler + new_tail_handler = self._get_handler(handler) + tail_handler.next_handler = new_tail_handler + tail_handler = new_tail_handler + return head_handler, tail_handler def extend( self, handlers: list[AbstractExceptionHandler | type[AbstractExceptionHandler]] diff --git a/app/presentation/bot/resources/strings.py b/app/presentation/bot/resources/strings.py index e30f8e4..e8b82a7 100644 --- a/app/presentation/bot/resources/strings.py +++ b/app/presentation/bot/resources/strings.py @@ -68,3 +68,11 @@ def _format(**kwargs: Any) -> str: SAMPLE_RECORD_CREATED_ANSWER = lookup.get_template( "sample_record_created_answer.txt.mako" ) + +SAMPLE_RECORD_DELETED_ANSWER = lookup.get_template( + "sample_record_deleted_answer.txt.mako" +) + +SAMPLE_RECORD_BAD_DATA_FORMAT = lookup.get_template( + "sample_record_bad_data_format.txt.mako" +) diff --git a/app/presentation/bot/resources/templates/sample_record/sample_record_bad_data_format.txt.mako b/app/presentation/bot/resources/templates/sample_record/sample_record_bad_data_format.txt.mako new file mode 100644 index 0000000..d494ee2 --- /dev/null +++ b/app/presentation/bot/resources/templates/sample_record/sample_record_bad_data_format.txt.mako @@ -0,0 +1,2 @@ +${explanation}. +**Идентификатор ошибки**: ${exception_id }. diff --git a/app/presentation/bot/resources/templates/sample_record/sample_record_created_answer.txt.mako b/app/presentation/bot/resources/templates/sample_record/sample_record_created_answer.txt.mako index ca77ab0..250629d 100644 --- a/app/presentation/bot/resources/templates/sample_record/sample_record_created_answer.txt.mako +++ b/app/presentation/bot/resources/templates/sample_record/sample_record_created_answer.txt.mako @@ -1,2 +1,2 @@ Запись успешно создана: -**id**: ${ id } **record_data**: ${ record_data }. +**id**: ${ id } **name**: ${ name } **record_data**: ${ record_data }. diff --git a/app/presentation/bot/resources/templates/sample_record/sample_record_deleted_answer.txt.mako b/app/presentation/bot/resources/templates/sample_record/sample_record_deleted_answer.txt.mako new file mode 100644 index 0000000..2e6b655 --- /dev/null +++ b/app/presentation/bot/resources/templates/sample_record/sample_record_deleted_answer.txt.mako @@ -0,0 +1 @@ +Запись c **id**: ${ id } успешно удалена. diff --git a/app/presentation/bot/schemas/sample_record.py b/app/presentation/bot/schemas/sample_record.py index eda4e59..520c2d7 100644 --- a/app/presentation/bot/schemas/sample_record.py +++ b/app/presentation/bot/schemas/sample_record.py @@ -29,6 +29,9 @@ class SampleRecordCreateRequestSchema( record_data: str = Field(..., min_length=1, max_length=128) name: str = Field(..., min_length=1, max_length=32) + class Config: + orm_mode = True + class SampleRecordDeleteRequestSchema(BaseModel): id: int @@ -43,3 +46,6 @@ class SampleRecordUpdateRequestSchema(BaseModel): def _from_plain_message_data(cls, message_data: str) -> Self: record_id, record_name, record_data = message_data.split(" ") return cls(id=record_id, record_name=record_name, record_data=record_data) # type: ignore[arg-type] + + class Config: + orm_mode = True diff --git a/app/presentation/bot/validators/base.py b/app/presentation/bot/validators/base.py index 7a143e3..e8f5d37 100644 --- a/app/presentation/bot/validators/base.py +++ b/app/presentation/bot/validators/base.py @@ -6,6 +6,8 @@ from pybotx import IncomingMessage from pydantic import BaseModel, ValidationError +from app.decorators.mapper.exception_mapper import ExceptionMapper +from app.decorators.mapper.factories import EnrichedExceptionFactory from app.presentation.bot.validators.exceptions import MessageValidationError T = TypeVar("T", bound=BaseModel) @@ -21,13 +23,37 @@ class BotXJsonRequestParser(IBotRequestParser[T]): def __init__(self, model: type[T]): self.model = model + @ExceptionMapper( + { + (JSONDecodeError, ValidationError): EnrichedExceptionFactory( + MessageValidationError + ) + }, + is_bound_method=True, + ) def parse(self, raw_input: IncomingMessage) -> T: - try: - message_json = orjson.loads(raw_input.argument) - return self.model.parse_obj(message_json) - except JSONDecodeError as ex: - raise MessageValidationError(str(ex)) from ex - except ValidationError as ex: - raise MessageValidationError( - ",".join(error["msg"] for error in ex.errors()) - ) from ex + message_json = orjson.loads(raw_input.argument) + return self.model.parse_obj(message_json) + + +class BotXPlainRequestParser(IBotRequestParser[T]): + """Base parser which try to create schema from positional arguments. + + Recommended to use strict model object creation with manual kwargs + """ + + def __init__(self, model: type[T]): + self.model = model + + @ExceptionMapper( + {ValidationError: EnrichedExceptionFactory(MessageValidationError)}, + is_bound_method=True, + ) + def parse(self, raw_input: IncomingMessage) -> T: + if not (message_args := raw_input.argument.strip().split(" ")): + raise ValidationError("Message is empty") + + fields = self.model.__fields__.keys() + message_kwargs = dict(zip(fields, message_args)) + + return self.model.parse_obj(message_kwargs) diff --git a/app/presentation/bot/validators/sample_record.py b/app/presentation/bot/validators/sample_record.py deleted file mode 100644 index 592aa10..0000000 --- a/app/presentation/bot/validators/sample_record.py +++ /dev/null @@ -1,27 +0,0 @@ -import orjson -from orjson import JSONDecodeError -from pybotx import IncomingMessage -from pydantic import ValidationError - -from app.decorators.mapper.exception_mapper import ExceptionMapper -from app.decorators.mapper.factories import EnrichedExceptionFactory -from app.presentation.bot.schemas.sample_record import SampleRecordCreateRequestSchema -from app.presentation.bot.validators.base import IBotRequestParser -from app.presentation.bot.validators.exceptions import MessageValidationError - - -class SampleRecordJsonCreateRequestValidator( - IBotRequestParser[SampleRecordCreateRequestSchema] -): - @ExceptionMapper( - { - (JSONDecodeError, ValidationError): EnrichedExceptionFactory( - MessageValidationError - ) - }, - is_bound_method=True, - ) - def parse(self, raw_input: IncomingMessage) -> SampleRecordCreateRequestSchema: - message_json = orjson.loads(raw_input.argument) - # TODO replace to model_validate during migration to pydantic 2.0 - return SampleRecordCreateRequestSchema.parse_obj(message_json) diff --git a/tests/unit/error_handlers/__init__.py b/tests/unit/error_handlers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/error_handlers/test_chain_executor.py b/tests/unit/error_handlers/test_chain_executor.py new file mode 100644 index 0000000..565f313 --- /dev/null +++ b/tests/unit/error_handlers/test_chain_executor.py @@ -0,0 +1,89 @@ +from unittest.mock import MagicMock + +from app.presentation.bot.error_handlers.base_handlers import AbstractExceptionHandler +from app.presentation.bot.error_handlers.exceptions_chain_executor import ( + ExceptionHandlersChainExecutor, +) +from tests.unit.error_handlers.test_classes import TestExceptionHandler + + +async def test_chain_usual_case(): + execution_history = [] + handlers = [ + TestExceptionHandler(index=index, run_history=execution_history) + for index in range(3) + ] + + chain_executor = ExceptionHandlersChainExecutor(handlers) + + await chain_executor.execute_chain(MagicMock(), MagicMock(), MagicMock()) + + assert execution_history == [0, 1, 2] + + +async def test_executor_call_only_right_handlers(): + execution_history = [] + handlers = [ + TestExceptionHandler( + index=index, + run_history=execution_history, + should_process_exception=bool(index % 2), + ) + for index in range(4) + ] + + chain_executor = ExceptionHandlersChainExecutor(handlers) + + await chain_executor.execute_chain(MagicMock(), MagicMock(), MagicMock()) + + assert execution_history == [1, 3] + + +async def test_executor_deal_with_only_one_handler(): + execution_history = [] + handlers = [ + TestExceptionHandler( + index=1, + run_history=execution_history, + ) + ] + + chain_executor = ExceptionHandlersChainExecutor(handlers) + + await chain_executor.execute_chain(MagicMock(), MagicMock(), MagicMock()) + + assert execution_history == [1] + + +async def test_executor_deal_with_only_one_non_executed_handler(): + history = [] + handlers = [ + TestExceptionHandler( + index=1, run_history=history, should_process_exception=False + ) + ] + + chain_executor = ExceptionHandlersChainExecutor(handlers) + + await chain_executor.execute_chain(MagicMock(), MagicMock(), MagicMock()) + + assert history == [] + + +async def test_executor_break_the_chain_if_needed(): + execution_history = [] + handlers = [ + TestExceptionHandler( + index=index, run_history=execution_history, break_the_chain=index >= 1 + ) + for index in range(4) + ] + + chain_executor = ExceptionHandlersChainExecutor(handlers) + + await chain_executor.execute_chain(MagicMock(), MagicMock(), MagicMock()) + + assert execution_history == [ + 0, + 1, + ] diff --git a/tests/unit/error_handlers/test_classes.py b/tests/unit/error_handlers/test_classes.py new file mode 100644 index 0000000..f3f59f5 --- /dev/null +++ b/tests/unit/error_handlers/test_classes.py @@ -0,0 +1,27 @@ +from pybotx import Bot, IncomingMessage + +from app.presentation.bot.error_handlers.base_handlers import AbstractExceptionHandler + + +class TestExceptionHandler(AbstractExceptionHandler): + def __init__( + self, + index: int, + run_history: list, + should_process_exception: bool = True, + stop_on_failure: bool = False, + break_the_chain: bool = False, + ): + self.index = index + self.should_process_exception_flag = should_process_exception + self.run_history = run_history + + super().__init__(None,stop_on_failure,break_the_chain) + + def should_process_exception( + self, exc: Exception, bot: Bot, message: IncomingMessage + ) -> bool: + return self.should_process_exception_flag + + async def process_exception(self, *args, **kwargs): + self.run_history.append(self.index) From a8433643f6353e338adb9b7c9533d44edfc4e032 Mon Sep 17 00:00:00 2001 From: vladimirgubarik Date: Thu, 7 Aug 2025 23:57:20 +0300 Subject: [PATCH 08/15] add integration tests for bot commands --- app/application/repository/interfaces.py | 7 +- app/application/use_cases/record_use_cases.py | 2 +- app/decorators/mapper/context.py | 7 ++ app/decorators/mapper/exception_mapper.py | 8 +- .../repositories/sample_record.py | 12 +-- app/main.py | 1 - .../bot/command_handlers/sample_record.py | 20 +++-- .../bot/commands/sample_record.py | 26 ++++--- .../bot/middlewares/answer_error.py | 1 + tests/conftest.py | 63 +-------------- .../integration/bot_commands/bot_factories.py | 77 +++++++++++++++++++ tests/integration/bot_commands/conftest.py | 47 +++++++++++ .../bot_commands/sample_record_factories.py | 32 ++++++++ tests/integration/bot_commands/test_common.py | 8 +- .../bot_commands/test_sample_record.py | 64 +++++++++++++++ tests/integration/conftest.py | 13 +++- tests/integration/factories.py | 2 + 17 files changed, 284 insertions(+), 106 deletions(-) create mode 100644 tests/integration/bot_commands/bot_factories.py create mode 100644 tests/integration/bot_commands/conftest.py create mode 100644 tests/integration/bot_commands/sample_record_factories.py create mode 100644 tests/integration/bot_commands/test_sample_record.py diff --git a/app/application/repository/interfaces.py b/app/application/repository/interfaces.py index 4ddadfc..d57ebc0 100644 --- a/app/application/repository/interfaces.py +++ b/app/application/repository/interfaces.py @@ -1,7 +1,7 @@ """Record repository interface.""" from abc import ABC, abstractmethod -from typing import List +from typing import List, Self from app.domain.entities.sample_record import SampleRecord @@ -23,6 +23,7 @@ class ISampleRecordUnitOfWork(IUnitOfWork, ABC): @abstractmethod def get_sample_record_repository(self) -> "ISampleRecordRepository": + """Return an initialized ISampleRecordRepository object implementation.""" pass @@ -81,7 +82,3 @@ async def get_all(self) -> List[SampleRecord]: """Get all records from the database""" pass - @abstractmethod - async def commit(self) -> None: - """Commit changes to the database""" - pass diff --git a/app/application/use_cases/record_use_cases.py b/app/application/use_cases/record_use_cases.py index e650d05..0f1de9f 100644 --- a/app/application/use_cases/record_use_cases.py +++ b/app/application/use_cases/record_use_cases.py @@ -12,7 +12,7 @@ class SampleRecordUseCases(ISampleRecordUseCases): - """Implementation of record use cases.""" + """Implementation of samplr record use cases.""" def __init__(self, record_repo: ISampleRecordRepository): self._repo = record_repo diff --git a/app/decorators/mapper/context.py b/app/decorators/mapper/context.py index f4e34c3..085bf5f 100644 --- a/app/decorators/mapper/context.py +++ b/app/decorators/mapper/context.py @@ -3,6 +3,7 @@ class ExceptionContext: + """Class to store exception rising context.""" SENSITIVE_KEYS: frozenset[str] = frozenset( ("password", "token", "key", "secret", "auth", "credential", "passwd") ) @@ -21,6 +22,8 @@ def __init__( @cached_property def formatted_context(self) -> str: + """Format exception context for logging. + """ error_context = [ f"Error in function '{self.func.__module__}.{self.func.__qualname__}'" ] @@ -42,6 +45,10 @@ def _sanitised_value( value: Any, key: str | None = None, ) -> str: + """Exclude sensitive data from logging + + TODO: add deeper sanitation for nested structures + """ if key is not None and key.lower() in self.SENSITIVE_KEYS: return "****HIDDEN****" diff --git a/app/decorators/mapper/exception_mapper.py b/app/decorators/mapper/exception_mapper.py index c1771bc..9f4717f 100644 --- a/app/decorators/mapper/exception_mapper.py +++ b/app/decorators/mapper/exception_mapper.py @@ -14,7 +14,10 @@ class ExceptionMapper: - """Exception-mapping decorator with bounded LRU caching and dynamic MRO lookup.""" + """Exception-mapping decorator with bounded LRU caching and dynamic MRO lookup. + + The main decorator purpose is map exception between application layers and enrich exceptions by context. + """ def __init__( self, @@ -38,6 +41,7 @@ def _get_exceptions_flat_map( self, exception_map: dict[ExceptionOrTupleOfExceptions, ExceptionFactory], ) -> dict[Type[Exception], ExceptionFactory]: + """Do a flat map from given exception map.""" flat_map: dict[Type[Exception], ExceptionFactory] = {} for exception_class, factory in exception_map.items(): if isinstance(exception_class, tuple): @@ -96,7 +100,7 @@ def _get_exception_factory( self._lru_cache[exc_type] = target_exception_factory return target_exception_factory - # exception is not presented in base mapping, but Exception in base mapping + # exception is not presented in base mapping, but catchall presented in mapping dict if self.exception_catchall_factory: self._lru_cache[exc_type] = self.exception_catchall_factory return self.exception_catchall_factory diff --git a/app/infrastructure/repositories/sample_record.py b/app/infrastructure/repositories/sample_record.py index 68e7cca..9525017 100644 --- a/app/infrastructure/repositories/sample_record.py +++ b/app/infrastructure/repositories/sample_record.py @@ -13,7 +13,8 @@ RecordUpdateError, RecordAlreadyExistsError, ForeignKeyError, - ValidationError, BaseRepositoryError, + ValidationError, + BaseRepositoryError, ) from app.application.repository.interfaces import ISampleRecordRepository from app.decorators.mapper.exception_mapper import ( @@ -58,15 +59,6 @@ def __init__(self, session: AsyncSession): """ self._session = session - @ExceptionMapper( - { - Exception: EnrichedExceptionFactory(BaseRepositoryError), - }, - is_bound_method=True, - ) - async def commit(self) -> None: - await self._session.commit() - @ExceptionMapper( { IntegrityError: IntegrityErrorFactory(RecordCreateError), diff --git a/app/main.py b/app/main.py index b1d61df..49f9dc3 100644 --- a/app/main.py +++ b/app/main.py @@ -33,7 +33,6 @@ async def shutdown( await container.callback_task_manager().shutdown() await container.redis_client().aclose() - await container.shutdown_resources() await get_engine().dispose() diff --git a/app/presentation/bot/command_handlers/sample_record.py b/app/presentation/bot/command_handlers/sample_record.py index 4850d1e..d0f2cd7 100644 --- a/app/presentation/bot/command_handlers/sample_record.py +++ b/app/presentation/bot/command_handlers/sample_record.py @@ -7,8 +7,10 @@ RecordAlreadyExistsError, RecordDoesNotExistError, ) -from app.application.repository.interfaces import ISampleRecordUnitOfWork, \ - ISampleRecordRepository +from app.application.repository.interfaces import ( + ISampleRecordUnitOfWork, + ISampleRecordRepository, +) from app.application.use_cases.interfaces import ISampleRecordUseCases from app.presentation.bot.command_handlers.base_handler import BaseCommandHandler from app.presentation.bot.error_handlers.exceptions_chain_executor import ( @@ -54,7 +56,7 @@ def __init__( bot: Bot, message: IncomingMessage, unit_of_work: ISampleRecordUnitOfWork, - use_case_factory: Callable[[ISampleRecordRepository], ISampleRecordUseCases] + use_case_factory: Callable[[ISampleRecordRepository], ISampleRecordUseCases], ): self._use_cases = use_case_factory self.unit_of_work = unit_of_work @@ -97,9 +99,11 @@ def __init__( self, bot: Bot, message: IncomingMessage, - use_cases: ISampleRecordUseCases, + unit_of_work: ISampleRecordUnitOfWork, + use_case_factory: Callable[[ISampleRecordRepository], ISampleRecordUseCases], ): - self._use_cases = use_cases + self._use_cases = use_case_factory + self.unit_of_work = unit_of_work super().__init__(bot, message, self.exception_handler_chain_executor) @@ -107,7 +111,11 @@ async def handle_logic( self, request_parameter: SampleRecordDeleteRequestSchema, # type: ignore ) -> None: - await self._use_cases.delete_record(request_parameter.id) + async with self.unit_of_work as uof: + await self._use_cases(uof.get_sample_record_repository()).delete_record( + request_parameter.id + ) + await self._bot.answer_message( SAMPLE_RECORD_DELETED_ANSWER.format( id=request_parameter.id, diff --git a/app/presentation/bot/commands/sample_record.py b/app/presentation/bot/commands/sample_record.py index aaac9e1..beb505d 100644 --- a/app/presentation/bot/commands/sample_record.py +++ b/app/presentation/bot/commands/sample_record.py @@ -11,7 +11,7 @@ BotSampleRecordCommandContainer, ApplicationStartupContainer, ) -from app.infrastructure.db.sqlalchemy import provide_session + from app.infrastructure.repositories.unit_of_work import WriteSampleRecordUnitOfWork from app.presentation.bot.commands.command_listing import SampleRecordCommands from app.presentation.bot.command_handlers.sample_record import ( @@ -23,16 +23,16 @@ @collector.command(**SampleRecordCommands.CREATE_RECORD.command_data()) -# @provide_session @inject async def create_sample_record( message: IncomingMessage, bot: Bot, - # session: AsyncSession, - unit_of_work: WriteSampleRecordUnitOfWork=Provide[BotSampleRecordCommandContainer.rw_unit_of_work], - record_use_cases_factory: Callable[[ISampleRecordRepository], ISampleRecordUseCases] = Provider[ - BotSampleRecordCommandContainer.record_use_cases_factory + unit_of_work: WriteSampleRecordUnitOfWork = Provide[ + BotSampleRecordCommandContainer.rw_unit_of_work ], + record_use_cases_factory: Callable[ + [ISampleRecordRepository], ISampleRecordUseCases + ] = Provider[BotSampleRecordCommandContainer.record_use_cases_factory], ) -> None: """Creates a sample record in the database.""" handler = CreateSampleRecordHandler( @@ -46,19 +46,21 @@ async def create_sample_record( @collector.command(**SampleRecordCommands.DELETE_RECORD.command_data()) -# @provide_session @inject async def delete_sample_record( message: IncomingMessage, bot: Bot, - session: AsyncSession, - record_use_cases_factory: Factory[ISampleRecordUseCases] = Provider[ - BotSampleRecordCommandContainer.record_use_cases_factory - ], + unit_of_work: WriteSampleRecordUnitOfWork = Provide[ + BotSampleRecordCommandContainer.rw_unit_of_work + ], + record_use_cases_factory: Callable[ + [ISampleRecordRepository], ISampleRecordUseCases + ] = Provider[BotSampleRecordCommandContainer.record_use_cases_factory], ) -> None: """Delete a sample record in the database.""" await DeleteSampleRecordHandler( bot=bot, message=message, - use_cases=record_use_cases_factory(session), + use_case_factory=record_use_cases_factory, + unit_of_work=unit_of_work, ).execute() diff --git a/app/presentation/bot/middlewares/answer_error.py b/app/presentation/bot/middlewares/answer_error.py index d94eb6c..7a1041f 100644 --- a/app/presentation/bot/middlewares/answer_error.py +++ b/app/presentation/bot/middlewares/answer_error.py @@ -19,6 +19,7 @@ async def answer_error_middleware( message: IncomingMessage, bot: Bot, call_next: IncomingMessageHandlerFunc ) -> None: + """Middleware, used for catching and logging unhandled AnswerError and AnswerMessageError.""" try: await call_next(message, bot) except AnswerError as exc: diff --git a/tests/conftest.py b/tests/conftest.py index ede2009..d9f7981 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,18 +1,10 @@ import asyncio from datetime import datetime -from typing import Any, Callable, Dict, Optional -from uuid import UUID, uuid4 +from typing import Any, Dict +from uuid import UUID import jwt import pytest -from pybotx import ( - BotAccount, - Chat, - ChatTypes, - IncomingMessage, - UserDevice, - UserSender, -) from testcontainers.postgres import PostgresContainer # type: ignore from app.settings import settings @@ -72,54 +64,3 @@ def authorization_header( return {"authorization": f"Bearer {token}"} -@pytest.fixture -def incoming_message_factory( - bot_id: UUID, - user_huid: UUID, - host: str, -) -> Callable[..., IncomingMessage]: - def factory( - *, - body: str = "", - ad_login: Optional[str] = None, - ad_domain: Optional[str] = None, - ) -> IncomingMessage: - return IncomingMessage( - bot=BotAccount( - id=bot_id, - host=host, - ), - sync_id=uuid4(), - source_sync_id=None, - body=body, - data={}, - metadata={}, - sender=UserSender( - huid=user_huid, - udid=None, - ad_login=ad_login, - ad_domain=ad_domain, - username=None, - is_chat_admin=True, - is_chat_creator=True, - device=UserDevice( - manufacturer=None, - device_name=None, - os=None, - pushes=None, - timezone=None, - permissions=None, - platform=None, - platform_package_id=None, - app_version=None, - locale=None, - ), - ), - chat=Chat( - id=uuid4(), - type=ChatTypes.PERSONAL_CHAT, - ), - raw_command=None, - ) - - return factory diff --git a/tests/integration/bot_commands/bot_factories.py b/tests/integration/bot_commands/bot_factories.py new file mode 100644 index 0000000..a403fa0 --- /dev/null +++ b/tests/integration/bot_commands/bot_factories.py @@ -0,0 +1,77 @@ +from typing import Callable, Optional, Any +from uuid import UUID, uuid4 + +import factory +import pytest +from factory import Factory +from pybotx import IncomingMessage, BotAccount, UserSender, UserDevice, Chat, ChatTypes, \ + ClientPlatforms + + + +class BotAccountFactory(Factory): + """Factory for bot accounts.""" + id:UUID=factory.Faker("uuid4") + host:str|None=None + class Meta: + model = BotAccount + + +class ChatFactory(Factory): + """Factory for chats.""" + + id: UUID = factory.Faker("uuid4") + type: ChatTypes = ChatTypes.PERSONAL_CHAT + + class Meta: + model = Chat + + +class UserDeviceFactory(Factory): + """Factory for user devices.""" + manufacturer: str | None = (None,) + device_name: str | None = (None,) + os: str | None = (None,) + pushes: str | None = (None,) + timezone: str | None = (None,) + permissions: str | None = (None,) + platform: ClientPlatforms | None = (None,) + platform_package_id: str | None = (None,) + app_version: str | None = (None,) + locale: str | None = (None,) + + class Meta: + model = UserDevice + + +class UserSenderFactory(Factory): + """Factory for user senders.""" + + huid: UUID = factory.Faker("uuid4") + udid = None + ad_login: Optional[str] = None + ad_domain: Optional[str] = None + username: Optional[str] = None + is_chat_admin: bool = True + is_chat_creator: bool = True + device: UserDevice = factory.SubFactory(UserDeviceFactory) + + class Meta: + model = UserSender + + +class IncomingMessageFactory(Factory): + """Factory for incoming messages.""" + + bot: BotAccount = factory.SubFactory(BotAccountFactory) + sync_id: UUID = factory.Faker("uuid4") + source_sync_id: Optional[UUID] = None + body: str = factory.Faker("text", max_nb_chars=100) + data: dict[str,Any] = {} + metadata: dict = {} + sender: UserSender = factory.SubFactory(UserSenderFactory) + chat: Chat = factory.SubFactory(ChatFactory) + raw_command: Optional[str] = None + + class Meta: + model = IncomingMessage diff --git a/tests/integration/bot_commands/conftest.py b/tests/integration/bot_commands/conftest.py new file mode 100644 index 0000000..0281e44 --- /dev/null +++ b/tests/integration/bot_commands/conftest.py @@ -0,0 +1,47 @@ +from typing import Callable, Optional +from uuid import UUID, uuid4 + +import pytest +from pybotx import IncomingMessage, BotAccount, UserSender, UserDevice, Chat, ChatTypes + +from tests.integration.bot_commands.bot_factories import IncomingMessageFactory + + +@pytest.fixture +def message_from_user( + bot_id: UUID, +): + return IncomingMessageFactory.create( + bot__id=bot_id, + ) + + +@pytest.fixture +def command_message_from_user( + bot_id: UUID, +) -> Callable[[str, str], IncomingMessage]: + def factory(command: str, args: str) -> IncomingMessage: + body = f"{command} {args}" + return IncomingMessageFactory.create(bot__id=bot_id, body=body) + + return factory + + +@pytest.fixture +def create_sample_record_command_message_from_user_factory( + command_message_from_user, +) -> Callable[[str], IncomingMessage]: + def factory(args: str) -> IncomingMessage: + return command_message_from_user("/create_record", args) + + return factory + + +@pytest.fixture +def delete_sample_record_command_message_from_user_factory( + command_message_from_user, +) -> Callable[[int], IncomingMessage]: + def factory(object_id: int) -> IncomingMessage: + return command_message_from_user("/delete_record", str(object_id)) + + return factory diff --git a/tests/integration/bot_commands/sample_record_factories.py b/tests/integration/bot_commands/sample_record_factories.py new file mode 100644 index 0000000..ff13a0a --- /dev/null +++ b/tests/integration/bot_commands/sample_record_factories.py @@ -0,0 +1,32 @@ +import json + +import factory +from factory import DictFactory + + +class JsonDict(dict): + def json(self, **kwargs): + return json.dumps(self, **kwargs) + + +class JsonableFactory(DictFactory): + @classmethod + def build(cls, **kwargs): + return JsonDict(super().build(**kwargs)) + + @classmethod + def _generate(cls, strategy, params): + """Override the core method used by `__call__()`""" + dict_obj = super()._generate(strategy, params) + return JsonDict(dict_obj) + + +class CreateSampleRecordRequestFactory(JsonableFactory): + record_data: str = factory.Faker("text", max_nb_chars=8) + name = factory.Faker("text", max_nb_chars=8) + + +class UpdateSampleRecordRequestFactory(JsonableFactory): + id: int = factory.Faker("integer") + record_data: str | None = factory.Faker("text", max_nb_chars=8) + name: str | None = factory.Faker("text", max_nb_chars=8) diff --git a/tests/integration/bot_commands/test_common.py b/tests/integration/bot_commands/test_common.py index 62db381..086940f 100644 --- a/tests/integration/bot_commands/test_common.py +++ b/tests/integration/bot_commands/test_common.py @@ -17,13 +17,10 @@ async def test_default_message_handler( bot: Bot, - incoming_message_factory: Callable[..., IncomingMessage], + message_from_user: IncomingMessage, ) -> None: - # - Arrange - - message = incoming_message_factory() - # - Act - - await bot.async_execute_bot_command(message) + await bot.async_execute_bot_command(message_from_user) # - Assert - bot.answer_message.assert_awaited_once_with("Hello!") # type: ignore @@ -75,4 +72,3 @@ async def test_chat_created_handler( ), bubbles=BubbleMarkup([[Button(command="/help", label="/help")]]), ) - diff --git a/tests/integration/bot_commands/test_sample_record.py b/tests/integration/bot_commands/test_sample_record.py new file mode 100644 index 0000000..2d25fc6 --- /dev/null +++ b/tests/integration/bot_commands/test_sample_record.py @@ -0,0 +1,64 @@ +import json +from typing import Callable +from unittest.mock import MagicMock + +from pybotx import Bot, IncomingMessage +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.infrastructure.db.sample_record.models import SampleRecordModel +from app.presentation.bot.resources import strings +from tests.factories import SampleRecordFactory +from tests.integration.bot_commands.sample_record_factories import ( + CreateSampleRecordRequestFactory, + UpdateSampleRecordRequestFactory, +) +from tests.integration.factories import SampleRecordModelFactory + + +async def test_sample_record_created( + bot: Bot, + create_sample_record_command_message_from_user_factory, + isolated_session: AsyncSession, +): + """Test creating a record usual way.""" + request_data = CreateSampleRecordRequestFactory.create() + message = create_sample_record_command_message_from_user_factory( + request_data.json() + ) + await bot.async_execute_bot_command(message) + + # Check db object existing + db_object: SampleRecordModel = await isolated_session.scalar( + select(SampleRecordModel).where(SampleRecordModel.name == request_data["name"]) + ) + + assert db_object.record_data == request_data["record_data"] + assert db_object.name == request_data["name"] + + # Check bot answer + assert bot.answer_message.call_args[0][0] == ( + f"Запись успешно создана:\n**id**: {db_object.id} " + f"**name**: {db_object.name} " + f"**record_data**: {db_object.record_data}." + ) + + +async def test_sample_record_delete( + bot: Bot, + isolated_session: AsyncSession, + sample_record_factory, + delete_sample_record_command_message_from_user_factory, +): + """Test creating a record usual way.""" + existing_record: SampleRecordModel = await sample_record_factory.create() + + message = delete_sample_record_command_message_from_user_factory(existing_record.id) + await bot.async_execute_bot_command(message) + + # Check db object non existing + db_object: SampleRecordModel = await isolated_session.scalar( + select(SampleRecordModel).where(SampleRecordModel.id == existing_record.id) + ) + + assert db_object is None diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 875fc11..26bb3a0 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -24,6 +24,7 @@ from testcontainers.postgres import PostgresContainer from testcontainers.redis import RedisContainer +import app.infrastructure.db.sqlalchemy from app.infrastructure.db.sqlalchemy import AsyncSessionFactory, make_url_async from app.infrastructure.repositories.sample_record import SampleRecordRepository from app.main import get_application @@ -90,14 +91,21 @@ def alembic_configuration() -> Config: async def isolated_session( db_session_factory: AsyncSessionFactory, alembic_configuration: Config ): - """Isolated session with proper rollback to prevent test data leaks.""" - command.upgrade(alembic_configuration, "head") async with db_session_factory() as session: yield session command.downgrade(alembic_configuration, "base") +@pytest.fixture +def override_session_factory(isolated_session): + with patch( + "app.infrastructure.db.sqlalchemy.get_session_factory", + return_value=AsyncMock(return_value=isolated_session), + ): + yield + + @pytest.fixture async def sample_record_repository(isolated_session) -> SampleRecordRepository: return SampleRecordRepository(isolated_session) @@ -122,6 +130,7 @@ async def fastapi_app( respx_mock: Callable[..., Any], # We can't apply pytest mark to fixture redis_container, postgres_container, + override_session_factory, ): fastapi_app = get_application() mock_authorization() diff --git a/tests/integration/factories.py b/tests/integration/factories.py index 60d8402..1506737 100644 --- a/tests/integration/factories.py +++ b/tests/integration/factories.py @@ -12,3 +12,5 @@ class Meta: record_data = factory.Faker("text", max_nb_chars=100) name = factory.Faker("text", max_nb_chars=32) + + From aa6ba1c55f35fd5b986b90f73d8d2d767a3b0a1f Mon Sep 17 00:00:00 2001 From: vladimirgubarik Date: Fri, 8 Aug 2025 01:20:05 +0300 Subject: [PATCH 09/15] lint+refactor containers --- app/application/repository/interfaces.py | 12 ++- app/application/use_cases/interfaces.py | 2 +- app/application/use_cases/record_use_cases.py | 2 +- app/decorators/mapper/context.py | 8 +- app/decorators/mapper/exception_mapper.py | 6 +- app/decorators/mapper/factories.py | 11 +-- app/domain/entities/sample_record.py | 1 - app/infrastructure/containers.py | 78 +++++++------------ app/infrastructure/db/sample_record/models.py | 2 +- app/infrastructure/db/sqlalchemy.py | 9 +-- .../repositories/sample_record.py | 11 ++- .../repositories/unit_of_work.py | 4 +- .../worker/tasks/simple_task.py | 8 +- app/infrastructure/worker/worker.py | 11 +-- app/main.py | 5 +- app/presentation/api/botx.py | 5 +- app/presentation/bot/bot.py | 6 +- .../bot/command_handlers/sample_record.py | 72 ++++++++++++++--- .../bot/commands/command_listing.py | 5 +- .../bot/commands/sample_record.py | 59 +++++++++----- .../bot/error_handlers/base_handlers.py | 43 ++++++++-- .../exceptions_chain_executor.py | 9 +-- .../bot/middlewares/answer_error.py | 6 +- .../bot/middlewares/smart_logger.py | 2 +- app/presentation/bot/resources/strings.py | 2 + .../show_sample_record_answer.txt.mako | 2 + app/presentation/bot/schemas/sample_record.py | 23 +++--- app/presentation/bot/validators/base.py | 4 +- app/settings.py | 2 +- tests/conftest.py | 10 +-- tests/factories.py | 18 +---- .../integration/bot_commands/bot_factories.py | 52 +++++++------ tests/integration/bot_commands/conftest.py | 20 +++-- .../bot_commands/sample_record_factories.py | 10 +-- tests/integration/bot_commands/test_common.py | 2 - .../bot_commands/test_sample_record.py | 44 +++++++---- tests/integration/conftest.py | 13 ++-- tests/integration/endpoints/test_botx.py | 1 - tests/integration/factories.py | 2 - .../test_sample_record_repository.py | 11 ++- .../test_sample_record_use_cases_int.py | 6 +- tests/unit/decorators/test_classes.py | 2 +- tests/unit/decorators/test_errors_mapper.py | 3 - .../error_handlers/test_chain_executor.py | 1 - tests/unit/error_handlers/test_classes.py | 2 +- 45 files changed, 343 insertions(+), 264 deletions(-) create mode 100644 app/presentation/bot/resources/templates/sample_record/show_sample_record_answer.txt.mako diff --git a/app/application/repository/interfaces.py b/app/application/repository/interfaces.py index d57ebc0..bc138f8 100644 --- a/app/application/repository/interfaces.py +++ b/app/application/repository/interfaces.py @@ -1,6 +1,7 @@ """Record repository interface.""" from abc import ABC, abstractmethod +from types import TracebackType from typing import List, Self from app.domain.entities.sample_record import SampleRecord @@ -10,11 +11,17 @@ class IUnitOfWork(ABC): """Interface for unit of work operations.""" @abstractmethod - async def __aenter__(self): + async def __aenter__(self) -> Self: return self @abstractmethod - async def __aexit__(self, exc_type, exc_val, exc_tb): + @abstractmethod + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: pass @@ -81,4 +88,3 @@ async def get_by_id(self, record_id: int) -> SampleRecord: async def get_all(self) -> List[SampleRecord]: """Get all records from the database""" pass - diff --git a/app/application/use_cases/interfaces.py b/app/application/use_cases/interfaces.py index f09db34..c49e014 100644 --- a/app/application/use_cases/interfaces.py +++ b/app/application/use_cases/interfaces.py @@ -11,7 +11,7 @@ class ISampleRecordUseCases(ABC): - """Interface for record use cases.""" + """Interface for the sample record use cases.""" @abstractmethod async def create_record( diff --git a/app/application/use_cases/record_use_cases.py b/app/application/use_cases/record_use_cases.py index 0f1de9f..924a436 100644 --- a/app/application/use_cases/record_use_cases.py +++ b/app/application/use_cases/record_use_cases.py @@ -12,7 +12,7 @@ class SampleRecordUseCases(ISampleRecordUseCases): - """Implementation of samplr record use cases.""" + """Implementation of sample record use cases.""" def __init__(self, record_repo: ISampleRecordRepository): self._repo = record_repo diff --git a/app/decorators/mapper/context.py b/app/decorators/mapper/context.py index 085bf5f..366b73b 100644 --- a/app/decorators/mapper/context.py +++ b/app/decorators/mapper/context.py @@ -1,9 +1,10 @@ from functools import cached_property -from typing import Callable, Any +from typing import Any, Callable class ExceptionContext: - """Class to store exception rising context.""" + """Class to get exception rising context.""" + SENSITIVE_KEYS: frozenset[str] = frozenset( ("password", "token", "key", "secret", "auth", "credential", "passwd") ) @@ -22,8 +23,7 @@ def __init__( @cached_property def formatted_context(self) -> str: - """Format exception context for logging. - """ + """Format exception context for logging.""" error_context = [ f"Error in function '{self.func.__module__}.{self.func.__qualname__}'" ] diff --git a/app/decorators/mapper/exception_mapper.py b/app/decorators/mapper/exception_mapper.py index 9f4717f..f1a90f3 100644 --- a/app/decorators/mapper/exception_mapper.py +++ b/app/decorators/mapper/exception_mapper.py @@ -8,7 +8,6 @@ from app.decorators.mapper.context import ExceptionContext from app.decorators.mapper.factories import ExceptionFactory -from app.logger import logger ExceptionOrTupleOfExceptions = Type[Exception] | tuple[Type[Exception], ...] @@ -16,7 +15,8 @@ class ExceptionMapper: """Exception-mapping decorator with bounded LRU caching and dynamic MRO lookup. - The main decorator purpose is map exception between application layers and enrich exceptions by context. + The main decorator purpose is map exception between application layers and enrich + exceptions by context. """ def __init__( @@ -100,7 +100,7 @@ def _get_exception_factory( self._lru_cache[exc_type] = target_exception_factory return target_exception_factory - # exception is not presented in base mapping, but catchall presented in mapping dict + # exception is not presented in base mapping, but catchall is presented if self.exception_catchall_factory: self._lru_cache[exc_type] = self.exception_catchall_factory return self.exception_catchall_factory diff --git a/app/decorators/mapper/factories.py b/app/decorators/mapper/factories.py index 99bf827..a4c2580 100644 --- a/app/decorators/mapper/factories.py +++ b/app/decorators/mapper/factories.py @@ -1,10 +1,13 @@ from abc import ABC, abstractmethod +from typing import Any from app.decorators.mapper.context import ExceptionContext class ContextAwareError(Exception): - def __init__(self, message: str, context: ExceptionContext = None, *args): + def __init__( + self, message: str, context: ExceptionContext | None = None, *args: Any + ): super().__init__(message, *args) self.context = context @@ -44,13 +47,11 @@ class EnrichedExceptionFactory(ExceptionFactory): Create and manage enriched exceptions based on a given exception type. This class provides a mechanism to create exceptions dynamically, - enriching them with a formatted context. It extends the behavior of - the base ExceptionFactory class by incorporating the concept of a - generated error type and formatted context. + enriching them with a formatted context. :ivar generated_error: The type of exception to generate when creating an enriched exception. - :type generated_error: type[Exception] + :type generated_error: type[ContextAwareError] """ def __init__(self, generated_error: type[ContextAwareError]): diff --git a/app/domain/entities/sample_record.py b/app/domain/entities/sample_record.py index 05eceb2..5644693 100644 --- a/app/domain/entities/sample_record.py +++ b/app/domain/entities/sample_record.py @@ -3,7 +3,6 @@ from dataclasses import dataclass - @dataclass class SampleRecord: """Record entity representing a simple record in the system.""" diff --git a/app/infrastructure/containers.py b/app/infrastructure/containers.py index 701ad6f..9c9f97d 100644 --- a/app/infrastructure/containers.py +++ b/app/infrastructure/containers.py @@ -1,10 +1,9 @@ import asyncio -from importlib import import_module from dependency_injector import containers, providers from dependency_injector.providers import Callable, Factory, Singleton from httpx import AsyncClient, Limits -from pybotx import Bot +from pybotx import Bot, HandlerCollector from redis import asyncio as aioredis from app.application.use_cases.record_use_cases import SampleRecordUseCases @@ -20,7 +19,6 @@ WriteSampleRecordUnitOfWork, ) from app.logger import logger - from app.presentation.bot.error_handlers.internal_error_handler import ( internal_error_handler, ) @@ -81,12 +79,12 @@ def __call__(self) -> asyncio.Task: return self._get_task() -class ApplicationStartupContainer(containers.DeclarativeContainer): - """Container for application startup dependencies.""" +class BaseStartupContainer(containers.DeclarativeContainer): + """Общий контейнер для старта бота.""" - redis_client = Singleton(lambda: aioredis.from_url(settings.REDIS_DSN)) + redis_client = providers.Singleton(lambda: aioredis.from_url(settings.REDIS_DSN)) - redis_repo = Factory( + redis_repo = providers.Factory( RedisRepo, redis=redis_client, prefix=strings.BOT_PROJECT_NAME, @@ -107,16 +105,8 @@ class ApplicationStartupContainer(containers.DeclarativeContainer): {} if not settings.RAISE_BOT_EXCEPTIONS else {Exception: internal_error_handler} ) - # Ленивая загрузка коллекторов - @staticmethod - def get_collectors(): - common = import_module("app.presentation.bot.commands.common") - sample_record = import_module("app.presentation.bot.commands.sample_record") - return [common.collector, sample_record.collector] - bot = providers.Singleton( Bot, - collectors=Callable(get_collectors), bot_accounts=settings.BOT_CREDENTIALS, exception_handlers=exception_handlers, # type: ignore default_callback_timeout=settings.BOTX_CALLBACK_TIMEOUT_IN_SECONDS, @@ -128,54 +118,40 @@ def get_collectors(): callback_repo=callback_repo, ) - # Используем менеджер задач для ленивой инициализации + +class ApplicationStartupContainer(BaseStartupContainer): + """Контейнер приложения с ленивой загрузкой collectors.""" + + @staticmethod + def get_collectors() -> list[HandlerCollector]: + from app.presentation.bot.commands.common import collector as common_collector + from app.presentation.bot.commands.sample_record import collector as sample_record_collector + return [common_collector, sample_record_collector] + + bot = providers.Singleton( + Bot, + collectors=providers.Callable(get_collectors), + **BaseStartupContainer.bot.kwargs, + ) + callback_task_manager = providers.Singleton( CallbackTaskManager, - callback_repo, + BaseStartupContainer.callback_repo, ) - # Провайдер который возвращает задачу через менеджер process_callbacks_task = providers.Callable( lambda manager: manager(), callback_task_manager, ) -class WorkerStartupContainer(containers.DeclarativeContainer): - redis_client = Singleton(lambda: aioredis.from_url(settings.REDIS_DSN)) - - redis_repo = Factory( - RedisRepo, - redis=redis_client, - prefix=strings.BOT_PROJECT_NAME, - ) - - async_client = providers.Singleton( - AsyncClient, - timeout=settings.BOT_ASYNC_CLIENT_TIMEOUT_IN_SECONDS, - limits=Limits(max_keepalive_connections=None, max_connections=None), - ) +class WorkerStartupContainer(BaseStartupContainer): + """Контейнер воркера с прямым импортом collectors.""" - callback_repo = providers.Singleton( - CallbackRedisRepo, - redis=redis_client, - ) from app.presentation.bot.commands import common, sample_record - exception_handlers = ( - {} if not settings.RAISE_BOT_EXCEPTIONS else {Exception: internal_error_handler} - ) - bot = providers.Singleton( Bot, - collectors=[common.collector, sample_record.collector], - bot_accounts=settings.BOT_CREDENTIALS, - exception_handlers=exception_handlers, # type: ignore - default_callback_timeout=settings.BOTX_CALLBACK_TIMEOUT_IN_SECONDS, - httpx_client=async_client, - middlewares=[ - smart_logger_middleware, - answer_error_middleware, - ], - callback_repo=callback_repo, - ) + collectors=[common.collector, sample_record.collector], # type:ignore + **BaseStartupContainer.bot.kwargs, + ) \ No newline at end of file diff --git a/app/infrastructure/db/sample_record/models.py b/app/infrastructure/db/sample_record/models.py index 52ab7aa..456da21 100644 --- a/app/infrastructure/db/sample_record/models.py +++ b/app/infrastructure/db/sample_record/models.py @@ -1,6 +1,6 @@ """Database models declarations.""" -from sqlalchemy import String, CheckConstraint +from sqlalchemy import CheckConstraint, String from sqlalchemy.orm import Mapped, mapped_column from app.infrastructure.db.sqlalchemy import Base diff --git a/app/infrastructure/db/sqlalchemy.py b/app/infrastructure/db/sqlalchemy.py index 1887712..09ce14a 100644 --- a/app/infrastructure/db/sqlalchemy.py +++ b/app/infrastructure/db/sqlalchemy.py @@ -1,15 +1,12 @@ """SQLAlchemy helpers.""" -from asyncio import current_task -from contextlib import asynccontextmanager -from functools import lru_cache, wraps -from typing import Any, Callable +from functools import lru_cache +from typing import Callable from sqlalchemy import MetaData from sqlalchemy.ext.asyncio import ( AsyncEngine, AsyncSession, - async_scoped_session, async_sessionmaker, create_async_engine, ) @@ -41,6 +38,7 @@ def make_url_sync(url: str) -> str: Base = declarative_base(metadata=MetaData(naming_convention=convention)) + @lru_cache(maxsize=1) def get_engine() -> AsyncEngine: """Lazily initialize and cache a single SQLAlchemy async engine.""" @@ -57,6 +55,7 @@ def get_session_factory() -> async_sessionmaker: engine = get_engine() return async_sessionmaker(bind=engine, expire_on_commit=False) + # # def provide_session(func: Callable) -> Callable: # """ diff --git a/app/infrastructure/repositories/sample_record.py b/app/infrastructure/repositories/sample_record.py index 9525017..bf4ce46 100644 --- a/app/infrastructure/repositories/sample_record.py +++ b/app/infrastructure/repositories/sample_record.py @@ -2,30 +2,29 @@ from typing import List +from psycopg2 import errorcodes from sqlalchemy import delete, insert, select, update -from sqlalchemy.exc import SQLAlchemyError, IntegrityError, NoResultFound +from sqlalchemy.exc import IntegrityError, NoResultFound, SQLAlchemyError from app.application.repository.exceptions import ( + ForeignKeyError, + RecordAlreadyExistsError, RecordCreateError, RecordDeleteError, RecordDoesNotExistError, RecordRetrieveError, RecordUpdateError, - RecordAlreadyExistsError, - ForeignKeyError, ValidationError, - BaseRepositoryError, ) from app.application.repository.interfaces import ISampleRecordRepository +from app.decorators.mapper.context import ExceptionContext from app.decorators.mapper.exception_mapper import ( ExceptionMapper, ) from app.decorators.mapper.factories import EnrichedExceptionFactory -from app.decorators.mapper.context import ExceptionContext from app.domain.entities.sample_record import SampleRecord from app.infrastructure.db.sample_record.models import SampleRecordModel from app.infrastructure.db.sqlalchemy import AsyncSession -from psycopg2 import errorcodes class IntegrityErrorFactory(EnrichedExceptionFactory): diff --git a/app/infrastructure/repositories/unit_of_work.py b/app/infrastructure/repositories/unit_of_work.py index a313086..ca7d2dd 100644 --- a/app/infrastructure/repositories/unit_of_work.py +++ b/app/infrastructure/repositories/unit_of_work.py @@ -3,8 +3,8 @@ from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker from app.application.repository.interfaces import ( - ISampleRecordUnitOfWork, ISampleRecordRepository, + ISampleRecordUnitOfWork, ) from app.infrastructure.repositories.sample_record import SampleRecordRepository @@ -27,7 +27,7 @@ async def __aenter__(self): async def __aexit__(self, exc_type, exc_val, exc_tb): try: - # Recommended for implicit resources cleanup + # Recommended for explicit resources cleanup await self._session.rollback() finally: await self._session.close() diff --git a/app/infrastructure/worker/tasks/simple_task.py b/app/infrastructure/worker/tasks/simple_task.py index 2681e91..25d928a 100644 --- a/app/infrastructure/worker/tasks/simple_task.py +++ b/app/infrastructure/worker/tasks/simple_task.py @@ -1,11 +1,10 @@ import asyncio from typing import Any -from dependency_injector.wiring import inject, Provide +from dependency_injector.wiring import Provide, inject from pybotx import Bot from app.infrastructure.containers import WorkerStartupContainer - from app.logger import logger @@ -13,8 +12,9 @@ async def heartbeat_task( ctx: dict[str, Any], bot: Bot = Provide[WorkerStartupContainer.bot], -): - task_name = asyncio.current_task().get_name() +) -> None: + """Simple example of a periodic task""" + task_name = asyncio.current_task().get_name() # type:ignore logger.info(f"Task {task_name} Heartbeat task executed start bot id {id(bot)}") await asyncio.sleep(10) logger.info(f"Task {task_name} Heartbeat task executed end bot id {id(bot)}") diff --git a/app/infrastructure/worker/worker.py b/app/infrastructure/worker/worker.py index e63a030..f643b87 100644 --- a/app/infrastructure/worker/worker.py +++ b/app/infrastructure/worker/worker.py @@ -1,20 +1,15 @@ """Tasks worker configuration.""" -from typing import Any, Dict, Literal +from typing import Any, Dict -from dependency_injector.wiring import inject, Provide +from dependency_injector.wiring import Provide, inject from pybotx import Bot -from redis import asyncio as aioredis from redis.asyncio import Redis -from saq import Queue, CronJob +from saq import CronJob, Queue from app.infrastructure.containers import WorkerStartupContainer -from app.infrastructure.repositories.caching.callback_redis_repo import ( - CallbackRedisRepo, -) from app.infrastructure.worker.tasks.simple_task import heartbeat_task from app.logger import logger - from app.settings import settings SaqCtx = Dict[str, Any] diff --git a/app/main.py b/app/main.py index 49f9dc3..3d68cfe 100644 --- a/app/main.py +++ b/app/main.py @@ -5,15 +5,14 @@ from dependency_injector.wiring import Provide from fastapi import FastAPI from pybotx import Bot -from redis.asyncio import Redis from app.infrastructure.containers import ( ApplicationStartupContainer, BotSampleRecordCommandContainer, - CallbackTaskManager, ) from app.infrastructure.db.sqlalchemy import ( - get_session_factory, get_engine, + get_engine, + get_session_factory, ) from app.presentation.api.routers import router from app.presentation.bot.resources import strings diff --git a/app/presentation/api/botx.py b/app/presentation/api/botx.py index dd0badc..294a7de 100644 --- a/app/presentation/api/botx.py +++ b/app/presentation/api/botx.py @@ -1,8 +1,9 @@ """Endpoints for communication with botx.""" from http import HTTPStatus -from dependency_injector.wiring import inject, Provide -from fastapi import APIRouter, Request, Depends + +from dependency_injector.wiring import Provide, inject +from fastapi import APIRouter, Depends, Request from fastapi.responses import JSONResponse from pybotx import ( Bot, diff --git a/app/presentation/bot/bot.py b/app/presentation/bot/bot.py index 7929224..38a66a9 100644 --- a/app/presentation/bot/bot.py +++ b/app/presentation/bot/bot.py @@ -3,10 +3,12 @@ from httpx import AsyncClient, Limits from pybotx import Bot, CallbackRepoProto +from app.presentation.bot.commands import common, sample_record +from app.presentation.bot.error_handlers.internal_error_handler import ( + internal_error_handler, +) from app.presentation.bot.middlewares.answer_error import answer_error_middleware from app.presentation.bot.middlewares.smart_logger import smart_logger_middleware -from app.presentation.bot.commands import common, sample_record -from app.presentation.bot.error_handlers.internal_error_handler import internal_error_handler from app.settings import settings diff --git a/app/presentation/bot/command_handlers/sample_record.py b/app/presentation/bot/command_handlers/sample_record.py index d0f2cd7..cef95f9 100644 --- a/app/presentation/bot/command_handlers/sample_record.py +++ b/app/presentation/bot/command_handlers/sample_record.py @@ -3,30 +3,31 @@ from pybotx import Bot, IncomingMessage from app.application.repository.exceptions import ( - RecordCreateError, RecordAlreadyExistsError, + RecordCreateError, RecordDoesNotExistError, ) from app.application.repository.interfaces import ( - ISampleRecordUnitOfWork, ISampleRecordRepository, + ISampleRecordUnitOfWork, ) from app.application.use_cases.interfaces import ISampleRecordUseCases from app.presentation.bot.command_handlers.base_handler import BaseCommandHandler -from app.presentation.bot.error_handlers.exceptions_chain_executor import ( - ExceptionHandlersChainExecutor, - DEFAULT_HANDLERS, -) from app.presentation.bot.error_handlers.base_handlers import ( SendErrorExplainToUserHandler, ) +from app.presentation.bot.error_handlers.exceptions_chain_executor import ( + DEFAULT_HANDLERS, + ExceptionHandlersChainExecutor, DEFAULT_HANDLERS_WITH_EXPLAIN, +) from app.presentation.bot.resources.strings import ( SAMPLE_RECORD_CREATED_ANSWER, SAMPLE_RECORD_DELETED_ANSWER, + SHOW_SAMPLE_RECORD_ANSWER, ) from app.presentation.bot.schemas.sample_record import ( SampleRecordCreateRequestSchema, - SampleRecordDeleteRequestSchema, + SampleRecordGetOrDeleteRequestSchema, ) from app.presentation.bot.validators.base import ( BotXJsonRequestParser, @@ -38,7 +39,7 @@ class CreateSampleRecordHandler(BaseCommandHandler): incoming_argument_parser = BotXJsonRequestParser(SampleRecordCreateRequestSchema) - _EXCEPTIONS_HANDLERS = DEFAULT_HANDLERS + [ + _EXCEPTIONS_HANDLERS = DEFAULT_HANDLERS_WITH_EXPLAIN + [ SendErrorExplainToUserHandler( exception_explain_mapping={ RecordAlreadyExistsError: "Запись с такими параметрами уже существует", @@ -81,9 +82,11 @@ async def handle_logic( class DeleteSampleRecordHandler(BaseCommandHandler): - incoming_argument_parser = BotXPlainRequestParser(SampleRecordDeleteRequestSchema) + incoming_argument_parser = BotXPlainRequestParser( + SampleRecordGetOrDeleteRequestSchema + ) - _EXCEPTIONS_HANDLERS = DEFAULT_HANDLERS + [ + _EXCEPTIONS_HANDLERS = DEFAULT_HANDLERS_WITH_EXPLAIN + [ SendErrorExplainToUserHandler( exception_explain_mapping={ RecordDoesNotExistError: "Запиcь с указанным id не найдена", @@ -109,7 +112,7 @@ def __init__( async def handle_logic( self, - request_parameter: SampleRecordDeleteRequestSchema, # type: ignore + request_parameter: SampleRecordGetOrDeleteRequestSchema, # type: ignore ) -> None: async with self.unit_of_work as uof: await self._use_cases(uof.get_sample_record_repository()).delete_record( @@ -121,3 +124,50 @@ async def handle_logic( id=request_parameter.id, ) ) + + +class GetSampleRecordHandler(BaseCommandHandler): + incoming_argument_parser = BotXPlainRequestParser( + SampleRecordGetOrDeleteRequestSchema + ) + + _EXCEPTIONS_HANDLERS = DEFAULT_HANDLERS_WITH_EXPLAIN + [ + SendErrorExplainToUserHandler( + exception_explain_mapping={ + RecordDoesNotExistError: "Запиcь с указанным id не найдена", + MessageValidationError: "Неправильный формат данных", + } + ) + ] + exception_handler_chain_executor = ExceptionHandlersChainExecutor( + _EXCEPTIONS_HANDLERS + ) + + def __init__( + self, + bot: Bot, + message: IncomingMessage, + unit_of_work: ISampleRecordUnitOfWork, + use_case_factory: Callable[[ISampleRecordRepository], ISampleRecordUseCases], + ): + self._use_cases = use_case_factory + self.unit_of_work = unit_of_work + + super().__init__(bot, message, self.exception_handler_chain_executor) + + async def handle_logic( + self, + request_parameter: SampleRecordGetOrDeleteRequestSchema, + ) -> None: + async with self.unit_of_work as uof: + record = await self._use_cases( + uof.get_sample_record_repository() + ).get_record(request_parameter.id) + + await self._bot.answer_message( + SHOW_SAMPLE_RECORD_ANSWER.format( + id=record.id, + record_data=record.record_data, + name=record.name, + ) + ) diff --git a/app/presentation/bot/commands/command_listing.py b/app/presentation/bot/commands/command_listing.py index 2881bed..9c03e4b 100644 --- a/app/presentation/bot/commands/command_listing.py +++ b/app/presentation/bot/commands/command_listing.py @@ -23,4 +23,7 @@ class SampleRecordCommands: command_name="/delete_record", description="Удалить запись", ) - + GET_RECORD = BotCommand( + command_name="/get_record", + description="Получить запись", + ) diff --git a/app/presentation/bot/commands/sample_record.py b/app/presentation/bot/commands/sample_record.py index beb505d..7340ab1 100644 --- a/app/presentation/bot/commands/sample_record.py +++ b/app/presentation/bot/commands/sample_record.py @@ -1,26 +1,28 @@ from typing import Callable -from dependency_injector.providers import Factory -from dependency_injector.wiring import Provider, inject, Provide +from dependency_injector.wiring import Provide, Provider, inject from pybotx import Bot, HandlerCollector, IncomingMessage -from sqlalchemy.ext.asyncio import AsyncSession from app.application.repository.interfaces import ISampleRecordRepository from app.application.use_cases.interfaces import ISampleRecordUseCases from app.infrastructure.containers import ( BotSampleRecordCommandContainer, - ApplicationStartupContainer, ) - -from app.infrastructure.repositories.unit_of_work import WriteSampleRecordUnitOfWork -from app.presentation.bot.commands.command_listing import SampleRecordCommands +from app.infrastructure.repositories.unit_of_work import ( + ReadOnlySampleRecordUnitOfWork, + WriteSampleRecordUnitOfWork, +) from app.presentation.bot.command_handlers.sample_record import ( CreateSampleRecordHandler, DeleteSampleRecordHandler, + GetSampleRecordHandler, ) +from app.presentation.bot.commands.command_listing import SampleRecordCommands collector = HandlerCollector() +UseCaseFactory = Callable[[ISampleRecordRepository], ISampleRecordUseCases] + @collector.command(**SampleRecordCommands.CREATE_RECORD.command_data()) @inject @@ -30,15 +32,15 @@ async def create_sample_record( unit_of_work: WriteSampleRecordUnitOfWork = Provide[ BotSampleRecordCommandContainer.rw_unit_of_work ], - record_use_cases_factory: Callable[ - [ISampleRecordRepository], ISampleRecordUseCases - ] = Provider[BotSampleRecordCommandContainer.record_use_cases_factory], + use_case_factory: UseCaseFactory = Provider[ + BotSampleRecordCommandContainer.record_use_cases_factory + ], ) -> None: """Creates a sample record in the database.""" handler = CreateSampleRecordHandler( bot=bot, message=message, - use_case_factory=record_use_cases_factory, + use_case_factory=use_case_factory, unit_of_work=unit_of_work, ) @@ -50,17 +52,38 @@ async def create_sample_record( async def delete_sample_record( message: IncomingMessage, bot: Bot, - unit_of_work: WriteSampleRecordUnitOfWork = Provide[ - BotSampleRecordCommandContainer.rw_unit_of_work - ], - record_use_cases_factory: Callable[ - [ISampleRecordRepository], ISampleRecordUseCases - ] = Provider[BotSampleRecordCommandContainer.record_use_cases_factory], + unit_of_work: WriteSampleRecordUnitOfWork = Provide[ + BotSampleRecordCommandContainer.rw_unit_of_work + ], + use_case_factory: UseCaseFactory = Provider[ + BotSampleRecordCommandContainer.record_use_cases_factory + ], ) -> None: """Delete a sample record in the database.""" await DeleteSampleRecordHandler( bot=bot, message=message, - use_case_factory=record_use_cases_factory, + use_case_factory=use_case_factory, + unit_of_work=unit_of_work, + ).execute() + + +@collector.command(**SampleRecordCommands.GET_RECORD.command_data()) +@inject +async def get_sample_record( + message: IncomingMessage, + bot: Bot, + unit_of_work: ReadOnlySampleRecordUnitOfWork = Provide[ + BotSampleRecordCommandContainer.ro_unit_of_work + ], + use_case_factory: UseCaseFactory = Provider[ + BotSampleRecordCommandContainer.record_use_cases_factory + ], +) -> None: + """Delete a sample record in the database.""" + await GetSampleRecordHandler( + bot=bot, + message=message, + use_case_factory=use_case_factory, unit_of_work=unit_of_work, ).execute() diff --git a/app/presentation/bot/error_handlers/base_handlers.py b/app/presentation/bot/error_handlers/base_handlers.py index 08f0f16..b0076bb 100644 --- a/app/presentation/bot/error_handlers/base_handlers.py +++ b/app/presentation/bot/error_handlers/base_handlers.py @@ -1,20 +1,33 @@ from abc import ABC, abstractmethod -from typing import Self, Callable +from typing import Callable, Self from uuid import UUID -from pybotx import Bot, IncomingMessage, BotShuttingDownError +from pybotx import Bot, BotShuttingDownError, IncomingMessage +from app.decorators.mapper.factories import ContextAwareError from app.logger import logger from app.presentation.bot.resources import strings class AbstractExceptionHandler(ABC): + """Abstract template class for exception handlers.""" + def __init__( self, next_handler: Self | None = None, stop_on_failure: bool = False, break_the_chain: bool = False, ): + """Constructor for exception handler. + + Args: + next_handler: The next handler in the chain. + stop_on_failure: Whether to stop processing the chain on this + handler failure. + break_the_chain: Whether to break the chain if this handler processed + the exception successfully. + + """ self.next_handler = next_handler self._stop_on_failure = stop_on_failure self._break_the_chain = break_the_chain @@ -23,6 +36,7 @@ def __init__( def should_process_exception( self, exc: Exception, bot: Bot, message: IncomingMessage ) -> bool: + """Method to determine whether exception should be processed or not.""" pass @abstractmethod @@ -32,7 +46,8 @@ async def process_exception( bot: Bot, message: IncomingMessage, exception_id: UUID | None, - ): + ) -> None: + """Method to process exception.""" pass async def handle_exception( @@ -42,16 +57,20 @@ async def handle_exception( message: IncomingMessage, exception_id: UUID | None = None, ) -> None: + """Base method to handle exception. + Execute main chain logic""" if self.should_process_exception(exc, bot, message): try: await self.process_exception(exc, bot, message, exception_id) + if self.next_handler and not self._break_the_chain: await self.next_handler.handle_exception( exc, bot, message, exception_id ) except Exception as exc: - logger.opt(exception=exc).error( - f"Error handling exception {exception_id}" + logger.error( + f"Error handling exception {exception_id}", + exc_info=True, ) if self._stop_on_failure: return @@ -79,7 +98,13 @@ async def process_exception( message: IncomingMessage, exception_id: UUID | None, ) -> None: - logger.error(f"Error {exception_id}:{exc}", exc_info=exc) + # TODO: add structured context logging + if isinstance(exc, ContextAwareError) and exc.context is not None: + msg = f"Error {exception_id}:{exc}. Context:{exc.context.formatted_context}" + else: + msg = f"Error {exception_id}:{exc}" + + logger.error(msg, exc_info=exc) class DropFSMOnErrorHandler(AbstractExceptionHandler): @@ -89,7 +114,11 @@ def should_process_exception( return True async def process_exception( - self, exc: Exception, bot: Bot, message: IncomingMessage, exception_id: UUID + self, + exc: Exception, + bot: Bot, + message: IncomingMessage, + exception_id: UUID | None, ) -> None: if fsm_manager := getattr(message.state, "fsm", None): await fsm_manager.drop_state() diff --git a/app/presentation/bot/error_handlers/exceptions_chain_executor.py b/app/presentation/bot/error_handlers/exceptions_chain_executor.py index 8b0ad1f..5399e07 100644 --- a/app/presentation/bot/error_handlers/exceptions_chain_executor.py +++ b/app/presentation/bot/error_handlers/exceptions_chain_executor.py @@ -4,9 +4,9 @@ from app.presentation.bot.error_handlers.base_handlers import ( AbstractExceptionHandler, - SendErrorExplainToUserHandler, - LoggingExceptionHandler, DropFSMOnErrorHandler, + LoggingExceptionHandler, + SendErrorExplainToUserHandler, ) @@ -95,10 +95,5 @@ def append( LoggingExceptionHandler, DropFSMOnErrorHandler, ] -DEFAULT_EXCEPTION_HANDLER_EXECUTOR = ExceptionHandlersChainExecutor(DEFAULT_HANDLERS) DEFAULT_HANDLERS_WITH_EXPLAIN = DEFAULT_HANDLERS + [SendErrorExplainToUserHandler] - -DEFAULT_EXCEPTION_HANDLER_EXECUTOR_WITH_EXPLAIN = ExceptionHandlersChainExecutor( - handlers=DEFAULT_HANDLERS_WITH_EXPLAIN -) diff --git a/app/presentation/bot/middlewares/answer_error.py b/app/presentation/bot/middlewares/answer_error.py index 7a1041f..da8e9db 100644 --- a/app/presentation/bot/middlewares/answer_error.py +++ b/app/presentation/bot/middlewares/answer_error.py @@ -1,13 +1,13 @@ """Middleware to handle AnswerError and AnswerMessageError exceptions.""" -from typing import Dict, Any, Union, List, Optional +from typing import Any, Dict, List, Optional, Union from uuid import UUID from pybotx import ( Bot, + BubbleMarkup, IncomingMessage, IncomingMessageHandlerFunc, - BubbleMarkup, KeyboardMarkup, OutgoingAttachment, OutgoingMessage, @@ -19,7 +19,7 @@ async def answer_error_middleware( message: IncomingMessage, bot: Bot, call_next: IncomingMessageHandlerFunc ) -> None: - """Middleware, used for catching and logging unhandled AnswerError and AnswerMessageError.""" + """Middleware, used for answering error messages to user..""" try: await call_next(message, bot) except AnswerError as exc: diff --git a/app/presentation/bot/middlewares/smart_logger.py b/app/presentation/bot/middlewares/smart_logger.py index bdc6dec..ebcc852 100644 --- a/app/presentation/bot/middlewares/smart_logger.py +++ b/app/presentation/bot/middlewares/smart_logger.py @@ -1,7 +1,7 @@ """Middlewares to log all requests using smart logger wrapper.""" from pprint import pformat -from typing import Optional, Dict, Any +from typing import Any, Dict, Optional from pybotx import Bot, IncomingMessage, IncomingMessageHandlerFunc from pybotx.logger import trim_file_data_in_incoming_json diff --git a/app/presentation/bot/resources/strings.py b/app/presentation/bot/resources/strings.py index e8b82a7..c5e2ade 100644 --- a/app/presentation/bot/resources/strings.py +++ b/app/presentation/bot/resources/strings.py @@ -69,6 +69,8 @@ def _format(**kwargs: Any) -> str: "sample_record_created_answer.txt.mako" ) +SHOW_SAMPLE_RECORD_ANSWER = lookup.get_template("show_sample_record_answer.txt.mako") + SAMPLE_RECORD_DELETED_ANSWER = lookup.get_template( "sample_record_deleted_answer.txt.mako" ) diff --git a/app/presentation/bot/resources/templates/sample_record/show_sample_record_answer.txt.mako b/app/presentation/bot/resources/templates/sample_record/show_sample_record_answer.txt.mako new file mode 100644 index 0000000..6b10e34 --- /dev/null +++ b/app/presentation/bot/resources/templates/sample_record/show_sample_record_answer.txt.mako @@ -0,0 +1,2 @@ +Запись найдена: +**id**: ${ id } **name**: ${ name } **record_data**: ${ record_data }. diff --git a/app/presentation/bot/schemas/sample_record.py b/app/presentation/bot/schemas/sample_record.py index 520c2d7..1358816 100644 --- a/app/presentation/bot/schemas/sample_record.py +++ b/app/presentation/bot/schemas/sample_record.py @@ -1,7 +1,5 @@ """Domains.""" -from typing import Self - from pydantic import BaseModel, Field @@ -33,19 +31,18 @@ class Config: orm_mode = True -class SampleRecordDeleteRequestSchema(BaseModel): +class SampleRecordUpdateRequestSchema( + BaseModel, +): id: int + record_data: str = Field(..., min_length=1, max_length=128) + name: str = Field(..., min_length=1, max_length=32) + class Config: + orm_mode = True -class SampleRecordUpdateRequestSchema(BaseModel): - id: int - record_data: str | None = Field(..., min_length=1, max_length=128) - name: str | None = Field(..., min_length=1, max_length=32) - @classmethod - def _from_plain_message_data(cls, message_data: str) -> Self: - record_id, record_name, record_data = message_data.split(" ") - return cls(id=record_id, record_name=record_name, record_data=record_data) # type: ignore[arg-type] +class SampleRecordGetOrDeleteRequestSchema(BaseModel): + """Schema for sample record get or delete request.""" - class Config: - orm_mode = True + id: int diff --git a/app/presentation/bot/validators/base.py b/app/presentation/bot/validators/base.py index e8f5d37..8578702 100644 --- a/app/presentation/bot/validators/base.py +++ b/app/presentation/bot/validators/base.py @@ -51,9 +51,9 @@ def __init__(self, model: type[T]): ) def parse(self, raw_input: IncomingMessage) -> T: if not (message_args := raw_input.argument.strip().split(" ")): - raise ValidationError("Message is empty") + raise ValidationError("Message is empty", self.model) fields = self.model.__fields__.keys() - message_kwargs = dict(zip(fields, message_args)) + message_kwargs = dict(zip(fields, message_args, strict=True)) return self.model.parse_obj(message_kwargs) diff --git a/app/settings.py b/app/settings.py index e50b15d..97cb878 100644 --- a/app/settings.py +++ b/app/settings.py @@ -4,7 +4,7 @@ from uuid import UUID from pybotx import BotAccountWithSecret -from pydantic import BaseSettings, PositiveInt, Field +from pydantic import BaseSettings, Field, PositiveInt class AppSettings(BaseSettings): diff --git a/tests/conftest.py b/tests/conftest.py index d9f7981..06df35e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,7 +5,6 @@ import jwt import pytest -from testcontainers.postgres import PostgresContainer # type: ignore from app.settings import settings @@ -13,7 +12,7 @@ @pytest.fixture(scope="session") def event_loop(): """Create a session-scoped event loop for async session-scoped fixtures. - Don't touch this fixture. Its internally used by pytest-asyncio.""" + Don't touch this fixture. It's internally used by pytest-asyncio.""" loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) yield loop @@ -35,11 +34,6 @@ def secret_key() -> str: return settings.BOT_CREDENTIALS[0].secret_key -@pytest.fixture -def user_huid() -> UUID: - return UUID("cd069aaa-46e6-4223-950b-ccea42b89c06") - - @pytest.fixture def authorization_token_payload(bot_id: UUID, host: str) -> Dict[str, Any]: return { @@ -62,5 +56,3 @@ def authorization_header( key=secret_key, ) return {"authorization": f"Bearer {token}"} - - diff --git a/tests/factories.py b/tests/factories.py index 38132dd..c238efc 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -1,10 +1,9 @@ import factory -from factory import DictFactory, Factory +from factory import Factory from app.domain.entities.sample_record import SampleRecord from app.presentation.bot.schemas.sample_record import ( SampleRecordCreateRequestSchema, - SampleRecordDeleteRequestSchema, SampleRecordUpdateRequestSchema, ) @@ -20,7 +19,7 @@ class Meta: class SampleRecordCreateSchemaFactory(Factory): - """Factory for sample record create schema objects.""" + """Factory for sample record creation schema objects.""" class Meta: model = SampleRecordCreateRequestSchema @@ -30,20 +29,11 @@ class Meta: class SampleRecordUpdateSchemaFactory(Factory): - """Factory for sample record update schema objects.""" + """Factory for sample record creation schema objects.""" class Meta: model = SampleRecordUpdateRequestSchema - id = factory.Faker("integer") + id: int record_data = factory.Faker("text", max_nb_chars=128) name = factory.Faker("text", max_nb_chars=32) - - -class SampleRecordDeleteSchemaFactory(Factory): - """Factory for sample record delete schema objects.""" - - class Meta: - model = SampleRecordDeleteRequestSchema - - id = factory.Faker("integer") diff --git a/tests/integration/bot_commands/bot_factories.py b/tests/integration/bot_commands/bot_factories.py index a403fa0..edd8620 100644 --- a/tests/integration/bot_commands/bot_factories.py +++ b/tests/integration/bot_commands/bot_factories.py @@ -1,18 +1,25 @@ -from typing import Callable, Optional, Any -from uuid import UUID, uuid4 +from typing import Any, Optional +from uuid import UUID import factory -import pytest from factory import Factory -from pybotx import IncomingMessage, BotAccount, UserSender, UserDevice, Chat, ChatTypes, \ - ClientPlatforms - +from pybotx import ( + BotAccount, + Chat, + ChatTypes, + ClientPlatforms, + IncomingMessage, + UserDevice, + UserSender, +) class BotAccountFactory(Factory): """Factory for bot accounts.""" - id:UUID=factory.Faker("uuid4") - host:str|None=None + + id: UUID = factory.Faker("uuid4") # type:ignore + host: str | None = None + class Meta: model = BotAccount @@ -20,7 +27,7 @@ class Meta: class ChatFactory(Factory): """Factory for chats.""" - id: UUID = factory.Faker("uuid4") + id: UUID = factory.Faker("uuid4") # type:ignore type: ChatTypes = ChatTypes.PERSONAL_CHAT class Meta: @@ -29,16 +36,17 @@ class Meta: class UserDeviceFactory(Factory): """Factory for user devices.""" - manufacturer: str | None = (None,) - device_name: str | None = (None,) - os: str | None = (None,) - pushes: str | None = (None,) - timezone: str | None = (None,) - permissions: str | None = (None,) - platform: ClientPlatforms | None = (None,) - platform_package_id: str | None = (None,) - app_version: str | None = (None,) - locale: str | None = (None,) + + manufacturer: str | None = None + device_name: str | None = None + os: str | None = None + pushes: str | None = None + timezone: str | None = None + permissions: str | None = None + platform: ClientPlatforms | None = None + platform_package_id: str | None = None + app_version: str | None = None + locale: str | None = None class Meta: model = UserDevice @@ -47,14 +55,14 @@ class Meta: class UserSenderFactory(Factory): """Factory for user senders.""" - huid: UUID = factory.Faker("uuid4") + huid: UUID = factory.Faker("uuid4") # type:ignore udid = None ad_login: Optional[str] = None ad_domain: Optional[str] = None username: Optional[str] = None is_chat_admin: bool = True is_chat_creator: bool = True - device: UserDevice = factory.SubFactory(UserDeviceFactory) + device: UserDevice = factory.SubFactory(UserDeviceFactory) # type:ignore class Meta: model = UserSender @@ -67,7 +75,7 @@ class IncomingMessageFactory(Factory): sync_id: UUID = factory.Faker("uuid4") source_sync_id: Optional[UUID] = None body: str = factory.Faker("text", max_nb_chars=100) - data: dict[str,Any] = {} + data: dict[str, Any] = {} metadata: dict = {} sender: UserSender = factory.SubFactory(UserSenderFactory) chat: Chat = factory.SubFactory(ChatFactory) diff --git a/tests/integration/bot_commands/conftest.py b/tests/integration/bot_commands/conftest.py index 0281e44..ee04373 100644 --- a/tests/integration/bot_commands/conftest.py +++ b/tests/integration/bot_commands/conftest.py @@ -1,8 +1,8 @@ -from typing import Callable, Optional -from uuid import UUID, uuid4 +from typing import Callable +from uuid import UUID import pytest -from pybotx import IncomingMessage, BotAccount, UserSender, UserDevice, Chat, ChatTypes +from pybotx import IncomingMessage from tests.integration.bot_commands.bot_factories import IncomingMessageFactory @@ -28,7 +28,7 @@ def factory(command: str, args: str) -> IncomingMessage: @pytest.fixture -def create_sample_record_command_message_from_user_factory( +def create_sample_record_command_message_factory( command_message_from_user, ) -> Callable[[str], IncomingMessage]: def factory(args: str) -> IncomingMessage: @@ -38,10 +38,20 @@ def factory(args: str) -> IncomingMessage: @pytest.fixture -def delete_sample_record_command_message_from_user_factory( +def delete_sample_record_command_message_factory( command_message_from_user, ) -> Callable[[int], IncomingMessage]: def factory(object_id: int) -> IncomingMessage: return command_message_from_user("/delete_record", str(object_id)) return factory + + +@pytest.fixture +def get_sample_record_command_message_factory( + command_message_from_user, +) -> Callable[[int], IncomingMessage]: + def factory(object_id: int) -> IncomingMessage: + return command_message_from_user("/get_record", str(object_id)) + + return factory diff --git a/tests/integration/bot_commands/sample_record_factories.py b/tests/integration/bot_commands/sample_record_factories.py index ff13a0a..1dace74 100644 --- a/tests/integration/bot_commands/sample_record_factories.py +++ b/tests/integration/bot_commands/sample_record_factories.py @@ -22,11 +22,11 @@ def _generate(cls, strategy, params): class CreateSampleRecordRequestFactory(JsonableFactory): - record_data: str = factory.Faker("text", max_nb_chars=8) - name = factory.Faker("text", max_nb_chars=8) + record_data: str = factory.Faker("text", max_nb_chars=8) # type: ignore + name = factory.Faker("text", max_nb_chars=8) # type:ignore class UpdateSampleRecordRequestFactory(JsonableFactory): - id: int = factory.Faker("integer") - record_data: str | None = factory.Faker("text", max_nb_chars=8) - name: str | None = factory.Faker("text", max_nb_chars=8) + id: int = factory.Faker("integer") # type:ignore + record_data: str | None = factory.Faker("text", max_nb_chars=8) # type: ignore + name: str | None = factory.Faker("text", max_nb_chars=8) # type: ignore diff --git a/tests/integration/bot_commands/test_common.py b/tests/integration/bot_commands/test_common.py index 086940f..34a00c5 100644 --- a/tests/integration/bot_commands/test_common.py +++ b/tests/integration/bot_commands/test_common.py @@ -1,4 +1,3 @@ -from typing import Callable from uuid import UUID from pybotx import ( @@ -19,7 +18,6 @@ async def test_default_message_handler( bot: Bot, message_from_user: IncomingMessage, ) -> None: - await bot.async_execute_bot_command(message_from_user) # - Assert - diff --git a/tests/integration/bot_commands/test_sample_record.py b/tests/integration/bot_commands/test_sample_record.py index 2d25fc6..96369ec 100644 --- a/tests/integration/bot_commands/test_sample_record.py +++ b/tests/integration/bot_commands/test_sample_record.py @@ -1,31 +1,21 @@ -import json -from typing import Callable -from unittest.mock import MagicMock - -from pybotx import Bot, IncomingMessage +from pybotx import Bot from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession from app.infrastructure.db.sample_record.models import SampleRecordModel -from app.presentation.bot.resources import strings -from tests.factories import SampleRecordFactory from tests.integration.bot_commands.sample_record_factories import ( CreateSampleRecordRequestFactory, - UpdateSampleRecordRequestFactory, ) -from tests.integration.factories import SampleRecordModelFactory async def test_sample_record_created( bot: Bot, - create_sample_record_command_message_from_user_factory, + create_sample_record_command_message_factory, isolated_session: AsyncSession, ): """Test creating a record usual way.""" request_data = CreateSampleRecordRequestFactory.create() - message = create_sample_record_command_message_from_user_factory( - request_data.json() - ) + message = create_sample_record_command_message_factory(request_data.json()) await bot.async_execute_bot_command(message) # Check db object existing @@ -48,12 +38,12 @@ async def test_sample_record_delete( bot: Bot, isolated_session: AsyncSession, sample_record_factory, - delete_sample_record_command_message_from_user_factory, + delete_sample_record_command_message_factory, ): """Test creating a record usual way.""" existing_record: SampleRecordModel = await sample_record_factory.create() - message = delete_sample_record_command_message_from_user_factory(existing_record.id) + message = delete_sample_record_command_message_factory(existing_record.id) await bot.async_execute_bot_command(message) # Check db object non existing @@ -62,3 +52,27 @@ async def test_sample_record_delete( ) assert db_object is None + + +async def test_get_sample_record( + bot: Bot, + sample_record_factory, + get_sample_record_command_message_factory, + isolated_session: AsyncSession, +): + """Test get sample record.""" + existing_record: SampleRecordModel = await sample_record_factory.create() + + message = get_sample_record_command_message_factory( + existing_record.id, + ) + + await bot.async_execute_bot_command(message) + + # Check bot answer + assert bot.answer_message.call_args[0][0] == ( + "Запись найдена:\n" + f"**id**: {existing_record.id} " + f"**name**: {existing_record.name} " + f"**record_data**: {existing_record.record_data}." + ) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 26bb3a0..7abc82b 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -2,8 +2,8 @@ from asyncio import current_task from http import HTTPStatus from pathlib import Path -from typing import Generator, Callable, Any, AsyncGenerator -from unittest.mock import patch, AsyncMock +from typing import Any, AsyncGenerator, Callable, Generator +from unittest.mock import AsyncMock, patch from uuid import uuid4 import httpx @@ -16,15 +16,14 @@ from sqlalchemy import NullPool from sqlalchemy.ext.asyncio import ( AsyncEngine, - create_async_engine, - async_scoped_session, AsyncSession, + async_scoped_session, + create_async_engine, ) from sqlalchemy.orm import sessionmaker -from testcontainers.postgres import PostgresContainer -from testcontainers.redis import RedisContainer +from testcontainers.postgres import PostgresContainer # type:ignore +from testcontainers.redis import RedisContainer # type:ignore -import app.infrastructure.db.sqlalchemy from app.infrastructure.db.sqlalchemy import AsyncSessionFactory, make_url_async from app.infrastructure.repositories.sample_record import SampleRecordRepository from app.main import get_application diff --git a/tests/integration/endpoints/test_botx.py b/tests/integration/endpoints/test_botx.py index b15e1a2..f6ace7e 100644 --- a/tests/integration/endpoints/test_botx.py +++ b/tests/integration/endpoints/test_botx.py @@ -7,7 +7,6 @@ import respx from fastapi import FastAPI from fastapi.testclient import TestClient -from pybotx import Bot from app.main import get_application diff --git a/tests/integration/factories.py b/tests/integration/factories.py index 1506737..60d8402 100644 --- a/tests/integration/factories.py +++ b/tests/integration/factories.py @@ -12,5 +12,3 @@ class Meta: record_data = factory.Faker("text", max_nb_chars=100) name = factory.Faker("text", max_nb_chars=32) - - diff --git a/tests/integration/repository/test_sample_record_repository.py b/tests/integration/repository/test_sample_record_repository.py index 89e29ee..7d52892 100644 --- a/tests/integration/repository/test_sample_record_repository.py +++ b/tests/integration/repository/test_sample_record_repository.py @@ -5,10 +5,10 @@ from sqlalchemy.ext.asyncio import AsyncSession from app.application.repository.exceptions import ( - RecordDoesNotExistError, RecordAlreadyExistsError, - ValidationError, RecordCreateError, + RecordDoesNotExistError, + ValidationError, ) from app.domain.entities.sample_record import SampleRecord from app.infrastructure.db.sample_record.models import SampleRecordModel @@ -19,6 +19,7 @@ def assert_database_object_equal_domain( db_object: SampleRecordModel, domain_object: SampleRecord ) -> None: + """Assert that database object and domain object are equal.""" assert db_object.id == domain_object.id assert db_object.record_data == domain_object.record_data assert db_object.name == domain_object.name @@ -29,7 +30,6 @@ async def test_add_record( isolated_session: AsyncSession, ): """Test adding a new record.""" - new_record = SampleRecord(record_data="test_add", name="test_name") created_record = await sample_record_repository.create(new_record) @@ -52,9 +52,8 @@ async def test_add_record_with_non_unique_name( sample_record_repository: SampleRecordRepository, isolated_session: AsyncSession, ): - existing_record = await sample_record_factory.create( - record_data="test_add", name="test_name" - ) + """Test adding a new record with non unique name raises RecordAlreadyExistsError.""" + await sample_record_factory.create(record_data="test_add", name="test_name") new_record = SampleRecord(record_data="new_data", name="test_name") with pytest.raises(RecordAlreadyExistsError): diff --git a/tests/integration/sample_record_use_cases/test_sample_record_use_cases_int.py b/tests/integration/sample_record_use_cases/test_sample_record_use_cases_int.py index 1aae251..9101575 100644 --- a/tests/integration/sample_record_use_cases/test_sample_record_use_cases_int.py +++ b/tests/integration/sample_record_use_cases/test_sample_record_use_cases_int.py @@ -37,12 +37,10 @@ async def test_sample_record_use_case_add_record_in_database( async def test_sample_record_use_case_get_record_from_database( sample_record_use_cases_with_real_repo: ISampleRecordUseCases, isolated_session: AsyncSession, - sample_record_factory:type[SampleRecordModelFactory] + sample_record_factory: type[SampleRecordModelFactory], ): """Test get a record.""" - - existing_record = await sample_record_factory.create() response = await sample_record_use_cases_with_real_repo.get_record( @@ -57,7 +55,7 @@ async def test_sample_record_use_case_get_record_from_database( async def test_sample_record_use_case_remove_record_from_database( sample_record_use_cases_with_real_repo: ISampleRecordUseCases, isolated_session: AsyncSession, - sample_record_factory:type[SampleRecordModelFactory] + sample_record_factory: type[SampleRecordModelFactory], ): """Test adding a new record.""" diff --git a/tests/unit/decorators/test_classes.py b/tests/unit/decorators/test_classes.py index 25b83b3..b024717 100644 --- a/tests/unit/decorators/test_classes.py +++ b/tests/unit/decorators/test_classes.py @@ -1,5 +1,5 @@ -from app.decorators.mapper.factories import ExceptionFactory from app.decorators.mapper.context import ExceptionContext +from app.decorators.mapper.factories import ExceptionFactory class ParentError(Exception): diff --git a/tests/unit/decorators/test_errors_mapper.py b/tests/unit/decorators/test_errors_mapper.py index bfb6dad..337afaf 100644 --- a/tests/unit/decorators/test_errors_mapper.py +++ b/tests/unit/decorators/test_errors_mapper.py @@ -1,9 +1,6 @@ -from unittest.mock import patch - import pytest from app.decorators.mapper.exception_mapper import ExceptionMapper -from app.logger import logger from tests.unit.decorators.test_classes import ( ChildError, DummyFactory, diff --git a/tests/unit/error_handlers/test_chain_executor.py b/tests/unit/error_handlers/test_chain_executor.py index 565f313..51b520d 100644 --- a/tests/unit/error_handlers/test_chain_executor.py +++ b/tests/unit/error_handlers/test_chain_executor.py @@ -1,6 +1,5 @@ from unittest.mock import MagicMock -from app.presentation.bot.error_handlers.base_handlers import AbstractExceptionHandler from app.presentation.bot.error_handlers.exceptions_chain_executor import ( ExceptionHandlersChainExecutor, ) diff --git a/tests/unit/error_handlers/test_classes.py b/tests/unit/error_handlers/test_classes.py index f3f59f5..fa91956 100644 --- a/tests/unit/error_handlers/test_classes.py +++ b/tests/unit/error_handlers/test_classes.py @@ -16,7 +16,7 @@ def __init__( self.should_process_exception_flag = should_process_exception self.run_history = run_history - super().__init__(None,stop_on_failure,break_the_chain) + super().__init__(None, stop_on_failure, break_the_chain) def should_process_exception( self, exc: Exception, bot: Bot, message: IncomingMessage From a5e3abb04cddd8bcf7239681cc6049fe83fb5e11 Mon Sep 17 00:00:00 2001 From: vladimirgubarik Date: Fri, 8 Aug 2025 01:26:09 +0300 Subject: [PATCH 10/15] refactor containers --- app/infrastructure/containers.py | 34 +++++++++++------------------ app/infrastructure/worker/worker.py | 4 ++-- 2 files changed, 15 insertions(+), 23 deletions(-) diff --git a/app/infrastructure/containers.py b/app/infrastructure/containers.py index 9c9f97d..e7622bc 100644 --- a/app/infrastructure/containers.py +++ b/app/infrastructure/containers.py @@ -82,6 +82,15 @@ def __call__(self) -> asyncio.Task: class BaseStartupContainer(containers.DeclarativeContainer): """Общий контейнер для старта бота.""" + @staticmethod + def get_collectors() -> list[HandlerCollector]: + from app.presentation.bot.commands.common import collector as common_collector + from app.presentation.bot.commands.sample_record import ( + collector as sample_record_collector, + ) + + return [common_collector, sample_record_collector] + redis_client = providers.Singleton(lambda: aioredis.from_url(settings.REDIS_DSN)) redis_repo = providers.Factory( @@ -116,23 +125,12 @@ class BaseStartupContainer(containers.DeclarativeContainer): answer_error_middleware, ], callback_repo=callback_repo, + collectors=get_collectors(), ) class ApplicationStartupContainer(BaseStartupContainer): - """Контейнер приложения с ленивой загрузкой collectors.""" - - @staticmethod - def get_collectors() -> list[HandlerCollector]: - from app.presentation.bot.commands.common import collector as common_collector - from app.presentation.bot.commands.sample_record import collector as sample_record_collector - return [common_collector, sample_record_collector] - - bot = providers.Singleton( - Bot, - collectors=providers.Callable(get_collectors), - **BaseStartupContainer.bot.kwargs, - ) + """Main Fastapi application container.""" callback_task_manager = providers.Singleton( CallbackTaskManager, @@ -146,12 +144,6 @@ def get_collectors() -> list[HandlerCollector]: class WorkerStartupContainer(BaseStartupContainer): - """Контейнер воркера с прямым импортом collectors.""" - - from app.presentation.bot.commands import common, sample_record + """SAQ Worker container""" - bot = providers.Singleton( - Bot, - collectors=[common.collector, sample_record.collector], # type:ignore - **BaseStartupContainer.bot.kwargs, - ) \ No newline at end of file + pass diff --git a/app/infrastructure/worker/worker.py b/app/infrastructure/worker/worker.py index f643b87..b8272ea 100644 --- a/app/infrastructure/worker/worker.py +++ b/app/infrastructure/worker/worker.py @@ -7,7 +7,7 @@ from redis.asyncio import Redis from saq import CronJob, Queue -from app.infrastructure.containers import WorkerStartupContainer +from app.infrastructure.containers import WorkerStartupContainer, BaseStartupContainer from app.infrastructure.worker.tasks.simple_task import heartbeat_task from app.logger import logger from app.settings import settings @@ -38,7 +38,7 @@ async def _shutdown_with_injection( async def startup(ctx: SaqCtx) -> None: - worker_startup_container = WorkerStartupContainer() + worker_startup_container = BaseStartupContainer() worker_startup_container.wire(modules=[__name__, "app.infrastructure.worker.tasks"]) await _startup_with_injection() From 5b742e2b4824add988700259ccf11d8d1f1002b2 Mon Sep 17 00:00:00 2001 From: vladimirgubarik Date: Fri, 8 Aug 2025 09:58:00 +0300 Subject: [PATCH 11/15] refactor lint --- app/decorators/mapper/exception_mapper.py | 4 +++ app/infrastructure/containers.py | 2 +- .../repositories/sample_record.py | 4 --- app/infrastructure/worker/worker.py | 2 +- .../bot/command_handlers/sample_record.py | 27 +++++++++++-------- 5 files changed, 22 insertions(+), 17 deletions(-) diff --git a/app/decorators/mapper/exception_mapper.py b/app/decorators/mapper/exception_mapper.py index f1a90f3..f2afe5f 100644 --- a/app/decorators/mapper/exception_mapper.py +++ b/app/decorators/mapper/exception_mapper.py @@ -1,5 +1,6 @@ """Decorators to rethrow and log exceptions.""" +import asyncio from functools import wraps from inspect import iscoroutinefunction from typing import Any, Callable, Type @@ -81,6 +82,9 @@ def _handle_exception_logic( args: tuple[Any, ...], kwargs: dict[str, Any], ) -> None: + if isinstance(exc, asyncio.CancelledError): + raise + if exception_factory := self._get_exception_factory(type(exc)): context = ExceptionContext(exc, func, self._filtered_args(args), kwargs) raise exception_factory.make_exception(context) from exc diff --git a/app/infrastructure/containers.py b/app/infrastructure/containers.py index e7622bc..2405ef7 100644 --- a/app/infrastructure/containers.py +++ b/app/infrastructure/containers.py @@ -1,7 +1,7 @@ import asyncio from dependency_injector import containers, providers -from dependency_injector.providers import Callable, Factory, Singleton +from dependency_injector.providers import Callable, Factory from httpx import AsyncClient, Limits from pybotx import Bot, HandlerCollector from redis import asyncio as aioredis diff --git a/app/infrastructure/repositories/sample_record.py b/app/infrastructure/repositories/sample_record.py index bf4ce46..1b7c31f 100644 --- a/app/infrastructure/repositories/sample_record.py +++ b/app/infrastructure/repositories/sample_record.py @@ -72,7 +72,6 @@ async def create(self, record: SampleRecord) -> SampleRecord: .returning(SampleRecordModel) ) result = await self._session.execute(query) - await self._session.flush() record_model = result.scalar_one() return self._to_domain_object(record_model) @@ -91,7 +90,6 @@ async def update(self, record: SampleRecord) -> SampleRecord: .returning(SampleRecordModel) ) execute_result = (await self._session.execute(query)).scalar_one_or_none() - await self._session.flush() if execute_result is None: raise RecordDoesNotExistError( f"Sample record with id={record.id} does not exist." @@ -125,8 +123,6 @@ async def delete(self, record_id: int) -> None: f"Sample record with id={record_id} does not exist." ) - await self._session.flush() - @ExceptionMapper( { NoResultFound: EnrichedExceptionFactory(RecordDoesNotExistError), diff --git a/app/infrastructure/worker/worker.py b/app/infrastructure/worker/worker.py index b8272ea..2ebf461 100644 --- a/app/infrastructure/worker/worker.py +++ b/app/infrastructure/worker/worker.py @@ -7,7 +7,7 @@ from redis.asyncio import Redis from saq import CronJob, Queue -from app.infrastructure.containers import WorkerStartupContainer, BaseStartupContainer +from app.infrastructure.containers import BaseStartupContainer, WorkerStartupContainer from app.infrastructure.worker.tasks.simple_task import heartbeat_task from app.logger import logger from app.settings import settings diff --git a/app/presentation/bot/command_handlers/sample_record.py b/app/presentation/bot/command_handlers/sample_record.py index cef95f9..b904355 100644 --- a/app/presentation/bot/command_handlers/sample_record.py +++ b/app/presentation/bot/command_handlers/sample_record.py @@ -17,8 +17,8 @@ SendErrorExplainToUserHandler, ) from app.presentation.bot.error_handlers.exceptions_chain_executor import ( - DEFAULT_HANDLERS, - ExceptionHandlersChainExecutor, DEFAULT_HANDLERS_WITH_EXPLAIN, + DEFAULT_HANDLERS_WITH_EXPLAIN, + ExceptionHandlersChainExecutor, ) from app.presentation.bot.resources.strings import ( SAMPLE_RECORD_CREATED_ANSWER, @@ -48,9 +48,6 @@ class CreateSampleRecordHandler(BaseCommandHandler): } ) ] - exception_handler_chain_executor = ExceptionHandlersChainExecutor( - _EXCEPTIONS_HANDLERS - ) def __init__( self, @@ -62,7 +59,11 @@ def __init__( self._use_cases = use_case_factory self.unit_of_work = unit_of_work - super().__init__(bot, message, self.exception_handler_chain_executor) + exception_handler_chain_executor = ExceptionHandlersChainExecutor( + self._EXCEPTIONS_HANDLERS + ) + + super().__init__(bot, message, exception_handler_chain_executor) async def handle_logic( self, @@ -94,9 +95,6 @@ class DeleteSampleRecordHandler(BaseCommandHandler): } ) ] - exception_handler_chain_executor = ExceptionHandlersChainExecutor( - _EXCEPTIONS_HANDLERS - ) def __init__( self, @@ -107,8 +105,11 @@ def __init__( ): self._use_cases = use_case_factory self.unit_of_work = unit_of_work + exception_handler_chain_executor = ExceptionHandlersChainExecutor( + self._EXCEPTIONS_HANDLERS + ) - super().__init__(bot, message, self.exception_handler_chain_executor) + super().__init__(bot, message, exception_handler_chain_executor) async def handle_logic( self, @@ -153,7 +154,11 @@ def __init__( self._use_cases = use_case_factory self.unit_of_work = unit_of_work - super().__init__(bot, message, self.exception_handler_chain_executor) + exception_handler_chain_executor = ExceptionHandlersChainExecutor( + self._EXCEPTIONS_HANDLERS + ) + + super().__init__(bot, message, exception_handler_chain_executor) async def handle_logic( self, From 890ba588e5c2b79208d5e397a41571384ae07b5e Mon Sep 17 00:00:00 2001 From: vladimirgubarik Date: Fri, 8 Aug 2025 12:24:11 +0300 Subject: [PATCH 12/15] refactor lint --- app/infrastructure/containers.py | 6 +- .../repositories/sample_record.py | 4 +- .../repositories/unit_of_work.py | 45 ++++++++---- .../bot/command_handlers/base_handler.py | 2 +- .../bot/command_handlers/sample_record.py | 62 ++++++++++------- .../bot/error_handlers/base_handlers.py | 3 +- .../exceptions_chain_executor.py | 59 ++++++++++++---- pyproject.toml | 3 + .../integration/bot_commands/bot_factories.py | 10 +-- .../bot_commands/test_sample_record.py | 8 +-- .../test_sample_record_repository.py | 13 ++-- .../error_handlers/test_chain_executor.py | 69 +++++++++++++++++++ 12 files changed, 212 insertions(+), 72 deletions(-) diff --git a/app/infrastructure/containers.py b/app/infrastructure/containers.py index 2405ef7..7d46516 100644 --- a/app/infrastructure/containers.py +++ b/app/infrastructure/containers.py @@ -1,10 +1,12 @@ import asyncio +from typing import Callable as TypeCallable from dependency_injector import containers, providers from dependency_injector.providers import Callable, Factory from httpx import AsyncClient, Limits from pybotx import Bot, HandlerCollector from redis import asyncio as aioredis +from sqlalchemy.ext.asyncio import AsyncSession from app.application.use_cases.record_use_cases import SampleRecordUseCases from app.infrastructure.repositories.caching.callback_redis_repo import ( @@ -29,7 +31,9 @@ class BotSampleRecordCommandContainer(containers.DeclarativeContainer): - session_factory = providers.Dependency() + session_factory: providers.Dependency[TypeCallable[[], AsyncSession]] = ( + providers.Dependency() + ) ro_unit_of_work: Factory[ReadOnlySampleRecordUnitOfWork] = Factory( ReadOnlySampleRecordUnitOfWork, session_factory diff --git a/app/infrastructure/repositories/sample_record.py b/app/infrastructure/repositories/sample_record.py index 1b7c31f..cb1792b 100644 --- a/app/infrastructure/repositories/sample_record.py +++ b/app/infrastructure/repositories/sample_record.py @@ -21,14 +21,14 @@ from app.decorators.mapper.exception_mapper import ( ExceptionMapper, ) -from app.decorators.mapper.factories import EnrichedExceptionFactory +from app.decorators.mapper.factories import ContextAwareError, EnrichedExceptionFactory from app.domain.entities.sample_record import SampleRecord from app.infrastructure.db.sample_record.models import SampleRecordModel from app.infrastructure.db.sqlalchemy import AsyncSession class IntegrityErrorFactory(EnrichedExceptionFactory): - def make_exception(self, context: ExceptionContext) -> Exception: + def make_exception(self, context: ExceptionContext) -> ContextAwareError: if not (orig := getattr(context.original_exception, "orig", None)): return self.generated_error(context.formatted_context) diff --git a/app/infrastructure/repositories/unit_of_work.py b/app/infrastructure/repositories/unit_of_work.py index ca7d2dd..ade1c70 100644 --- a/app/infrastructure/repositories/unit_of_work.py +++ b/app/infrastructure/repositories/unit_of_work.py @@ -1,4 +1,5 @@ -import asyncio +from types import TracebackType +from typing import Self from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker @@ -10,38 +11,56 @@ class ReadOnlySampleRecordUnitOfWork(ISampleRecordUnitOfWork): + def __init__(self, session_factory: async_sessionmaker): + super().__init__() + self.session_factory = session_factory + self._session: AsyncSession | None = None + def get_sample_record_repository(self) -> ISampleRecordRepository: if not self._session: raise RuntimeError("Session is not initialized") return SampleRecordRepository(self._session) - def __init__(self, session_factory: async_sessionmaker): - super().__init__() - self.session_factory = session_factory - self._session: AsyncSession | None = None - - async def __aenter__(self): + async def __aenter__(self) -> Self: self._session = self.session_factory() return self - async def __aexit__(self, exc_type, exc_val, exc_tb): + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if not self._session: + return + try: - # Recommended for explicit resources cleanup - await self._session.rollback() + if exc_type is not None or self._session.in_transaction(): + await self._session.rollback() finally: await self._session.close() + self._session = None class WriteSampleRecordUnitOfWork(ReadOnlySampleRecordUnitOfWork): """Unit of Work for write operations with full transaction management.""" - async def __aenter__(self): + async def __aenter__(self) -> Self: self._session = self.session_factory() - await asyncio.wait_for(self._session.begin(), timeout=5) + + await self._session.begin() return self - async def __aexit__(self, exc_type, exc_val, exc_tb): + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if not self._session: + return + try: if exc_type: await self._session.rollback() diff --git a/app/presentation/bot/command_handlers/base_handler.py b/app/presentation/bot/command_handlers/base_handler.py index 73b8eef..d16854e 100644 --- a/app/presentation/bot/command_handlers/base_handler.py +++ b/app/presentation/bot/command_handlers/base_handler.py @@ -15,7 +15,7 @@ def __init__( self, bot: Bot, message: IncomingMessage, - exception_handler_executor: ExceptionHandlersChainExecutor | None, + exception_handler_executor: ExceptionHandlersChainExecutor, ): self._bot = bot self._message = message diff --git a/app/presentation/bot/command_handlers/sample_record.py b/app/presentation/bot/command_handlers/sample_record.py index b904355..92095a3 100644 --- a/app/presentation/bot/command_handlers/sample_record.py +++ b/app/presentation/bot/command_handlers/sample_record.py @@ -19,6 +19,7 @@ from app.presentation.bot.error_handlers.exceptions_chain_executor import ( DEFAULT_HANDLERS_WITH_EXPLAIN, ExceptionHandlersChainExecutor, + HandlerOrHandlerClass, ) from app.presentation.bot.resources.strings import ( SAMPLE_RECORD_CREATED_ANSWER, @@ -39,15 +40,18 @@ class CreateSampleRecordHandler(BaseCommandHandler): incoming_argument_parser = BotXJsonRequestParser(SampleRecordCreateRequestSchema) - _EXCEPTIONS_HANDLERS = DEFAULT_HANDLERS_WITH_EXPLAIN + [ - SendErrorExplainToUserHandler( - exception_explain_mapping={ - RecordAlreadyExistsError: "Запись с такими параметрами уже существует", - RecordCreateError: "Внутренняя ошибка создания записи", - MessageValidationError: "Неправильный формат данных", - } - ) - ] + _EXCEPTIONS_HANDLERS: list[HandlerOrHandlerClass] = ( + DEFAULT_HANDLERS_WITH_EXPLAIN + + [ + SendErrorExplainToUserHandler( + exception_explain_mapping={ + RecordAlreadyExistsError: "Запись с такими параметрами существует", + RecordCreateError: "Внутренняя ошибка создания записи", + MessageValidationError: "Неправильный формат данных", + } + ) + ] + ) def __init__( self, @@ -87,14 +91,17 @@ class DeleteSampleRecordHandler(BaseCommandHandler): SampleRecordGetOrDeleteRequestSchema ) - _EXCEPTIONS_HANDLERS = DEFAULT_HANDLERS_WITH_EXPLAIN + [ - SendErrorExplainToUserHandler( - exception_explain_mapping={ - RecordDoesNotExistError: "Запиcь с указанным id не найдена", - MessageValidationError: "Неправильный формат данных", - } - ) - ] + _EXCEPTIONS_HANDLERS: list[HandlerOrHandlerClass] = ( + DEFAULT_HANDLERS_WITH_EXPLAIN + + [ + SendErrorExplainToUserHandler( + exception_explain_mapping={ + RecordDoesNotExistError: "Запиcь с указанным id не найдена", + MessageValidationError: "Неправильный формат данных", + } + ) + ] + ) def __init__( self, @@ -132,14 +139,17 @@ class GetSampleRecordHandler(BaseCommandHandler): SampleRecordGetOrDeleteRequestSchema ) - _EXCEPTIONS_HANDLERS = DEFAULT_HANDLERS_WITH_EXPLAIN + [ - SendErrorExplainToUserHandler( - exception_explain_mapping={ - RecordDoesNotExistError: "Запиcь с указанным id не найдена", - MessageValidationError: "Неправильный формат данных", - } - ) - ] + _EXCEPTIONS_HANDLERS: list[HandlerOrHandlerClass] = ( + DEFAULT_HANDLERS_WITH_EXPLAIN + + [ + SendErrorExplainToUserHandler( + exception_explain_mapping={ + RecordDoesNotExistError: "Запиcь с указанным id не найдена", + MessageValidationError: "Неправильный формат данных", + } + ) + ] + ) exception_handler_chain_executor = ExceptionHandlersChainExecutor( _EXCEPTIONS_HANDLERS ) @@ -162,7 +172,7 @@ def __init__( async def handle_logic( self, - request_parameter: SampleRecordGetOrDeleteRequestSchema, + request_parameter: SampleRecordGetOrDeleteRequestSchema, # type: ignore ) -> None: async with self.unit_of_work as uof: record = await self._use_cases( diff --git a/app/presentation/bot/error_handlers/base_handlers.py b/app/presentation/bot/error_handlers/base_handlers.py index b0076bb..f7f9fac 100644 --- a/app/presentation/bot/error_handlers/base_handlers.py +++ b/app/presentation/bot/error_handlers/base_handlers.py @@ -67,10 +67,11 @@ async def handle_exception( await self.next_handler.handle_exception( exc, bot, message, exception_id ) - except Exception as exc: + except Exception as inner_exc: logger.error( f"Error handling exception {exception_id}", exc_info=True, + exc=inner_exc, ) if self._stop_on_failure: return diff --git a/app/presentation/bot/error_handlers/exceptions_chain_executor.py b/app/presentation/bot/error_handlers/exceptions_chain_executor.py index 5399e07..47d421d 100644 --- a/app/presentation/bot/error_handlers/exceptions_chain_executor.py +++ b/app/presentation/bot/error_handlers/exceptions_chain_executor.py @@ -1,3 +1,4 @@ +from abc import ABCMeta from uuid import uuid4 from pybotx import Bot, IncomingMessage @@ -9,6 +10,10 @@ SendErrorExplainToUserHandler, ) +HandlerOrHandlerClass = ( + AbstractExceptionHandler | type[AbstractExceptionHandler] | ABCMeta +) + class ExceptionHandlersChainExecutor: """ @@ -24,7 +29,8 @@ class ExceptionHandlersChainExecutor: """ def __init__( - self, handlers: list[type[AbstractExceptionHandler] | AbstractExceptionHandler] + self, + handlers: list[HandlerOrHandlerClass] | None = None, ): self._chain_head, self._chain_tail = self._create_chain(handlers) @@ -52,13 +58,26 @@ async def execute_chain( await self._chain_head.handle_exception(exc, bot, message, exception_id) def _get_handler( - self, handler: AbstractExceptionHandler | type[AbstractExceptionHandler] + self, + handler: HandlerOrHandlerClass, ) -> AbstractExceptionHandler: return handler if isinstance(handler, AbstractExceptionHandler) else handler() def _create_chain( - self, handlers: list[type[AbstractExceptionHandler] | AbstractExceptionHandler] + self, + handlers: list[HandlerOrHandlerClass] | None = None, ) -> tuple[AbstractExceptionHandler | None, AbstractExceptionHandler | None]: + """ + Create a linked list of exception handlers from the given list. + + This method takes a sequence of exception handler classes or instances + and chains them together into a linked list. The returned tuple contains + the head and tail of the constructed chain. + + warning: + This method modifies the passed objects of + class:`AbstractExceptionHandler` type in place. + """ if not handlers: return None, None @@ -71,29 +90,39 @@ def _create_chain( tail_handler = new_tail_handler return head_handler, tail_handler - def extend( - self, handlers: list[AbstractExceptionHandler | type[AbstractExceptionHandler]] - ): + def extend(self, handlers: list[HandlerOrHandlerClass] | None) -> None: """Append handlers to the chain""" + if not handlers: + return + new_head, new_tail = self._create_chain(handlers) - if self._chain_head is None: + if self._is_empty(): self._chain_head = new_head else: - self._chain_tail.next_handler = new_head + # The tail and head cannot be None at the same time. + self._chain_tail.next_handler = new_head # type: ignore self._chain_tail = new_tail - def append( - self, handler: AbstractExceptionHandler | type[AbstractExceptionHandler] - ): - """Append handler to the end of chain""" + def append(self, handler: HandlerOrHandlerClass) -> None: + """Append handler to the end of a chain""" new_tail = self._get_handler(handler) - self._chain_tail.next_handler = new_tail + if self._is_empty(): + self._chain_head = new_tail + self._chain_tail = new_tail + else: + self._chain_tail.next_handler = new_tail # type:ignore + + def _is_empty(self) -> bool: + return self._chain_head is None and self._chain_tail is None -DEFAULT_HANDLERS = [ + +DEFAULT_HANDLERS: list[HandlerOrHandlerClass] = [ LoggingExceptionHandler, DropFSMOnErrorHandler, ] -DEFAULT_HANDLERS_WITH_EXPLAIN = DEFAULT_HANDLERS + [SendErrorExplainToUserHandler] +DEFAULT_HANDLERS_WITH_EXPLAIN: list[HandlerOrHandlerClass] = DEFAULT_HANDLERS + [ + SendErrorExplainToUserHandler +] diff --git a/pyproject.toml b/pyproject.toml index de817c7..598979e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,6 +64,9 @@ deepdiff = "^8.5.0" +[tool.poetry.group.dev.dependencies] +types-psycopg2 = "^2.9.21.20250718" + [build-system] requires = ["poetry>=1.1.12"] build-backend = "poetry.masonry.api" diff --git a/tests/integration/bot_commands/bot_factories.py b/tests/integration/bot_commands/bot_factories.py index edd8620..c7ef06c 100644 --- a/tests/integration/bot_commands/bot_factories.py +++ b/tests/integration/bot_commands/bot_factories.py @@ -71,14 +71,14 @@ class Meta: class IncomingMessageFactory(Factory): """Factory for incoming messages.""" - bot: BotAccount = factory.SubFactory(BotAccountFactory) - sync_id: UUID = factory.Faker("uuid4") + bot: BotAccount = factory.SubFactory(BotAccountFactory) # type: ignore + sync_id: UUID = factory.Faker("uuid4") # type: ignore source_sync_id: Optional[UUID] = None - body: str = factory.Faker("text", max_nb_chars=100) + body: str = factory.Faker("text", max_nb_chars=100) # type: ignore data: dict[str, Any] = {} metadata: dict = {} - sender: UserSender = factory.SubFactory(UserSenderFactory) - chat: Chat = factory.SubFactory(ChatFactory) + sender: UserSender = factory.SubFactory(UserSenderFactory) # type: ignore + chat: Chat = factory.SubFactory(ChatFactory) # type: ignore raw_command: Optional[str] = None class Meta: diff --git a/tests/integration/bot_commands/test_sample_record.py b/tests/integration/bot_commands/test_sample_record.py index 96369ec..898d9ce 100644 --- a/tests/integration/bot_commands/test_sample_record.py +++ b/tests/integration/bot_commands/test_sample_record.py @@ -19,7 +19,7 @@ async def test_sample_record_created( await bot.async_execute_bot_command(message) # Check db object existing - db_object: SampleRecordModel = await isolated_session.scalar( + db_object: SampleRecordModel = await isolated_session.scalar( # type: ignore select(SampleRecordModel).where(SampleRecordModel.name == request_data["name"]) ) @@ -27,7 +27,7 @@ async def test_sample_record_created( assert db_object.name == request_data["name"] # Check bot answer - assert bot.answer_message.call_args[0][0] == ( + assert bot.answer_message.call_args[0][0] == ( # type: ignore f"Запись успешно создана:\n**id**: {db_object.id} " f"**name**: {db_object.name} " f"**record_data**: {db_object.record_data}." @@ -47,7 +47,7 @@ async def test_sample_record_delete( await bot.async_execute_bot_command(message) # Check db object non existing - db_object: SampleRecordModel = await isolated_session.scalar( + db_object: SampleRecordModel = await isolated_session.scalar( # type: ignore select(SampleRecordModel).where(SampleRecordModel.id == existing_record.id) ) @@ -70,7 +70,7 @@ async def test_get_sample_record( await bot.async_execute_bot_command(message) # Check bot answer - assert bot.answer_message.call_args[0][0] == ( + assert bot.answer_message.call_args[0][0] == ( # type:ignore "Запись найдена:\n" f"**id**: {existing_record.id} " f"**name**: {existing_record.name} " diff --git a/tests/integration/repository/test_sample_record_repository.py b/tests/integration/repository/test_sample_record_repository.py index 7d52892..55fcbba 100644 --- a/tests/integration/repository/test_sample_record_repository.py +++ b/tests/integration/repository/test_sample_record_repository.py @@ -17,9 +17,11 @@ def assert_database_object_equal_domain( - db_object: SampleRecordModel, domain_object: SampleRecord + db_object: SampleRecordModel | None, domain_object: SampleRecord ) -> None: """Assert that database object and domain object are equal.""" + + assert db_object is not None assert db_object.id == domain_object.id assert db_object.record_data == domain_object.record_data assert db_object.name == domain_object.name @@ -44,7 +46,7 @@ async def test_add_record( db_object = await isolated_session.scalar( select(SampleRecordModel).where(SampleRecordModel.id == created_record.id) ) - assert_database_object_equal_domain(db_object, created_record) + assert_database_object_equal_domain(db_object, created_record) # type:ignore async def test_add_record_with_non_unique_name( @@ -65,7 +67,7 @@ async def test_create_record_with_null_required_field( ): """Test creating a record with null required field raises ValidationError.""" invalid_record = SampleRecord(record_data="test_add", name="test_name") # type: ignore - invalid_record.record_data = None + invalid_record.record_data = None # type:ignore with pytest.raises(ValidationError): await sample_record_repository.create(invalid_record) @@ -115,7 +117,10 @@ async def test_update_record( select(SampleRecordModel).where(SampleRecordModel.id == existing_record.id) ) - assert_database_object_equal_domain(record_from_db, updated_record_from_repo) + assert_database_object_equal_domain( + record_from_db, + updated_record_from_repo, + ) async def test_update_record_with_non_unique_name( diff --git a/tests/unit/error_handlers/test_chain_executor.py b/tests/unit/error_handlers/test_chain_executor.py index 51b520d..cff7484 100644 --- a/tests/unit/error_handlers/test_chain_executor.py +++ b/tests/unit/error_handlers/test_chain_executor.py @@ -20,6 +20,75 @@ async def test_chain_usual_case(): assert execution_history == [0, 1, 2] +async def test_chain_extend(): + """Test extend method.""" + execution_history = [] + handlers = [ + TestExceptionHandler(index=index, run_history=execution_history) + for index in range(3) + ] + + chain_executor = ExceptionHandlersChainExecutor(handlers) + + extended_handlers = [ + TestExceptionHandler(index=index, run_history=execution_history) + for index in range(3, 5) + ] + + chain_executor.extend(extended_handlers) + + await chain_executor.execute_chain(MagicMock(), MagicMock(), MagicMock()) + + assert execution_history == [0, 1, 2, 3, 4] + + +async def test_chain_extend_to_empty_list(): + """Test extend method.""" + execution_history = [] + + chain_executor = ExceptionHandlersChainExecutor([]) + + extended_handlers = [ + TestExceptionHandler(index=index, run_history=execution_history) + for index in range(3, 5) + ] + + chain_executor.extend(extended_handlers) + + await chain_executor.execute_chain(MagicMock(), MagicMock(), MagicMock()) + + assert execution_history == [3, 4] + + +async def test_chain_append(): + """Test append method.""" + execution_history = [] + handlers = [ + TestExceptionHandler(index=index, run_history=execution_history) + for index in range(3) + ] + + chain_executor = ExceptionHandlersChainExecutor(handlers) + + chain_executor.append(TestExceptionHandler(index=3, run_history=execution_history)) + + await chain_executor.execute_chain(MagicMock(), MagicMock(), MagicMock()) + + assert execution_history == [0, 1, 2, 3] + + +async def test_chain_append_on_the_empty_init_chain(): + """Test append method then init chain is empty.""" + execution_history = [] + chain_executor = ExceptionHandlersChainExecutor([]) + + chain_executor.append(TestExceptionHandler(index=3, run_history=execution_history)) + + await chain_executor.execute_chain(MagicMock(), MagicMock(), MagicMock()) + + assert execution_history == [3] + + async def test_executor_call_only_right_handlers(): execution_history = [] handlers = [ From becc0bde4484b1af38a74c0ab51b1add6b51d14e Mon Sep 17 00:00:00 2001 From: vladimirgubarik Date: Fri, 8 Aug 2025 12:32:09 +0300 Subject: [PATCH 13/15] refactor lint --- app/infrastructure/db/sqlalchemy.py | 26 -------------------------- 1 file changed, 26 deletions(-) diff --git a/app/infrastructure/db/sqlalchemy.py b/app/infrastructure/db/sqlalchemy.py index 09ce14a..830552e 100644 --- a/app/infrastructure/db/sqlalchemy.py +++ b/app/infrastructure/db/sqlalchemy.py @@ -54,29 +54,3 @@ def get_engine() -> AsyncEngine: def get_session_factory() -> async_sessionmaker: engine = get_engine() return async_sessionmaker(bind=engine, expire_on_commit=False) - - -# -# def provide_session(func: Callable) -> Callable: -# """ -# Provides a database session to an async function if one is not already passed. -# -# :param func: The asynchronous function to wrap. It must accept a `session` -# keyword argument. -# :return: The wrapped function with automatic session provisioning.""" -# -# @wraps(func) -# async def wrapper(*args: Any, **kwargs: Any) -> Any: -# if kwargs.get("session"): -# return await func(*args, **kwargs) -# -# async with session_factory() as session: -# try: -# return await func(*args, **kwargs, session=session) -# except Exception: -# await session.rollback() -# raise -# finally: -# await session.close() -# -# return wrapper From 0dbb8ba8512d0024879f60c22d158d6ba97fd95b Mon Sep 17 00:00:00 2001 From: vladimirgubarik Date: Fri, 8 Aug 2025 15:06:26 +0300 Subject: [PATCH 14/15] remove unused --- app/presentation/bot/bot.py | 34 ---------------------------------- 1 file changed, 34 deletions(-) delete mode 100644 app/presentation/bot/bot.py diff --git a/app/presentation/bot/bot.py b/app/presentation/bot/bot.py deleted file mode 100644 index 38a66a9..0000000 --- a/app/presentation/bot/bot.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Configuration for bot instance.""" - -from httpx import AsyncClient, Limits -from pybotx import Bot, CallbackRepoProto - -from app.presentation.bot.commands import common, sample_record -from app.presentation.bot.error_handlers.internal_error_handler import ( - internal_error_handler, -) -from app.presentation.bot.middlewares.answer_error import answer_error_middleware -from app.presentation.bot.middlewares.smart_logger import smart_logger_middleware -from app.settings import settings - - -def get_bot(callback_repo: CallbackRepoProto) -> Bot: - exception_handlers = {} - if not settings.RAISE_BOT_EXCEPTIONS: - exception_handlers[Exception] = internal_error_handler - - return Bot( - collectors=[common.collector, sample_record.collector], - bot_accounts=settings.BOT_CREDENTIALS, - exception_handlers=exception_handlers, # type: ignore - default_callback_timeout=settings.BOTX_CALLBACK_TIMEOUT_IN_SECONDS, - httpx_client=AsyncClient( - timeout=60, - limits=Limits(max_keepalive_connections=None, max_connections=None), - ), - middlewares=[ - smart_logger_middleware, - answer_error_middleware, - ], - callback_repo=callback_repo, - ) From f274924a500bc0a148cec5e1d66a5458cf1744f7 Mon Sep 17 00:00:00 2001 From: vladimirgubarik Date: Fri, 8 Aug 2025 19:50:30 +0300 Subject: [PATCH 15/15] bump pydantic to >2.8 --- app/application/use_cases/record_use_cases.py | 8 +- app/presentation/bot/schemas/sample_record.py | 14 +-- app/presentation/bot/validators/base.py | 10 +- app/settings.py | 96 +++++++++---------- pyproject.toml | 11 ++- 5 files changed, 66 insertions(+), 73 deletions(-) diff --git a/app/application/use_cases/record_use_cases.py b/app/application/use_cases/record_use_cases.py index 924a436..93666b9 100644 --- a/app/application/use_cases/record_use_cases.py +++ b/app/application/use_cases/record_use_cases.py @@ -23,7 +23,7 @@ async def create_record( domain_object = SampleRecord( record_data=request_object.record_data, name=request_object.name ) - created_record = SampleRecordResponseSchema.from_orm( + created_record = SampleRecordResponseSchema.model_validate( await self._repo.create(domain_object) ) @@ -38,7 +38,7 @@ async def update_record( id=update_request.id, name=update_request.name, ) - updated_record = SampleRecordResponseSchema.from_orm( + updated_record = SampleRecordResponseSchema.model_validate( await self._repo.update(domain_object) ) return updated_record @@ -50,12 +50,12 @@ async def delete_record(self, record_id: int) -> None: async def get_record(self, record_id: int) -> SampleRecordResponseSchema: """Get a record by ID.""" fetched_record = await self._repo.get_by_id(record_id) - return SampleRecordResponseSchema.from_orm(fetched_record) + return SampleRecordResponseSchema.model_validate(fetched_record) async def get_all_records(self) -> SampleRecordResponseListSchema: """Get all records.""" fetched_records = await self._repo.get_all() response_records = [ - SampleRecordResponseSchema.from_orm(record) for record in fetched_records + SampleRecordResponseSchema.model_validate(record) for record in fetched_records ] return SampleRecordResponseListSchema(data=response_records) diff --git a/app/presentation/bot/schemas/sample_record.py b/app/presentation/bot/schemas/sample_record.py index 1358816..83d5d64 100644 --- a/app/presentation/bot/schemas/sample_record.py +++ b/app/presentation/bot/schemas/sample_record.py @@ -1,6 +1,6 @@ """Domains.""" -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, ConfigDict class SampleRecordResponseSchema(BaseModel): @@ -10,15 +10,13 @@ class SampleRecordResponseSchema(BaseModel): record_data: str name: str - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class SampleRecordResponseListSchema(BaseModel): data: list[SampleRecordResponseSchema] - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class SampleRecordCreateRequestSchema( @@ -27,8 +25,7 @@ class SampleRecordCreateRequestSchema( record_data: str = Field(..., min_length=1, max_length=128) name: str = Field(..., min_length=1, max_length=32) - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class SampleRecordUpdateRequestSchema( @@ -38,8 +35,7 @@ class SampleRecordUpdateRequestSchema( record_data: str = Field(..., min_length=1, max_length=128) name: str = Field(..., min_length=1, max_length=32) - class Config: - orm_mode = True + model_config = ConfigDict(from_attributes=True) class SampleRecordGetOrDeleteRequestSchema(BaseModel): diff --git a/app/presentation/bot/validators/base.py b/app/presentation/bot/validators/base.py index 8578702..6493258 100644 --- a/app/presentation/bot/validators/base.py +++ b/app/presentation/bot/validators/base.py @@ -25,7 +25,7 @@ def __init__(self, model: type[T]): @ExceptionMapper( { - (JSONDecodeError, ValidationError): EnrichedExceptionFactory( + Exception: EnrichedExceptionFactory( MessageValidationError ) }, @@ -33,7 +33,7 @@ def __init__(self, model: type[T]): ) def parse(self, raw_input: IncomingMessage) -> T: message_json = orjson.loads(raw_input.argument) - return self.model.parse_obj(message_json) + return self.model.model_validate(message_json) class BotXPlainRequestParser(IBotRequestParser[T]): @@ -46,14 +46,14 @@ def __init__(self, model: type[T]): self.model = model @ExceptionMapper( - {ValidationError: EnrichedExceptionFactory(MessageValidationError)}, + {Exception: EnrichedExceptionFactory(MessageValidationError)}, is_bound_method=True, ) def parse(self, raw_input: IncomingMessage) -> T: if not (message_args := raw_input.argument.strip().split(" ")): raise ValidationError("Message is empty", self.model) - fields = self.model.__fields__.keys() + fields = (self.model.model_fields.keys()) message_kwargs = dict(zip(fields, message_args, strict=True)) - return self.model.parse_obj(message_kwargs) + return self.model.model_validate(message_kwargs) diff --git a/app/settings.py b/app/settings.py index 97cb878..35bd67f 100644 --- a/app/settings.py +++ b/app/settings.py @@ -4,66 +4,23 @@ from uuid import UUID from pybotx import BotAccountWithSecret -from pydantic import BaseSettings, Field, PositiveInt +from pydantic import Field, PositiveInt, field_validator, model_validator +from pydantic_settings import BaseSettings, SettingsConfigDict class AppSettings(BaseSettings): - class Config: - env_file = ".env" - - @classmethod - def parse_env_var(cls, field_name: str, raw_val: str) -> Any: - if field_name == "BOT_CREDENTIALS": - if not raw_val: - return [] - - return [ - cls._build_credentials_from_string(credentials_str) - for credentials_str in raw_val.replace(",", " ").split() - ] - elif field_name == "SMARTLOG_DEBUG_HUIDS": - return cls.parse_smartlog_debug_huids(raw_val) - - return cls.json_loads(raw_val) # type: ignore - - @classmethod - def parse_smartlog_debug_huids(cls, raw_huids: Any) -> List[UUID]: - """Parse debug huids separated by comma.""" - if not raw_huids: - return [] - - return [UUID(huid) for huid in raw_huids.split(",")] - - @classmethod - def _build_credentials_from_string( - cls, credentials_str: str - ) -> BotAccountWithSecret: - credentials_str = credentials_str.replace("|", "@") - assert credentials_str.count("@") == 2, "Have you forgot to add `bot_id`?" - - cts_url, secret_key, bot_id = [ - str_value.strip() for str_value in credentials_str.split("@") - ] - - if "://" not in cts_url: - cts_url = f"https://{cts_url}" - - return BotAccountWithSecret( - id=UUID(bot_id), - cts_url=cts_url, # type: ignore[arg-type] - secret_key=secret_key, - ) - - BOT_CREDENTIALS: List[BotAccountWithSecret] + model_config = SettingsConfigDict(env_file=".env", extra="ignore") # base kwargs DEBUG: bool = False + BOT_CREDENTIALS: list[BotAccountWithSecret] | str + # hide original exceptions from bot user RAISE_BOT_EXCEPTIONS: bool = False # User huids for debug - SMARTLOG_DEBUG_HUIDS: List[UUID] + SMARTLOG_DEBUG_HUIDS: List[UUID] | str # database POSTGRES_DSN: str @@ -81,8 +38,45 @@ def _build_credentials_from_string( WORKER_CONCURRENCY: int = 2 WORKERS_COUNT: int = 1 - BOTX_CALLBACK_TIMEOUT_IN_SECONDS = 30 - BOT_ASYNC_CLIENT_TIMEOUT_IN_SECONDS = 60 + BOTX_CALLBACK_TIMEOUT_IN_SECONDS: int = 30 + BOT_ASYNC_CLIENT_TIMEOUT_IN_SECONDS: int = 60 + + @field_validator("SMARTLOG_DEBUG_HUIDS", mode="before") + @classmethod + def parse_smartlog_debug_huids(cls, v): + if not v: + return [] + return [UUID(huid) for huid in v.split(",")] + + @field_validator("BOT_CREDENTIALS", mode="before") + @classmethod + def parse_bot_credentials(cls, v): + if not v: + return [] + # parse your raw env string here + return [ + cls._build_credentials_from_string(credentials_str) + for credentials_str in v.replace(",", " ").split() + ] + + @classmethod + def _build_credentials_from_string( + cls, credentials_str: str + ) -> BotAccountWithSecret: + credentials_str = credentials_str.replace("|", "@") + + cts_url, secret_key, bot_id = [ + str_value.strip() for str_value in credentials_str.split("@") + ] + + if "://" not in cts_url: + cts_url = f"https://{cts_url}" + + return BotAccountWithSecret( + id=UUID(bot_id), + cts_url=cts_url, # type: ignore[arg-type] + secret_key=secret_key, + ) settings = AppSettings() # type: ignore[call-arg] diff --git a/pyproject.toml b/pyproject.toml index 598979e..8f0a7da 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,10 +8,12 @@ authors = [] [tool.poetry.dependencies] -python = ">=3.9,<3.12" +python = ">3.9,<3.14" + +pybotx = "~0.76.0a1" +#pybotx-smart-logger = "0.12.0a1" +pybotx-smart-logger = { git = "https://github.com/ExpressApp/pybotx-smart-logger.git", branch = "chore/pydantic-pybotx-v-update" } -pybotx = "~0.75.1" -pybotx-smart-logger = "~0.10.1" @@ -22,7 +24,7 @@ uvicorn = { version = "~0.29.0", extras = ["standart"] } loguru = ">=0.6.0,<0.7.0" mako = "~1.2.2" -pydantic = { version = ">=1.10.5,<2.0.0", extras = ["dotenv"] } +pydantic = { version = ">=2.8.2,<3.0", extras = ["dotenv"] } alembic = "~1.13.1" SQLAlchemy = "~2.0.0" @@ -40,6 +42,7 @@ factory-boy = "^3.3.3" async-factory-boy = "^1.0.1" cachetools = "^6.1.0" saq = {extras = ["hiredis", "web"], version = "^0.25.2"} +pydantic-settings = "^2.10.1" [tool.poetry.dev-dependencies]