diff --git a/contemplative-constellations/.dockerignore b/contemplative-constellations/.dockerignore new file mode 100644 index 0000000..817b24e --- /dev/null +++ b/contemplative-constellations/.dockerignore @@ -0,0 +1,9 @@ +.github/ +.git/ +.tests/ +.vscode/ +.editorconfig +.gitattributes +.gitignore +.pre-commit-config.yaml +README.md diff --git a/contemplative-constellations/.editorconfig b/contemplative-constellations/.editorconfig new file mode 100644 index 0000000..9c2f900 --- /dev/null +++ b/contemplative-constellations/.editorconfig @@ -0,0 +1,17 @@ +# Check http://editorconfig.org for more information +# This is the main config file for this project: +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true + +[*.{py, pyi}] +indent_size = 4 + +[*.md] +trim_trailing_whitespace = false diff --git a/contemplative-constellations/.gitattributes b/contemplative-constellations/.gitattributes new file mode 100644 index 0000000..521ac1b --- /dev/null +++ b/contemplative-constellations/.gitattributes @@ -0,0 +1,2 @@ +# (CRLF sucks, it's just a waste of a byte, Windows is stupid for using it) +* text=auto eol=lf diff --git a/contemplative-constellations/.github/scripts/normalize_coverage.py b/contemplative-constellations/.github/scripts/normalize_coverage.py new file mode 100644 index 0000000..9c056fe --- /dev/null +++ b/contemplative-constellations/.github/scripts/normalize_coverage.py @@ -0,0 +1,9 @@ +import sqlite3 + +connection = sqlite3.connect(".coverage") + +# Normalize windows paths +connection.execute("UPDATE file SET path = REPLACE(path, '\\', '/')") + +connection.commit() +connection.close() diff --git a/contemplative-constellations/.github/workflows/docker.yml b/contemplative-constellations/.github/workflows/docker.yml new file mode 100644 index 0000000..ce77303 --- /dev/null +++ b/contemplative-constellations/.github/workflows/docker.yml @@ -0,0 +1,94 @@ +name: Docker + +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +on: workflow_call + +env: + # Use docker.io for Docker Hub if empty + REGISTRY: ghcr.io + # github.repository as / + IMAGE_NAME: ${{ github.repository }} + + +jobs: + build: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + # This is used to complete the identity challenge + # with sigstore/fulcio when running outside of PRs. + id-token: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # Install the cosign tool except on PR + # https://github.com/sigstore/cosign-installer + - name: Install cosign + if: github.event_name != 'pull_request' + uses: sigstore/cosign-installer@v3.5.0 + with: + cosign-release: 'v2.2.4' + + # Set up BuildKit Docker container builder to be able to build + # multi-platform images and export cache + # https://github.com/docker/setup-buildx-action + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + # Allows us to build images with different cpu architectures than the github runner (arm64) + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + # Login against a Docker registry except on PR + # https://github.com/docker/login-action + - name: Log into registry ${{ env.REGISTRY }} + if: github.event_name != 'pull_request' + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + # Extract metadata (tags, labels) for Docker + # https://github.com/docker/metadata-action + - name: Extract Docker metadata + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + + # Build and push Docker image with Buildx (don't push on PR) + # https://github.com/docker/build-push-action + - name: Build and push Docker image + id: build-and-push + uses: docker/build-push-action@v6 + with: + platforms: linux/amd64,linux/arm64 + context: . + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + + # Sign the resulting Docker image digest except on PRs. + # This will only write to the public Rekor transparency log when the Docker + # repository is public to avoid leaking data. If you would like to publish + # transparency data even for private images, pass --force to cosign below. + # https://github.com/sigstore/cosign + - name: Sign the published Docker image + if: ${{ github.event_name != 'pull_request' }} + env: + # https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-an-intermediate-environment-variable + TAGS: ${{ steps.meta.outputs.tags }} + DIGEST: ${{ steps.build-and-push.outputs.digest }} + # This step uses the identity token to provision an ephemeral certificate + # against the sigstore community Fulcio instance. + run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST} diff --git a/contemplative-constellations/.github/workflows/main.yml b/contemplative-constellations/.github/workflows/main.yml new file mode 100644 index 0000000..bbabed7 --- /dev/null +++ b/contemplative-constellations/.github/workflows/main.yml @@ -0,0 +1,74 @@ +--- +name: CI + +on: + push: + branches: + - main + pull_request: + workflow_dispatch: + +# Cancel already running workflows if new ones are scheduled +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + validation: + uses: ./.github/workflows/validation.yml + + unit-tests: + uses: ./.github/workflows/unit-tests.yml + + pages: + uses: ./.github/workflows/pages.yml + needs: [unit-tests, validation] + permissions: + contents: write + pull-requests: write + + docker: + uses: ./.github/workflows/docker.yml + needs: [unit-tests, validation] + permissions: + packages: write + contents: read + id-token: write + + deploy-portainer: + runs-on: ubuntu-latest + needs: [docker] + env: + WEBHOOK: ${{ secrets.PORTAINER_WEBHOOK }} + if: (github.event_name == 'push' || github.event == 'workflow_dispatch') && github.ref == 'refs/heads/main' + steps: + - name: Trigger Portainer Webhook + if: env.WEBHOOK != '' + run: | + response=$(curl -s -X POST -o /dev/null -w "%{http_code}" ${{ secrets.PORTAINER_WEBHOOK }}) + if [[ "$response" -lt 200 || "$response" -ge 300 ]]; then + echo "Webhook trigger failed with response code $response" + exit 1 + fi + + # Produce a pull request payload artifact with various data about the + # pull-request event (such as the PR number, title, author, ...). + # This data is then be picked up by status-embed.yml action. + pr_artifact: + name: Produce Pull Request payload artifact + runs-on: ubuntu-latest + + steps: + - name: Prepare Pull Request Payload artifact + id: prepare-artifact + if: always() && github.event_name == 'pull_request' + continue-on-error: true + run: cat $GITHUB_EVENT_PATH | jq '.pull_request' > pull_request_payload.json + + - name: Upload a Build Artifact + if: always() && steps.prepare-artifact.outcome == 'success' + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: pull-request-payload + path: pull_request_payload.json diff --git a/contemplative-constellations/.github/workflows/pages.yml b/contemplative-constellations/.github/workflows/pages.yml new file mode 100644 index 0000000..cd413c8 --- /dev/null +++ b/contemplative-constellations/.github/workflows/pages.yml @@ -0,0 +1,45 @@ +name: marp-to-pages +concurrency: marp-to-pages + +on: + workflow_call: + +jobs: + build: + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + steps: + + - name: Checkout code + uses: actions/checkout@v3 + + - name: Ensure build dir exists + run: mkdir -p build + + - name: Copy images directory (if exists) + run: if [[ -d presentation/img ]]; then cp -R presentation/img build/img; fi + + - name: Marp Build (README) + uses: docker://marpteam/marp-cli:v3.0.2 + with: + args: presentation/presentation.md -o build/index.html + env: + MARP_USER: root:root + + - name: Deploy preview + if: ${{ github.event_name == 'pull_request' }} + uses: rossjrw/pr-preview-action@v1 + with: + source-dir: ./build/ + preview-branch: gh-pages + umbrella-dir: pr-preview + + - name: Deploy production + if: ${{ github.event_name == 'push' }} + uses: JamesIves/github-pages-deploy-action@v4 + with: + branch: gh-pages + folder: ./build/ + clean-exclude: pr-preview/ diff --git a/contemplative-constellations/.github/workflows/unit-tests.yml b/contemplative-constellations/.github/workflows/unit-tests.yml new file mode 100644 index 0000000..608c52d --- /dev/null +++ b/contemplative-constellations/.github/workflows/unit-tests.yml @@ -0,0 +1,58 @@ +--- +name: Unit-Tests + +on: workflow_call + +jobs: + unit-tests: + runs-on: ${{ matrix.platform }} + + strategy: + fail-fast: false # Allows for matrix sub-jobs to fail without cancelling the rest + matrix: + platform: [ubuntu-latest, windows-latest] + python-version: ["3.12"] + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Setup poetry + id: poetry_setup + uses: ItsDrike/setup-poetry@v1 + with: + python-version: ${{ matrix.python-version }} + install-args: "--without lint" + + - name: Run pytest + shell: bash + run: | + # Ignore exit code 5 (no tests found) + poetry run pytest -v || ([ $? = 5 ] && exit 0 || exit $?) + + python .github/scripts/normalize_coverage.py + mv .coverage .coverage.${{ matrix.platform }}.${{ matrix.python-version }} + + - name: Upload coverage artifact + uses: actions/upload-artifact@v4 + with: + name: coverage.${{ matrix.platform }}.${{ matrix.python-version }} + path: .coverage.${{ matrix.platform }}.${{ matrix.python-version }} + retention-days: 1 + if-no-files-found: error + + tests-done: + needs: [unit-tests] + if: always() && !cancelled() + runs-on: ubuntu-latest + + steps: + - name: Set status based on required jobs + env: + RESULTS: ${{ join(needs.*.result, ' ') }} + run: | + for result in $RESULTS; do + if [ "$result" != "success" ]; then + exit 1 + fi + done diff --git a/contemplative-constellations/.github/workflows/validation.yml b/contemplative-constellations/.github/workflows/validation.yml new file mode 100644 index 0000000..6d09287 --- /dev/null +++ b/contemplative-constellations/.github/workflows/validation.yml @@ -0,0 +1,45 @@ +--- +name: Validation + +on: workflow_call + +env: + PRE_COMMIT_HOME: "/home/runner/.cache/pre-commit" + +jobs: + lint: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Setup poetry + id: poetry_setup + uses: ItsDrike/setup-poetry@v1 + with: + python-version: 3.12 + + - name: Pre-commit Environment Caching + uses: actions/cache@v4 + with: + path: ${{ env.PRE_COMMIT_HOME }} + key: + "precommit-${{ runner.os }}-${{ steps.poetry_setup.outputs.python-version }}-\ + ${{ hashFiles('./.pre-commit-config.yaml') }}" + # Restore keys allows us to perform a cache restore even if the full cache key wasn't matched. + # That way we still end up saving new cache, but we can still make use of the cache from previous + # version. + restore-keys: "precommit-${{ runner.os }}-${{ steps.poetry_setup.outputs-python-version}}-" + + - name: Run pre-commit hooks + run: SKIP=ruff-linter,ruff-formatter,basedpyright pre-commit run --all-files + + - name: Run ruff linter + run: ruff check --output-format=github --show-fixes --exit-non-zero-on-fix . + + - name: Run ruff formatter + run: ruff format --diff . + + - name: Run basedpyright type checker + run: basedpyright --warnings . diff --git a/contemplative-constellations/.gitignore b/contemplative-constellations/.gitignore new file mode 100644 index 0000000..d95e651 --- /dev/null +++ b/contemplative-constellations/.gitignore @@ -0,0 +1,44 @@ +# SQLite database +database.db + +# Files generated by the interpreter +__pycache__/ +*.py[cod] + +# Python virtual environment +.venv + +# Pytest reports +htmlcov/ +.coverage* +coverage.xml + +# Logs +*.log + +# Local python version information (pyenv/rye) +.python-version + +# Built objects +*.so +dist/ +build/ + +# Editor generated files +.idea/ +.vscode/* +!.vscode/extensions.json +.spyproject/ +.spyderproject/ +.replit +.neoconf.json + +# Folder attributes / configuration files on various platforms +.DS_STORE +[Dd]esktop.ini +.directory + +# Environmental, backup and personal files +.env +*.bak +TODO diff --git a/contemplative-constellations/.pre-commit-config.yaml b/contemplative-constellations/.pre-commit-config.yaml new file mode 100644 index 0000000..8e6e7d0 --- /dev/null +++ b/contemplative-constellations/.pre-commit-config.yaml @@ -0,0 +1,45 @@ +# Pre-commit configuration. +# See https://github.com/python-discord/code-jam-template/tree/main#pre-commit-run-linting-before-committing + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + - id: check-toml + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace + args: [--markdown-linebreak-ext=md] + - id: mixed-line-ending + args: [--fix=lf] + + - repo: local + hooks: + - id: ruff + name: ruff + description: Run ruff linter + entry: poetry run ruff check --force-exclude + language: system + types_or: [python, pyi] + require_serial: true + args: [--fix, --exit-non-zero-on-fix] + + - repo: local + hooks: + - id: ruff-format + name: ruff-format + description: Run ruff formatter + entry: poetry run ruff format + language: system + types_or: [python, pyi] + require_serial: true + + - repo: local + hooks: + - id: basedpyright + name: Based Pyright + description: Run basedpyright type checker + entry: poetry run basedpyright --warnings + language: system + types: [python] + pass_filenames: false # pyright runs for the entire project, it can't run for single files diff --git a/contemplative-constellations/.vscode/extensions.json b/contemplative-constellations/.vscode/extensions.json new file mode 100644 index 0000000..c6994d5 --- /dev/null +++ b/contemplative-constellations/.vscode/extensions.json @@ -0,0 +1,6 @@ +{ + "recommendations": [ + "charliermarsh.ruff", + "detachhead.basedpyright" + ] +} diff --git a/contemplative-constellations/Dockerfile b/contemplative-constellations/Dockerfile new file mode 100644 index 0000000..42bf739 --- /dev/null +++ b/contemplative-constellations/Dockerfile @@ -0,0 +1,31 @@ +FROM python:3.12-slim-bookworm + +ENV POETRY_VERSION=1.8.3 \ + POETRY_HOME="/opt/poetry/home" \ + POETRY_CACHE_DIR="/opt/poetry/cache" \ + POETRY_NO_INTERACTION=1 \ + POETRY_VIRTUALENVS_IN_PROJECT=false + +ENV PATH="$POETRY_HOME/bin:$PATH" + +RUN apt-get update \ + && apt-get -y upgrade \ + && apt-get install --no-install-recommends -y curl \ + && apt-get clean && rm -rf /var/lib/apt/lists/* + +# Install poetry using the official installer +RUN curl -sSL https://install.python-poetry.org | python + +# Limit amount of concurrent install requests, to avoid hitting pypi rate-limits +RUN poetry config installer.max-workers 10 + +# Install project dependencies +WORKDIR /app +COPY pyproject.toml poetry.lock ./ +RUN poetry install --only main --no-interaction --no-ansi -vvv + +# Copy the source code in last to optimize rebuilding the image +COPY . . + +ENTRYPOINT ["poetry"] +CMD ["run", "python", "-m", "src"] diff --git a/contemplative-constellations/LICENSE.txt b/contemplative-constellations/LICENSE.txt new file mode 100644 index 0000000..e16fc78 --- /dev/null +++ b/contemplative-constellations/LICENSE.txt @@ -0,0 +1,7 @@ +Copyright 2024 ItsDrike , Benji , Paillat-dev , v33010 , Ash8121 + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/contemplative-constellations/README.md b/contemplative-constellations/README.md new file mode 100644 index 0000000..e8aa1d9 --- /dev/null +++ b/contemplative-constellations/README.md @@ -0,0 +1,105 @@ + + +# Python Discord's Code Jam 2024, Contemplative Constellations Team Project + +This repository houses the source code of the team project created for **Python Discord's Code Jam 2024**, developed by +**Contemplative Constellations** team. + +## Running the bot + +To run the bot, you'll first want to install all of the project's dependencies. This is done using the +[`poetry`](https://python-poetry.org/docs/) package manager. You may need to download poetry if you don't already have +it. + +To install the dependencies, you can run the `poetry install` command. If you only want to run the bot and you're not +interested in also developing / contributing, you can also run `poetry install --only main`, which will skip the +development dependencies (tools for linting and testing). + +Once done, you will want to activate the virtual environment that poetry has just created for the project. To do so, +simply run `poetry shell`. + +You'll now need to configure the bot. See the [configuring section](#configuring-the-bot) + +Finally, you can start the bot with `python -m src`. + +### Using docker + +The project also supports [docker](https://www.docker.com/) installation, which should allow running the project +anywhere, without installing all of the dependencies manually. This is a lot more convenient way to run the bot, if you +just want to run it and you don't wish to do any actual development. + +To use docker, you can check out the images that are automatically built after each update to the `main` branch at +[ghcr](https://github.com/itsdrike/code-jam-2024/pkgs/container/code-jam-2024). You can also use [`docker +compose`](https://docs.docker.com/compose/) with the [`docker-compose.yaml`](./docker-compose.yaml) file, which will +pull this image from ghcr. To run the container using this file, you can use the following command: + +```bash +docker compose up +``` + +> [!TIP] +> To run the container in the background, add the `-d` flag to the command. + +If you want to build the image locally (to include some other changes that aren't yet in the main branch, maybe during +development or to customize something when deploying), you can also use +[`docker-compose.local.yaml`](./docker-compose.local.yaml), which defines an image building step from our +[`Dockerfile`](./Dockerfile). To run this local version of docker-compose, you can use the following command: + +```bash +docker compose -f ./docker-compose.local.yaml up +``` + +> [!IMPORTANT] +> Note that you will still need to create a `.env` file with all of the configuration variables (see [the configuring +> section](#configuring-the-bot)) + +> [!NOTE] +> By default, the docker container will always use a brand new database. If you wish to persist the database across +> runs, make sure to modify the docker-compose file and mount the database file. In the container, this file will by +> default use the `/app/database.db` path. You can either mount this file from a [docker volume][docker-volumes] or +> from your file-system using a [bind mount][docker-bind-mount]. + +[docker-volumes]: https://docs.docker.com/storage/volumes/#use-a-volume-with-docker-compose +[docker-bind-mount]: https://docs.docker.com/storage/bind-mounts/#use-a-bind-mount-with-compose + +## Configuring the bot + +The bot is configured using environment variables. You can either create a `.env` file and define these variables +there, or you can set / export them manually. Using the `.env` file is generally a better idea and will likely be more +convenient. + +| Variable name | Type | Default | Description | +| -------------------------- | ------ | ------------- | ------------------------------------------------------------------------------------------------------------------- | +| `BOT_TOKEN` | string | N/A | Bot token of the discord application (see: [this guide][bot-token-guide] if you don't have one yet) | +| `TVDB_API_KEY` | string | N/A | API key for TVDB (see [this page][tvdb-api-page] if you don't have one yet) | +| `TVDB_RATE_LIMIT_REQUESTS` | int | 5 | Amount of requests that the bot is allowed to make to the TVDB API within `TVDB_RATE_LIMIT_PERIOD` | +| `TVDB_RATE_LIMIT_PERIOD` | float | 100 | Period of time in seconds, within which the bot can make up to `TVDB_RATE_LIMIT_REQUESTS` requests to the TVDB API. | +| `SQLITE_DATABASE_FILE` | path | ./database.db | Path to sqlite database file, can be relative to project root (if the file doesn't yet exists, it will be created) | + +[bot-token-guide]: https://guide.pycord.dev/getting-started/creating-your-first-bot#creating-the-bot-application +[tvdb-api-page]: https://www.thetvdb.com/api-information + +### Debug configuration variables + +| Variable name | Type | Default | Description | +| -------------------- | ------ | ------- | ------------------------------------------------------------------------------------------------------------------- | +| `ECHO_SQL` | bool | 0 | If `1`, print out every SQL command that SQLAlchemy library runs internally (can be useful when debugging) | +| `DB_ALWAYS_MIGRATE` | bool | 0 | If `1`, database migrations will always be performed, even on a new database (instead of just creating the tables). | +| `DEBUG` | bool | 0 | If `1`, debug logs will be enabled, if `0` only info logs and above will be shown | +| `LOG_FILE` | path | N/A | If set, also write the logs into given file, otherwise, only print them | +| `TRACE_LEVEL_FILTER` | custom | N/A | Configuration for trace level logging, see: [trace logs config section](#trace-logs-config) | + +### Trace logs config + +We have a custom `trace` log level for the bot, which can be used for debugging purposes. This level is below `debug` +and can only be enabled if `DEBUG=1`. This log level is controlled through the `TRACE_LEVEL_FILTER` environment +variable. It works in the following way: + +- If `DEBUG=0`, the `TRACE_LEVEL_FILTER` variable is ignored, regardless of it's value. +- If `TRACE_LEVEL_FILTER` is not set, no trace logs will appear (debug logs only). +- If `TRACE_LEVEL_FILTER` is set to `*`, the root logger will be set to `TRACE` level. All trace logs will appear. +- When `TRACE_LEVEL_FILTER` is set to a list of logger names, delimited by a comma, each of the specified loggers will + be set to `TRACE` level, leaving the rest at `DEBUG` level. For example: `TRACE_LEVEL_FILTER="src.exts.foo.foo,src.exts.bar.bar"` +- When `TRACE_LEVEL_FILTER` starts with a `!` symbol, followed by a list of loggers, the root logger will be set to + `TRACE` level, with the specified loggers being set to `DEBUG` level. diff --git a/contemplative-constellations/alembic-migrations/README.md b/contemplative-constellations/alembic-migrations/README.md new file mode 100644 index 0000000..c92e334 --- /dev/null +++ b/contemplative-constellations/alembic-migrations/README.md @@ -0,0 +1,93 @@ + + +# Welcome to `alembic-migrations` directory + +This directory contains all of our database migrations. + +## What are database migrations? + +In case you aren't familiar, a database migration is essentially just a set of SQL instructions that should be +performed, to get your database into the state that we expect it to be in. + +The thing is, as software changes, the requirements for the database structure change alongside with it and that means +that anyone who would like to update this application to a newer version will also need to find a way to get their +database up to date with any changes that were made. + +If people had to do this manually, it would mean going through diffs and checking what exactly changed in the relevant +files, then using some tool where they can run SQL commands, figuring out what commands to even run to properly get +everything up to date without losing any existing information and finally actually running them. + +Clearly, this isn't ideal, especially for people who just want to use this bot as it's being updated and don't know +much about SQL and databases. For that reason, we decided to instead keep the instructions for these migrations in +individual version files, which people can simply execute to get their database up to date (or even to downgrade it, +in case someone needs to test out an older version of the bot). + +## How to use these migrations? + +We're using [`alembic`](https://alembic.sqlalchemy.org/en/latest/index.html), which is a tool that makes generating and +applying migrations very easy. Additionally, we have some custom logic in place, to make sure that all migrations that +weren't yet applied will automatically be ran once the application is started, so you don't actually need to do +anything to apply them. + +That said, if you would want to apply the migrations manually, without having to start the bot first, you can do so +with the command below: + +```bash +alembic upgrade head +``` + +This will run all of the migrations one by one, from the last previously executed migrations (if you haven't run the +command before, it will simply run each one). Alembic will then keep track of the revision id (basically the specific +migration file) that was applied and store that id into your database (Alembic will create it's own database table for +this). That way, alembic will always know what version is your current database at. + +> [!TIP] +> You can also run `alembic check`, to see if there are any pending migrations that you haven't yet applied. + +## How to create migrations? (for developers) + +If you're adding a new database table, deleting it, or just changing it somehow, you will want to create a new +migration file for it. Thankfully, alembic makes this very easy. All you need to do is run: + +```bash +alembic revision --autogenerate -m "Some message (e.g.: Added users table)" +``` + +Alembic will actually load the python classes that represent all of the tables and compare that with what you currently +have in the database, automatically generating all of the instructions that need to be ran in a new migration script. +This script will be stored in `alembic-migrations/versions/` directory. + +Note that after you did this, you will want to apply the migrations. You can do that by simply running the bot for a +while, to let the custom logic we have in place run alembic migrations for you, or you can run them manually with +`alembic upgrade head`. + +### Manual migrations + +In most cases, running the command to auto-generate the migration will be all that you need to do. + +That said, alembic has it's limitations and in some cases, the automatic generation doesn't work, or doesn't do what +we'd like it to do. For example, if you rename a table, alembic can't understand that this was a rename, rather than a +deletion of one table and a creation of another. This is a problem, because instead of simply renaming while keeping +the old existing data, alembic will generate instructions that would lead to losing those old data. + +In these cases, you will need to do some extra work and edit the migration files yourself. In case auto-generation +fails completely, you can run the same command without that `--autogenerate` flag, which will generate an empty +migration file, that you'll need to fill out. + +That said, in vast majority of cases, you will not need to write your migrations manually. For more info on when you +might need to, check [the documentation][alembic-autogeneration-autodetection]. + +[alembic-autogeneration-autodetection]: https://guide.pycord.dev/getting-started/creating-your-first-bot#creating-the-bot-application + +### Stamping + +In case you've made modifications to your database already (perhaps by manually running some SQL commands to test out a +manually written migration), you might want to skip applying a migration and instead just tell Alembic that the +database is already up to date with the latest revision. + +Thankfully, alembic makes this really simple, all you need to do is run: + +```bash +alembic stamp head +``` diff --git a/contemplative-constellations/alembic-migrations/env.py b/contemplative-constellations/alembic-migrations/env.py new file mode 100644 index 0000000..d20a9e6 --- /dev/null +++ b/contemplative-constellations/alembic-migrations/env.py @@ -0,0 +1,98 @@ +import asyncio + +from alembic import context +from sqlalchemy import MetaData, engine_from_config, pool +from sqlalchemy.engine import Connection +from sqlalchemy.ext.asyncio import AsyncEngine + +from src.utils.database import Base, SQLALCHEMY_URL, load_db_models +from src.utils.log import get_logger + +# Obtain a custom logger instance +# This will also set up logging with our custom configuration +log = get_logger(__name__) + +# Override the logging level of the alembic migration logs +# we set this to WARNING in the project to avoid log spam on auto-migrations +# however when alembic is ran manually, we want to see these logs, so set it +# back to the same level as the root log (INFO or DEBUG) +get_logger("alembic.runtime.migration").setLevel(get_logger().getEffectiveLevel()) + +# This is the Alembic Config object, which provides access to the values within the .ini file in use. +config = context.config + + +def run_migrations_offline(target_metadata: MetaData, *, render_as_batch: bool) -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the script output. + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + render_as_batch=render_as_batch, + ) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_migrations_online(target_metadata: MetaData, *, render_as_batch: bool) -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine and associate a connection with the context. + """ + + def do_run_migrations(connection: Connection) -> None: + context.configure( + connection=connection, + target_metadata=target_metadata, + render_as_batch=render_as_batch, + ) + + with context.begin_transaction(): + context.run_migrations() + + configuration = config.get_section(config.config_ini_section) + if configuration is None: + raise RuntimeError("Config ini section doesn't exists (should never happen)") + + connectable = AsyncEngine( + engine_from_config( + configuration, + prefix="sqlalchemy.", + poolclass=pool.NullPool, + future=True, + ) + ) + + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + await connectable.dispose() + + +def main() -> None: + """Main entry point function.""" + config.set_main_option("sqlalchemy.url", SQLALCHEMY_URL) + load_db_models() + + # Check if we're using SQLite database, as it requires special handling for migrations + # due to the lack of ALTER TABLE statements. (https://alembic.sqlalchemy.org/en/latest/batch.html) + render_as_batch = SQLALCHEMY_URL.startswith("sqlite") + + target_metadata = Base.metadata + + if context.is_offline_mode(): + run_migrations_offline(target_metadata, render_as_batch=render_as_batch) + else: + asyncio.run(run_migrations_online(target_metadata, render_as_batch=render_as_batch)) + + +main() diff --git a/contemplative-constellations/alembic-migrations/script.py.mako b/contemplative-constellations/alembic-migrations/script.py.mako new file mode 100644 index 0000000..d5f4a9a --- /dev/null +++ b/contemplative-constellations/alembic-migrations/script.py.mako @@ -0,0 +1,25 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} +""" + +from collections.abc import Sequence + +import sqlalchemy as sa +from alembic import op +${imports if imports else ""} +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: str | None = ${repr(down_revision)} +branch_labels: str | Sequence[str] | None = ${repr(branch_labels)} +depends_on: str | Sequence[str] | None = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/contemplative-constellations/alembic-migrations/versions/2024_07_27_1712-eeef1b453205_initial_migration.py b/contemplative-constellations/alembic-migrations/versions/2024_07_27_1712-eeef1b453205_initial_migration.py new file mode 100644 index 0000000..9b59948 --- /dev/null +++ b/contemplative-constellations/alembic-migrations/versions/2024_07_27_1712-eeef1b453205_initial_migration.py @@ -0,0 +1,80 @@ +"""Initial migration + +Revision ID: eeef1b453205 +Revises: +Create Date: 2024-07-27 17:12:52.289591 +""" + +from collections.abc import Sequence + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = "eeef1b453205" +down_revision: str | None = None +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table("movies", sa.Column("tvdb_id", sa.Integer(), nullable=False), sa.PrimaryKeyConstraint("tvdb_id")) + op.create_table("series", sa.Column("tvdb_id", sa.Integer(), nullable=False), sa.PrimaryKeyConstraint("tvdb_id")) + op.create_table( + "users", sa.Column("discord_id", sa.Integer(), nullable=False), sa.PrimaryKeyConstraint("discord_id") + ) + op.create_table( + "episodes", + sa.Column("tvdb_id", sa.Integer(), nullable=False), + sa.Column("series_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["series_id"], + ["series.tvdb_id"], + ), + sa.PrimaryKeyConstraint("tvdb_id"), + ) + op.create_table( + "user_lists", + sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("user_id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(), nullable=False), + sa.Column( + "item_kind", sa.Enum("SERIES", "MOVIE", "EPISODE", "MEDIA", "ANY", name="userlistkind"), nullable=False + ), + sa.ForeignKeyConstraint( + ["user_id"], + ["users.discord_id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("user_id", "name", name="unique_user_list_name"), + ) + with op.batch_alter_table("user_lists", schema=None) as batch_op: + batch_op.create_index("ix_user_lists_user_id_name", ["user_id", "name"], unique=True) + + op.create_table( + "user_list_items", + sa.Column("list_id", sa.Integer(), nullable=False), + sa.Column("tvdb_id", sa.Integer(), nullable=False), + sa.Column("kind", sa.Enum("SERIES", "MOVIE", "EPISODE", name="userlistitemkind"), nullable=False), + sa.ForeignKeyConstraint( + ["list_id"], + ["user_lists.id"], + ), + sa.PrimaryKeyConstraint("list_id", "tvdb_id", "kind"), + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("user_list_items") + with op.batch_alter_table("user_lists", schema=None) as batch_op: + batch_op.drop_index("ix_user_lists_user_id_name") + + op.drop_table("user_lists") + op.drop_table("episodes") + op.drop_table("users") + op.drop_table("series") + op.drop_table("movies") + # ### end Alembic commands ### diff --git a/contemplative-constellations/alembic.ini b/contemplative-constellations/alembic.ini new file mode 100644 index 0000000..87142fa --- /dev/null +++ b/contemplative-constellations/alembic.ini @@ -0,0 +1,118 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +# Use forward slashes (/) also on windows to provide an os agnostic path +script_location = alembic-migrations + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic-migrations/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic-migrations/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# Connection string to the database to perform migrations on +# **This is a placeholder, we obtain the real value dynamically from the environment** +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples +# https://alembic.sqlalchemy.org/en/latest/autogenerate.html#basic-post-processor-configuration + +hooks = ruff-autofix, ruff-format + +ruff-autofix.type = exec +ruff-autofix.executable = %(here)s/.venv/bin/ruff +ruff-autofix.options = check --fix REVISION_SCRIPT_FILENAME + +ruff-format.type = exec +ruff-format.executable = %(here)s/.venv/bin/ruff +ruff-format.options = format REVISION_SCRIPT_FILENAME + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/contemplative-constellations/docker-compose.local.yaml b/contemplative-constellations/docker-compose.local.yaml new file mode 100644 index 0000000..c01d1f8 --- /dev/null +++ b/contemplative-constellations/docker-compose.local.yaml @@ -0,0 +1,9 @@ +version: "3.9" + +services: + bot: + build: + context: . + dockerfile: Dockerfile + env_file: + - .env diff --git a/contemplative-constellations/docker-compose.yaml b/contemplative-constellations/docker-compose.yaml new file mode 100644 index 0000000..de095d5 --- /dev/null +++ b/contemplative-constellations/docker-compose.yaml @@ -0,0 +1,7 @@ +version: '3.9' + +services: + bot: + image: ghcr.io/itsdrike/code-jam-2024:main + env_file: + - .env diff --git a/contemplative-constellations/poetry.lock b/contemplative-constellations/poetry.lock new file mode 100644 index 0000000..fcda70b --- /dev/null +++ b/contemplative-constellations/poetry.lock @@ -0,0 +1,1694 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "aiocache" +version = "0.12.2" +description = "multi backend asyncio cache" +optional = false +python-versions = "*" +files = [ + {file = "aiocache-0.12.2-py2.py3-none-any.whl", hash = "sha256:9b6fa30634ab0bfc3ecc44928a91ff07c6ea16d27d55469636b296ebc6eb5918"}, + {file = "aiocache-0.12.2.tar.gz", hash = "sha256:b41c9a145b050a5dcbae1599f847db6dd445193b1f3bd172d8e0fe0cb9e96684"}, +] + +[package.dependencies] +aiomcache = {version = ">=0.5.2", optional = true, markers = "extra == \"memcached\""} +redis = {version = ">=4.2.0", optional = true, markers = "extra == \"redis\""} + +[package.extras] +memcached = ["aiomcache (>=0.5.2)"] +msgpack = ["msgpack (>=0.5.5)"] +redis = ["redis (>=4.2.0)"] + +[[package]] +name = "aiohttp" +version = "3.9.5" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, + {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, + {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, + {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, + {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, + {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, + {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, + {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, + {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, + {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, + {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, + {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aiomcache" +version = "0.8.2" +description = "Minimal pure python memcached client" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiomcache-0.8.2-py3-none-any.whl", hash = "sha256:9d78d6b6e74e775df18b350b1cddfa96bd2f0a44d49ad27fa87759a3469cef5e"}, + {file = "aiomcache-0.8.2.tar.gz", hash = "sha256:43b220d7f499a32a71871c4f457116eb23460fa216e69c1d32b81e3209e51359"}, +] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "aiosqlite" +version = "0.20.0" +description = "asyncio bridge to the standard sqlite3 module" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiosqlite-0.20.0-py3-none-any.whl", hash = "sha256:36a1deaca0cac40ebe32aac9977a6e2bbc7f5189f23f4a54d5908986729e5bd6"}, + {file = "aiosqlite-0.20.0.tar.gz", hash = "sha256:6d35c8c256637f4672f843c31021464090805bf925385ac39473fb16eaaca3d7"}, +] + +[package.dependencies] +typing_extensions = ">=4.0" + +[package.extras] +dev = ["attribution (==1.7.0)", "black (==24.2.0)", "coverage[toml] (==7.4.1)", "flake8 (==7.0.0)", "flake8-bugbear (==24.2.6)", "flit (==3.9.0)", "mypy (==1.8.0)", "ufmt (==2.3.0)", "usort (==1.0.8.post1)"] +docs = ["sphinx (==7.2.6)", "sphinx-mdinclude (==0.5.3)"] + +[[package]] +name = "alembic" +version = "1.13.2" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.8" +files = [ + {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, + {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["backports.zoneinfo"] + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.4.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "argcomplete" +version = "3.4.0" +description = "Bash tab completion for argparse" +optional = false +python-versions = ">=3.8" +files = [ + {file = "argcomplete-3.4.0-py3-none-any.whl", hash = "sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5"}, + {file = "argcomplete-3.4.0.tar.gz", hash = "sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f"}, +] + +[package.extras] +test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "basedpyright" +version = "1.14.0" +description = "static type checking for Python (but based)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "basedpyright-1.14.0-py3-none-any.whl", hash = "sha256:ca29ae9c9dd04d718866b9d3cc737a31f084ce954a9afc9f00eafac9419e0046"}, + {file = "basedpyright-1.14.0.tar.gz", hash = "sha256:ebbbb44484e269c441d48129bf43619aa8ff54966706e13732cd4412408d1477"}, +] + +[package.dependencies] +nodejs-wheel-binaries = ">=20.13.1" + +[[package]] +name = "black" +version = "24.4.2" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, + {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, + {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, + {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, + {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, + {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, + {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, + {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, + {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, + {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, + {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, + {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, + {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, + {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, + {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, + {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, + {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, + {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, + {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, + {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, + {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, + {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2024.7.4" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coloredlogs" +version = "15.0.1" +description = "Colored terminal output for Python's logging module" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, + {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, +] + +[package.dependencies] +humanfriendly = ">=9.1" + +[package.extras] +cron = ["capturer (>=2.4)"] + +[[package]] +name = "coverage" +version = "7.6.0" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dff044f661f59dace805eedb4a7404c573b6ff0cdba4a524141bc63d7be5c7fd"}, + {file = "coverage-7.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8659fd33ee9e6ca03950cfdcdf271d645cf681609153f218826dd9805ab585c"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7792f0ab20df8071d669d929c75c97fecfa6bcab82c10ee4adb91c7a54055463"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b3cd1ca7cd73d229487fa5caca9e4bc1f0bca96526b922d61053ea751fe791"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e128f85c0b419907d1f38e616c4f1e9f1d1b37a7949f44df9a73d5da5cd53c"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a94925102c89247530ae1dab7dc02c690942566f22e189cbd53579b0693c0783"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dcd070b5b585b50e6617e8972f3fbbee786afca71b1936ac06257f7e178f00f6"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d50a252b23b9b4dfeefc1f663c568a221092cbaded20a05a11665d0dbec9b8fb"}, + {file = "coverage-7.6.0-cp310-cp310-win32.whl", hash = "sha256:0e7b27d04131c46e6894f23a4ae186a6a2207209a05df5b6ad4caee6d54a222c"}, + {file = "coverage-7.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dece71673b3187c86226c3ca793c5f891f9fc3d8aa183f2e3653da18566169"}, + {file = "coverage-7.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7b525ab52ce18c57ae232ba6f7010297a87ced82a2383b1afd238849c1ff933"}, + {file = "coverage-7.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bea27c4269234e06f621f3fac3925f56ff34bc14521484b8f66a580aacc2e7d"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed8d1d1821ba5fc88d4a4f45387b65de52382fa3ef1f0115a4f7a20cdfab0e94"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c322ef2bbe15057bc4bf132b525b7e3f7206f071799eb8aa6ad1940bcf5fb1"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03cafe82c1b32b770a29fd6de923625ccac3185a54a5e66606da26d105f37dac"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d1b923fc4a40c5832be4f35a5dab0e5ff89cddf83bb4174499e02ea089daf57"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4b03741e70fb811d1a9a1d75355cf391f274ed85847f4b78e35459899f57af4d"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a73d18625f6a8a1cbb11eadc1d03929f9510f4131879288e3f7922097a429f63"}, + {file = "coverage-7.6.0-cp311-cp311-win32.whl", hash = "sha256:65fa405b837060db569a61ec368b74688f429b32fa47a8929a7a2f9b47183713"}, + {file = "coverage-7.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6379688fb4cfa921ae349c76eb1a9ab26b65f32b03d46bb0eed841fd4cb6afb1"}, + {file = "coverage-7.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7db0b6ae1f96ae41afe626095149ecd1b212b424626175a6633c2999eaad45b"}, + {file = "coverage-7.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bbdf9a72403110a3bdae77948b8011f644571311c2fb35ee15f0f10a8fc082e8"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc44bf0315268e253bf563f3560e6c004efe38f76db03a1558274a6e04bf5d5"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da8549d17489cd52f85a9829d0e1d91059359b3c54a26f28bec2c5d369524807"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0086cd4fc71b7d485ac93ca4239c8f75732c2ae3ba83f6be1c9be59d9e2c6382"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fad32ee9b27350687035cb5fdf9145bc9cf0a094a9577d43e909948ebcfa27b"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:044a0985a4f25b335882b0966625270a8d9db3d3409ddc49a4eb00b0ef5e8cee"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:76d5f82213aa78098b9b964ea89de4617e70e0d43e97900c2778a50856dac605"}, + {file = "coverage-7.6.0-cp312-cp312-win32.whl", hash = "sha256:3c59105f8d58ce500f348c5b56163a4113a440dad6daa2294b5052a10db866da"}, + {file = "coverage-7.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca5d79cfdae420a1d52bf177de4bc2289c321d6c961ae321503b2ca59c17ae67"}, + {file = "coverage-7.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d39bd10f0ae453554798b125d2f39884290c480f56e8a02ba7a6ed552005243b"}, + {file = "coverage-7.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beb08e8508e53a568811016e59f3234d29c2583f6b6e28572f0954a6b4f7e03d"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2e16f4cd2bc4d88ba30ca2d3bbf2f21f00f382cf4e1ce3b1ddc96c634bc48ca"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6616d1c9bf1e3faea78711ee42a8b972367d82ceae233ec0ac61cc7fec09fa6b"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4567d6c334c46046d1c4c20024de2a1c3abc626817ae21ae3da600f5779b44"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d17c6a415d68cfe1091d3296ba5749d3d8696e42c37fca5d4860c5bf7b729f03"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9146579352d7b5f6412735d0f203bbd8d00113a680b66565e205bc605ef81bc6"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cdab02a0a941af190df8782aafc591ef3ad08824f97850b015c8c6a8b3877b0b"}, + {file = "coverage-7.6.0-cp38-cp38-win32.whl", hash = "sha256:df423f351b162a702c053d5dddc0fc0ef9a9e27ea3f449781ace5f906b664428"}, + {file = "coverage-7.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f2501d60d7497fd55e391f423f965bbe9e650e9ffc3c627d5f0ac516026000b8"}, + {file = "coverage-7.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7221f9ac9dad9492cecab6f676b3eaf9185141539d5c9689d13fd6b0d7de840c"}, + {file = "coverage-7.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddaaa91bfc4477d2871442bbf30a125e8fe6b05da8a0015507bfbf4718228ab2"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cbe651f3904e28f3a55d6f371203049034b4ddbce65a54527a3f189ca3b390"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831b476d79408ab6ccfadaaf199906c833f02fdb32c9ab907b1d4aa0713cfa3b"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c3d091059ad0b9c59d1034de74a7f36dcfa7f6d3bde782c49deb42438f2450"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d5fae0a22dc86259dee66f2cc6c1d3e490c4a1214d7daa2a93d07491c5c04b6"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:07ed352205574aad067482e53dd606926afebcb5590653121063fbf4e2175166"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:49c76cdfa13015c4560702574bad67f0e15ca5a2872c6a125f6327ead2b731dd"}, + {file = "coverage-7.6.0-cp39-cp39-win32.whl", hash = "sha256:482855914928c8175735a2a59c8dc5806cf7d8f032e4820d52e845d1f731dca2"}, + {file = "coverage-7.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:543ef9179bc55edfd895154a51792b01c017c87af0ebaae092720152e19e42ca"}, + {file = "coverage-7.6.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:6fe885135c8a479d3e37a7aae61cbd3a0fb2deccb4dda3c25f92a49189f766d6"}, + {file = "coverage-7.6.0.tar.gz", hash = "sha256:289cc803fa1dc901f84701ac10c9ee873619320f2f9aff38794db4a4a0268d51"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "datamodel-code-generator" +version = "0.25.8" +description = "Datamodel Code Generator" +optional = false +python-versions = "<4.0,>=3.7" +files = [ + {file = "datamodel_code_generator-0.25.8-py3-none-any.whl", hash = "sha256:f9b216efad84d8dcb517273d2728875b6052b7e8dc4e5c13a597441cef236f6e"}, + {file = "datamodel_code_generator-0.25.8.tar.gz", hash = "sha256:b7838122b8133dae6e46f36a1cf25c0ccc66745da057988f490d00ab71121de7"}, +] + +[package.dependencies] +argcomplete = ">=1.10,<4.0" +black = ">=19.10b0" +genson = ">=1.2.1,<2.0" +httpx = {version = "*", optional = true, markers = "extra == \"http\""} +inflect = ">=4.1.0,<6.0" +isort = ">=4.3.21,<6.0" +jinja2 = ">=2.10.1,<4.0" +packaging = "*" +pydantic = {version = ">=1.10.0,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.4.0 || >2.4.0,<3.0", extras = ["email"], markers = "python_version >= \"3.12\" and python_version < \"4.0\""} +pyyaml = ">=6.0.1" + +[package.extras] +debug = ["PySnooper (>=0.4.1,<2.0.0)"] +graphql = ["graphql-core (>=3.2.3,<4.0.0)"] +http = ["httpx"] +validation = ["openapi-spec-validator (>=0.2.8,<0.7.0)", "prance (>=0.18.2)"] + +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + +[[package]] +name = "dnspython" +version = "2.6.1" +description = "DNS toolkit" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, + {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=41)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=0.9.25)"] +idna = ["idna (>=3.6)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "email-validator" +version = "2.2.0" +description = "A robust email address syntax and deliverability validation library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, + {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +idna = ">=2.0.0" + +[[package]] +name = "filelock" +version = "3.15.4" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + +[[package]] +name = "genson" +version = "1.3.0" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.3.0-py3-none-any.whl", hash = "sha256:468feccd00274cc7e4c09e84b08704270ba8d95232aa280f65b986139cec67f7"}, + {file = "genson-1.3.0.tar.gz", hash = "sha256:e02db9ac2e3fd29e65b5286f7135762e2cd8a986537c075b06fc5f1517308e37"}, +] + +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + +[[package]] +name = "httpx" +version = "0.27.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "humanfriendly" +version = "10.0" +description = "Human friendly output for text interfaces using Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, + {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, +] + +[package.dependencies] +pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} + +[[package]] +name = "identify" +version = "2.6.0" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, + {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "inflect" +version = "5.6.2" +description = "Correctly generate plurals, singular nouns, ordinals, indefinite articles; convert numbers to words" +optional = false +python-versions = ">=3.7" +files = [ + {file = "inflect-5.6.2-py3-none-any.whl", hash = "sha256:b45d91a4a28a4e617ff1821117439b06eaa86e2a4573154af0149e9be6687238"}, + {file = "inflect-5.6.2.tar.gz", hash = "sha256:aadc7ed73928f5e014129794bbac03058cca35d0a973a5fc4eb45c7fa26005f9"}, +] + +[package.extras] +docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] +testing = ["pygments", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "mako" +version = "1.3.5" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, + {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "multidict" +version = "6.0.5" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "nodejs-wheel-binaries" +version = "20.15.1" +description = "unoffical Node.js package" +optional = false +python-versions = ">=3.7" +files = [ + {file = "nodejs_wheel_binaries-20.15.1-py2.py3-none-macosx_10_15_x86_64.whl", hash = "sha256:a04537555f59e53021f8a2b07fa7aaac29d7793b7fae7fbf561bf9a859f4c67a"}, + {file = "nodejs_wheel_binaries-20.15.1-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:b5ff04efa56a3fcd1fd09b30f5236c12bd84c10fcb222f3c0e04e1d497342b70"}, + {file = "nodejs_wheel_binaries-20.15.1-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c3e172e3fde3c13e7509312c81700736304dbd250745d87f00e7506065f3a5"}, + {file = "nodejs_wheel_binaries-20.15.1-py2.py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9740f7456a43cb09521a1ac93a4355dc8282c41420f2d61ff631a01f39e2aa18"}, + {file = "nodejs_wheel_binaries-20.15.1-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:bf5e239676efabb2fbaeff2f36d0bad8e2379f260ef44e13ef2151d037e40af3"}, + {file = "nodejs_wheel_binaries-20.15.1-py2.py3-none-win_amd64.whl", hash = "sha256:624936171b1aa2e1cc6d1718b1caa089e943b54df16568fa2f4576d145ac279a"}, + {file = "nodejs_wheel_binaries-20.15.1.tar.gz", hash = "sha256:b2f25b4f0e9a827ae1af8218ab13a385e279c236faf7b7c821e969bb8f6b25e8"}, +] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "3.7.1" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"}, + {file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "py-cord" +version = "2.6.0" +description = "A Python wrapper for the Discord API" +optional = false +python-versions = ">=3.8" +files = [ + {file = "py_cord-2.6.0-py3-none-any.whl", hash = "sha256:906cc077904a4d478af0264ae4374b0412ed9f9ab950d0162c4f31239a906d26"}, + {file = "py_cord-2.6.0.tar.gz", hash = "sha256:bbc0349542965d05e4b18cc4424136206430a8cc911fda12a0a57df6fdf9cd9c"}, +] + +[package.dependencies] +aiohttp = ">=3.6.0,<4.0" + +[package.extras] +docs = ["furo (==2023.3.23)", "myst-parser (==1.0.0)", "sphinx (==5.3.0)", "sphinx-autodoc-typehints (==1.23.0)", "sphinx-copybutton (==0.5.2)", "sphinxcontrib-trio (==1.1.2)", "sphinxcontrib-websupport (==1.2.4)", "sphinxext-opengraph (==0.9.1)"] +speed = ["aiohttp[speedups]", "msgspec (>=0.18.6,<0.19.0)"] +voice = ["PyNaCl (>=1.3.0,<1.6)"] + +[[package]] +name = "pydantic" +version = "2.8.2" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, + {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""} +pydantic-core = "2.20.1" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.20.1" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, + {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, + {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, + {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, + {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, + {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, + {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, + {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, + {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, + {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, + {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, + {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, + {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, + {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyreadline3" +version = "3.4.1" +description = "A python implementation of GNU readline." +optional = false +python-versions = "*" +files = [ + {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"}, + {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, +] + +[[package]] +name = "pytest" +version = "8.2.2" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, + {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2.0" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.23.8" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, + {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, +] + +[package.dependencies] +pytest = ">=7.0.0,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pytest-cov" +version = "5.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + +[[package]] +name = "python-decouple" +version = "3.8" +description = "Strict separation of settings from code." +optional = false +python-versions = "*" +files = [ + {file = "python-decouple-3.8.tar.gz", hash = "sha256:ba6e2657d4f376ecc46f77a3a615e058d93ba5e465c01bbe57289bfb7cce680f"}, + {file = "python_decouple-3.8-py3-none-any.whl", hash = "sha256:d0d45340815b25f4de59c974b855bb38d03151d81b037d9e3f463b0c9f8cbd66"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "redis" +version = "5.0.7" +description = "Python client for Redis database and key-value store" +optional = false +python-versions = ">=3.7" +files = [ + {file = "redis-5.0.7-py3-none-any.whl", hash = "sha256:0e479e24da960c690be5d9b96d21f7b918a98c0cf49af3b6fafaa0753f93a0db"}, + {file = "redis-5.0.7.tar.gz", hash = "sha256:8f611490b93c8109b50adc317b31bfd84fff31def3475b92e7e80bf39f48175b"}, +] + +[package.extras] +hiredis = ["hiredis (>=1.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] + +[[package]] +name = "ruff" +version = "0.3.7" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"}, + {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"}, + {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"}, + {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"}, + {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.31" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2a213c1b699d3f5768a7272de720387ae0122f1becf0901ed6eaa1abd1baf6c"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9fea3d0884e82d1e33226935dac990b967bef21315cbcc894605db3441347443"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ad7f221d8a69d32d197e5968d798217a4feebe30144986af71ada8c548e9fa"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2bee229715b6366f86a95d497c347c22ddffa2c7c96143b59a2aa5cc9eebbc"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cd5b94d4819c0c89280b7c6109c7b788a576084bf0a480ae17c227b0bc41e109"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:750900a471d39a7eeba57580b11983030517a1f512c2cb287d5ad0fcf3aebd58"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-win32.whl", hash = "sha256:7bd112be780928c7f493c1a192cd8c5fc2a2a7b52b790bc5a84203fb4381c6be"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-win_amd64.whl", hash = "sha256:5a48ac4d359f058474fadc2115f78a5cdac9988d4f99eae44917f36aa1476327"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f68470edd70c3ac3b6cd5c2a22a8daf18415203ca1b036aaeb9b0fb6f54e8298"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e2c38c2a4c5c634fe6c3c58a789712719fa1bf9b9d6ff5ebfce9a9e5b89c1ca"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd15026f77420eb2b324dcb93551ad9c5f22fab2c150c286ef1dc1160f110203"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2196208432deebdfe3b22185d46b08f00ac9d7b01284e168c212919891289396"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:352b2770097f41bff6029b280c0e03b217c2dcaddc40726f8f53ed58d8a85da4"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56d51ae825d20d604583f82c9527d285e9e6d14f9a5516463d9705dab20c3740"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-win32.whl", hash = "sha256:6e2622844551945db81c26a02f27d94145b561f9d4b0c39ce7bfd2fda5776dac"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-win_amd64.whl", hash = "sha256:ccaf1b0c90435b6e430f5dd30a5aede4764942a695552eb3a4ab74ed63c5b8d3"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3b74570d99126992d4b0f91fb87c586a574a5872651185de8297c6f90055ae42"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f77c4f042ad493cb8595e2f503c7a4fe44cd7bd59c7582fd6d78d7e7b8ec52c"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd1591329333daf94467e699e11015d9c944f44c94d2091f4ac493ced0119449"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74afabeeff415e35525bf7a4ecdab015f00e06456166a2eba7590e49f8db940e"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b9c01990d9015df2c6f818aa8f4297d42ee71c9502026bb074e713d496e26b67"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66f63278db425838b3c2b1c596654b31939427016ba030e951b292e32b99553e"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-win32.whl", hash = "sha256:0b0f658414ee4e4b8cbcd4a9bb0fd743c5eeb81fc858ca517217a8013d282c96"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-win_amd64.whl", hash = "sha256:fa4b1af3e619b5b0b435e333f3967612db06351217c58bfb50cee5f003db2a5a"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f43e93057cf52a227eda401251c72b6fbe4756f35fa6bfebb5d73b86881e59b0"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d337bf94052856d1b330d5fcad44582a30c532a2463776e1651bd3294ee7e58b"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c06fb43a51ccdff3b4006aafee9fcf15f63f23c580675f7734245ceb6b6a9e05"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:b6e22630e89f0e8c12332b2b4c282cb01cf4da0d26795b7eae16702a608e7ca1"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:79a40771363c5e9f3a77f0e28b3302801db08040928146e6808b5b7a40749c88"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-win32.whl", hash = "sha256:501ff052229cb79dd4c49c402f6cb03b5a40ae4771efc8bb2bfac9f6c3d3508f"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-win_amd64.whl", hash = "sha256:597fec37c382a5442ffd471f66ce12d07d91b281fd474289356b1a0041bdf31d"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dc6d69f8829712a4fd799d2ac8d79bdeff651c2301b081fd5d3fe697bd5b4ab9"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23b9fbb2f5dd9e630db70fbe47d963c7779e9c81830869bd7d137c2dc1ad05fb"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a21c97efcbb9f255d5c12a96ae14da873233597dfd00a3a0c4ce5b3e5e79704"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26a6a9837589c42b16693cf7bf836f5d42218f44d198f9343dd71d3164ceeeac"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc251477eae03c20fae8db9c1c23ea2ebc47331bcd73927cdcaecd02af98d3c3"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2fd17e3bb8058359fa61248c52c7b09a97cf3c820e54207a50af529876451808"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-win32.whl", hash = "sha256:c76c81c52e1e08f12f4b6a07af2b96b9b15ea67ccdd40ae17019f1c373faa227"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-win_amd64.whl", hash = "sha256:4b600e9a212ed59355813becbcf282cfda5c93678e15c25a0ef896b354423238"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b6cf796d9fcc9b37011d3f9936189b3c8074a02a4ed0c0fbbc126772c31a6d4"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:78fe11dbe37d92667c2c6e74379f75746dc947ee505555a0197cfba9a6d4f1a4"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc47dc6185a83c8100b37acda27658fe4dbd33b7d5e7324111f6521008ab4fe"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a41514c1a779e2aa9a19f67aaadeb5cbddf0b2b508843fcd7bafdf4c6864005"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:afb6dde6c11ea4525318e279cd93c8734b795ac8bb5dda0eedd9ebaca7fa23f1"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3f9faef422cfbb8fd53716cd14ba95e2ef655400235c3dfad1b5f467ba179c8c"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-win32.whl", hash = "sha256:fc6b14e8602f59c6ba893980bea96571dd0ed83d8ebb9c4479d9ed5425d562e9"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-win_amd64.whl", hash = "sha256:3cb8a66b167b033ec72c3812ffc8441d4e9f5f78f5e31e54dcd4c90a4ca5bebc"}, + {file = "SQLAlchemy-2.0.31-py3-none-any.whl", hash = "sha256:69f3e3c08867a8e4856e92d7afb618b95cdee18e0bc1647b77599722c9a28911"}, + {file = "SQLAlchemy-2.0.31.tar.gz", hash = "sha256:b607489dd4a54de56984a0c7656247504bd5523d9d0ba799aef59d4add009484"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", optional = true, markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "virtualenv" +version = "20.26.3" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "yarl" +version = "1.9.4" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[metadata] +lock-version = "2.0" +python-versions = "^3.12" +content-hash = "29da624decbb73ca07e87b8449b602ba20f01b9f868c09a064bf0ad4b0ab6325" diff --git a/contemplative-constellations/presentation/img/img.png b/contemplative-constellations/presentation/img/img.png new file mode 100644 index 0000000..6352a4a Binary files /dev/null and b/contemplative-constellations/presentation/img/img.png differ diff --git a/contemplative-constellations/presentation/img/img_1.png b/contemplative-constellations/presentation/img/img_1.png new file mode 100644 index 0000000..3280010 Binary files /dev/null and b/contemplative-constellations/presentation/img/img_1.png differ diff --git a/contemplative-constellations/presentation/img/img_2.png b/contemplative-constellations/presentation/img/img_2.png new file mode 100644 index 0000000..7394d35 Binary files /dev/null and b/contemplative-constellations/presentation/img/img_2.png differ diff --git a/contemplative-constellations/presentation/img/img_3.png b/contemplative-constellations/presentation/img/img_3.png new file mode 100644 index 0000000..6316413 Binary files /dev/null and b/contemplative-constellations/presentation/img/img_3.png differ diff --git a/contemplative-constellations/presentation/img/img_4.png b/contemplative-constellations/presentation/img/img_4.png new file mode 100644 index 0000000..d05f2bc Binary files /dev/null and b/contemplative-constellations/presentation/img/img_4.png differ diff --git a/contemplative-constellations/presentation/img/img_5.png b/contemplative-constellations/presentation/img/img_5.png new file mode 100644 index 0000000..58a05ab Binary files /dev/null and b/contemplative-constellations/presentation/img/img_5.png differ diff --git a/contemplative-constellations/presentation/presentation.md b/contemplative-constellations/presentation/presentation.md new file mode 100644 index 0000000..0002716 --- /dev/null +++ b/contemplative-constellations/presentation/presentation.md @@ -0,0 +1,244 @@ +--- +# This file is supposed to be viewed using marp, which is a tool for creating Markdown based presentations. +marp: true +footer: "Authors: ItsDrike, Benji, Paillat-dev, Ash8121" +class: invert +--- + +# Lumina Tracker + +--- + +**Lumina Tracker** is a discord bot that allows you to keep track of the TV shows and movies that you have watched. You can use it to search for and see various information about the shows/movies you're interested in, but also to mark individual movies or show episodes as watched or favorite. + +The bot then allows anyone to take a look at your profile, to see what you have watched, alongside with some neat statistics like the total number of episodes, shows and movies that you have tracked. + +--- + +## Data + +We decided to use TheTVDB service as the base for our bot. It's a massive database of TV shows and movies with an API that people can access for free (at least for non-profit / open-source projects). Without it, we wouldn't be able to create this bot, so they deserve a massive thanks from us. + +--- + +## Features + +Lumina Tracker can be used with discord application commands, and has two main commands: + +- `/search` to search for a TV show or a movie +- `/profile` to view your (or someone else's) profile + +--- + +### Search + +The search command is easy to use. Simply type `/search` followed by the name of the TV show or movie you want to search for, in the `query` option. + +You also have access to two more (optional) options: + +- `type`: To specify if you are looking for a TV show or a movie and filter the results +- `by_id`: A boolean option that will interpret the query as a TheTVDB id. When using this option, make sure to specify the type, as TheTVDB ids are not unique between tv shows and movies. + +--- + +#### Search interface + +![bg right 80%](img/img.png) +The search interface responds with a message consisting of two or three buttons, depending on the type of media you are searching for, and a dropdown menu. + +--- + +#### Search interface - result picker + +![bg right 80%](img/img_1.png) +The dropdown allows you to quickly switch to another of the top 5 results, and the buttons allow you to mark the result as watched, favorite it, or in case of a TV show, view the episodes. + +This menu is only shown if there was more than 1 result found for the query. + +--- + +#### Search interface - buttons + +![bg right 80%](img/img_2.png) +The buttons are interactive, and you can click on them to perform the action you want, such as marking a TV show as watched or adding it to your favorites list. + + + +> ℹ️ Marking an entire TV show as watched can sometimes take some time (depending on how many episodes it has). There is no need to worry, the message will update when the operation is complete. + +--- + +#### Episodes interface + +![bg right 80%](img/img_3.png) +The episode interface is displayed when you click on the **View Episodes** button in the search interface. Like the search interface, it provides a _Mark as watched_ button, so that you can track episodes individually. + +To choose between episodes, there are two dropdown menus, first for picking the seasons and second for the episode. + +--- + +### Profile + +The profile command displays various data and statistics for the specified user. +It has one (optional) option: + +- `user`: To specify the user whose profile you want to view. If not specified, it will display your profile. + +--- + +#### Profile interface + +![bg right 80%](img/img_4.png) +The profile interface displays the user's profile. It contains the amount of TV shows, movies and episodes watched, and a list with the shows marked as watched / favorite. It does not provide any interactive elements, as it is only meant to display information, but you can click on any show's link to view more information about it on TheTVDB. + +--- + +### Additional features + +Lumina Tracker also has some additional features: + +- A `/help` command to display a list of all available commands +- A `/ping` command to check if the bot is online and its latency +- A `/sudo` command group, only available to the bot owner, to load, unload or reload extensions to improve the + development experience. + +--- + +### Additional features +![bg right 80%](img/img_5.png) +Lumina Tracker also has a custom rate limiting system, both global and per user, to avoid spamming the bot with requests. Alongside that, we also added a cache system to store the results from TheTVDB and avoid flooding the api with the same requests. + +--- +### Additional features +![bg right 80%](img/img_5.png) + +Even though we did not find any information about rate limits in the documentation for TheTVDB API, we didn't want to abuse that fact and took the safer approach. + +--- + +### Additional features + +At last, Lumina Tracker has a built-in error handler to catch and show well formatted error messages to the user, directing them to the GitHub repository where they can report the issue, instead of letting the command fail silently. + +--- + +## Setting up + +Before starting, there are some prerequisites you need to have: + +- A discord bot token +- A TheTVDB API key +- A place on your host machine to store the database +- Docker and Docker Compose installed on your machine + +--- + +## Setting up + +In order to allow for an easy and reproducible deployment setup, we decided to go with Docker :whale: and Docker Compose for the bot. + +Since we use SQLite as a database, you will have to mount a volume to persist the data between restarts. Choose a place on your host machine to store this database and then create a `docker-compose.yml` file with the following content: + +```yaml +version: "3.9" + +services: + bot: + image: ghcr.io/itsdrike/code-jam-2024:main + env_file: + - .env + volumes: + - /database.db:/app/database.db +``` + +--- + +## Setting up + +You will also need to create a `.env` file in the same directory as the docker-compose.yml file. +There are two mandatory environment variables: + +- `BOT_TOKEN`: The token of your discord bot +- `TVDB_API_KEY`: The API key for TheTVDB (you can get one for free here: https://www.thetvdb.com/api-information) + +```env +BOT_TOKEN=your_discord_bot_token +TVDB_API_KEY=your_thetvdb_api_key +``` + +This is the minimal configuration, but you can learn more about other environment variables [here](https://github.com/ItsDrike/code-jam-2024?tab=readme-ov-file#configuring-the-bot). + +--- + +## Setting up + +Once you have created the `docker-compose.yml` and `.env` files, you can start the bot by running the following command in the same directory as the files: + +```bash +docker-compose up -d +``` + +--- + +## Theme + +Our project, Lumina Tracker, was created with the theme of the jam in mind: _Information Overload_. + +All of us have probably watched a bunch of different shows and movies, but after a while, keeping track of everything that you've seen can get quite overwhelming. + +The link to the theme is therefore present through the ability to quickly find information about these medias, alongside its ability to keep track of what you have watched and liked for you. We also tried to keep the interface as simple, intuitive and easy as possible, to avoid overwhelming the user with information. + +--- + +## Contributions + +- Core Bot Implementation: + + - Basic bot: ItsDrike + - Logging: ItsDrike + - Error handling: ItsDrike + +- Infrastructure: + - CI/CD: ItsDrike, Paillat-dev + - Docker: ItsDrike, Paillat-dev + +--- + +### Contributions + +- Database and ORM: + + - Database models: ItsDrike, Paillat-dev, Benji + - Alembic migrations: ItsDrike + +- TheTVDB Integration: + - TheTVDB client: ItsDrike, Paillat-dev, Ash8121 + +--- + +### Contributions + +- User Interface and Interactions: + + - Help command: ItsDrike, Benji, Ash8121 + - Profile command: ItsDrike + - Search command: ItsDrike, Paillat-dev + - Favoriting, watching and episodes UI and logic: ItsDrike, Paillat-dev, Benji + +--- + +### Contributions + +- Performance and Security: + + - Cache: ItsDrike, Paillat-dev, Benji + - Rate limiter: ItsDrike + +- Admin Tools: + - Sudo extension: ItsDrike + +--- + +## Thanks + +We'd like to thank Python Discord and the Event team for giving us the amazing opportunity to work on this project in a group. It was a lot of fun and we managed to learn a bunch of new things along the way! diff --git a/contemplative-constellations/presentation/presentation.pdf b/contemplative-constellations/presentation/presentation.pdf new file mode 100644 index 0000000..0aa11db Binary files /dev/null and b/contemplative-constellations/presentation/presentation.pdf differ diff --git a/contemplative-constellations/pyproject.toml b/contemplative-constellations/pyproject.toml new file mode 100644 index 0000000..5a24573 --- /dev/null +++ b/contemplative-constellations/pyproject.toml @@ -0,0 +1,252 @@ +[tool.poetry] +name = "code-jam-2024" +version = "0.1.0" +description = "Python Discord's Code Jam 2024, Contemplative Constellations Team Project" +authors = [ + "ItsDrike ", + "Benji ", + "Paillat-dev ", + "v33010 ", + "Ash8121 " +] +readme = "README.md" +license = "MIT" +packages = [{ include = "src" }] + +[tool.poetry.dependencies] +python = "^3.12" +py-cord = "^2.6.0" +python-decouple = "^3.8" +coloredlogs = "^15.0.1" +pydantic = "^2.8.2" +sqlalchemy = { version = "^2.0.31", extras = ["asyncio"] } +aiosqlite = "^0.20.0" +alembic = "^1.13.2" +aiocache = {extras = ["memcached", "redis"], version = "^0.12.2"} + +[tool.poetry.group.lint.dependencies] +ruff = "^0.3.2" +pre-commit = "^3.6.2" +basedpyright = "^1.13.3" + +[tool.poetry.group.test.dependencies] +pytest = "^8.1.1" +pytest-asyncio = "^0.23.6" +pytest-cov = "^5.0.0" + + +[tool.poetry.group.dev.dependencies] +datamodel-code-generator = {extras = ["http"], version = "^0.25.8"} + + +[tool.datamodel-codegen] +field-constraints = true +snake-case-field = true +target-python-version = "3.12" +use-default-kwarg = true +use-exact-imports = true +use-field-description = true +use-union-operator = true +reuse-model = true +output-model-type = "pydantic_v2.BaseModel" +custom-file-header = "# ruff: noqa: D101 # Allow missing docstrings" +field-include-all-keys = true +strict-nullable = true + +[tool.poetry.scripts] +generate-tvdb-models = "tools.generate_tvdb_models:main" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.ruff] +target-version = "py312" +line-length = 119 +fix = true + +[tool.ruff.lint] +select = ["ALL"] + +ignore = [ + "C90", # mccabe + "CPY", # flake8-copyright + "EM", # flake8-errmsg + "SLF", # flake8-self + "ARG", # flake8-unused-arguments + "TD", # flake8-todos + "FIX", # flake8-fixme + "PD", # pandas-vet + + "D100", # Missing docstring in public module + "D104", # Missing docstring in public package + "D105", # Missing docstring in magic method + "D106", # Missing docstring in public nested class + "D107", # Missing docstring in __init__ + "D203", # Blank line required before class docstring + "D213", # Multi-line summary should start at the second line (incompatible with D212) + "D301", # Use r""" if any backslashes in a docstring + "D401", # First line of docstring should be in imperative mood + "D404", # First word of the docstring should not be "This" + "D405", # Section name should be properly capitalized + "D406", # Section name should end with a newline + "D407", # Missing dashed underline after section + "D408", # Section underline should be in the line following the section's name + "D409", # Section underline should match the length of its name + "D410", # Missing blank line after section + "D411", # Missing blank line before section + "D412", # No blank lines allowed between a section header and its content + "D413", # Missing blank line after last section + "D414", # Section has no content + "D416", # Section name should end with a colon + "D417", # Missing argument description in the docstring + + "ANN101", # Missing type annotation for self in method + "ANN102", # Missing type annotation for cls in classmethod + "ANN204", # Missing return type annotation for special method + "ANN401", # Dynamically typed expressions (typing.Any) disallowed + + "SIM102", # use a single if statement instead of nested if statements + "SIM108", # Use ternary operator {contents} instead of if-else-block + + "G001", # Logging statement uses str.format + "G004", # Logging statement uses f-string + "G003", # Logging statement uses + + + "B904", # Raise without `from` within an `except` clause + + "UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)` + "PLR2004", # Using unnamed numerical constants + "PGH003", # Using specific rule codes in type ignores + "E731", # Don't asign a lambda expression, use a def + "S311", # Use `secrets` for random number generation, not `random` + "TRY003", # Avoid specifying long messages outside the exception class + + # Redundant rules with ruff-format: + "E111", # Indentation of a non-multiple of 4 spaces + "E114", # Comment with indentation of a non-multiple of 4 spaces + "E117", # Cheks for over-indented code + "D206", # Checks for docstrings indented with tabs + "D300", # Checks for docstring that use ''' instead of """ + "Q000", # Checks of inline strings that use wrong quotes (' instead of ") + "Q001", # Multiline string that use wrong quotes (''' instead of """) + "Q002", # Checks for docstrings that use wrong quotes (''' instead of """) + "Q003", # Checks for avoidable escaped quotes ("\"" -> '"') + "COM812", # Missing trailing comma (in multi-line lists/tuples/...) + "COM819", # Prohibited trailing comma (in single-line lists/tuples/...) + "ISC001", # Single line implicit string concatenation ("hi" "hey" -> "hihey") + "ISC002", # Multi line implicit string concatenation +] + +[tool.ruff.lint.isort] +order-by-type = false +case-sensitive = true +combine-as-imports = true + +# Redundant rules with ruff-format +force-single-line = false # forces all imports to appear on their own line +force-wrap-aliases = false # Split imports with multiple members and at least one alias +lines-after-imports = -1 # The number of blank lines to place after imports +lines-between-types = 0 # Number of lines to place between "direct" and import from imports +split-on-trailing-comma = false # if last member of multiline import has a comma, don't fold it to single line + +[tool.ruff.lint.pylint] +max-args = 15 +max-branches = 15 +max-locals = 15 +max-nested-blocks = 5 +max-returns = 8 +max-statements = 75 + +[tool.ruff.lint.per-file-ignores] +"tests/**.py" = [ + "ANN", # annotations + "D", # docstrings + "S101", # Use of assert +] +".github/scripts/**.py" = [ + "INP001", # Implicit namespace package +] +"alembic-migrations/env.py" = [ + "INP001", # Implicit namespace package +] +"alembic-migrations/versions/*" = [ + "INP001", # Implicit namespace package + "D103", # Missing docstring in public function + "D400", # First line should end with a period + "D415", # First line should end with a period, question mark, or exclamation point +] + +[tool.ruff.format] +line-ending = "lf" + +[tool.basedpyright] +pythonPlatform = "All" +pythonVersion = "3.12" +typeCheckingMode = "all" + +# Diagnostic behavior settings +strictListInference = false +strictDictionaryInference = false +strictSetInference = false +analyzeUnannotatedFunctions = true +strictParameterNoneValue = true +enableTypeIgnoreComments = true +deprecateTypingAliases = true +enableExperimentalFeatures = false +disableBytesTypePromotions = true + +# Diagnostic rules +reportAny = false +reportImplicitStringConcatenation = false +reportUnreachable = "information" +reportMissingTypeStubs = false +reportUninitializedInstanceVariable = false # until https://github.com/DetachHead/basedpyright/issues/491 +reportMissingParameterType = false # ruff's flake8-annotations (ANN) already covers this + gives us more control + +# Too strict for py-cord codebases +reportIncompatibleMethodOverride = false +reportUnusedCallResult = false + +# Unknown type reporting rules (too strict for most code-bases) +reportUnknownArgumentType = false +reportUnknownVariableType = false +reportUnknownMemberType = false +reportUnknownParameterType = false +reportUnknownLambdaType = false + +executionEnvironments = [ + { root = "src/db_tables", reportImportCycles = false }, +] + +[tool.pytest.ini_options] +minversion = "6.0" +asyncio_mode = "auto" +testpaths = ["tests"] +addopts = "--strict-markers --cov --no-cov-on-fail" + +[tool.coverage.report] +precision = 2 +fail_under = 0 +show_missing = true +skip_covered = false +skip_empty = false +sort = "cover" +exclude_lines = [ + "\\#\\s*pragma: no cover", + "^\\s*if (typing\\.)?TYPE_CHECKING:", + "^\\s*@(abc\\.)?abstractmethod", + "^\\s*@(typing\\.)?overload", + "^\\s*def __repr__\\(", + "^\\s*class .*\\bProtocol\\):", + "^\\s*raise NotImplementedError", + "^\\s*return NotImplemented", + "^\\s*\\.\\.\\.", +] + +[tool.coverage.run] +relative_files = true +parallel = true +branch = true +timid = false +source = ["src"] diff --git a/contemplative-constellations/src/__init__.py b/contemplative-constellations/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/contemplative-constellations/src/__main__.py b/contemplative-constellations/src/__main__.py new file mode 100644 index 0000000..21b03e7 --- /dev/null +++ b/contemplative-constellations/src/__main__.py @@ -0,0 +1,62 @@ +import asyncio + +import aiohttp +import discord +from aiocache import SimpleMemoryCache + +from src.bot import Bot +from src.settings import BOT_TOKEN, SQLITE_DATABASE_FILE +from src.utils.database import apply_db_migrations, engine, get_db_session, load_db_models +from src.utils.log import get_logger + +log = get_logger(__name__) + + +async def _init_database(*, retries: int = 5, retry_time: float = 3) -> None: + """Try to connect to the database, keep trying if we fail. + + :param retries: Number of re-try attempts, in case db connection fails. + :param retry_time: Time to wait between re-try attempts. + """ + load_db_models() + + # NOTE: The retry logic here isn't that useful with sqlite databases, but it's here + # in case we switch to a different database in the future. + for _ in range(retries): + log.debug(f"Connecting to the database: {SQLITE_DATABASE_FILE}") + try: + conn = engine.connect() + except ConnectionRefusedError as exc: + log.exception(f"Database connection failed, retrying in {retry_time} seconds.", exc_info=exc) + await asyncio.sleep(retry_time) + else: + async with conn, conn.begin(): + await conn.run_sync(apply_db_migrations) + break + + log.debug("Database connection established") + + +async def main() -> None: + """Main entrypoint of the application. + + This will load all of the extensions and start the bot. + """ + intents = discord.Intents().default() + intents.message_content = True + + await _init_database() + + cache = SimpleMemoryCache() + + async with aiohttp.ClientSession() as http_session, get_db_session() as db_session: + bot = Bot(intents=intents, http_session=http_session, db_session=db_session, cache=cache) + bot.load_all_extensions() + + log.info("Starting the bot...") + async with bot as bot_: + await bot_.start(BOT_TOKEN) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/contemplative-constellations/src/bot.py b/contemplative-constellations/src/bot.py new file mode 100644 index 0000000..58c1813 --- /dev/null +++ b/contemplative-constellations/src/bot.py @@ -0,0 +1,62 @@ +from collections.abc import Sequence +from sys import exception +from typing import Any, ClassVar, override + +import aiohttp +import discord +from aiocache import BaseCache +from sqlalchemy.ext.asyncio import AsyncSession + +from src.utils.log import get_logger + +log = get_logger(__name__) + + +class Bot(discord.Bot): + """Bot subclass that holds the state of the application. + + The bot instance is available throughout the application, which makes it + suitable to store various state variables that are needed in multiple places, + such as database connections. + """ + + EXTENSIONS: ClassVar[Sequence[str]] = [ + "src.exts.ping", + "src.exts.error_handler", + "src.exts.sudo", + "src.exts.help", + "src.exts.tvdb_info", + ] + + def __init__( + self, + *args: object, + http_session: aiohttp.ClientSession, + db_session: AsyncSession, + cache: BaseCache, + **kwargs: object, + ) -> None: + """Initialize the bot instance, containing various state variables.""" + super().__init__(*args, **kwargs) + self.http_session = http_session + self.db_session = db_session + self.cache = cache + + self.event(self.on_ready) + + async def on_ready(self) -> None: + """The `on_ready` event handler.""" + log.info(f"{self.user} is ready and online!") + + def load_all_extensions(self) -> None: + """Load all of our bot extensions. + + This relies on the `EXTENSIONS` class variable. + """ + log.info("Loading extensions...") + self.load_extensions(*self.EXTENSIONS) + + @override + def on_error(self, event_method: str, *args: Any, **kwargs: Any) -> None: + """Log errors raised in commands and events properly, rather than just printing them to stderr.""" + log.exception(f"Unhandled exception in {event_method}", exc_info=exception()) diff --git a/contemplative-constellations/src/converters/__init__.py b/contemplative-constellations/src/converters/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/contemplative-constellations/src/converters/bot_extension.py b/contemplative-constellations/src/converters/bot_extension.py new file mode 100644 index 0000000..4377ea8 --- /dev/null +++ b/contemplative-constellations/src/converters/bot_extension.py @@ -0,0 +1,42 @@ +import importlib +import inspect +from typing import override + +from discord.ext.commands import Bot, Context, Converter + + +class ValidBotExtension(Converter[str]): + """Convert given extension name to a fully qualified path to extension.""" + + @staticmethod + def valid_extension_path(extension_name: str) -> str: + """Get the fully qualified path to a valid bot extension. + + The `extension_name` can be: + - A fully qualified path (e.g. 'src.exts.ping'), + - A suffix component of a fully qualified path (e.g. 'exts.ping', or just 'ping') + + The suffix must still be an entire path component, so while 'ping' is valid, 'pi' or 'ng' is not. + + If the `extension_name` doesn't point to a valid extension a ValueError will be raised. + """ + extension_name = extension_name.removeprefix("src.exts.").removeprefix("exts.") + extension_name = f"src.exts.{extension_name}" + + # This could technically be a vulnerability, but this converter can only + # be used by the bot owner + try: + imported = importlib.import_module(extension_name) + except ModuleNotFoundError: + raise ValueError(f"Unable to import '{extension_name}'.") + + # If it lacks a setup function, it's not an extension + if not inspect.isfunction(getattr(imported, "setup", None)): + raise ValueError(f"'{extension_name}' is not a valid extension.") + + return extension_name + + @override + async def convert(self, ctx: Context[Bot], argument: str) -> str: + """Try to match given `argument` to a valid extension within the bot project.""" + return self.valid_extension_path(argument) diff --git a/contemplative-constellations/src/db_adapters/__init__.py b/contemplative-constellations/src/db_adapters/__init__.py new file mode 100644 index 0000000..e220e57 --- /dev/null +++ b/contemplative-constellations/src/db_adapters/__init__.py @@ -0,0 +1,24 @@ +from .lists import ( + get_list_item, + list_get_item, + list_put_item, + list_put_item_safe, + list_remove_item, + list_remove_item_safe, + refresh_list_items, +) +from .user import user_create_list, user_get, user_get_list_safe, user_get_safe + +__all__ = [ + "user_get", + "user_create_list", + "list_put_item", + "list_put_item_safe", + "list_get_item", + "user_get_safe", + "user_get_list_safe", + "list_remove_item", + "refresh_list_items", + "get_list_item", + "list_remove_item_safe", +] diff --git a/contemplative-constellations/src/db_adapters/lists.py b/contemplative-constellations/src/db_adapters/lists.py new file mode 100644 index 0000000..2230c98 --- /dev/null +++ b/contemplative-constellations/src/db_adapters/lists.py @@ -0,0 +1,119 @@ +from typing import Literal, overload + +from sqlalchemy.ext.asyncio import AsyncSession + +from src.db_adapters.media import ensure_media +from src.db_tables.user_list import UserList, UserListItem, UserListItemKind + + +@overload +async def list_put_item( + session: AsyncSession, + user_list: UserList, + tvdb_id: int, + kind: Literal[UserListItemKind.MOVIE, UserListItemKind.SERIES], +) -> UserListItem: ... + + +@overload +async def list_put_item( + session: AsyncSession, + user_list: UserList, + tvdb_id: int, + kind: Literal[UserListItemKind.EPISODE], + series_id: int, +) -> UserListItem: ... + + +async def list_put_item( + session: AsyncSession, user_list: UserList, tvdb_id: int, kind: UserListItemKind, series_id: int | None = None +) -> UserListItem: + """Add an item to a user list. + + :raises ValueError: If the item is already present in the list. + """ + if series_id: + await ensure_media(session, tvdb_id, kind, series_id=series_id) + else: + await ensure_media(session, tvdb_id, kind) + if await session.get(UserListItem, (user_list.id, tvdb_id, kind)) is not None: + raise ValueError(f"Item {tvdb_id} is already in list {user_list.id}.") + + item = UserListItem(list_id=user_list.id, tvdb_id=tvdb_id, kind=kind) + session.add(item) + await session.commit() + return item + + +async def list_get_item( + session: AsyncSession, user_list: UserList, tvdb_id: int, kind: UserListItemKind +) -> UserListItem | None: + """Get an item from a user list.""" + return await session.get(UserListItem, (user_list.id, tvdb_id, kind)) + + +async def list_remove_item(session: AsyncSession, user_list: UserList, item: UserListItem) -> None: + """Remove an item from a user list.""" + await session.delete(item) + await session.commit() + await session.refresh(user_list, ["items"]) + + +async def list_remove_item_safe( + session: AsyncSession, user_list: UserList, tvdb_id: int, kind: UserListItemKind +) -> None: + """Removes an item from a user list if it exists.""" + if item := await list_get_item(session, user_list, tvdb_id, kind): + await list_remove_item(session, user_list, item) + + +@overload +async def list_put_item_safe( + session: AsyncSession, + user_list: UserList, + tvdb_id: int, + kind: Literal[UserListItemKind.MOVIE, UserListItemKind.SERIES], +) -> UserListItem: ... + + +@overload +async def list_put_item_safe( + session: AsyncSession, + user_list: UserList, + tvdb_id: int, + kind: Literal[UserListItemKind.EPISODE], + series_id: int, +) -> UserListItem: ... + + +async def list_put_item_safe( + session: AsyncSession, user_list: UserList, tvdb_id: int, kind: UserListItemKind, series_id: int | None = None +) -> UserListItem: + """Add an item to a user list, or return the existing item if it is already present.""" + if series_id: + await ensure_media(session, tvdb_id, kind, series_id=series_id) + else: + await ensure_media(session, tvdb_id, kind) + item = await list_get_item(session, user_list, tvdb_id, kind) + if item: + return item + + item = UserListItem(list_id=user_list.id, tvdb_id=tvdb_id, kind=kind) + session.add(item) + await session.commit() + return item + + +async def refresh_list_items(session: AsyncSession, user_list: UserList) -> None: + """Refresh the items in a user list.""" + await session.refresh(user_list, ["items"]) + + +async def get_list_item( + session: AsyncSession, + user_list: UserList, + tvdb_id: int, + kind: UserListItemKind, +) -> UserListItem | None: + """Get a user list.""" + return await session.get(UserListItem, (user_list.id, tvdb_id, kind)) diff --git a/contemplative-constellations/src/db_adapters/media.py b/contemplative-constellations/src/db_adapters/media.py new file mode 100644 index 0000000..5366640 --- /dev/null +++ b/contemplative-constellations/src/db_adapters/media.py @@ -0,0 +1,29 @@ +from typing import Any + +from sqlalchemy.ext.asyncio import AsyncSession + +from src.db_tables.media import Episode, Movie, Series +from src.db_tables.user_list import UserListItemKind + + +async def ensure_media(session: AsyncSession, tvdb_id: int, kind: UserListItemKind, **kwargs: Any) -> None: + """Ensure that a tvdb media item is present in its respective table.""" + match kind: + case UserListItemKind.MOVIE: + cls = Movie + case UserListItemKind.SERIES: + cls = Series + case UserListItemKind.EPISODE: + cls = Episode + media = await session.get(cls, tvdb_id) + if media is None: + media = cls(tvdb_id=tvdb_id, **kwargs) + session.add(media) + await session.commit() + + if isinstance(media, Episode): + await session.refresh(media, ["series"]) + if not media.series: + series = Series(tvdb_id=kwargs["series_id"]) + session.add(series) + await session.commit() diff --git a/contemplative-constellations/src/db_adapters/user.py b/contemplative-constellations/src/db_adapters/user.py new file mode 100644 index 0000000..cc52170 --- /dev/null +++ b/contemplative-constellations/src/db_adapters/user.py @@ -0,0 +1,64 @@ +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from src.db_tables.user import User +from src.db_tables.user_list import UserList, UserListKind + + +async def user_get(session: AsyncSession, discord_id: int) -> User | None: + """Get a user by their Discord ID.""" + return await session.get(User, discord_id) + + +async def user_get_safe(session: AsyncSession, discord_id: int) -> User: + """Get a user by their Discord ID, creating them if they don't exist.""" + user = await user_get(session, discord_id) + if user is None: + user = User(discord_id=discord_id) + session.add(user) + await session.commit() + + return user + + +async def user_get_list(session: AsyncSession, user: User, name: str) -> UserList | None: + """Get a user's list by name.""" + # use where clause on user.id and name + user_list = await session.execute( + select(UserList) + .where( + UserList.user_id == user.discord_id, + ) + .where(UserList.name == name) + ) + return user_list.scalars().first() + + +async def user_create_list(session: AsyncSession, user: User, name: str, item_kind: UserListKind) -> UserList: + """Create a new list for a user. + + :raises ValueError: If a list with the same name already exists for the user. + """ + if await user_get_list(session, user, name) is not None: + raise ValueError(f"List with name {name} already exists for user {user.discord_id}.") + user_list = UserList(user_id=user.discord_id, name=name, item_kind=item_kind) + session.add(user_list) + await session.commit() + await session.refresh(user, ["lists"]) + + return user_list + + +async def user_get_list_safe( + session: AsyncSession, user: User, name: str, kind: UserListKind = UserListKind.MEDIA +) -> UserList: + """Get a user's list by name, creating it if it doesn't exist. + + :param kind: The kind of list to create if it doesn't exist. + :return: The user list. + """ + user_list = await user_get_list(session, user, name) + if user_list is None: + user_list = await user_create_list(session, user, name, kind) + + return user_list diff --git a/contemplative-constellations/src/db_tables/README.md b/contemplative-constellations/src/db_tables/README.md new file mode 100644 index 0000000..39a071a --- /dev/null +++ b/contemplative-constellations/src/db_tables/README.md @@ -0,0 +1,85 @@ + + +# Welcome to `db_tables` directory + +This directory defines all of our database tables. To do so, we're using [`SQLAlchemy`](https://docs.sqlalchemy.org) +ORM. That means our database tables are defined as python classes, that follow certain special syntax to achieve this. + +All of these tables must inherit from the `Base` class, that can be imported from `src.utils.database` module. + +There is no need to register newly created classes / files anywhere, as all files in this directory (except those +starting with `_`) will be automatically imported and picked up by SQLAlchemy. + +## Example database class + +Imagine an application that schedules appointmnets for users: + +`user.py`: + +```python +import enum +from typing import TYPE_CHECKING +from sqlalchemy import Mapped, mapped_column, relationship + +from src.utils.database import Base + + +# Prevent circular imports for relationships +if TYPE_CHECKING: + from src.db_tables.appointment import Appointment + + +class Role(enum.IntEnum): + """Enumeration of all possible user roles.""" + + USER = 0 + ADMIN = 1 + + +class User(Base): + """User database table.""" + + __tablename__ = "user" + + id: Mapped[int] = mapped_column(primary_key=True, index=True, nullable=False) + email: Mapped[str] = mapped_column(unique=True, nullable=False) + name: Mapped[str] = mapped_column(nullable=False) + surname: Mapped[str] = mapped_column(nullable=False) + user_role: Mapped[Role] = mapped_column(nullable=False, default=Role.USER) + + appointments: Mapped[list["Appointment"]] = relationship( + lazy="selectin", + back_populates="user", + cascade="all, delete-orphan", + ) +``` + +`appointment.py`: + +```python +from datetime import date, time +from typing import TYPE_CHECKING + +from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from src.utils.database import Base + +# Prevent circular imports for relationships +if TYPE_CHECKING: + from src.db_tables.user import User + + +class Appointment(Base): + """Appointment database table.""" + + __tablename__ = "appointment" + + id: Mapped[int] = mapped_column(primary_key=True, index=True, nullable=False) + booked_date: Mapped[date] = mapped_column(nullable=False) + booked_time: Mapped[time] = mapped_column(nullable=False) + user_id: Mapped[int] = mapped_column(ForeignKey("user.id"), nullable=False) + + user: Mapped["User"] = relationship(lazy="selectin", back_populates="appointments") +``` diff --git a/contemplative-constellations/src/db_tables/__init__.py b/contemplative-constellations/src/db_tables/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/contemplative-constellations/src/db_tables/media.py b/contemplative-constellations/src/db_tables/media.py new file mode 100644 index 0000000..a0191dc --- /dev/null +++ b/contemplative-constellations/src/db_tables/media.py @@ -0,0 +1,46 @@ +"""This file houses all tvdb media related database tables. + +Some of these tables only have one column (`tvdb_id`), which may seem like a mistake, but is intentional. +That's because this provides better type safety and allows us to define proper foreign key relationships that +refer to these tables instead of duplicating that data. +It also may become useful if at any point we would +want to store something extra that's global to each movie / show / episode. +""" + +from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from src.utils.database import Base + + +class Movie(Base): + """Table to store movies.""" + + __tablename__ = "movies" + + tvdb_id: Mapped[int] = mapped_column(primary_key=True) + + +class Series(Base): + """Table to store series.""" + + __tablename__ = "series" + + tvdb_id: Mapped[int] = mapped_column(primary_key=True) + + episodes: Mapped[list["Episode"]] = relationship( + lazy="selectin", + back_populates="series", + cascade="all, delete-orphan", + ) + + +class Episode(Base): + """Table to store episodes of series.""" + + __tablename__ = "episodes" + + tvdb_id: Mapped[int] = mapped_column(primary_key=True) + series_id: Mapped[int] = mapped_column(ForeignKey("series.tvdb_id")) + + series: Mapped[Series] = relationship(lazy="selectin", back_populates="episodes") diff --git a/contemplative-constellations/src/db_tables/user.py b/contemplative-constellations/src/db_tables/user.py new file mode 100644 index 0000000..e5c2a84 --- /dev/null +++ b/contemplative-constellations/src/db_tables/user.py @@ -0,0 +1,19 @@ +from typing import TYPE_CHECKING + +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from src.utils.database import Base + +# Prevent circular imports for relationships +if TYPE_CHECKING: + from src.db_tables.user_list import UserList + + +class User(Base): + """Table to store users.""" + + __tablename__ = "users" + + discord_id: Mapped[int] = mapped_column(primary_key=True) + + lists: Mapped[list["UserList"]] = relationship("UserList", back_populates="user") diff --git a/contemplative-constellations/src/db_tables/user_list.py b/contemplative-constellations/src/db_tables/user_list.py new file mode 100644 index 0000000..c0b94fc --- /dev/null +++ b/contemplative-constellations/src/db_tables/user_list.py @@ -0,0 +1,100 @@ +from enum import Enum +from typing import TYPE_CHECKING + +from sqlalchemy import ForeignKey, Index, UniqueConstraint +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from src.utils.database import Base + +# Prevent circular imports for relationships +if TYPE_CHECKING: + from src.db_tables.media import Episode, Movie, Series + from src.db_tables.user import User + + +class UserListKind(Enum): + """Enum to represent the kind of items that are stored in a user list.""" + + SERIES = "series" + MOVIE = "movie" + EPISODE = "episode" + MEDIA = "media" # either series or movie + ANY = "any" + + +class UserListItemKind(Enum): + """Enum to represent the kind of item in a user list.""" + + SERIES = 0 + MOVIE = 1 + EPISODE = 2 + + +class UserList(Base): + """Table to store user lists. + + This provides a dynamic way to store various lists of media for the user, such as favorites, to watch, + already watched, ... all tracked in the same table, instead of having to define tables for each such + structure. + """ + + __tablename__ = "user_lists" + + id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) + user_id: Mapped[int] = mapped_column(ForeignKey("users.discord_id"), nullable=False) + name: Mapped[str] = mapped_column(nullable=False) + item_kind: Mapped[UserListKind] = mapped_column(nullable=False) + + user: Mapped["User"] = relationship("User", back_populates="lists") + items: Mapped[list["UserListItem"]] = relationship("UserListItem", back_populates="user_list") + + __table_args__ = ( + UniqueConstraint("user_id", "name", name="unique_user_list_name"), + Index( + "ix_user_lists_user_id_name", + "user_id", + "name", + unique=True, + ), + ) + + +class UserListItem(Base): + """Base class for items in a user list.""" + + __tablename__ = "user_list_items" + list_id: Mapped[int] = mapped_column(ForeignKey("user_lists.id"), primary_key=True) + tvdb_id: Mapped[int] = mapped_column(primary_key=True) + kind: Mapped[UserListItemKind] = mapped_column(nullable=False, primary_key=True) + + user_list: Mapped["UserList"] = relationship("UserList", back_populates="items") + + # 'tvdb_id' can reference Series, Movie, or Episode tables, determined by 'media_type' + # viewonly=True is used to prevent SQLAlchemy from managing complex relationships, + # as we only need them for querying + series: Mapped["Series"] = relationship( + "Series", + foreign_keys=[tvdb_id], + primaryjoin="and_(UserListItem.tvdb_id == Series.tvdb_id, UserListItem.kind == 'SERIES')", + uselist=False, + viewonly=True, + ) + movie: Mapped["Movie"] = relationship( + "Movie", + foreign_keys=[tvdb_id], + primaryjoin="and_(UserListItem.tvdb_id == Movie.tvdb_id, UserListItem.kind == 'MOVIE')", + uselist=False, + viewonly=True, + ) + episode: Mapped["Episode"] = relationship( + "Episode", + foreign_keys=[tvdb_id], + primaryjoin="and_(UserListItem.tvdb_id == Episode.tvdb_id, UserListItem.kind == 'EPISODE')", + uselist=False, + viewonly=True, + ) + + @property + def media(self) -> "Series | Movie | Episode": + """Return the media item associated with this user list item.""" + return self.series or self.movie or self.episode diff --git a/contemplative-constellations/src/exts/error_handler/__init__.py b/contemplative-constellations/src/exts/error_handler/__init__.py new file mode 100644 index 0000000..d8085bc --- /dev/null +++ b/contemplative-constellations/src/exts/error_handler/__init__.py @@ -0,0 +1,3 @@ +from .error_handler import setup + +__all__ = ["setup"] diff --git a/contemplative-constellations/src/exts/error_handler/error_handler.py b/contemplative-constellations/src/exts/error_handler/error_handler.py new file mode 100644 index 0000000..dfb3fba --- /dev/null +++ b/contemplative-constellations/src/exts/error_handler/error_handler.py @@ -0,0 +1,153 @@ +import sys +from itertools import chain +from typing import cast + +from discord import Any, ApplicationContext, Cog, EmbedField, EmbedFooter, errors +from discord.ext.commands import errors as commands_errors + +from src.bot import Bot +from src.settings import FAIL_EMOJI +from src.utils.log import get_logger +from src.utils.ratelimit import RateLimitExceededError + +from .utils import build_error_embed, build_unhandled_application_embed + +log = get_logger(__name__) + + +class ErrorHandler(Cog): + """Cog to handle any errors invoked from commands.""" + + def __init__(self, bot: Bot) -> None: + self.bot = bot + + async def _handle_check_failure( + self, + ctx: ApplicationContext, + exc: errors.CheckFailure | commands_errors.CheckFailure, + ) -> None: + if isinstance(exc, commands_errors.CheckAnyFailure): + # We don't really care that all of the checks have failed, we need to produce an error message here, + # so just take the first failure and work with that as the error. + + # Even though the docstring says that exc.errors should contain the CheckFailure exceptions, + # the type-hint says that the errors should be in exc.checks... Cast the type away to Any + # and just check both, see where the error actually is and use that + errors1 = cast(list[Any], exc.errors) + errors2 = cast(list[Any], exc.checks) + + if len(errors1) > 0 and isinstance(errors1[0], (errors.CheckFailure, commands_errors.CheckFailure)): + exc = errors1[0] + elif len(errors2) > 0 and isinstance(errors2[0], (errors.CheckFailure, commands_errors.CheckFailure)): + exc = errors2[0] + else: + # Just in case, this library is a mess... + raise ValueError("Never (hopefully), here's some random code: 0xd1ff0aaac") + + if isinstance(exc, commands_errors.NotOwner): + embed = build_error_embed(description=f"{FAIL_EMOJI} This command is limited to the bot owner.") + + elif isinstance( + exc, + ( + commands_errors.MissingPermissions, + commands_errors.MissingRole, + commands_errors.MissingAnyRole, + ), + ): + embed = build_error_embed(description=f"{FAIL_EMOJI} You don't have permission to run this command.") + + elif isinstance( + exc, + ( + commands_errors.BotMissingRole, + commands_errors.BotMissingAnyRole, + commands_errors.BotMissingPermissions, + ), + ): + embed = build_error_embed( + description=f"{FAIL_EMOJI} I don't have the necessary permissions to perform this action." + ) + + elif isinstance(exc, commands_errors.NoPrivateMessage): + embed = build_error_embed(description=f"{FAIL_EMOJI} This command can only be used in a server.") + + elif isinstance(exc, commands_errors.PrivateMessageOnly): + embed = build_error_embed(description=f"{FAIL_EMOJI} This command can only be used in a DM.") + + elif isinstance(exc, commands_errors.NSFWChannelRequired): + embed = build_error_embed(description=f"{FAIL_EMOJI} This command can only be used in an NSFW channel.") + else: + embed = build_unhandled_application_embed(ctx, exc) + + await ctx.send(f"Sorry {ctx.author.mention}", embed=embed) + + async def _handle_command_invoke_error( + self, + ctx: ApplicationContext, + exc: errors.ApplicationCommandInvokeError, + ) -> None: + original_exception = exc.__cause__ + + if original_exception is None: + embed = build_unhandled_application_embed(ctx, exc) + log.exception("Got ApplicationCommandInvokeError without a cause.", exc_info=exc) + + elif isinstance(original_exception, RateLimitExceededError): + msg = original_exception.msg or "Hit a rate-limit, please try again later." + time_remaining = f"Expected reset: " + footer = None + if original_exception.updates_when_exceeded: + footer = EmbedFooter( + text="Spamming the command will only increase the time you have to wait.", + ) + embed = build_error_embed( + title="Rate limit exceeded", + description=f"{FAIL_EMOJI} {msg}", + fields=[EmbedField(name="", value=time_remaining)], + footer=footer, + ) + else: + embed = build_unhandled_application_embed(ctx, original_exception) + log.exception("Unhandled exception occurred.", exc_info=original_exception) + + await ctx.send(f"Sorry {ctx.author.mention}", embed=embed) + + @Cog.listener() + async def on_application_command_error(self, ctx: ApplicationContext, exc: errors.DiscordException) -> None: + """Handle exceptions that have occurred while running some command.""" + if isinstance(exc, (errors.CheckFailure, commands_errors.CheckFailure)): + await self._handle_check_failure(ctx, exc) + return + + if isinstance(exc, errors.ApplicationCommandInvokeError): + await self._handle_command_invoke_error(ctx, exc) + return + + embed = build_unhandled_application_embed(ctx, exc) + await ctx.send(f"Sorry {ctx.author.mention}", embed=embed) + + @Cog.listener() + async def on_error(self, event_method: str, *args: object, **kwargs: object) -> None: + """Handle exception that have occurred in any event. + + This is a catch-all for errors that aren't handled by any other listeners, or fell through (were re-raised). + """ + log.exception(f"Unhandled excepton occurred {event_method=} {args=!r} {kwargs=!r}", exc_info=True) + + exc = sys.exc_info()[1] + if exc is None: + return + + for arg in chain(args, kwargs.values()): + if isinstance(arg, ApplicationContext): + ctx = arg + + embed = build_unhandled_application_embed(ctx, exc) + await ctx.send(f"Sorry {ctx.author.mention}", embed=embed) + return + + +def setup(bot: Bot) -> None: + """Register the ErrorHandler cog.""" + bot.add_cog(ErrorHandler(bot)) diff --git a/contemplative-constellations/src/exts/error_handler/utils.py b/contemplative-constellations/src/exts/error_handler/utils.py new file mode 100644 index 0000000..aa88f1c --- /dev/null +++ b/contemplative-constellations/src/exts/error_handler/utils.py @@ -0,0 +1,56 @@ +import textwrap + +from discord import ApplicationContext, Colour, Embed, EmbedField, EmbedFooter + +from src.settings import GITHUB_REPO +from src.utils.log import get_logger + +log = get_logger(__name__) + + +def build_error_embed( + *, + title: str | None = None, + description: str | None = None, + fields: list[EmbedField] | None = None, + footer: EmbedFooter | None = None, +) -> Embed: + """Create an embed regarding the unhandled exception that occurred.""" + if title is None and description is None: + raise ValueError("You need to provide either a title or a description.") + + return Embed( + title=title, + description=description, + color=Colour.red(), + fields=fields, + footer=footer, + ) + + +def build_unhandled_application_embed(ctx: ApplicationContext, exc: BaseException) -> Embed: + """Build an embed regarding the unhandled exception that occurred.""" + msg = f"Exception {exc!r} has occurred from command {ctx.command.qualified_name} invoked by {ctx.author.id}" + if ctx.message: + msg += f" in message {ctx.message.content!r}" + if ctx.guild: + msg += f" on guild {ctx.guild.id}" + log.warning(msg) + + return build_error_embed( + title="Unhandled exception", + description=textwrap.dedent( + f""" + Unknown error has occurred without being properly handled. + Please report this at the [GitHub repository]({GITHUB_REPO}) + + **Command**: `{ctx.command.qualified_name}` + **Exception details**: ```{exc.__class__.__name__}: {exc}``` + """ + ), + ) + + +async def send_unhandled_application_embed(ctx: ApplicationContext, exc: BaseException) -> None: + """Send an embed regarding the unhandled exception that occurred.""" + await ctx.send(f"Sorry {ctx.author.mention}", embed=build_unhandled_application_embed(ctx, exc)) diff --git a/contemplative-constellations/src/exts/error_handler/view.py b/contemplative-constellations/src/exts/error_handler/view.py new file mode 100644 index 0000000..17b1ecb --- /dev/null +++ b/contemplative-constellations/src/exts/error_handler/view.py @@ -0,0 +1,51 @@ +import sys +import textwrap +from typing import Self, override + +import discord +from discord.interactions import Interaction +from discord.ui import Item + +from src.exts.error_handler.utils import build_error_embed +from src.settings import GITHUB_REPO +from src.utils.log import get_logger + +log = get_logger(__name__) + + +# TODO: Is this file really the right place for this? Or would utils work better? +class ErrorHandledView(discord.ui.View): + """View with error-handling support.""" + + @override + async def on_error(self, error: Exception, item: Item[Self], interaction: Interaction) -> None: + log.exception( + f"Unhandled exception in view: {self.__class__.__name__} (item={item.__class__.__name__})", + exc_info=True, + ) + + exc_info = sys.exc_info() + exc = exc_info[1] + if exc is None: + await super().on_error(error, item, interaction) + return + + embed = build_error_embed( + title="Unhandled exception", + description=textwrap.dedent( + f""" + Unknown error has occurred without being properly handled. + Please report this at the [GitHub repository]({GITHUB_REPO}) + + **View**: `{self.__class__.__name__}` + **Item**: `{item.__class__.__name__}` + **Exception details**: ```{exc.__class__.__name__}: {exc}``` + """ + ), + ) + if interaction.user is not None: + msg = f"Sorry {interaction.user.mention}" + else: + msg = "" + + await interaction.respond(msg, embed=embed) diff --git a/contemplative-constellations/src/exts/help/__init__.py b/contemplative-constellations/src/exts/help/__init__.py new file mode 100644 index 0000000..e01a799 --- /dev/null +++ b/contemplative-constellations/src/exts/help/__init__.py @@ -0,0 +1,3 @@ +from .help import setup + +__all__ = ["setup"] diff --git a/contemplative-constellations/src/exts/help/help.py b/contemplative-constellations/src/exts/help/help.py new file mode 100644 index 0000000..e03a362 --- /dev/null +++ b/contemplative-constellations/src/exts/help/help.py @@ -0,0 +1,72 @@ +from typing import Any + +import discord +from discord import ( + ApplicationCommand, + ApplicationContext, + CheckFailure, + Cog, + SlashCommand, + SlashCommandGroup, + slash_command, +) +from discord.ext.commands import CheckFailure as CommandCheckFailure +from discord.ext.pages import Page, Paginator + +from src.bot import Bot +from src.settings import COMMAND_EMOJI, GROUP_EMOJI +from src.utils import mention_command +from src.utils.cat_api import get_cat_image_url +from src.utils.log import get_logger + +log = get_logger(__name__) + + +class HelpCog(Cog): + """Cog to provide help info for all available bot commands.""" + + def __init__(self, bot: Bot) -> None: + self.bot = bot + + @slash_command() + async def help(self, ctx: ApplicationContext) -> None: + """Shows help for all available commands.""" + cat_image_url: str = await get_cat_image_url(self.bot.http_session) + fields: list[tuple[str, str]] = [] + + async def gather_all_commands(command: ApplicationCommand[Any, ..., Any], depth: int = 0) -> None: + try: + can_run = await command.can_run(ctx) + except (CheckFailure, CommandCheckFailure): + can_run = False + if not can_run: + return + full_command_name: str = f"{mention_command(command)}".strip() + if isinstance(command, SlashCommand): + fields.append( + (f'{COMMAND_EMOJI} {full_command_name} {"sub-" * (depth-1)}command', f"{command.description}") + ) + elif isinstance(command, SlashCommandGroup): + fields.append((f'{GROUP_EMOJI}`{command}` {"sub-" * depth}group', f"{command.description}")) + for subcommand in command.subcommands: + await gather_all_commands(subcommand, depth + 1) + else: + log.error(f"Got unexpected command type: {command.__class__.__name__}, {command!r}") + + for command in self.bot.commands: + await gather_all_commands(command) + + new_embed = lambda url: discord.Embed(title="Help command").set_thumbnail(url=url) + + embeds: list[discord.Embed] = [new_embed(cat_image_url)] + for name, value in fields: + if len(embeds[-1].fields) >= 5: + embeds.append(new_embed(cat_image_url)) + embeds[-1].add_field(name=name, value=value, inline=False) + paginator = Paginator([Page(embeds=[embed]) for embed in embeds]) + await paginator.respond(ctx.interaction) + + +def setup(bot: Bot) -> None: + """Register the HelpCog cog.""" + bot.add_cog(HelpCog(bot)) diff --git a/contemplative-constellations/src/exts/ping/__init__.py b/contemplative-constellations/src/exts/ping/__init__.py new file mode 100644 index 0000000..6e9b9dd --- /dev/null +++ b/contemplative-constellations/src/exts/ping/__init__.py @@ -0,0 +1,3 @@ +from .ping import setup + +__all__ = ["setup"] diff --git a/contemplative-constellations/src/exts/ping/ping.py b/contemplative-constellations/src/exts/ping/ping.py new file mode 100644 index 0000000..11f8914 --- /dev/null +++ b/contemplative-constellations/src/exts/ping/ping.py @@ -0,0 +1,23 @@ +from discord import ApplicationContext, Cog, slash_command + +from src.bot import Bot +from src.utils.log import get_logger + +log = get_logger(__name__) + + +class PingCog(Cog): + """Cog to verify the bot is working.""" + + def __init__(self, bot: Bot) -> None: + self.bot = bot + + @slash_command() + async def ping(self, ctx: ApplicationContext) -> None: + """Test out bot's latency.""" + await ctx.respond(f"Pong! ({(self.bot.latency * 1000):.2f}ms)") + + +def setup(bot: Bot) -> None: + """Register the PingCog cog.""" + bot.add_cog(PingCog(bot)) diff --git a/contemplative-constellations/src/exts/sudo/__init__.py b/contemplative-constellations/src/exts/sudo/__init__.py new file mode 100644 index 0000000..988bfaf --- /dev/null +++ b/contemplative-constellations/src/exts/sudo/__init__.py @@ -0,0 +1,3 @@ +from .sudo import setup + +__all__ = ["setup"] diff --git a/contemplative-constellations/src/exts/sudo/sudo.py b/contemplative-constellations/src/exts/sudo/sudo.py new file mode 100644 index 0000000..533e49a --- /dev/null +++ b/contemplative-constellations/src/exts/sudo/sudo.py @@ -0,0 +1,84 @@ +from typing import cast, override + +from discord import ( + ApplicationContext, + Cog, + ExtensionAlreadyLoaded, + ExtensionNotLoaded, + SlashCommandGroup, + User, + option, +) +from discord.ext.commands.errors import NotOwner + +from src.bot import Bot +from src.converters.bot_extension import ValidBotExtension +from src.settings import FAIL_EMOJI, SUCCESS_EMOJI + + +class Sudo(Cog): + """Cog that allows the bot owner to perform various privileged actions.""" + + def __init__(self, bot: Bot) -> None: + self.bot = bot + + sudo = SlashCommandGroup(name="sudo", description="Commands for the bot owner.") + + @sudo.command() + @option("extension", ValidBotExtension) + async def load(self, ctx: ApplicationContext, extension: str) -> None: + """Dynamically load a requested bot extension. + + This can be very useful for debugging and testing new features without having to restart the bot. + """ + try: + self.bot.load_extension(extension) + except ExtensionAlreadyLoaded: + await ctx.respond(f"{FAIL_EMOJI} Extension is already loaded") + return + await ctx.respond(f"{SUCCESS_EMOJI} Extension `{extension}` loaded") + + @sudo.command() + @option("extension", ValidBotExtension) + async def unload(self, ctx: ApplicationContext, extension: str) -> None: + """Dynamically unload a requested bot extension. + + This can be very useful for debugging and testing new features without having to restart the bot. + """ + try: + self.bot.unload_extension(extension) + except ExtensionNotLoaded: + await ctx.respond(f"{FAIL_EMOJI} Extension is not loaded") + return + await ctx.respond(f"{SUCCESS_EMOJI} Extension `{extension}` unloaded") + + @sudo.command() + @option("extension", ValidBotExtension) + async def reload(self, ctx: ApplicationContext, extension: str) -> None: + """Dynamically reload a requested bot extension. + + This can be very useful for debugging and testing new features without having to restart the bot. + """ + try: + self.bot.unload_extension(extension) + except ExtensionNotLoaded: + already_loaded = False + else: + already_loaded = True + self.bot.load_extension(extension) + + action = "reloaded" if already_loaded else "loaded" + await ctx.respond(f"{SUCCESS_EMOJI} Extension `{extension}` {action}") + + @override + async def cog_check(self, ctx: ApplicationContext) -> bool: + """Only the bot owners can use this cog.""" + if not await self.bot.is_owner(cast(User, ctx.author)): + raise NotOwner + + return super().cog_check(ctx) + + +def setup(bot: Bot) -> None: + """Load the Reloader cog.""" + bot.add_cog(Sudo(bot)) diff --git a/contemplative-constellations/src/exts/tvdb_info/__init__.py b/contemplative-constellations/src/exts/tvdb_info/__init__.py new file mode 100644 index 0000000..0401fc2 --- /dev/null +++ b/contemplative-constellations/src/exts/tvdb_info/__init__.py @@ -0,0 +1,3 @@ +from .main import setup + +__all__ = ["setup"] diff --git a/contemplative-constellations/src/exts/tvdb_info/main.py b/contemplative-constellations/src/exts/tvdb_info/main.py new file mode 100644 index 0000000..f6567d1 --- /dev/null +++ b/contemplative-constellations/src/exts/tvdb_info/main.py @@ -0,0 +1,124 @@ +from typing import Literal + +from discord import ApplicationContext, Cog, Member, User, option, slash_command + +from src.bot import Bot +from src.db_adapters.user import user_get_list_safe, user_get_safe +from src.tvdb import FetchMeta, Movie, Series, TvdbClient +from src.tvdb.errors import InvalidIdError +from src.utils.log import get_logger +from src.utils.ratelimit import rate_limited + +from .ui import ProfileView, search_view + +log = get_logger(__name__) + +MOVIE_EMOJI = "🎬" +SERIES_EMOJI = "📺" + + +class InfoCog(Cog): + """Cog to show information about a movie or a series.""" + + def __init__(self, bot: Bot) -> None: + self.bot = bot + self.tvdb_client = TvdbClient(self.bot.http_session, self.bot.cache) + + @slash_command() + @option("user", input_type=User, description="The user to show the profile for.", required=False) + async def profile(self, ctx: ApplicationContext, *, user: User | Member | None = None) -> None: + """Show a user's profile.""" + await ctx.defer() + + if user is None: + user = ctx.author + + # Convert Member to User (Member isn't a subclass of User...) + if isinstance(user, Member): + user = user._user # pyright: ignore[reportPrivateUsage] + + # TODO: Friend check (don't allow looking at other people's profiles, unless + # they are friends with the user, or it's their own profile) + # https://github.com/ItsDrike/code-jam-2024/issues/51 + + db_user = await user_get_safe(self.bot.db_session, user.id) + view = ProfileView( + bot=self.bot, + tvdb_client=self.tvdb_client, + user=user, + invoker_user_id=ctx.author.id, + watched_list=await user_get_list_safe(self.bot.db_session, db_user, "watched"), + favorite_list=await user_get_list_safe(self.bot.db_session, db_user, "favorite"), + ) + await view.send(ctx.interaction) + + @slash_command() + @option("query", input_type=str, description="The query to search for.") + @option( + "type", + input_type=str, + parameter_name="entity_type", + description="The type of entity to search for.", + choices=["movie", "series"], + required=False, + ) + @option("by_id", input_type=bool, description="Search by tvdb ID.", required=False) + @rate_limited(key=lambda self, ctx: f"{ctx.user}", limit=2, period=8, update_when_exceeded=True, prefix_key=True) + async def search( + self, + ctx: ApplicationContext, + *, + query: str, + entity_type: Literal["movie", "series"] | None = None, + by_id: bool = False, + ) -> None: + """Search for a movie or series.""" + await ctx.defer() + + if by_id: + if query.startswith("movie-"): + entity_type = "movie" + query = query[6:] + elif query.startswith("series-"): + entity_type = "series" + query = query[7:] + try: + match entity_type: + case "movie": + response = [ + await Movie.fetch(query, self.tvdb_client, extended=True, meta=FetchMeta.TRANSLATIONS) + ] + case "series": + series = await Series.fetch( + query, self.tvdb_client, extended=True, meta=FetchMeta.TRANSLATIONS + ) + await series.fetch_episodes() + response = [series] + case None: + await ctx.respond( + "You must specify a type (movie or series) when searching by ID.", ephemeral=True + ) + return + except InvalidIdError: + await ctx.respond( + 'Invalid ID. Id must be an integer, or "movie-" / "series-" followed by an integer.', + ephemeral=True, + ) + return + else: + response = await self.tvdb_client.search(query, limit=5, entity_type=entity_type) + for element in response: + await element.ensure_translations() + if isinstance(element, Series): + await element.fetch_episodes() + if not response: + await ctx.respond("No results found.") + return + + view = await search_view(self.bot, ctx.user.id, ctx.user.id, response) + await view.send(ctx.interaction) + + +def setup(bot: Bot) -> None: + """Register the PingCog cog.""" + bot.add_cog(InfoCog(bot)) diff --git a/contemplative-constellations/src/exts/tvdb_info/ui/__init__.py b/contemplative-constellations/src/exts/tvdb_info/ui/__init__.py new file mode 100644 index 0000000..c4463a1 --- /dev/null +++ b/contemplative-constellations/src/exts/tvdb_info/ui/__init__.py @@ -0,0 +1,12 @@ +from .episode_view import EpisodeView +from .movie_series_view import MovieView, SeriesView +from .profile_view import ProfileView +from .search_view import search_view + +__all__ = [ + "MovieView", + "SeriesView", + "EpisodeView", + "ProfileView", + "search_view", +] diff --git a/contemplative-constellations/src/exts/tvdb_info/ui/_media_view.py b/contemplative-constellations/src/exts/tvdb_info/ui/_media_view.py new file mode 100644 index 0000000..fbdb650 --- /dev/null +++ b/contemplative-constellations/src/exts/tvdb_info/ui/_media_view.py @@ -0,0 +1,210 @@ +from abc import ABC, abstractmethod +from typing import override + +import discord + +from src.bot import Bot +from src.db_tables.user_list import UserList +from src.exts.error_handler.view import ErrorHandledView + +from ._reactive_buttons import ReactiveButton, ReactiveButtonStateStyle + + +class MediaView(ErrorHandledView, ABC): + """Base class for views that display info about some media (movie/series/episode).""" + + def __init__( + self, + *, + bot: Bot, + user_id: int, + invoker_user_id: int, + watched_list: UserList, + favorite_list: UserList, + ) -> None: + """Initialize MediaView. + + :param bot: The bot instance. + :param user_id: + Discord ID of the user that invoked this view. + + Only this user will be able to interact with this view and the relevant information + will be tailored towards this user based on their data. (i.e. whether they have already + watched this media / marked it favorite.) + :param watched_list: The list of all watched items for this user. + :param favorite_list: The list of all favorited items for this user. + """ + super().__init__(disable_on_timeout=True) + + self.bot = bot + self.user_id = user_id + self.invoker_user_id = invoker_user_id + self.watched_list = watched_list + self.favorite_list = favorite_list + + self.watched_button = ReactiveButton( + initial_state=False, # This should be updated on _initialize + state_map={ + False: ReactiveButtonStateStyle( + label="Mark as watched", + style=discord.ButtonStyle.success, + emoji="✅", + ), + True: ReactiveButtonStateStyle( + label="Unmark as watched", + style=discord.ButtonStyle.primary, + emoji="❌", + ), + }, + row=1, + ) + self.watched_button.callback = self._watched_button_callback + + self.favorite_button = ReactiveButton( + initial_state=False, # This should be updated on _initialize + state_map={ + False: ReactiveButtonStateStyle( + label="Favorite", + style=discord.ButtonStyle.primary, + emoji="⭐", + ), + True: ReactiveButtonStateStyle( + label="Unfavorite", + style=discord.ButtonStyle.secondary, + emoji="❌", + ), + }, + row=1, + ) + self.favorite_button.callback = self._favorite_button_callback + + def _add_items(self) -> None: + """Add all relevant items to the view.""" + self.add_item(self.watched_button) + self.add_item(self.favorite_button) + + async def _initialize(self) -> None: + """Initialize the view to reflect the current state of the media. + + This will (likely) perform database lookups and other necessary operations to obtain + the current state of the media or the user, configuring the internal state accordingly. + + Tasks that need to be performed here: + - Call `self._add_items()` + - Set the state of the watched and favorite buttons. + + This method will only be called once. + """ + self._add_items() + self.watched_button.set_state(await self.is_watched()) + self.favorite_button.set_state(await self.is_favorite()) + + @abstractmethod + def _get_embed(self) -> discord.Embed: + """Get the discord embed to be displayed in the message. + + This embed should contain all the relevant information about the media. + """ + raise NotImplementedError + + async def _refresh(self) -> None: + """Edit the message to reflect the current state of the view. + + Called whenever the user-facing view needs to be updated. + """ + if not self.message: + raise ValueError("View has no message (not yet sent?), can't refresh") + + await self.message.edit(embed=self._get_embed(), view=self) + + async def _ensure_correct_invoker(self, interaction: discord.Interaction) -> bool: + """Ensure that the interaction was invoked by the author of this view.""" + if interaction.user is None: + raise ValueError("Interaction user is None") + + if interaction.user.id != self.invoker_user_id: + await interaction.response.send_message("You can't interact with this view.", ephemeral=True) + return False + return True + + @abstractmethod + async def is_favorite(self) -> bool: + """Check if the current media is marked as favorite by the user. + + This will perform a database query. + """ + raise NotImplementedError + + @abstractmethod + async def set_favorite(self, state: bool) -> None: # noqa: FBT001 + """Mark or unmark the current media as favorite. + + This will perform a database operation. + """ + raise NotImplementedError + + @abstractmethod + async def is_watched(self) -> bool: + """Check if the current media is marked as watched by the user. + + This will perform a database query. + """ + raise NotImplementedError + + @abstractmethod + async def set_watched(self, state: bool) -> None: # noqa: FBT001 + """Mark or unmark the current media as watched. + + This will perform a database operation. + """ + raise NotImplementedError + + async def send(self, interaction: discord.Interaction) -> None: + """Send the view to the user.""" + await self._initialize() + await interaction.respond(embed=self._get_embed(), view=self) + + async def _watched_button_callback(self, interaction: discord.Interaction) -> None: + """Callback for when the user clicks on the mark as watched button.""" + if not await self._ensure_correct_invoker(interaction): + return + + await interaction.response.defer() + cur_state = self.watched_button.state + await self.set_watched(not cur_state) + self.watched_button.set_state(not cur_state) + + await self._refresh() + + async def _favorite_button_callback(self, interaction: discord.Interaction) -> None: + """Callback for when the user clicks on the mark as favorite button.""" + if not await self._ensure_correct_invoker(interaction): + return + + await interaction.response.defer() + cur_state = self.favorite_button.state + await self.set_favorite(not cur_state) + self.favorite_button.set_state(not cur_state) + + await self._refresh() + + +class DynamicMediaView(MediaView): + """Base class for dynamic views`that display info about some media (movie/series/episode). + + A dynamic view is one that can change its state after a user interaction. + For example, a view that displays different episodes based on user selection. + """ + + @abstractmethod + async def _update_state(self) -> None: + """Update the internal state to reflect the currently picked media. + + Called whenever the picked media is changed. + """ + raise NotImplementedError + + @override + async def _initialize(self) -> None: + await super()._initialize() + await self._update_state() diff --git a/contemplative-constellations/src/exts/tvdb_info/ui/_reactive_buttons.py b/contemplative-constellations/src/exts/tvdb_info/ui/_reactive_buttons.py new file mode 100644 index 0000000..93d6a46 --- /dev/null +++ b/contemplative-constellations/src/exts/tvdb_info/ui/_reactive_buttons.py @@ -0,0 +1,75 @@ +from collections.abc import Mapping +from dataclasses import dataclass +from typing import Any, override + +import discord +from discord.emoji import Emoji +from discord.enums import ButtonStyle +from discord.partial_emoji import PartialEmoji + + +@dataclass +class ReactiveButtonStateStyle: + """Style of a reactive button. + + This determines how the button will appear for this state. + """ + + label: str + style: discord.ButtonStyle + emoji: str + + def apply(self, button: discord.ui.Button[Any]) -> None: + """Apply this state to the button.""" + button.label = self.label + button.style = self.style + button.emoji = self.emoji + + +class ReactiveButton[S, V: discord.ui.View](discord.ui.Button[V]): + """A state-aware button, which can change its appearance based on the state it's in. + + This is very useful to quickly switch between various states of a button, such as "Add" and "Remove". + """ + + @override + def __init__( + self, + *, + initial_state: S, + state_map: Mapping[S, ReactiveButtonStateStyle], + style: ButtonStyle = ButtonStyle.secondary, + label: str | None = None, + disabled: bool = False, + custom_id: str | None = None, + url: str | None = None, + emoji: str | Emoji | PartialEmoji | None = None, + sku_id: int | None = None, + row: int | None = None, + ): + super().__init__( + style=style, + label=label, + disabled=disabled, + custom_id=custom_id, + url=url, + emoji=emoji, + sku_id=sku_id, + row=row, + ) + + _state = initial_state + self.state_map = state_map + + @property + def state(self) -> S: + """Get the current state of the button.""" + return self._state + + # This is intentionally not a state.setter, as this makes it clerer + # to the caller that this method changes modifies the button. + def set_state(self, state: S) -> None: + """Set the state of the button.""" + self._state = state + style = self.state_map[state] + style.apply(self) diff --git a/contemplative-constellations/src/exts/tvdb_info/ui/episode_view.py b/contemplative-constellations/src/exts/tvdb_info/ui/episode_view.py new file mode 100644 index 0000000..c021ce9 --- /dev/null +++ b/contemplative-constellations/src/exts/tvdb_info/ui/episode_view.py @@ -0,0 +1,219 @@ +import warnings +from typing import final, override + +import discord + +from src.bot import Bot +from src.db_adapters.lists import get_list_item, list_put_item, list_remove_item +from src.db_tables.user_list import UserList, UserListItemKind +from src.settings import THETVDB_COPYRIGHT_FOOTER, THETVDB_LOGO +from src.tvdb.client import Episode, Series +from src.utils.tvdb import by_season + +from ._media_view import DynamicMediaView + + +@final +class EpisodeView(DynamicMediaView): + """View for displaying episodes of a series.""" + + def __init__( + self, + *, + bot: Bot, + user_id: int, + invoker_user_id: int, + watched_list: UserList, + favorite_list: UserList, + series: Series, + season_idx: int = 1, + episode_idx: int = 1, + ) -> None: + super().__init__( + bot=bot, + user_id=user_id, + invoker_user_id=invoker_user_id, + watched_list=watched_list, + favorite_list=favorite_list, + ) + + self.series = series + + self.season_idx = season_idx + self.episode_idx = episode_idx + + self.episode_dropdown = discord.ui.Select(placeholder="Select an episode") + self.episode_dropdown.callback = self._episode_dropdown_callback + + self.season_dropdown = discord.ui.Select(placeholder="Select a season") + self.season_dropdown.callback = self._season_dropdown_callback + + self.watched_button.row = 2 + self.favorite_button.row = 2 + + @override + def _add_items(self) -> None: + self.add_item(self.episode_dropdown) + self.add_item(self.season_dropdown) + super()._add_items() + + # Episodes aren't favoritable + self.remove_item(self.favorite_button) + + @override + async def _initialize(self) -> None: + await self.series.ensure_seasons_and_episodes() + if self.series.episodes is None: + raise ValueError("Series has no episodes") + self.episodes = by_season(self.series.episodes) + + # Make the super call (must happen after we set self.episodes) + # This assumes is_favorite works properly, however, since we don't actually have + # this implemented for episodes, to make this call work, we'll need to temporarily + # set is_favorite to a dummy method. + _old_is_favorite = self.is_favorite + + async def _dummy_is_favorite() -> bool: + return False + + self.is_favorite = _dummy_is_favorite + await super()._initialize() + self.is_favorite = _old_is_favorite + + @override + async def _update_state(self) -> None: + self.episode_dropdown.options = [ + discord.SelectOption( + label=episode.formatted_name, + value=str(episode.number), + description=episode.overview[:100] if episode.overview else None, + ) + for episode in self.episodes[self.season_idx] + ] + + self.season_dropdown.options = [ + discord.SelectOption(label=f"Season {season}", value=str(season)) for season in self.episodes + ] + + # TODO: This is not ideal, we should support some way to paginate this, however + # implementing that isn't trivial. For now, just trim the list to prevent errors. + + if len(self.episode_dropdown.options) > 25: + self.episode_dropdown.options = self.episode_dropdown.options[:25] + warnings.warn("Too many episodes to display, truncating to 25", UserWarning, stacklevel=1) + + if len(self.season_dropdown.options) > 25: + self.season_dropdown.options = self.season_dropdown.options[:25] + warnings.warn("Too many seasons to display, truncating to 25", UserWarning, stacklevel=1) + + self.watched_button.set_state(await self.is_watched()) + + @property + def current_episode(self) -> "Episode": + """Get the current episode being displayed.""" + return self.episodes[self.season_idx][self.episode_idx - 1] + + @override + async def is_favorite(self) -> bool: + raise NotImplementedError("Individual episodes cannot be marked as favorite.") + + @override + async def set_favorite(self, state: bool) -> None: + raise NotImplementedError("Individual episodes cannot be marked as favorite.") + + @override + async def is_watched(self) -> bool: + if not self.current_episode.id: + raise ValueError("Episode has no ID") + + item = await get_list_item( + self.bot.db_session, + self.watched_list, + self.current_episode.id, + UserListItemKind.EPISODE, + ) + return item is not None + + @override + async def set_watched(self, state: bool) -> None: + if not self.current_episode.id: + raise ValueError("Episode has no ID") + + if state is False: + item = await get_list_item( + self.bot.db_session, + self.watched_list, + self.current_episode.id, + UserListItemKind.EPISODE, + ) + if item is None: + raise ValueError("Episode is not marked as watched, can't re-mark as unwatched.") + await list_remove_item(self.bot.db_session, self.watched_list, item) + else: + try: + await list_put_item( + self.bot.db_session, + self.watched_list, + self.current_episode.id, + UserListItemKind.EPISODE, + self.series.id, + ) + except ValueError: + raise ValueError("Episode is already marked as watched, can't re-mark as watched.") + + async def _episode_dropdown_callback(self, interaction: discord.Interaction) -> None: + """Callback for when the user selects an episode from the drop-down.""" + if not await self._ensure_correct_invoker(interaction): + return + + if not self.episode_dropdown.values or not isinstance(self.episode_dropdown.values[0], str): + raise ValueError("Episode dropdown values are empty or non-string, but callback was triggered.") + + new_episode_idx = int(self.episode_dropdown.values[0]) + if new_episode_idx == self.episode_idx: + await interaction.response.defer() + return + + self.episode_idx = new_episode_idx + await self._update_state() + await interaction.response.defer() + await self._refresh() + + async def _season_dropdown_callback(self, interaction: discord.Interaction) -> None: + """Callback for when the user selects a season from the drop-down.""" + if not await self._ensure_correct_invoker(interaction): + return + + if not self.season_dropdown.values or not isinstance(self.season_dropdown.values[0], str): + raise ValueError("Episode dropdown values are empty or non-string, but callback was triggered.") + + new_season_idx = int(self.season_dropdown.values[0]) + if new_season_idx == self.season_idx: + await interaction.response.defer() + return + + self.season_idx = new_season_idx + self.episode_idx = 1 + await self._update_state() + await interaction.response.defer() + await self._refresh() + + @override + def _get_embed(self) -> discord.Embed: + if self.current_episode.overview: + description = self.current_episode.overview + if len(description) > 1000: + description = description[:1000] + "..." + else: + description = None + + embed = discord.Embed( + title=self.current_episode.formatted_name, + description=description, + color=discord.Color.blurple(), + url=self.series.url, + ) + if self.current_episode.image_url: + embed.set_image(url=self.current_episode.image_url) + embed.set_footer(text=THETVDB_COPYRIGHT_FOOTER, icon_url=THETVDB_LOGO) + return embed diff --git a/contemplative-constellations/src/exts/tvdb_info/ui/movie_series_view.py b/contemplative-constellations/src/exts/tvdb_info/ui/movie_series_view.py new file mode 100644 index 0000000..95d7833 --- /dev/null +++ b/contemplative-constellations/src/exts/tvdb_info/ui/movie_series_view.py @@ -0,0 +1,278 @@ +from typing import Literal, final, override + +import discord + +from src.bot import Bot +from src.db_adapters.lists import ( + get_list_item, + list_put_item, + list_put_item_safe, + list_remove_item, + list_remove_item_safe, + refresh_list_items, +) +from src.db_tables.user_list import UserList, UserListItemKind +from src.settings import MOVIE_EMOJI, SERIES_EMOJI, THETVDB_COPYRIGHT_FOOTER, THETVDB_LOGO +from src.tvdb.client import Movie, Series +from src.utils.iterators import get_first + +from ._media_view import MediaView +from .episode_view import EpisodeView + + +class _SeriesOrMovieView(MediaView): + """View for displaying details about a movie or a series.""" + + @override + def __init__( + self, + *, + bot: Bot, + user_id: int, + invoker_user_id: int, + watched_list: UserList, + favorite_list: UserList, + media_data: Movie | Series, + ) -> None: + super().__init__( + bot=bot, + user_id=user_id, + invoker_user_id=invoker_user_id, + watched_list=watched_list, + favorite_list=favorite_list, + ) + + self.media_data = media_data + + @property + def _db_item_kind(self) -> Literal[UserListItemKind.MOVIE, UserListItemKind.SERIES]: + """Return the kind of item this view represents.""" + if isinstance(self.media_data, Series): + return UserListItemKind.SERIES + return UserListItemKind.MOVIE + + @override + async def is_favorite(self) -> bool: + if not self.media_data.id: + raise ValueError("Media has no ID") + + item = await get_list_item(self.bot.db_session, self.favorite_list, self.media_data.id, self._db_item_kind) + return item is not None + + @override + async def set_favorite(self, state: bool) -> None: + if not self.media_data.id: + raise ValueError("Media has no ID") + + if state is False: + item = await get_list_item(self.bot.db_session, self.favorite_list, self.media_data.id, self._db_item_kind) + if item is None: + raise ValueError("Media is not marked as favorite, can't re-mark as favorite.") + await list_remove_item(self.bot.db_session, self.watched_list, item) + else: + try: + await list_put_item(self.bot.db_session, self.favorite_list, self.media_data.id, self._db_item_kind) + except ValueError: + raise ValueError("Media is already marked as favorite, can't re-mark as favorite.") + + @override + async def is_watched(self) -> bool: + if not self.media_data.id: + raise ValueError("Media has no ID") + + item = await get_list_item(self.bot.db_session, self.watched_list, self.media_data.id, self._db_item_kind) + return item is not None + + @override + async def set_watched(self, state: bool) -> None: + if not self.media_data.id: + raise ValueError("Media has no ID") + + if state is False: + item = await get_list_item(self.bot.db_session, self.watched_list, self.media_data.id, self._db_item_kind) + if item is None: + raise ValueError("Media is not marked as watched, can't re-mark as unwatched.") + await list_remove_item(self.bot.db_session, self.watched_list, item) + else: + try: + await list_put_item(self.bot.db_session, self.watched_list, self.media_data.id, self._db_item_kind) + except ValueError: + raise ValueError("Media is already marked as watched, can't re-mark as watched.") + + @override + def _get_embed(self) -> discord.Embed: + if self.media_data.overview_eng: + overview = f"{self.media_data.overview_eng}" + overview_extra = "" + elif not self.media_data.overview_eng and self.media_data.overview: + overview = f"{self.media_data.overview}" + overview_extra = "*No English overview available.*" + else: + overview = "" + overview_extra = "*No overview available.*" + + if len(overview) > 1000: + overview = overview[:100] + "..." + + if overview_extra: + if overview: + overview += f"\n\n{overview_extra}" + else: + overview = overview_extra + + title = self.media_data.bilingual_name + + if isinstance(self.media_data, Series): + title = f"{SERIES_EMOJI} {title}" + else: + title = f"{MOVIE_EMOJI} {title}" + url = self.media_data.url + + embed = discord.Embed(title=title, color=discord.Color.blurple(), url=url) + embed.add_field(name="Overview", value=overview, inline=False) + embed.set_footer(text=THETVDB_COPYRIGHT_FOOTER, icon_url=THETVDB_LOGO) + embed.set_image(url=self.media_data.image_url) + return embed + + +@final +class SeriesView(_SeriesOrMovieView): + """View for displaying details about a series.""" + + media_data: Series + + @override + def __init__( + self, + *, + bot: Bot, + user_id: int, + invoker_user_id: int, + watched_list: UserList, + favorite_list: UserList, + media_data: Series, + ) -> None: + super().__init__( + bot=bot, + user_id=user_id, + invoker_user_id=invoker_user_id, + watched_list=watched_list, + favorite_list=favorite_list, + media_data=media_data, + ) + + self.episodes_button = discord.ui.Button( + style=discord.ButtonStyle.danger, + label="View episodes", + emoji="📺", + row=1, + ) + self.episodes_button.callback = self._episodes_button_callback + + @override + async def _initialize(self) -> None: + await self.media_data.ensure_seasons_and_episodes() + await super()._initialize() + + @override + def _add_items(self) -> None: + super()._add_items() + self.add_item(self.episodes_button) + + async def _episodes_button_callback(self, interaction: discord.Interaction) -> None: + """Callback for when the user clicks the "View Episodes" button.""" + if not await self._ensure_correct_invoker(interaction): + return + + view = EpisodeView( + bot=self.bot, + user_id=self.user_id, + invoker_user_id=self.invoker_user_id, + watched_list=self.watched_list, + favorite_list=self.favorite_list, + series=self.media_data, + ) + await view.send(interaction) + + @override + async def is_watched(self) -> bool: + # Series uses a special method to determine whether it's watched. + # This approach uses the last episode of the series to determine if the series is watched. + + # If the series has no episodes, fall back to marking the series itself as watched. + if self.media_data.episodes is None: + return await super().is_watched() + + last_ep = get_first(episode for episode in reversed(self.media_data.episodes) if episode.aired) + + if not last_ep or last_ep.id is None: + raise ValueError("Episode has no ID") + + item = await get_list_item(self.bot.db_session, self.watched_list, last_ep.id, UserListItemKind.EPISODE) + return item is not None + + @override + async def set_watched(self, state: bool) -> None: + # When a series is marked as watched, we mark all of its aired episodes as watched. + # Similarly, unmarking will unmark all episodes (aired or not). + + # If the series has no episodes, fall back to marking the season itself as watched / unwatched. + if self.media_data.episodes is None: + await super().set_watched(state) + return + + if state is False: + for episode in self.media_data.episodes: + if not episode.id: + raise ValueError("Episode has no ID") + + await list_remove_item_safe( + self.bot.db_session, + self.watched_list, + episode.id, + UserListItemKind.EPISODE, + ) + + await refresh_list_items(self.bot.db_session, self.watched_list) + else: + for episode in self.media_data.episodes: + if not episode.id: + raise ValueError("Episode has no ID") + if not episode.aired: + continue + + await list_put_item_safe( + self.bot.db_session, + self.watched_list, + episode.id, + UserListItemKind.EPISODE, + self.media_data.id, + ) + + +@final +class MovieView(_SeriesOrMovieView): + """View for displaying details about a movie.""" + + media_data: Movie + + # We override __init__ to provide a more specific type for media_data + @override + def __init__( + self, + *, + bot: Bot, + user_id: int, + invoker_user_id: int, + watched_list: UserList, + favorite_list: UserList, + media_data: Movie, + ) -> None: + super().__init__( + bot=bot, + user_id=user_id, + invoker_user_id=invoker_user_id, + watched_list=watched_list, + favorite_list=favorite_list, + media_data=media_data, + ) diff --git a/contemplative-constellations/src/exts/tvdb_info/ui/profile_view.py b/contemplative-constellations/src/exts/tvdb_info/ui/profile_view.py new file mode 100644 index 0000000..d7fbda2 --- /dev/null +++ b/contemplative-constellations/src/exts/tvdb_info/ui/profile_view.py @@ -0,0 +1,212 @@ +import textwrap +from itertools import groupby +from typing import final + +import discord + +from src.bot import Bot +from src.db_adapters import refresh_list_items +from src.db_tables.media import Episode as EpisodeTable, Movie as MovieTable, Series as SeriesTable +from src.db_tables.user_list import UserList, UserListItemKind +from src.exts.error_handler.view import ErrorHandledView +from src.settings import MOVIE_EMOJI, SERIES_EMOJI +from src.tvdb import Movie, Series +from src.tvdb.client import FetchMeta, TvdbClient +from src.utils.iterators import get_first +from src.utils.log import get_logger + +log = get_logger(__name__) + + +@final +class ProfileView(ErrorHandledView): + """View for displaying user profiles with data about the user's added shows.""" + + fetched_favorite_movies: list[Movie] + fetched_favorite_shows: list[Series] + fetched_watched_movies: list[Movie] + fetched_watched_shows: list[Series] + fetched_partially_watched_shows: list[Series] + episodes_total: int + + def __init__( + self, + *, + bot: Bot, + tvdb_client: TvdbClient, + user: discord.User, + invoker_user_id: int, + watched_list: UserList, + favorite_list: UserList, + ) -> None: + super().__init__() + self.bot = bot + self.tvdb_client = tvdb_client + self.discord_user = user + self.invoker_user_id = invoker_user_id + self.watched_list = watched_list + self.favorite_list = favorite_list + + async def _initialize(self) -> None: + """Initialize the view, obtaining any necessary state.""" + await refresh_list_items(self.bot.db_session, self.watched_list) + await refresh_list_items(self.bot.db_session, self.favorite_list) + + watched_movies: list[MovieTable] = [] + watched_shows: list[SeriesTable] = [] + partially_watched_shows: list[SeriesTable] = [] + watched_episodes: list[EpisodeTable] = [] + + for item in self.watched_list.items: + match item.kind: + case UserListItemKind.MOVIE: + await self.bot.db_session.refresh(item, ["movie"]) + watched_movies.append(item.movie) + case UserListItemKind.SERIES: + await self.bot.db_session.refresh(item, ["series"]) + watched_shows.append(item.series) + case UserListItemKind.EPISODE: + await self.bot.db_session.refresh(item, ["episode"]) + await self.bot.db_session.refresh(item.episode, ["series"]) + watched_episodes.append(item.episode) + + # We don't actually care about episodes in the profile view, however, we need them + # because of the way shows are marked as watched (last episode watched -> show watched). + + for series_id, episodes in groupby( + sorted(watched_episodes, key=lambda x: x.series_id), key=lambda x: x.series_id + ): + series = await Series.fetch(series_id, client=self.tvdb_client, extended=True, meta=FetchMeta.EPISODES) + if series.episodes is None: + raise ValueError("Found an episode in watched list for a series with no episodes") + + last_episode = get_first(episode for episode in reversed(series.episodes) if episode.aired) + if not last_episode or last_episode.id is None: + raise ValueError("Episode has no ID or is None") + + episodes_it = iter(episodes) + first_db_episode = get_first(episodes_it) + if first_db_episode is None: + raise ValueError("No episodes found in a group (never)") + + group_episode_ids = {episode.tvdb_id for episode in episodes_it} + group_episode_ids.add(first_db_episode.tvdb_id) + await self.bot.db_session.refresh(first_db_episode, ["series"]) + + if first_db_episode.series is None: # pyright: ignore[reportUnnecessaryComparison] + manual = await self.bot.db_session.get(SeriesTable, first_db_episode.series_id) + raise ValueError(f"DB series is None id={first_db_episode.series_id}, manual={manual}") + + if last_episode.id in group_episode_ids: + watched_shows.append(first_db_episode.series) + else: + partially_watched_shows.append(first_db_episode.series) + + favorite_movies: list[MovieTable] = [] + favorite_shows: list[SeriesTable] = [] + + for item in self.favorite_list.items: + match item.kind: + case UserListItemKind.MOVIE: + await self.bot.db_session.refresh(item, ["movie"]) + favorite_movies.append(item.movie) + case UserListItemKind.SERIES: + await self.bot.db_session.refresh(item, ["series"]) + favorite_shows.append(item.series) + case UserListItemKind.EPISODE: + raise TypeError("Found an episode in favorite list") + + # Fetch the data about all favorite & watched items from tvdb + # TODO: This is a lot of API calls, we should probably limit this to some maximum + self.fetched_favorite_movies = [ + await Movie.fetch(media_db_data.tvdb_id, client=self.tvdb_client) for media_db_data in favorite_movies + ] + self.fetched_favorite_shows = [ + await Series.fetch( + media_db_data.tvdb_id, client=self.tvdb_client, extended=True, meta=FetchMeta.TRANSLATIONS + ) + for media_db_data in favorite_shows + ] + self.fetched_watched_movies = [ + await Movie.fetch(media_db_data.tvdb_id, client=self.tvdb_client) for media_db_data in watched_movies + ] + self.fetched_watched_shows = [ + await Series.fetch( + media_db_data.tvdb_id, + client=self.tvdb_client, + extended=True, + meta=FetchMeta.TRANSLATIONS, + ) + for media_db_data in watched_shows + ] + self.fetched_partially_watched_shows = [ + await Series.fetch( + media_db_data.tvdb_id, + client=self.tvdb_client, + extended=True, + meta=FetchMeta.TRANSLATIONS, + ) + for media_db_data in partially_watched_shows + ] + + # Instead of fetching all episodes, just store the total number of episodes that the user has added + # as that's the only thing we need here and while it is a bit inconsistent, it's a LOT more efficient. + self.episodes_total = len(watched_episodes) + + async def _ensure_correct_invoker(self, interaction: discord.Interaction) -> bool: + """Ensure that the interaction was invoked by the author of this view.""" + if interaction.user is None: + raise ValueError("Interaction user is None") + + if interaction.user.id != self.invoker_user_id: + await interaction.response.send_message("You can't interact with this view.", ephemeral=True) + return False + return True + + def _get_embed(self) -> discord.Embed: + embed = discord.Embed( + title="Profile", + description=f"Profile for {self.discord_user.mention}", + color=discord.Color.blurple(), + thumbnail=self.discord_user.display_avatar.url, + ) + + stats_str = textwrap.dedent( + f""" + **Total Shows:** {len(self.fetched_watched_shows)} \ + ({self.episodes_total} episode{'s' if self.episodes_total > 0 else ''}) + **Total Movies:** {len(self.fetched_watched_movies)} + """ + ) + embed.add_field(name="Stats", value=stats_str, inline=False) + + # TODO: What if there's too many things here, we might need to paginate this. + + favorite_items: list[str] = [] + for item in self.fetched_favorite_movies: + favorite_items.append(f"[{MOVIE_EMOJI} {item.bilingual_name}]({item.url})") # noqa: PERF401 + for item in self.fetched_favorite_shows: + favorite_items.append(f"[{SERIES_EMOJI} {item.bilingual_name}]({item.url})") # noqa: PERF401 + + embed.add_field( + name="Favorites", + value="\n".join(favorite_items) or "No favorites", + ) + watched_items: list[str] = [] + for item in self.fetched_watched_movies: + watched_items.append(f"[{MOVIE_EMOJI} {item.bilingual_name}]({item.url})") # noqa: PERF401 + for item in self.fetched_watched_shows: + watched_items.append(f"[{SERIES_EMOJI} {item.bilingual_name}]({item.url})") # noqa: PERF401 + for item in self.fetched_partially_watched_shows: + watched_items.append(f"[{SERIES_EMOJI} {item.bilingual_name}]({item.url}) partially") # noqa: PERF401 + + embed.add_field( + name="Watched", + value="\n".join(watched_items) or "No watched items", + ) + return embed + + async def send(self, interaction: discord.Interaction) -> None: + """Send the view.""" + await self._initialize() + await interaction.respond(embed=self._get_embed(), view=self) diff --git a/contemplative-constellations/src/exts/tvdb_info/ui/search_view.py b/contemplative-constellations/src/exts/tvdb_info/ui/search_view.py new file mode 100644 index 0000000..b26eb9e --- /dev/null +++ b/contemplative-constellations/src/exts/tvdb_info/ui/search_view.py @@ -0,0 +1,95 @@ +from collections.abc import Sequence + +import discord + +from src.bot import Bot +from src.db_adapters.lists import refresh_list_items +from src.db_adapters.user import user_get_list_safe, user_get_safe +from src.db_tables.user_list import UserList +from src.tvdb.client import Movie, Series + +from .movie_series_view import MovieView, SeriesView + + +def _search_view( + bot: Bot, + user_id: int, + invoker_user_id: int, + watched_list: UserList, + favorite_list: UserList, + results: Sequence[Movie | Series], + cur_index: int = 0, +) -> MovieView | SeriesView: + result = results[cur_index] + + if isinstance(result, Movie): + view = MovieView( + bot=bot, + user_id=user_id, + invoker_user_id=invoker_user_id, + watched_list=watched_list, + favorite_list=favorite_list, + media_data=result, + ) + else: + view = SeriesView( + bot=bot, + user_id=user_id, + invoker_user_id=invoker_user_id, + watched_list=watched_list, + favorite_list=favorite_list, + media_data=result, + ) + + if len(results) == 1: + return view + + # Add support for switching between search results dynamically + search_result_dropdown = discord.ui.Select( + placeholder="Not what you're looking for? Select a different result.", + options=[ + discord.SelectOption( + label=(result.bilingual_name or "")[:100], + value=str(i), + description=result.overview[:100] if result.overview else None, + ) + for i, result in enumerate(results) + ], + row=2, + ) + + async def _search_dropdown_callback(interaction: discord.Interaction) -> None: + if not await view._ensure_correct_invoker(interaction): # pyright: ignore[reportPrivateUsage] + return + + if not search_result_dropdown.values or not isinstance(search_result_dropdown.values[0], str): + raise ValueError("Dropdown values are empty or not a string but callback was triggered.") + + index = int(search_result_dropdown.values[0]) + new_view = _search_view(bot, user_id, invoker_user_id, watched_list, favorite_list, results, index) + await new_view.send(interaction) + + search_result_dropdown.callback = _search_dropdown_callback + + view.add_item(search_result_dropdown) + return view + + +async def search_view( + bot: Bot, + user_id: int, + invoker_user_id: int, + results: Sequence[Movie | Series], +) -> MovieView | SeriesView: + """Construct a view showing the search results. + + This uses specific views to render a single result. This view is then modified to + add support for switching between the search results. + """ + user = await user_get_safe(bot.db_session, user_id) + watched_list = await user_get_list_safe(bot.db_session, user, "watched") + favorite_list = await user_get_list_safe(bot.db_session, user, "favorite") + await refresh_list_items(bot.db_session, watched_list) + await refresh_list_items(bot.db_session, favorite_list) + + return _search_view(bot, user_id, invoker_user_id, watched_list, favorite_list, results, 0) diff --git a/contemplative-constellations/src/py.typed b/contemplative-constellations/src/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/contemplative-constellations/src/settings.py b/contemplative-constellations/src/settings.py new file mode 100644 index 0000000..b05d271 --- /dev/null +++ b/contemplative-constellations/src/settings.py @@ -0,0 +1,28 @@ +from pathlib import Path + +from src.utils.config import get_config + +GITHUB_REPO = "https://github.com/ItsDrike/code-jam-2024" +BOT_TOKEN = get_config("BOT_TOKEN") +TVDB_API_KEY = get_config("TVDB_API_KEY") + +SQLITE_DATABASE_FILE = get_config("SQLITE_DATABASE_FILE", cast=Path, default=Path("./database.db")) +ECHO_SQL = get_config("ECHO_SQL", cast=bool, default=False) +DB_ALWAYS_MIGRATE = get_config("DB_ALWAYS_MIGRATE", cast=bool, default=False) + +FAIL_EMOJI = "❌" +SUCCESS_EMOJI = "✅" +MOVIE_EMOJI = "🎬" +SERIES_EMOJI = "📺" +GROUP_EMOJI = get_config("GROUP_EMOJI", default=":file_folder:") +COMMAND_EMOJI = get_config("COMMAND_EMOJI", default=":arrow_forward:") + +THETVDB_COPYRIGHT_FOOTER = ( + "Metadata provided by TheTVDB. Please consider adding missing information or subscribing at " "thetvdb.com." +) +THETVDB_LOGO = "https://www.thetvdb.com/images/attribution/logo1.png" + +# Note that tvdb doesn't actually have rate-limits (or at least they aren't documented), +# but we should still be careful not to spam the API too much and be on the safe side. +TVDB_RATE_LIMIT_REQUESTS = get_config("TVDB_RATE_LIMIT_REQUESTS", cast=int, default=100) +TVDB_RATE_LIMIT_PERIOD = get_config("TVDB_RATE_LIMIT_PERIOD", cast=float, default=5) # seconds diff --git a/contemplative-constellations/src/tvdb/__init__.py b/contemplative-constellations/src/tvdb/__init__.py new file mode 100644 index 0000000..cb92c16 --- /dev/null +++ b/contemplative-constellations/src/tvdb/__init__.py @@ -0,0 +1,4 @@ +from .client import FetchMeta, Movie, Series, TvdbClient +from .errors import InvalidApiKeyError + +__all__ = ["TvdbClient", "InvalidApiKeyError", "Movie", "Series", "FetchMeta"] diff --git a/contemplative-constellations/src/tvdb/client.py b/contemplative-constellations/src/tvdb/client.py new file mode 100644 index 0000000..4a65896 --- /dev/null +++ b/contemplative-constellations/src/tvdb/client.py @@ -0,0 +1,496 @@ +from abc import ABC, abstractmethod +from datetime import UTC, datetime +from enum import Enum +from typing import ClassVar, Literal, Self, final, overload, override + +import aiohttp +from aiocache import BaseCache +from yarl import URL + +from src.settings import TVDB_API_KEY, TVDB_RATE_LIMIT_PERIOD, TVDB_RATE_LIMIT_REQUESTS +from src.tvdb.errors import BadCallError, InvalidApiKeyError, InvalidIdError +from src.tvdb.generated_models import ( + EpisodeBaseRecord, + EpisodeExtendedRecord, + EpisodesIdExtendedGetResponse, + EpisodesIdGetResponse, + MovieBaseRecord, + MovieExtendedRecord, + MoviesIdExtendedGetResponse, + MoviesIdGetResponse, + SearchGetResponse, + SearchResult, + SeasonBaseRecord, + SeriesBaseRecord, + SeriesExtendedRecord, + SeriesIdEpisodesSeasonTypeGetResponse, + SeriesIdExtendedGetResponse, + SeriesIdGetResponse, +) +from src.utils.iterators import get_first +from src.utils.log import get_logger +from src.utils.ratelimit import rate_limit + +log = get_logger(__name__) + +type JSON_DATA = dict[str, JSON_DATA] | list[JSON_DATA] | str | int | float | bool | None # noice + +type SeriesRecord = SeriesBaseRecord | SeriesExtendedRecord +type MovieRecord = MovieBaseRecord | MovieExtendedRecord +type AnyRecord = SeriesRecord | MovieRecord + + +class FetchMeta(Enum): + """When calling fetch with extended=True, this is used if we want to fetch translations or episodes as well.""" + + TRANSLATIONS = "translations" + EPISODES = "episodes" + + +def parse_media_id(media_id: int | str) -> int: + """Parse the media ID from a string.""" + try: + media_id = int(str(media_id).removeprefix("movie-").removeprefix("series-").removeprefix("episode-")) + except ValueError: + raise InvalidIdError("Invalid media ID.") + else: + return media_id + + +class _Media(ABC): + ENDPOINT: ClassVar[str] + + ResponseType: ClassVar[type[MoviesIdGetResponse | SeriesIdGetResponse]] + ExtendedResponseType: ClassVar[type[MoviesIdExtendedGetResponse | SeriesIdExtendedGetResponse]] + + def __init__(self, client: "TvdbClient", data: AnyRecord | SearchResult | None): + if data is None: + raise ValueError("Data can't be None but is allowed to because of the broken pydantic generated models.") + self.client = client + self.set_attributes(data) + + def set_attributes(self, data: AnyRecord | SearchResult) -> None: + """Setting attributes.""" + self.data = data + self.name: str | None = self.data.name + self.overview: str | None = None + # if the class name is "Movie" or "Series" + self.entity_type: Literal["Movie", "Series"] = self.__class__.__name__ # pyright: ignore [reportAttributeAccessIssue] + if hasattr(self.data, "overview"): + self.overview = self.data.overview # pyright: ignore [reportAttributeAccessIssue] + self.slug: str | None = None + if hasattr(self.data, "slug"): + self.slug = self.data.slug + self.id = self.data.id + + self.name_eng: str | None = None + self.overview_eng: str | None = None + + self.image_url: URL | None = None + if isinstance(self.data, SearchResult) and self.data.image_url: + self.image_url = URL(self.data.image_url) + elif not isinstance(self.data, SearchResult) and self.data.image: + self.image_url = URL(self.data.image) + + if isinstance(self.data, SearchResult): + if self.data.translations and self.data.translations.root: + self.name_eng = self.data.translations.root.get("eng") + if self.data.overviews and self.data.overviews.root: + self.overview_eng = self.data.overviews.root.get("eng") + else: + if self.data.aliases: + self.name_eng = get_first(alias.name for alias in self.data.aliases if alias.language == "eng") + if isinstance(self.data, (SeriesExtendedRecord, MovieExtendedRecord)) and self.data.translations: + if self.data.translations.name_translations: + self.name_eng = get_first( + translation.name + for translation in self.data.translations.name_translations + if translation.language == "eng" + ) + if self.data.translations.overview_translations: + self.overview_eng = get_first( + translation.overview + for translation in self.data.translations.overview_translations + if translation.language == "eng" + ) + + @property + def bilingual_name(self) -> str | None: + if self.name == self.name_eng: + return self.name + return f"{self.name} ({self.name_eng})" + + @property + def id(self) -> int: + return self._id + + @id.setter + def id(self, value: int | str) -> None: # pyright: ignore[reportPropertyTypeMismatch] + self._id = parse_media_id(value) + + @classmethod + @abstractmethod + def supports_meta(cls, meta: FetchMeta) -> bool: + """Check if the class supports a specific meta.""" + ... + + @classmethod + @overload + async def fetch( + cls, + media_id: int | str, + client: "TvdbClient", + *, + extended: Literal[False] = False, + short: Literal[False] | None = None, + meta: None = None, + ) -> Self: ... + + @classmethod + @overload + async def fetch( + cls, + media_id: int | str, + client: "TvdbClient", + *, + extended: Literal[True], + short: bool | None = None, + meta: FetchMeta | None = None, + ) -> Self: ... + + @classmethod + async def fetch( + cls, + media_id: int | str, + client: "TvdbClient", + *, + extended: bool = False, + short: bool | None = None, + meta: FetchMeta | None = None, + ) -> Self: + """Fetch a movie by its ID. + + :param media_id: The ID of the movie. + :param client: The TVDB client to use. + :param extended: Whether to fetch extended information. + :param short: Whether to omit characters and artworks from the response. Requires extended=True to work. + :param meta: The meta to fetch. Requires extended=True to work. + :return: + """ + media_id = parse_media_id(media_id) + cache_key: str = f"{media_id}" + if extended: + cache_key += f"_{bool(short)}" + if meta: + cache_key += f"_{meta.value}" + response = await client.cache.get(cache_key, namespace=f"tvdb_{cls.ENDPOINT}") + query: dict[str, str] = {} + if extended: + if meta: + query["meta"] = meta.value + if short: + query["short"] = "true" + else: + query["short"] = "false" + elif meta: + raise BadCallError("Meta can only be used with extended=True.") + elif short: + raise BadCallError("Short can only be enabled with extended=True.") + if not response: + response = await client.request( + "GET", + f"{cls.ENDPOINT}/{media_id}" + ("/extended" if extended else ""), + query=query if query else None, + ) + await client.cache.set(key=cache_key, value=response, ttl=60 * 60, namespace=f"tvdb_{cls.ENDPOINT}") + log.trace(f"Stored into cache: {cache_key}") + else: + log.trace(f"Loaded from cache: {cache_key}") + response = cls.ResponseType(**response) if not extended else cls.ExtendedResponseType(**response) # pyright: ignore[reportCallIssue] + + return cls(client, response.data) + + async def ensure_translations(self) -> None: + """Ensure that response contains translations.""" + if not isinstance(self.data, SeriesExtendedRecord): + series = await self.fetch( + media_id=self.id, client=self.client, extended=True, short=True, meta=FetchMeta.TRANSLATIONS + ) + self.set_attributes(series.data) + + +@final +class Movie(_Media): + """Class to interact with the TVDB API for movies.""" + + ENDPOINT: ClassVar[str] = "movies" + data: SearchResult | MovieBaseRecord | MovieExtendedRecord + + ResponseType = MoviesIdGetResponse + ExtendedResponseType = MoviesIdExtendedGetResponse + + @override + def set_attributes(self, data: AnyRecord | SearchResult) -> None: + super().set_attributes(data) + self.url: str | None = f"https://www.thetvdb.com/movies/{self.slug}" if self.slug else None + + @override + @classmethod + async def supports_meta(cls, meta: FetchMeta) -> bool: + """Check if the class supports a specific meta.""" + return meta is FetchMeta.TRANSLATIONS + + +@final +class Series(_Media): + """Class to interact with the TVDB API for series.""" + + ENDPOINT: ClassVar[str] = "series" + data: SearchResult | SeriesBaseRecord | SeriesExtendedRecord + + ResponseType = SeriesIdGetResponse + ExtendedResponseType = SeriesIdExtendedGetResponse + + def __init__(self, client: "TvdbClient", data: AnyRecord | SearchResult | None): + super().__init__(client, data) + + @override + def set_attributes(self, data: SearchResult | SeriesBaseRecord | SeriesExtendedRecord) -> None: + super().set_attributes(data) + self.episodes: list[Episode] | None = None + self.seasons: list[SeasonBaseRecord] | None = None + if isinstance(self.data, SeriesExtendedRecord): + self.seasons = self.data.seasons + if self.data.episodes: + self.episodes = [Episode(episode, client=self.client) for episode in self.data.episodes] + self.url: str | None = f"https://www.thetvdb.com/series/{self.slug}" if self.slug else None + + @override + @classmethod + async def supports_meta(cls, meta: FetchMeta) -> bool: + """Check if the class supports a specific meta.""" + return meta in {FetchMeta.TRANSLATIONS, FetchMeta.EPISODES} + + async def fetch_episodes(self, season_type: str = "official") -> None: + """Fetch episodes for the series based on the season type.""" + cache_key: str = f"{self.id}_{season_type}" + endpoint = f"series/{self.id}/episodes/{season_type}" + response = await self.client.cache.get(cache_key, namespace="tvdb_episodes") + if not response: + response = await self.client.request("GET", endpoint) + await self.client.cache.set(cache_key, value=response, namespace="tvdb_episodes", ttl=60 * 60) + log.trace(f"Stored into cache: {cache_key}") + else: + log.trace(f"Loaded from cache: {cache_key}") + + # Assuming 'episodes' field contains the list of episodes + response = SeriesIdEpisodesSeasonTypeGetResponse(**response) # pyright: ignore[reportCallIssue] + + if response.data and response.data.episodes: + self.episodes = [Episode(episode, client=self.client) for episode in response.data.episodes] + + async def ensure_seasons_and_episodes(self) -> None: + """Ensure that reponse contains seasons.""" + if not isinstance(self.data, SeriesExtendedRecord): + series = await self.fetch( + media_id=self.id, client=self.client, extended=True, short=True, meta=FetchMeta.EPISODES + ) + self.set_attributes(series.data) + + +class Episode: + """Represents an episode from Tvdb.""" + + def __init__(self, data: EpisodeBaseRecord | EpisodeExtendedRecord, client: "TvdbClient") -> None: + self.client = client + self.set_attributes(data) + + def set_attributes(self, data: EpisodeBaseRecord | EpisodeExtendedRecord) -> None: + """Set attributes.""" + self.data = data + self.id: int | None = self.data.id + self.image_url: str | None = self.data.image if self.data.image else None + self.name: str | None = self.data.name + self.overview: str | None = self.data.overview + self.number: int | None = self.data.number + self.season_number: int | None = self.data.season_number + self.name_eng: str | None = None + self.overview_eng: str | None = None + self.series_id: int | None = self.data.series_id + self.air_date: datetime | None = None + if self.data.aired: + self.air_date = datetime.strptime(self.data.aired, "%Y-%m-%d").replace(tzinfo=UTC) + self.aired: bool = self.air_date is not None and self.air_date <= datetime.now(UTC) + + if isinstance(self.data, EpisodeExtendedRecord): + if self.data.translations and self.data.translations.name_translations: + self.name_eng = get_first( + translation.name + for translation in self.data.translations.name_translations + if translation.language == "eng" + ) + + if self.data.translations and self.data.translations.overview_translations: + self.overview_eng = get_first( + translation.overview + for translation in self.data.translations.overview_translations + if translation.language == "eng" + ) + + @property + def formatted_name(self) -> str: + """Returns the name in format SxxEyy - Name.""" + return f"S{self.season_number:02}E{self.number:02} - {self.name}" + + @classmethod + async def fetch(cls, media_id: str | int, *, client: "TvdbClient", extended: bool = True) -> "Episode": + """Fetch episode.""" + endpoint = f"/episodes/{parse_media_id(media_id)}" + query: dict[str, str] | None = None + + if extended: + endpoint += "/extended" + query = {"meta": "translations"} + response = await client.request("GET", endpoint=endpoint, query=query) + response = EpisodesIdGetResponse(**response) if not extended else EpisodesIdExtendedGetResponse(**response) # pyright: ignore[reportCallIssue] + + if not response.data: + raise ValueError("No data found for Episode") + return cls(response.data, client=client) + + async def ensure_translations(self) -> None: + """Ensure that response contains translations.""" + if not isinstance(self.data, EpisodeExtendedRecord): + if not self.id: + raise ValueError("Episode has no ID") + episode = await self.fetch(self.id, client=self.client, extended=True) + self.set_attributes(episode.data) + + async def fetch_series( + self, *, extended: bool = False, short: bool | None = None, meta: FetchMeta | None = None + ) -> Series: + """Fetching series.""" + if not self.series_id: + raise ValueError("Series Id cannot be None.") + return await Series.fetch( # pyright: ignore[reportCallIssue] + client=self.client, + media_id=self.series_id, + extended=extended, # pyright: ignore[reportArgumentType] + short=short, + meta=meta, + ) + + @property + def bilingual_name(self) -> str | None: + """Returns the name in both languages.""" + if self.name == self.name_eng: + return self.name + return f"{self.name} ({self.name_eng})" + + +class TvdbClient: + """Class to interact with the TVDB API.""" + + BASE_URL: ClassVar[URL] = URL("https://api4.thetvdb.com/v4/") + + def __init__(self, http_session: aiohttp.ClientSession, cache: BaseCache): + self.http_session = http_session + self.auth_token = None + self.cache = cache + + @overload + async def request( + self, + method: Literal["GET"], + endpoint: str, + body: None = None, + query: dict[str, str] | None = None, + ) -> JSON_DATA: ... + + @overload + async def request( + self, method: Literal["POST"], endpoint: str, body: JSON_DATA, query: None = None + ) -> JSON_DATA: ... + + async def request( + self, + method: Literal["GET", "POST"], + endpoint: str, + body: JSON_DATA = None, + query: dict[str, str] | None = None, + ) -> JSON_DATA: + """Make an authorized request to the TVDB API.""" + log.trace(f"Making TVDB {method} request to {endpoint}") + + # TODO: It would be better to instead use a queue to handle rate-limits + # and block until the next request can be made. + await rate_limit( + self.cache, + "tvdb", + limit=TVDB_RATE_LIMIT_REQUESTS, + period=TVDB_RATE_LIMIT_PERIOD, + err_msg="Bot wide rate-limit for TheTVDB API was exceeded.", + ) + + if self.auth_token is None: + log.trace("No auth token found, requesting initial login.") + await self._login() + headers = {"Authorization": f"Bearer {self.auth_token}", "Accept": "application/json"} + + url = self.BASE_URL / endpoint.removeprefix("/") + if method == "GET" and query: + url = url.with_query(query) + async with self.http_session.request(method, url, headers=headers, json=body) as response: + if response.status == 401: + log.debug("TVDB API token expired, requesting new token.") + self.auth_token = None + # TODO: might be an infinite loop + return await self.request(method, endpoint, body) # pyright: ignore[reportCallIssue,reportArgumentType] + response.raise_for_status() + return await response.json() + + async def search( + self, search_query: str, entity_type: Literal["series", "movie", None] = None, limit: int = 1 + ) -> list[Movie | Series]: + """Search for a series or movie in the TVDB database.""" + cache_key: str = f"{search_query}_{entity_type}_{limit}" + data = await self.cache.get(cache_key, namespace="tvdb_search") + if not data: + query: dict[str, str] = {"query": search_query, "limit": str(limit)} + if entity_type: + query["type"] = entity_type + data = await self.request("GET", "search", query=query) + await self.cache.set(key=cache_key, value=data, ttl=60 * 60, namespace="tvdb_search") + log.trace(f"Stored into cache: {cache_key}") + else: + log.trace(f"Loaded from cache: {cache_key}") + response = SearchGetResponse(**data) # pyright: ignore[reportCallIssue] + returnable: list[Movie | Series] = [] + if not response.data: + return returnable + for result in response.data: + match result.type: + case "movie": + returnable.append(Movie(self, result)) + case "series": + returnable.append(Series(self, result)) + case _: + pass + return returnable + + async def _login(self) -> None: + """Obtain the auth token from the TVDB API. + + This token has one month of validity. + """ + log.debug("Requesting TVDB API login") + url = self.BASE_URL / "login" + async with self.http_session.post(url, json={"apikey": TVDB_API_KEY}) as response: + if response.status == 401: + log.error("Invalid TVDB API key, login request failed.") + response_txt = await response.text() + raise InvalidApiKeyError(response, response_txt) + + response.raise_for_status() + data = await response.json() + self.auth_token = data["data"]["token"] diff --git a/contemplative-constellations/src/tvdb/errors.py b/contemplative-constellations/src/tvdb/errors.py new file mode 100644 index 0000000..7eebdea --- /dev/null +++ b/contemplative-constellations/src/tvdb/errors.py @@ -0,0 +1,22 @@ +import aiohttp + + +class TVDBError(Exception): + """The base exception for all TVDB errors.""" + + +class BadCallError(TVDBError): + """Exception raised when the meta value is incompatible with the class.""" + + +class InvalidIdError(TVDBError): + """Exception raised when the ID provided is invalid.""" + + +class InvalidApiKeyError(TVDBError): + """Exception raised when the TVDB API key used was invalid.""" + + def __init__(self, response: aiohttp.ClientResponse, response_txt: str): + self.response = response + self.response_txt = response_txt + super().__init__("Invalid TVDB API key.") diff --git a/contemplative-constellations/src/tvdb/generated_models.py b/contemplative-constellations/src/tvdb/generated_models.py new file mode 100644 index 0000000..13fd665 --- /dev/null +++ b/contemplative-constellations/src/tvdb/generated_models.py @@ -0,0 +1,1547 @@ +# ruff: noqa: D101, ERA001, E501 +# generated by datamodel-codegen: +# filename: swagger.yml +# timestamp: 2024-07-21T19:34:06+00:00 +# version: 0.25.8 + +from __future__ import annotations + +from enum import Enum + +from pydantic import BaseModel, Field, RootModel + + +class Action(Enum): + delete = "delete" + update = "update" + + +class Alias(BaseModel): + """An alias model, which can be associated with a series, season, movie, person, or list.""" + + language: str | None = Field( + default=None, + description="A 3-4 character string indicating the language of the alias, as defined in Language.", + ) + name: str | None = Field(default=None, description="A string containing the alias itself.") + + +class ArtworkBaseRecord(BaseModel): + """base artwork record.""" + + height: int | None = Field(default=None, json_schema_extra={"x-go-name": "Height"}) + id: int | None = None + image: str | None = Field(default=None, json_schema_extra={"x-go-name": "Image"}) + includes_text: bool | None = Field(default=None, alias="includesText") + language: str | None = None + score: float | None = None + thumbnail: str | None = Field(default=None, json_schema_extra={"x-go-name": "Thumbnail"}) + type: int | None = Field( + default=None, + description="The artwork type corresponds to the ids from the /artwork/types endpoint.", + json_schema_extra={"x-go-name": "Type"}, + ) + width: int | None = Field(default=None, json_schema_extra={"x-go-name": "Width"}) + + +class ArtworkExtendedRecord(BaseModel): + """extended artwork record.""" + + episode_id: int | None = Field(default=None, alias="episodeId") + height: int | None = Field(default=None, json_schema_extra={"x-go-name": "Height"}) + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + image: str | None = Field(default=None, json_schema_extra={"x-go-name": "Image"}) + includes_text: bool | None = Field(default=None, alias="includesText") + language: str | None = None + movie_id: int | None = Field(default=None, alias="movieId") + network_id: int | None = Field(default=None, alias="networkId") + people_id: int | None = Field(default=None, alias="peopleId") + score: float | None = None + season_id: int | None = Field(default=None, alias="seasonId") + series_id: int | None = Field(default=None, alias="seriesId") + series_people_id: int | None = Field(default=None, alias="seriesPeopleId") + status: ArtworkStatus | None = None + tag_options: list[TagOption] | None = Field( + default=None, alias="tagOptions", json_schema_extra={"x-go-name": "TagOptions"} + ) + thumbnail: str | None = Field(default=None, json_schema_extra={"x-go-name": "Thumbnail"}) + thumbnail_height: int | None = Field( + default=None, + alias="thumbnailHeight", + json_schema_extra={"x-go-name": "ThumbnailHeight"}, + ) + thumbnail_width: int | None = Field( + default=None, + alias="thumbnailWidth", + json_schema_extra={"x-go-name": "ThumbnailWidth"}, + ) + type: int | None = Field( + default=None, + description="The artwork type corresponds to the ids from the /artwork/types endpoint.", + json_schema_extra={"x-go-name": "Type"}, + ) + updated_at: int | None = Field(default=None, alias="updatedAt", json_schema_extra={"x-go-name": "UpdatedAt"}) + width: int | None = Field(default=None, json_schema_extra={"x-go-name": "Width"}) + + +class ArtworkIdExtendedGetResponse(BaseModel): + data: ArtworkExtendedRecord | None = None + status: str | None = None + + +class ArtworkIdGetResponse(BaseModel): + data: ArtworkBaseRecord | None = None + status: str | None = None + + +class ArtworkStatus(BaseModel): + """artwork status record.""" + + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + name: str | None = None + + +class ArtworkStatusesGetResponse(BaseModel): + data: list[ArtworkStatus] | None = None + status: str | None = None + + +class ArtworkType(BaseModel): + """artwork type record.""" + + height: int | None = None + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + image_format: str | None = Field( + default=None, + alias="imageFormat", + json_schema_extra={"x-go-name": "ImageFormat"}, + ) + name: str | None = Field(default=None, json_schema_extra={"x-go-name": "Name"}) + record_type: str | None = Field(default=None, alias="recordType", json_schema_extra={"x-go-name": "RecordType"}) + slug: str | None = Field(default=None, json_schema_extra={"x-go-name": "Slug"}) + thumb_height: int | None = Field( + default=None, + alias="thumbHeight", + json_schema_extra={"x-go-name": "ThumbHeight"}, + ) + thumb_width: int | None = Field(default=None, alias="thumbWidth", json_schema_extra={"x-go-name": "ThumbWidth"}) + width: int | None = Field(default=None, json_schema_extra={"x-go-name": "Width"}) + + +class ArtworkTypesGetResponse(BaseModel): + data: list[ArtworkType] | None = None + status: str | None = None + + +class AwardBaseRecord(BaseModel): + """base award record.""" + + id: int | None = None + name: str | None = None + + +class AwardCategoryBaseRecord(BaseModel): + """base award category record.""" + + allow_co_nominees: bool | None = Field( + default=None, + alias="allowCoNominees", + json_schema_extra={"x-go-name": "AllowCoNominees"}, + ) + award: AwardBaseRecord | None = None + for_movies: bool | None = Field(default=None, alias="forMovies", json_schema_extra={"x-go-name": "ForMovies"}) + for_series: bool | None = Field(default=None, alias="forSeries", json_schema_extra={"x-go-name": "ForSeries"}) + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + name: str | None = None + + +class AwardCategoryExtendedRecord(BaseModel): + """extended award category record.""" + + allow_co_nominees: bool | None = Field( + default=None, + alias="allowCoNominees", + json_schema_extra={"x-go-name": "AllowCoNominees"}, + ) + award: AwardBaseRecord | None = None + for_movies: bool | None = Field(default=None, alias="forMovies", json_schema_extra={"x-go-name": "ForMovies"}) + for_series: bool | None = Field(default=None, alias="forSeries", json_schema_extra={"x-go-name": "ForSeries"}) + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + name: str | None = None + nominees: list[AwardNomineeBaseRecord] | None = Field(default=None, json_schema_extra={"x-go-name": "Nominees"}) + + +class AwardExtendedRecord(BaseModel): + """extended award record.""" + + categories: list[AwardCategoryBaseRecord] | None = Field( + default=None, json_schema_extra={"x-go-name": "Categories"} + ) + id: int | None = None + name: str | None = None + score: int | None = Field(default=None, json_schema_extra={"x-go-name": "Score"}) + + +class AwardNomineeBaseRecord(BaseModel): + """base award nominee record.""" + + character: Character | None = None + details: str | None = None + episode: EpisodeBaseRecord | None = None + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + is_winner: bool | None = Field(default=None, alias="isWinner", json_schema_extra={"x-go-name": "IsWinner"}) + movie: MovieBaseRecord | None = None + series: SeriesBaseRecord | None = None + year: str | None = None + category: str | None = None + name: str | None = None + + +class AwardsCategoriesIdExtendedGetResponse(BaseModel): + data: AwardCategoryExtendedRecord | None = None + status: str | None = None + + +class AwardsCategoriesIdGetResponse(BaseModel): + data: AwardCategoryBaseRecord | None = None + status: str | None = None + + +class AwardsGetResponse(BaseModel): + data: list[AwardBaseRecord] | None = None + status: str | None = None + + +class AwardsIdExtendedGetResponse(BaseModel): + data: AwardExtendedRecord | None = None + status: str | None = None + + +class AwardsIdGetResponse(BaseModel): + data: AwardBaseRecord | None = None + status: str | None = None + + +class Biography(BaseModel): + """biography record.""" + + biography: str | None = Field(default=None, json_schema_extra={"x-go-name": "Biography"}) + language: str | None = Field(default=None, json_schema_extra={"x-go-name": "Language"}) + + +class Character(BaseModel): + """character record.""" + + aliases: list[Alias] | None = Field(default=None, json_schema_extra={"x-go-name": "Aliases"}) + episode: RecordInfo | None = None + episode_id: int | None = Field(default=None, alias="episodeId") + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + image: str | None = None + is_featured: bool | None = Field(default=None, alias="isFeatured", json_schema_extra={"x-go-name": "IsFeatured"}) + movie_id: int | None = Field(default=None, alias="movieId") + movie: RecordInfo | None = None + name: str | None = None + name_translations: list[str] | None = Field( + default=None, + alias="nameTranslations", + json_schema_extra={"x-go-name": "NameTranslations"}, + ) + overview_translations: list[str] | None = Field( + default=None, + alias="overviewTranslations", + json_schema_extra={"x-go-name": "OverviewTranslations"}, + ) + people_id: int | None = Field(default=None, alias="peopleId") + person_img_url: str | None = Field(default=None, alias="personImgURL") + people_type: str | None = Field(default=None, alias="peopleType") + series_id: int | None = Field(default=None, alias="seriesId") + series: RecordInfo | None = None + sort: int | None = Field(default=None, json_schema_extra={"x-go-name": "Sort"}) + tag_options: list[TagOption] | None = Field( + default=None, alias="tagOptions", json_schema_extra={"x-go-name": "TagOptions"} + ) + type: int | None = Field(default=None, json_schema_extra={"x-go-name": "Type"}) + url: str | None = Field(default=None, json_schema_extra={"x-go-name": "URL"}) + person_name: str | None = Field(default=None, alias="personName") + + +class CharactersIdGetResponse(BaseModel): + data: Character | None = None + status: str | None = None + + +class Companies(BaseModel): + """Companies by type record.""" + + studio: list[Company] | None = None + network: list[Company] | None = None + production: list[Company] | None = None + distributor: list[Company] | None = None + special_effects: list[Company] | None = None + + +class CompaniesGetResponse(BaseModel): + data: list[Company] | None = None + status: str | None = None + links: Links | None = None + + +class CompaniesIdGetResponse(BaseModel): + data: Company | None = None + status: str | None = None + + +class CompaniesTypesGetResponse(BaseModel): + data: list[CompanyType] | None = None + status: str | None = None + + +class Company(BaseModel): + """A company record.""" + + active_date: str | None = Field(default=None, alias="activeDate") + aliases: list[Alias] | None = Field(default=None, json_schema_extra={"x-go-name": "Aliases"}) + country: str | None = None + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + inactive_date: str | None = Field(default=None, alias="inactiveDate") + name: str | None = None + name_translations: list[str] | None = Field( + default=None, + alias="nameTranslations", + json_schema_extra={"x-go-name": "NameTranslations"}, + ) + overview_translations: list[str] | None = Field( + default=None, + alias="overviewTranslations", + json_schema_extra={"x-go-name": "OverviewTranslations"}, + ) + primary_company_type: int | None = Field( + default=None, + alias="primaryCompanyType", + json_schema_extra={"x-go-name": "PrimaryCompanyType"}, + ) + slug: str | None = Field(default=None, json_schema_extra={"x-go-name": "Slug"}) + parent_company: ParentCompany | None = Field(default=None, alias="parentCompany") + tag_options: list[TagOption] | None = Field( + default=None, alias="tagOptions", json_schema_extra={"x-go-name": "TagOptions"} + ) + + +class CompanyRelationShip(BaseModel): + """A company relationship.""" + + id: int | None = None + type_name: str | None = Field(default=None, alias="typeName") + + +class CompanyType(BaseModel): + """A company type record.""" + + company_type_id: int | None = Field(default=None, alias="companyTypeId") + company_type_name: str | None = Field(default=None, alias="companyTypeName") + + +class ContentRating(BaseModel): + """content rating record.""" + + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + name: str | None = Field(default=None, json_schema_extra={"x-go-name": "Name"}) + description: str | None = None + country: str | None = None + content_type: str | None = Field(default=None, alias="contentType") + order: int | None = None + full_name: str | None = Field(default=None, alias="fullName") + + +class ContentRatingsGetResponse(BaseModel): + data: list[ContentRating] | None = None + status: str | None = None + + +class CountriesGetResponse(BaseModel): + data: list[Country] | None = None + status: str | None = None + + +class Country(BaseModel): + """country record.""" + + id: str | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + name: str | None = Field(default=None, json_schema_extra={"x-go-name": "Name"}) + short_code: str | None = Field(default=None, alias="shortCode", json_schema_extra={"x-go-name": "ShortCode"}) + + +class Data(BaseModel): + token: str | None = None + + +class Data1(BaseModel): + series: SeriesBaseRecord | None = None + episodes: list[EpisodeBaseRecord] | None = None + + +class Data2(BaseModel): + series: SeriesBaseRecord | None = None + + +class EntitiesGetResponse(BaseModel): + data: list[EntityType] | None = None + status: str | None = None + + +class Entity(BaseModel): + """Entity record.""" + + movie_id: int | None = Field(default=None, alias="movieId") + order: int | None = Field(default=None, json_schema_extra={"x-go-name": "Order"}) + series_id: int | None = Field(default=None, alias="seriesId") + + +class EntityType(BaseModel): + """Entity Type record.""" + + id: int | None = None + name: str | None = Field(default=None, json_schema_extra={"x-go-name": "Order"}) + has_specials: bool | None = Field(default=None, alias="hasSpecials") + + +class EntityUpdate(BaseModel): + """entity update record.""" + + entity_type: str | None = Field(default=None, alias="entityType", json_schema_extra={"x-go-name": "EnitityType"}) + method_int: int | None = Field(default=None, alias="methodInt") + method: str | None = Field(default=None, json_schema_extra={"x-go-name": "Method"}) + extra_info: str | None = Field(default=None, alias="extraInfo") + user_id: int | None = Field(default=None, alias="userId") + record_type: str | None = Field(default=None, alias="recordType") + record_id: int | None = Field(default=None, alias="recordId", json_schema_extra={"x-go-name": "RecordID"}) + time_stamp: int | None = Field(default=None, alias="timeStamp", json_schema_extra={"x-go-name": "TimeStamp"}) + series_id: int | None = Field( + default=None, + alias="seriesId", + description="Only present for episodes records", + json_schema_extra={"x-go-name": "RecordID"}, + ) + merge_to_id: int | None = Field(default=None, alias="mergeToId") + merge_to_entity_type: str | None = Field(default=None, alias="mergeToEntityType") + + +class EpisodeBaseRecord(BaseModel): + """base episode record.""" + + absolute_number: int | None = Field(default=None, alias="absoluteNumber") + aired: str | None = None + airs_after_season: int | None = Field(default=None, alias="airsAfterSeason") + airs_before_episode: int | None = Field(default=None, alias="airsBeforeEpisode") + airs_before_season: int | None = Field(default=None, alias="airsBeforeSeason") + finale_type: str | None = Field(default=None, alias="finaleType", description="season, midseason, or series") + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + image: str | None = None + image_type: int | None = Field(default=None, alias="imageType") + is_movie: int | None = Field(default=None, alias="isMovie", json_schema_extra={"x-go-name": "IsMovie"}) + last_updated: str | None = Field(default=None, alias="lastUpdated") + linked_movie: int | None = Field(default=None, alias="linkedMovie") + name: str | None = None + name_translations: list[str] | None = Field( + default=None, + alias="nameTranslations", + json_schema_extra={"x-go-name": "NameTranslations"}, + ) + number: int | None = None + overview: str | None = None + overview_translations: list[str] | None = Field( + default=None, + alias="overviewTranslations", + json_schema_extra={"x-go-name": "OverviewTranslations"}, + ) + runtime: int | None = None + season_number: int | None = Field(default=None, alias="seasonNumber") + seasons: list[SeasonBaseRecord] | None = Field(default=None, json_schema_extra={"x-go-name": "Seasons"}) + series_id: int | None = Field(default=None, alias="seriesId", json_schema_extra={"x-go-name": "SeriesID"}) + season_name: str | None = Field(default=None, alias="seasonName") + year: str | None = None + + +class EpisodeExtendedRecord(BaseModel): + """extended episode record.""" + + aired: str | None = None + airs_after_season: int | None = Field(default=None, alias="airsAfterSeason") + airs_before_episode: int | None = Field(default=None, alias="airsBeforeEpisode") + airs_before_season: int | None = Field(default=None, alias="airsBeforeSeason") + awards: list[AwardBaseRecord] | None = Field(default=None, json_schema_extra={"x-go-name": "Awards"}) + characters: list[Character] | None = Field(default=None, json_schema_extra={"x-go-name": "Characters"}) + companies: list[Company] | None = None + content_ratings: list[ContentRating] | None = Field( + default=None, + alias="contentRatings", + json_schema_extra={"x-go-name": "ContentRatings"}, + ) + finale_type: str | None = Field(default=None, alias="finaleType", description="season, midseason, or series") + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + image: str | None = None + image_type: int | None = Field(default=None, alias="imageType") + is_movie: int | None = Field(default=None, alias="isMovie", json_schema_extra={"x-go-name": "IsMovie"}) + last_updated: str | None = Field(default=None, alias="lastUpdated") + linked_movie: int | None = Field(default=None, alias="linkedMovie") + name: str | None = None + name_translations: list[str] | None = Field( + default=None, + alias="nameTranslations", + json_schema_extra={"x-go-name": "NameTranslations"}, + ) + networks: list[Company] | None = None + nominations: list[AwardNomineeBaseRecord] | None = Field(default=None, json_schema_extra={"x-go-name": "Nominees"}) + number: int | None = None + overview: str | None = None + overview_translations: list[str] | None = Field( + default=None, + alias="overviewTranslations", + json_schema_extra={"x-go-name": "OverviewTranslations"}, + ) + production_code: str | None = Field(default=None, alias="productionCode") + remote_ids: list[RemoteID] | None = Field( + default=None, alias="remoteIds", json_schema_extra={"x-go-name": "RemoteIDs"} + ) + runtime: int | None = None + season_number: int | None = Field(default=None, alias="seasonNumber") + seasons: list[SeasonBaseRecord] | None = Field(default=None, json_schema_extra={"x-go-name": "Seasons"}) + series_id: int | None = Field(default=None, alias="seriesId", json_schema_extra={"x-go-name": "SeriesID"}) + studios: list[Company] | None = None + tag_options: list[TagOption] | None = Field( + default=None, alias="tagOptions", json_schema_extra={"x-go-name": "TagOptions"} + ) + trailers: list[Trailer] | None = Field(default=None, json_schema_extra={"x-go-name": "Trailers"}) + translations: TranslationExtended | None = None + year: str | None = None + + +class EpisodesGetResponse(BaseModel): + data: list[EpisodeBaseRecord] | None = None + status: str | None = None + links: Links | None = None + + +class EpisodesIdExtendedGetResponse(BaseModel): + data: EpisodeExtendedRecord | None = None + status: str | None = None + + +class EpisodesIdGetResponse(BaseModel): + data: EpisodeBaseRecord | None = None + status: str | None = None + + +class EpisodesIdTranslationsLanguageGetResponse(BaseModel): + data: Translation | None = None + status: str | None = None + + +class FavoriteRecord(BaseModel): + """Favorites record.""" + + series: int | None = Field(default=None, json_schema_extra={"x-go-name": "series"}) + movie: int | None = Field(default=None, json_schema_extra={"x-go-name": "movies"}) + episode: int | None = Field(default=None, json_schema_extra={"x-go-name": "episodes"}) + artwork: int | None = Field(default=None, json_schema_extra={"x-go-name": "artwork"}) + people: int | None = Field(default=None, json_schema_extra={"x-go-name": "people"}) + list: int | None = Field(default=None, json_schema_extra={"x-go-name": "list"}) + + +class Favorites(BaseModel): + """User favorites record.""" + + series: list[int] | None = Field(default=None, json_schema_extra={"x-go-name": "series"}) + movies: list[int] | None = Field(default=None, json_schema_extra={"x-go-name": "movies"}) + episodes: list[int] | None = Field(default=None, json_schema_extra={"x-go-name": "episodes"}) + artwork: list[int] | None = Field(default=None, json_schema_extra={"x-go-name": "artwork"}) + people: list[int] | None = Field(default=None, json_schema_extra={"x-go-name": "people"}) + lists: list[int] | None = Field(default=None, json_schema_extra={"x-go-name": "list"}) + + +class Gender(BaseModel): + """gender record.""" + + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + name: str | None = Field(default=None, json_schema_extra={"x-go-name": "Name"}) + + +class GendersGetResponse(BaseModel): + data: list[Gender] | None = None + status: str | None = None + + +class Genre(Enum): + number_1 = 1 + number_2 = 2 + number_3 = 3 + number_4 = 4 + number_5 = 5 + number_6 = 6 + number_7 = 7 + number_8 = 8 + number_9 = 9 + number_10 = 10 + number_11 = 11 + number_12 = 12 + number_13 = 13 + number_14 = 14 + number_15 = 15 + number_16 = 16 + number_17 = 17 + number_18 = 18 + number_19 = 19 + number_21 = 21 + number_22 = 22 + number_23 = 23 + number_24 = 24 + number_25 = 25 + number_26 = 26 + number_27 = 27 + number_28 = 28 + number_29 = 29 + number_30 = 30 + number_31 = 31 + number_32 = 32 + number_33 = 33 + number_34 = 34 + number_35 = 35 + number_36 = 36 + + +class GenreBaseRecord(BaseModel): + """base genre record.""" + + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + name: str | None = Field(default=None, json_schema_extra={"x-go-name": "Name"}) + slug: str | None = Field(default=None, json_schema_extra={"x-go-name": "Slug"}) + + +class GenresGetResponse(BaseModel): + data: list[GenreBaseRecord] | None = None + status: str | None = None + + +class GenresIdGetResponse(BaseModel): + data: GenreBaseRecord | None = None + status: str | None = None + + +class Inspiration(BaseModel): + """Movie inspiration record.""" + + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + type: str | None = None + type_name: str | None = None + url: str | None = None + + +class InspirationType(BaseModel): + """Movie inspiration type record.""" + + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + name: str | None = None + description: str | None = None + reference_name: str | None = None + url: str | None = None + + +class InspirationTypesGetResponse(BaseModel): + data: list[InspirationType] | None = None + status: str | None = None + + +class Language(BaseModel): + """language record.""" + + id: str | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + name: str | None = Field(default=None, json_schema_extra={"x-go-name": "Name"}) + native_name: str | None = Field(default=None, alias="nativeName", json_schema_extra={"x-go-name": "NativeName"}) + short_code: str | None = Field(default=None, alias="shortCode") + + +class LanguagesGetResponse(BaseModel): + data: list[Language] | None = None + status: str | None = None + + +class Links(BaseModel): + """Links for next, previous and current record.""" + + prev: str | None = None + self: str | None = None + next: str | None = None + total_items: int | None = None + page_size: int | None = None + + +class ListBaseRecord(BaseModel): + """base list record.""" + + aliases: list[Alias] | None = Field(default=None, json_schema_extra={"x-go-name": "Aliases"}) + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + image: str | None = None + image_is_fallback: bool | None = Field(default=None, alias="imageIsFallback") + is_official: bool | None = Field(default=None, alias="isOfficial", json_schema_extra={"x-go-name": "IsOfficial"}) + name: str | None = None + name_translations: list[str] | None = Field( + default=None, + alias="nameTranslations", + json_schema_extra={"x-go-name": "NameTranslations"}, + ) + overview: str | None = None + overview_translations: list[str] | None = Field( + default=None, + alias="overviewTranslations", + json_schema_extra={"x-go-name": "OverviewTranslations"}, + ) + remote_ids: list[RemoteID] | None = Field( + default=None, alias="remoteIds", json_schema_extra={"x-go-name": "RemoteIDs"} + ) + tags: list[TagOption] | None = Field(default=None, json_schema_extra={"x-go-name": "TagOptions"}) + score: int | None = None + url: str | None = None + + +class ListExtendedRecord(BaseModel): + """extended list record.""" + + aliases: list[Alias] | None = Field(default=None, json_schema_extra={"x-go-name": "Aliases"}) + entities: list[Entity] | None = Field(default=None, json_schema_extra={"x-go-name": "Entities"}) + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + image: str | None = None + image_is_fallback: bool | None = Field(default=None, alias="imageIsFallback") + is_official: bool | None = Field(default=None, alias="isOfficial", json_schema_extra={"x-go-name": "IsOfficial"}) + name: str | None = None + name_translations: list[str] | None = Field( + default=None, + alias="nameTranslations", + json_schema_extra={"x-go-name": "NameTranslations"}, + ) + overview: str | None = None + overview_translations: list[str] | None = Field( + default=None, + alias="overviewTranslations", + json_schema_extra={"x-go-name": "OverviewTranslations"}, + ) + score: int | None = Field(default=None, json_schema_extra={"x-go-name": "Score"}) + url: str | None = None + + +class ListsGetResponse(BaseModel): + data: list[ListBaseRecord] | None = None + status: str | None = None + links: Links | None = None + + +class ListsIdExtendedGetResponse(BaseModel): + data: ListExtendedRecord | None = None + status: str | None = None + + +class ListsIdGetResponse(BaseModel): + data: ListBaseRecord | None = None + status: str | None = None + + +class ListsIdTranslationsLanguageGetResponse(BaseModel): + data: list[Translation] | None = None + status: str | None = None + + +class ListsSlugSlugGetResponse(ListsIdGetResponse): + pass + + +class LoginPostRequest(BaseModel): + apikey: str + pin: str | None = None + + +class LoginPostResponse(BaseModel): + data: Data | None = None + status: str | None = None + + +class Meta(Enum): + translations = "translations" + + +class Meta3(Enum): + translations = "translations" + episodes = "episodes" + + +class MovieBaseRecord(BaseModel): + """base movie record.""" + + aliases: list[Alias] | None = Field(default=None, json_schema_extra={"x-go-name": "Aliases"}) + id: int = Field(json_schema_extra={"x-go-name": "ID"}) + image: str | None = Field(default=None, json_schema_extra={"x-go-name": "Image"}) + last_updated: str | None = Field(default=None, alias="lastUpdated") + name: str | None = Field(default=None, json_schema_extra={"x-go-name": "Name"}) + name_translations: list[str] | None = Field( + default=None, + alias="nameTranslations", + json_schema_extra={"x-go-name": "NameTranslations"}, + ) + overview_translations: list[str] | None = Field( + default=None, + alias="overviewTranslations", + json_schema_extra={"x-go-name": "OverviewTranslations"}, + ) + score: float | None = Field(default=None, json_schema_extra={"x-go-name": "Score"}) + slug: str | None = Field(default=None, json_schema_extra={"x-go-name": "Slug"}) + status: Status | None = None + runtime: int | None = None + year: str | None = None + + +class MovieExtendedRecord(BaseModel): + """extended movie record.""" + + aliases: list[Alias] | None = Field(default=None, json_schema_extra={"x-go-name": "Aliases"}) + artworks: list[ArtworkBaseRecord] | None = Field(default=None, json_schema_extra={"x-go-name": "Artworks"}) + audio_languages: list[str] | None = Field( + default=None, + alias="audioLanguages", + json_schema_extra={"x-go-name": "AudioLanguages"}, + ) + awards: list[AwardBaseRecord] | None = Field(default=None, json_schema_extra={"x-go-name": "Awards"}) + box_office: str | None = Field(default=None, alias="boxOffice") + box_office_us: str | None = Field(default=None, alias="boxOfficeUS") + budget: str | None = None + characters: list[Character] | None = Field(default=None, json_schema_extra={"x-go-name": "Characters"}) + companies: Companies | None = None + content_ratings: list[ContentRating] | None = Field(default=None, alias="contentRatings") + first_release: Release | None = None + genres: list[GenreBaseRecord] | None = Field(default=None, json_schema_extra={"x-go-name": "Genres"}) + id: int = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + image: str | None = Field(default=None, json_schema_extra={"x-go-name": "Image"}) + inspirations: list[Inspiration] | None = Field(default=None, json_schema_extra={"x-go-name": "Inspirations"}) + last_updated: str | None = Field(default=None, alias="lastUpdated") + lists: list[ListBaseRecord] | None = None + name: str | None = Field(default=None, json_schema_extra={"x-go-name": "Name"}) + name_translations: list[str] | None = Field( + default=None, + alias="nameTranslations", + json_schema_extra={"x-go-name": "NameTranslations"}, + ) + original_country: str | None = Field(default=None, alias="originalCountry") + original_language: str | None = Field(default=None, alias="originalLanguage") + overview_translations: list[str] | None = Field( + default=None, + alias="overviewTranslations", + json_schema_extra={"x-go-name": "OverviewTranslations"}, + ) + production_countries: list[ProductionCountry] | None = Field( + default=None, json_schema_extra={"x-go-name": "ProductionCountries"} + ) + releases: list[Release] | None = Field(default=None, json_schema_extra={"x-go-name": "Releases"}) + remote_ids: list[RemoteID] | None = Field( + default=None, alias="remoteIds", json_schema_extra={"x-go-name": "RemoteIDs"} + ) + runtime: int | None = None + score: float | None = Field(default=None, json_schema_extra={"x-go-name": "Score"}) + slug: str | None = Field(default=None, json_schema_extra={"x-go-name": "Slug"}) + spoken_languages: list[str] | None = Field(default=None, json_schema_extra={"x-go-name": "SpokenLanguages"}) + status: Status | None = None + studios: list[StudioBaseRecord] | None = Field(default=None, json_schema_extra={"x-go-name": "Studios"}) + subtitle_languages: list[str] | None = Field( + default=None, + alias="subtitleLanguages", + json_schema_extra={"x-go-name": "SubtitleLanguages"}, + ) + tag_options: list[TagOption] | None = Field( + default=None, alias="tagOptions", json_schema_extra={"x-go-name": "TagOptions"} + ) + trailers: list[Trailer] | None = Field(default=None, json_schema_extra={"x-go-name": "Trailers"}) + translations: TranslationExtended | None = None + year: str | None = None + + +class MoviesFilterGetResponse(BaseModel): + data: list[MovieBaseRecord] | None = None + status: str | None = None + + +class MoviesGetResponse(BaseModel): + data: list[MovieBaseRecord] | None = None + status: str | None = None + links: Links | None = None + + +class MoviesIdExtendedGetResponse(BaseModel): + data: MovieExtendedRecord | None = None + status: str | None = None + + +class MoviesIdGetResponse(BaseModel): + data: MovieBaseRecord | None = None + status: str | None = None + + +class MoviesIdTranslationsLanguageGetResponse(EpisodesIdTranslationsLanguageGetResponse): + pass + + +class MoviesSlugSlugGetResponse(MoviesIdGetResponse): + pass + + +class MoviesStatusesGetResponse(BaseModel): + data: list[Status] | None = None + status: str | None = None + + +class ParentCompany(BaseModel): + """A parent company record.""" + + id: int | None = None + name: str | None = None + relation: CompanyRelationShip | None = None + + +class PeopleBaseRecord(BaseModel): + """base people record.""" + + aliases: list[Alias] | None = Field(default=None, json_schema_extra={"x-go-name": "Aliases"}) + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + image: str | None = None + last_updated: str | None = Field(default=None, alias="lastUpdated") + name: str | None = None + name_translations: list[str] | None = Field( + default=None, + alias="nameTranslations", + json_schema_extra={"x-go-name": "NameTranslations"}, + ) + overview_translations: list[str] | None = Field( + default=None, + alias="overviewTranslations", + json_schema_extra={"x-go-name": "OverviewTranslations"}, + ) + score: int | None = Field(default=None, json_schema_extra={"x-go-name": "Score"}) + + +class PeopleExtendedRecord(BaseModel): + """extended people record.""" + + aliases: list[Alias] | None = Field(default=None, json_schema_extra={"x-go-name": "Aliases"}) + awards: list[AwardBaseRecord] | None = Field(default=None, json_schema_extra={"x-go-name": "Awards"}) + biographies: list[Biography] | None = Field(default=None, json_schema_extra={"x-go-name": "Biographies"}) + birth: str | None = None + birth_place: str | None = Field(default=None, alias="birthPlace") + characters: list[Character] | None = Field(default=None, json_schema_extra={"x-go-name": "Characters"}) + death: str | None = None + gender: int | None = None + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + image: str | None = None + last_updated: str | None = Field(default=None, alias="lastUpdated") + name: str | None = None + name_translations: list[str] | None = Field( + default=None, + alias="nameTranslations", + json_schema_extra={"x-go-name": "NameTranslations"}, + ) + overview_translations: list[str] | None = Field( + default=None, + alias="overviewTranslations", + json_schema_extra={"x-go-name": "OverviewTranslations"}, + ) + races: list[Race] | None = Field(default=None, json_schema_extra={"x-go-name": "Races"}) + remote_ids: list[RemoteID] | None = Field( + default=None, alias="remoteIds", json_schema_extra={"x-go-name": "RemoteIDs"} + ) + score: int | None = Field(default=None, json_schema_extra={"x-go-name": "Score"}) + slug: str | None = None + tag_options: list[TagOption] | None = Field( + default=None, alias="tagOptions", json_schema_extra={"x-go-name": "TagOptions"} + ) + translations: TranslationExtended | None = None + + +class PeopleGetResponse(BaseModel): + data: list[PeopleBaseRecord] | None = None + status: str | None = None + links: Links | None = None + + +class PeopleIdExtendedGetResponse(BaseModel): + data: PeopleExtendedRecord | None = None + status: str | None = None + + +class PeopleIdGetResponse(BaseModel): + data: PeopleBaseRecord | None = None + status: str | None = None + + +class PeopleIdTranslationsLanguageGetResponse(EpisodesIdTranslationsLanguageGetResponse): + pass + + +class PeopleType(BaseModel): + """people type record.""" + + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + name: str | None = Field(default=None, json_schema_extra={"x-go-name": "Name"}) + + +class PeopleTypesGetResponse(BaseModel): + data: list[PeopleType] | None = None + status: str | None = None + + +class ProductionCountry(BaseModel): + """Production country record.""" + + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + country: str | None = None + name: str | None = None + + +class Race(BaseModel): + """race record.""" + + +class RecordInfo(BaseModel): + """base record info.""" + + image: str | None = Field(default=None, json_schema_extra={"x-go-name": "Image"}) + name: str | None = Field(default=None, json_schema_extra={"x-go-name": "Name"}) + year: str | None = None + + +class Release(BaseModel): + """release record.""" + + country: str | None = None + date: str | None = None + detail: str | None = None + + +class RemoteID(BaseModel): + """remote id record.""" + + id: str | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + type: int | None = Field(default=None, json_schema_extra={"x-go-name": "Type"}) + source_name: str | None = Field(default=None, alias="sourceName", json_schema_extra={"x-go-name": "SourceName"}) + + +class SearchByRemoteIdResult(BaseModel): + """search by remote reuslt is a base record for a movie, series, people, season or company search result.""" + + series: SeriesBaseRecord | None = None + people: PeopleBaseRecord | None = None + movie: MovieBaseRecord | None = None + episode: EpisodeBaseRecord | None = None + company: Company | None = None + + +class SearchGetResponse(BaseModel): + data: list[SearchResult] | None = None + status: str | None = None + links: Links | None = None + + +class SearchRemoteidRemoteIdGetResponse(BaseModel): + data: list[SearchByRemoteIdResult] | None = None + status: str | None = None + + +class SearchResult(BaseModel): + """search result.""" + + aliases: list[str] | None = None + companies: list[str] | None = None + company_type: str | None = Field(default=None, alias="companyType") + country: str | None = None + director: str | None = None + first_air_time: str | None = None + genres: list[str] | None = None + id: str + image_url: str | None = None + name: str | None = None + is_official: bool | None = None + name_translated: str | None = None + network: str | None = None + object_id: str = Field(alias="objectID") + official_list: str | None = Field(default=None, alias="officialList") + overview: str | None = None + overviews: TranslationSimple | None = None + overview_translated: list[str] | None = None + poster: str | None = None + posters: list[str] | None = None + primary_language: str | None = None + remote_ids: list[RemoteID] | None = Field(default=None, json_schema_extra={"x-go-name": "RemoteIDs"}) + status: str | None = Field(default=None, json_schema_extra={"x-go-name": "Status"}) + slug: str | None = None + studios: list[str] | None = None + title: str | None = None + thumbnail: str | None = None + translations: TranslationSimple | None = None + translations_with_lang: list[str] | None = Field(default=None, alias="translationsWithLang") + tvdb_id: str | None = None + type: str | None = None + year: str | None = None + + +class SeasonBaseRecord(BaseModel): + """season genre record.""" + + id: int + image: str | None = None + image_type: int | None = Field(default=None, alias="imageType") + last_updated: str | None = Field(default=None, alias="lastUpdated") + name: str | None = None + name_translations: list[str] | None = Field( + default=None, + alias="nameTranslations", + json_schema_extra={"x-go-name": "NameTranslations"}, + ) + number: int | None = Field(default=None, json_schema_extra={"x-go-name": "Number"}) + overview_translations: list[str] | None = Field( + default=None, + alias="overviewTranslations", + json_schema_extra={"x-go-name": "OverviewTranslations"}, + ) + companies: Companies | None = None + series_id: int | None = Field(default=None, alias="seriesId", json_schema_extra={"x-go-name": "SeriesID"}) + type: SeasonType | None = None + year: str | None = None + + +class SeasonExtendedRecord(BaseModel): + """extended season record.""" + + artwork: list[ArtworkBaseRecord] | None = Field(default=None, json_schema_extra={"x-go-name": "Artwork"}) + companies: Companies | None = None + episodes: list[EpisodeBaseRecord] | None = Field(default=None, json_schema_extra={"x-go-name": "Episodes"}) + id: int + image: str | None = None + image_type: int | None = Field(default=None, alias="imageType") + last_updated: str | None = Field(default=None, alias="lastUpdated") + name: str | None = None + name_translations: list[str] | None = Field( + default=None, + alias="nameTranslations", + json_schema_extra={"x-go-name": "NameTranslations"}, + ) + number: int | None = Field(default=None, json_schema_extra={"x-go-name": "Number"}) + overview_translations: list[str] | None = Field( + default=None, + alias="overviewTranslations", + json_schema_extra={"x-go-name": "OverviewTranslations"}, + ) + series_id: int | None = Field(default=None, alias="seriesId", json_schema_extra={"x-go-name": "SeriesID"}) + trailers: list[Trailer] | None = Field(default=None, json_schema_extra={"x-go-name": "Trailers"}) + type: SeasonType | None = None + tag_options: list[TagOption] | None = Field( + default=None, alias="tagOptions", json_schema_extra={"x-go-name": "TagOptions"} + ) + translations: list[Translation] | None = None + year: str | None = None + + +class SeasonType(BaseModel): + """season type record.""" + + alternate_name: str | None = Field(default=None, alias="alternateName", json_schema_extra={"x-go-name": "Name"}) + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + name: str | None = Field(default=None, json_schema_extra={"x-go-name": "Name"}) + type: str | None = Field(default=None, json_schema_extra={"x-go-name": "Type"}) + + +class SeasonsGetResponse(BaseModel): + data: list[SeasonBaseRecord] | None = None + status: str | None = None + + +class SeasonsIdExtendedGetResponse(BaseModel): + data: SeasonExtendedRecord | None = None + status: str | None = None + + +class SeasonsIdGetResponse(BaseModel): + data: SeasonBaseRecord | None = None + status: str | None = None + + +class SeasonsIdTranslationsLanguageGetResponse(EpisodesIdTranslationsLanguageGetResponse): + pass + + +class SeasonsTypesGetResponse(BaseModel): + data: list[SeasonType] | None = None + status: str | None = None + + +class SeriesAirsDays(BaseModel): + """A series airs day record.""" + + friday: bool | None = Field(default=None, json_schema_extra={"x-go-name": "Friday"}) + monday: bool | None = Field(default=None, json_schema_extra={"x-go-name": "Monday"}) + saturday: bool | None = Field(default=None, json_schema_extra={"x-go-name": "Saturday"}) + sunday: bool | None = Field(default=None, json_schema_extra={"x-go-name": "Sunday"}) + thursday: bool | None = Field(default=None, json_schema_extra={"x-go-name": "Thursday"}) + tuesday: bool | None = Field(default=None, json_schema_extra={"x-go-name": "Tuesday"}) + wednesday: bool | None = Field(default=None, json_schema_extra={"x-go-name": "Wednesday"}) + + +class SeriesBaseRecord(BaseModel): + """The base record for a series. All series airs time like firstAired, lastAired, nextAired, etc. are in US EST for US series, and for all non-US series, the time of the show's country capital or most populous city. For streaming services, is the official release time. See https://support.thetvdb.com/kb/faq.php?id=29.""" + + aliases: list[Alias] | None = Field(default=None, json_schema_extra={"x-go-name": "Aliases"}) + average_runtime: int | None = Field(default=None, alias="averageRuntime") + country: str | None = None + default_season_type: int | None = Field( + default=None, + alias="defaultSeasonType", + json_schema_extra={"x-go-name": "DefaultSeasonType"}, + ) + episodes: list[EpisodeBaseRecord] | None = Field(default=None, json_schema_extra={"x-go-name": "Episodes"}) + first_aired: str | None = Field(default=None, alias="firstAired") + id: int + image: str | None = None + is_order_randomized: bool | None = Field( + default=None, + alias="isOrderRandomized", + json_schema_extra={"x-go-name": "IsOrderRandomized"}, + ) + last_aired: str | None = Field(default=None, alias="lastAired") + last_updated: str | None = Field(default=None, alias="lastUpdated") + name: str | None = None + name_translations: list[str] | None = Field( + default=None, + alias="nameTranslations", + json_schema_extra={"x-go-name": "NameTranslations"}, + ) + next_aired: str | None = Field(default=None, alias="nextAired", json_schema_extra={"x-go-name": "NextAired"}) + original_country: str | None = Field(default=None, alias="originalCountry") + original_language: str | None = Field(default=None, alias="originalLanguage") + overview_translations: list[str] | None = Field( + default=None, + alias="overviewTranslations", + json_schema_extra={"x-go-name": "OverviewTranslations"}, + ) + score: float | None = Field(default=None, json_schema_extra={"x-go-name": "Score"}) + slug: str | None = None + status: Status | None = None + year: str | None = None + + +class SeriesExtendedRecord(BaseModel): + """The extended record for a series. All series airs time like firstAired, lastAired, nextAired, etc. are in US EST for US series, and for all non-US series, the time of the show's country capital or most populous city. For streaming services, is the official release time. See https://support.thetvdb.com/kb/faq.php?id=29.""" + + abbreviation: str | None = None + airs_days: SeriesAirsDays | None = Field(default=None, alias="airsDays") + airs_time: str | None = Field(default=None, alias="airsTime") + aliases: list[Alias] | None = Field(default=None, json_schema_extra={"x-go-name": "Aliases"}) + artworks: list[ArtworkExtendedRecord] | None = Field(default=None, json_schema_extra={"x-go-name": "Artworks"}) + average_runtime: int | None = Field(default=None, alias="averageRuntime") + characters: list[Character] | None = Field(default=None, json_schema_extra={"x-go-name": "Characters"}) + content_ratings: list[ContentRating] | None = Field(default=None, alias="contentRatings") + country: str | None = None + default_season_type: int | None = Field( + default=None, + alias="defaultSeasonType", + json_schema_extra={"x-go-name": "DefaultSeasonType"}, + ) + episodes: list[EpisodeBaseRecord] | None = Field(default=None, json_schema_extra={"x-go-name": "Episodes"}) + first_aired: str | None = Field(default=None, alias="firstAired") + lists: list[ListBaseRecord] | None = None + genres: list[GenreBaseRecord] | None = Field(default=None, json_schema_extra={"x-go-name": "Genres"}) + id: int + image: str | None = None + is_order_randomized: bool | None = Field( + default=None, + alias="isOrderRandomized", + json_schema_extra={"x-go-name": "IsOrderRandomized"}, + ) + last_aired: str | None = Field(default=None, alias="lastAired") + last_updated: str | None = Field(default=None, alias="lastUpdated") + name: str | None = None + name_translations: list[str] | None = Field( + default=None, + alias="nameTranslations", + json_schema_extra={"x-go-name": "NameTranslations"}, + ) + companies: list[Company] | None = None + next_aired: str | None = Field(default=None, alias="nextAired", json_schema_extra={"x-go-name": "NextAired"}) + original_country: str | None = Field(default=None, alias="originalCountry") + original_language: str | None = Field(default=None, alias="originalLanguage") + original_network: Company | None = Field(default=None, alias="originalNetwork") + overview: str | None = None + latest_network: Company | None = Field(default=None, alias="latestNetwork") + overview_translations: list[str] | None = Field( + default=None, + alias="overviewTranslations", + json_schema_extra={"x-go-name": "OverviewTranslations"}, + ) + remote_ids: list[RemoteID] | None = Field( + default=None, alias="remoteIds", json_schema_extra={"x-go-name": "RemoteIDs"} + ) + score: float | None = Field(default=None, json_schema_extra={"x-go-name": "Score"}) + seasons: list[SeasonBaseRecord] | None = Field(default=None, json_schema_extra={"x-go-name": "Seasons"}) + season_types: list[SeasonType] | None = Field( + default=None, alias="seasonTypes", json_schema_extra={"x-go-name": "Seasons"} + ) + slug: str | None = None + status: Status | None = None + tags: list[TagOption] | None = Field(default=None, json_schema_extra={"x-go-name": "TagOptions"}) + trailers: list[Trailer] | None = Field(default=None, json_schema_extra={"x-go-name": "Trailers"}) + translations: TranslationExtended | None = None + year: str | None = None + + +class SeriesFilterGetResponse(BaseModel): + data: list[SeriesBaseRecord] | None = None + + +class SeriesGetResponse(BaseModel): + data: list[SeriesBaseRecord] | None = None + status: str | None = None + links: Links | None = None + + +class SeriesIdArtworksGetResponse(BaseModel): + data: SeriesExtendedRecord | None = None + status: str | None = None + + +class SeriesIdEpisodesSeasonTypeGetResponse(BaseModel): + data: Data1 | None = None + status: str | None = None + + +class SeriesIdEpisodesSeasonTypeLangGetResponse(BaseModel): + data: Data2 | None = None + status: str | None = None + + +class SeriesIdExtendedGetResponse(SeriesIdArtworksGetResponse): + pass + + +class SeriesIdGetResponse(BaseModel): + data: SeriesBaseRecord | None = None + status: str | None = None + + +class SeriesIdNextAiredGetResponse(SeriesIdGetResponse): + pass + + +class SeriesIdTranslationsLanguageGetResponse(EpisodesIdTranslationsLanguageGetResponse): + pass + + +class SeriesSlugSlugGetResponse(SeriesIdGetResponse): + pass + + +class SeriesStatusesGetResponse(MoviesStatusesGetResponse): + pass + + +class Short(Enum): + boolean_true = True + boolean_false = False + + +class Sort(Enum): + score = "score" + first_aired = "firstAired" + name_ = "name" + + +class Sort1(Enum): + score = "score" + first_aired = "firstAired" + last_aired = "lastAired" + name_ = "name" + + +class SortType(Enum): + asc = "asc" + desc = "desc" + + +class SourceType(BaseModel): + """source type record.""" + + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + name: str | None = Field(default=None, json_schema_extra={"x-go-name": "Name"}) + postfix: str | None = None + prefix: str | None = None + slug: str | None = Field(default=None, json_schema_extra={"x-go-name": "Slug"}) + sort: int | None = Field(default=None, json_schema_extra={"x-go-name": "Sort"}) + + +class SourcesTypesGetResponse(BaseModel): + data: list[SourceType] | None = None + status: str | None = None + + +class Status(BaseModel): + """status record.""" + + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + keep_updated: bool | None = Field( + default=None, + alias="keepUpdated", + json_schema_extra={"x-go-name": "KeepUpdated"}, + ) + name: str | None = Field(default=None, json_schema_extra={"x-go-name": "Name"}) + record_type: str | None = Field(default=None, alias="recordType", json_schema_extra={"x-go-name": "RecordType"}) + + +class Status1(Enum): + number_1 = 1 + number_2 = 2 + number_3 = 3 + + +class StudioBaseRecord(BaseModel): + """studio record.""" + + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + name: str | None = Field(default=None, json_schema_extra={"x-go-name": "Name"}) + parent_studio: int | None = Field(default=None, alias="parentStudio") + + +class Tag(BaseModel): + """tag record.""" + + allows_multiple: bool | None = Field( + default=None, + alias="allowsMultiple", + json_schema_extra={"x-go-name": "AllowsMultiple"}, + ) + help_text: str | None = Field(default=None, alias="helpText") + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + name: str | None = Field(default=None, json_schema_extra={"x-go-name": "Name"}) + options: list[TagOption] | None = Field(default=None, json_schema_extra={"x-go-name": "TagOptions"}) + + +class TagOption(BaseModel): + """tag option record.""" + + help_text: str | None = Field(default=None, alias="helpText") + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + name: str | None = Field(default=None, json_schema_extra={"x-go-name": "Name"}) + tag: int | None = Field(default=None, json_schema_extra={"x-go-name": "Tag"}) + tag_name: str | None = Field(default=None, alias="tagName", json_schema_extra={"x-go-name": "TagName"}) + + +class TagOptionEntity(BaseModel): + """a entity with selected tag option.""" + + name: str | None = None + tag_name: str | None = Field(default=None, alias="tagName") + tag_id: int | None = Field(default=None, alias="tagId") + + +class Trailer(BaseModel): + """trailer record.""" + + id: int | None = Field(default=None, json_schema_extra={"x-go-name": "ID"}) + language: str | None = None + name: str | None = None + url: str | None = None + runtime: int | None = None + + +class Translation(BaseModel): + """translation record.""" + + aliases: list[str] | None = None + is_alias: bool | None = Field(default=None, alias="isAlias") + is_primary: bool | None = Field(default=None, alias="isPrimary") + language: str | None = Field(default=None, json_schema_extra={"x-go-name": "Language"}) + name: str | None = None + overview: str | None = None + tagline: str | None = Field( + default=None, + description="Only populated for movie translations. We disallow taglines without a title.", + ) + + +class TranslationExtended(BaseModel): + """translation extended record.""" + + name_translations: list[Translation] | None = Field(default=None, alias="nameTranslations") + overview_translations: list[Translation] | None = Field(default=None, alias="overviewTranslations") + alias: list[str] | None = None + + +class TranslationSimple(RootModel[dict[str, str] | None]): + """translation simple record.""" + + root: dict[str, str] | None = None + + +class Type(Enum): + artwork = "artwork" + award_nominees = "award_nominees" + companies = "companies" + episodes = "episodes" + lists = "lists" + people = "people" + seasons = "seasons" + series = "series" + seriespeople = "seriespeople" + artworktypes = "artworktypes" + award_categories = "award_categories" + awards = "awards" + company_types = "company_types" + content_ratings = "content_ratings" + countries = "countries" + entity_types = "entity_types" + genres = "genres" + languages = "languages" + movies = "movies" + movie_genres = "movie_genres" + movie_status = "movie_status" + peopletypes = "peopletypes" + seasontypes = "seasontypes" + sourcetypes = "sourcetypes" + tag_options = "tag_options" + tags = "tags" + translatedcharacters = "translatedcharacters" + translatedcompanies = "translatedcompanies" + translatedepisodes = "translatedepisodes" + translatedlists = "translatedlists" + translatedmovies = "translatedmovies" + translatedpeople = "translatedpeople" + translatedseasons = "translatedseasons" + translatedserierk = "translatedserierk" + + +class UpdatesGetResponse(BaseModel): + data: list[EntityUpdate] | None = None + status: str | None = None + links: Links | None = None + + +class UserFavoritesGetResponse(BaseModel): + data: list[Favorites] | None = None + status: str | None = None + + +class UserGetResponse(BaseModel): + data: list[UserInfo] | None = None + status: str | None = None + + +class UserIdGetResponse(UserGetResponse): + pass + + +class UserInfo(BaseModel): + """User info record.""" + + id: int | None = None + language: str | None = None + name: str | None = None + type: str | None = None diff --git a/contemplative-constellations/src/utils/__init__.py b/contemplative-constellations/src/utils/__init__.py new file mode 100644 index 0000000..f5a01c7 --- /dev/null +++ b/contemplative-constellations/src/utils/__init__.py @@ -0,0 +1,4 @@ +from .cat_api import get_cat_image_url +from .markdown import mention_command + +__all__ = ["mention_command", "get_cat_image_url"] diff --git a/contemplative-constellations/src/utils/cat_api.py b/contemplative-constellations/src/utils/cat_api.py new file mode 100644 index 0000000..caee876 --- /dev/null +++ b/contemplative-constellations/src/utils/cat_api.py @@ -0,0 +1,14 @@ +from discord import aiohttp + +CAT_API_URL = "https://api.thecatapi.com/v1/images/search" + + +async def get_cat_image_url(http_session: aiohttp.ClientSession) -> str: + """Get a URL for a random cat image. + + The produced image can also be a GIF. + """ + async with http_session.get(CAT_API_URL) as resp: + resp.raise_for_status() + data = await resp.json() + return data[0]["url"] diff --git a/contemplative-constellations/src/utils/config.py b/contemplative-constellations/src/utils/config.py new file mode 100644 index 0000000..fc69566 --- /dev/null +++ b/contemplative-constellations/src/utils/config.py @@ -0,0 +1,94 @@ +"""File containing a typed wrapper function around ``decouple.config``.""" + +from __future__ import annotations + +from typing import Any, NewType, TYPE_CHECKING, TypeVar, cast, overload + +from decouple import UndefinedValueError, config + +if TYPE_CHECKING: + from collections.abc import Callable + + +__all__ = ["get_config"] + +T = TypeVar("T") +U = TypeVar("U") +Sentinel = NewType("Sentinel", object) +_MISSING = cast(Sentinel, object()) + + +@overload +def get_config( + search_path: str, + *, + cast: None = None, + default: U | Sentinel = _MISSING, +) -> str | U: ... + + +@overload +def get_config( + search_path: str, + *, + cast: Callable[[str], T], + default: U | Sentinel = _MISSING, +) -> T | U: ... + + +def get_config( + search_path: str, + *, + cast: Callable[[str], object] | None = None, + default: object = _MISSING, +) -> object: + """Typed wrapper around ``decouple.config`` for static type analysis.""" + try: + val = config(search_path) + except UndefinedValueError as exc: + if default is not _MISSING: + return default + raise exc from exc + + # Treat empty strings as unset values + if val == "": + if default is not _MISSING: + return default + + raise UndefinedValueError( + f"{search_path} was found, but the content was an empty string. " + "Set a non-empty value for the envvar or define a default value." + ) + + # We run this again, this time with a cast function. + # the reason we don't do this immediately is that the empty strings might not + # work with the cast function, which could raise various exceptions. + if cast is None: + cast = lambda x: x + return config(search_path, cast=cast) + + +@overload +def config_cast_list(cast: None = None) -> Callable[[str], list[str]]: ... + + +@overload +def config_cast_list(cast: Callable[[str], T]) -> Callable[[str], list[T]]: ... + + +def config_cast_list(cast: Callable[[str], object] | None = None) -> Callable[[str], list[Any]]: + """Cast function to convert the content of an environmental variable to a list of values. + + This works by splitting the contents of the environmental variable on `,` characters. + Currently, there is not support for escaping here, so list variables that require `,` + symbol to be present will not work. + + You can use this function in :func:`get_config` for the ``cast`` argument. + """ + if cast is None or cast is str: + cast = lambda x: x + + def inner(raw_value: str) -> list[Any]: + return [cast(x) for x in raw_value.split(",") if x] + + return inner diff --git a/contemplative-constellations/src/utils/database.py b/contemplative-constellations/src/utils/database.py new file mode 100644 index 0000000..f397311 --- /dev/null +++ b/contemplative-constellations/src/utils/database.py @@ -0,0 +1,122 @@ +import importlib +import pkgutil +from collections.abc import AsyncIterator +from contextlib import asynccontextmanager +from typing import NoReturn + +import alembic.config +from alembic.operations import Operations +from alembic.runtime.environment import EnvironmentContext +from alembic.runtime.migration import MigrationContext, RevisionStep +from alembic.script import ScriptDirectory +from sqlalchemy import Connection +from sqlalchemy.ext.asyncio import AsyncAttrs, AsyncSession, async_sessionmaker, create_async_engine +from sqlalchemy.orm import DeclarativeBase + +from src.settings import DB_ALWAYS_MIGRATE, ECHO_SQL, SQLITE_DATABASE_FILE +from src.utils.log import get_logger + +log = get_logger(__name__) + +__all__ = ["engine", "Base", "load_db_models", "get_db_session"] + +SQLALCHEMY_URL = f"sqlite+aiosqlite:///{SQLITE_DATABASE_FILE.absolute()}" +TABLES_PACKAGE_PATH = "src.db_tables" + + +engine = create_async_engine(SQLALCHEMY_URL, echo=ECHO_SQL) + +SessionLocal = async_sessionmaker(engine, expire_on_commit=False) + + +class Base(AsyncAttrs, DeclarativeBase): + """SQLAlchemy base class for registering ORM models. + + Note: Before calling ``Base.metadata.create_all``, all models that inherit + from this class must already be loaded (imported), so that this metaclass + can know about all of the models. See :func:`load_models`. + """ + + +def load_db_models() -> None: + """Import all models (all files/modules containing the models). + + This step is required before calling ``Base.metadata.create_all``, as all models + need to first be imported, so that they get registered into the :class:`Base` class. + """ + + def on_error(name: str) -> NoReturn: + """Handle an error encountered while walking packages.""" + raise ImportError(name=name) + + def ignore_module(module: pkgutil.ModuleInfo) -> bool: + """Return whether the module with name `name` should be ignored.""" + return any(name.startswith("_") for name in module.name.split(".")) + + log.debug(f"Loading database modules from {TABLES_PACKAGE_PATH}") + db_module = importlib.import_module(TABLES_PACKAGE_PATH) + for module_info in pkgutil.walk_packages(db_module.__path__, f"{db_module.__name__}.", onerror=on_error): + if ignore_module(module_info): + continue + + log.debug(f"Loading database module: {module_info.name}") + importlib.import_module(module_info.name) + + +def apply_db_migrations(db_conn: Connection) -> None: + """Apply alembic database migrations. + + This method will first check if the database is empty (no applied alembic revisions), + in which case, it use SQLAlchemy to create all tables and then stamp the database for alembic. + + If the database is not empty, it will apply all necessary migrations, bringing the database + up to date with the latest revision. + """ + # Create a standalone minimal config, that doesn't use alembic.ini + # (we don't want to load env.py, since they do a lot of things we don't want + # like setting up logging in a different way, ...) + alembic_cfg = alembic.config.Config() + alembic_cfg.set_main_option("script_location", "alembic-migrations") + alembic_cfg.set_main_option("sqlalchemy.url", SQLALCHEMY_URL) + + script = ScriptDirectory.from_config(alembic_cfg) + + def retrieve_migrations(rev: str, context: MigrationContext) -> list[RevisionStep]: + """Retrieve all remaining migrations to be applied to get to "head". + + The returned migrations will be the migrations that will get applied when upgrading. + """ + migrations = script._upgrade_revs("head", rev) # pyright: ignore[reportPrivateUsage] + + if len(migrations) > 0: + log.info(f"Applying {len(migrations)} database migrations") + else: + log.debug("No database migrations to apply, database is up to date") + + return migrations + + env_context = EnvironmentContext(alembic_cfg, script) + env_context.configure(connection=db_conn, target_metadata=Base.metadata, fn=retrieve_migrations) + context = env_context.get_context() + + current_rev = context.get_current_revision() + + # If there is no current revision, this is a brand new database + # instead of going through the migrations, we can instead use metadata.create_all + # to create all tables and then stamp the database with the head revision. + if current_rev is None and not DB_ALWAYS_MIGRATE: + log.info("Performing initial database setup (creating tables)") + Base.metadata.create_all(db_conn) + context.stamp(script, "head") + return + + log.debug("Checking for database migrations") + with Operations.context(context) as _op, context.begin_transaction(): + context.run_migrations() + + +@asynccontextmanager +async def get_db_session() -> AsyncIterator[AsyncSession]: + """Obtain a database session.""" + async with SessionLocal() as session: + yield session diff --git a/contemplative-constellations/src/utils/iterators.py b/contemplative-constellations/src/utils/iterators.py new file mode 100644 index 0000000..e2719fc --- /dev/null +++ b/contemplative-constellations/src/utils/iterators.py @@ -0,0 +1,9 @@ +from collections.abc import Iterator + + +def get_first[T, V](it: Iterator[T], default: V = None) -> T | V: + """Get the first item from an iterable, or `default` if it's empty.""" + try: + return next(it) + except StopIteration: + return default diff --git a/contemplative-constellations/src/utils/log.py b/contemplative-constellations/src/utils/log.py new file mode 100644 index 0000000..0d7b370 --- /dev/null +++ b/contemplative-constellations/src/utils/log.py @@ -0,0 +1,202 @@ +"""Logging configuration for the project. + +Note: + Whenever logging is needed, the `get_logger` function from this module should be used. + Do not use the default `logging.getLogger` function, as it does not return the correct logger type. +""" + +import logging +import logging.handlers +import os +import sys +from pathlib import Path +from typing import Any, TYPE_CHECKING, cast + +import coloredlogs + +from src.utils.config import get_config + +# We set these values here instead of getting them from +DEBUG = get_config("DEBUG", cast=bool, default=False) +LOG_FILE = get_config("LOG_FILE", cast=Path, default=None) +TRACE_LEVEL_FILTER = get_config("TRACE_LEVEL_FILTER", default=None) + +LOG_FORMAT = "%(asctime)s | %(name)s | %(levelname)7s | %(message)s" +TRACE_LEVEL = 5 + + +if TYPE_CHECKING: + LoggerClass = logging.Logger +else: + LoggerClass = logging.getLoggerClass() + + +class CustomLogger(LoggerClass): + """Custom implementation of the `Logger` class with an added `trace` method.""" + + def trace(self, msg: str, *args: object, **kwargs: Any) -> None: + """Log 'msg % args' with severity 'TRACE'. + + To pass exception information, use the keyword argument exc_info with + a true value, e.g. + + logger.trace("Houston, we have an %s", "interesting problem", exc_info=1) + """ + if self.isEnabledFor(TRACE_LEVEL): + self.log(TRACE_LEVEL, msg, *args, **kwargs) + + +def get_logger(name: str | None = None, *, skip_class_check: bool = False) -> CustomLogger: + """Utility to make the type checker recognise that logger is of type `CustomLogger`. + + Additionally, in case logging isn't already set up, :meth:`.setup_logging` is ran. + + This is necessary as this function is lying to the type-checker by using explicit + :meth:`cast`, specifying that the logger is of the :class:`CustomLogger` type, + when in fact it might not be. + + :param skip_class_check: + When ``True``, the logger class check, which ensures logging was already set up + will be skipped. + + Do know that disabling this check can be dangerous, as it might result in this + function returning a regular logger, with typing information of custom logger, + leading to issues like ``get_logger().trace`` producing an :exc:`AttributeError`. + """ + if not skip_class_check and logging.getLoggerClass() is not CustomLogger: + setup_logging() + + # Ideally, we would log this before running the setup_logging function, however + # we that would produce an unformatted (default) log, which is not what we want. + log = logging.getLogger(__name__) + log.debug("Ran setup_log (logger was requested).") + + return cast(CustomLogger, logging.getLogger(name)) + + +def setup_logging() -> None: + """Sets up logging library to use our logging configuration. + + This function only needs to be called once, at the program start. + """ + # This indicates that logging was already set up, no need to do it again + if logging.getLoggerClass() is CustomLogger: + log = get_logger(__name__) + log.debug("Attempted to setup logging, when it was already set up") + return + + # Setup log levels first, so that get_logger will not attempt to call setup_logging itself. + _setup_trace_level() + + root_log = get_logger() + _setup_coloredlogs(root_log) + _setup_logfile(root_log) + _setup_log_levels(root_log) + _setup_external_log_levels(root_log) + + +def _setup_trace_level() -> None: + """Setup logging to recognize our new TRACE level.""" + logging.TRACE = TRACE_LEVEL # pyright: ignore[reportAttributeAccessIssue] + logging.addLevelName(TRACE_LEVEL, "TRACE") + logging.setLoggerClass(CustomLogger) + + +def _setup_coloredlogs(root_log: LoggerClass) -> None: + """Install coloredlogs and set it up to use our log format.""" + if "COLOREDLOGS_LOG_FORMAT" not in os.environ: + coloredlogs.DEFAULT_LOG_FORMAT = LOG_FORMAT + + if "COLOREDLOGS_LEVEL_STYLES" not in os.environ: + coloredlogs.DEFAULT_LEVEL_STYLES = { + **coloredlogs.DEFAULT_LEVEL_STYLES, + "trace": {"color": 246}, + "critical": {"background": "red"}, + } + + if "COLOREDLOGS_DEFAULT_FIELD_STYLES" not in os.environ: + coloredlogs.DEFAULT_FIELD_STYLES = { + **coloredlogs.DEFAULT_FIELD_STYLES, + "levelname": {"color": "magenta", "bold": True}, + } + + # The log level here is set to TRACE, so that coloredlogs covers all messages. + # This however doesn't mean that our log level will actually be set to TRACE, + # that's configured by setting the root log's log level directly. + coloredlogs.install(level=TRACE_LEVEL, logger=root_log, stream=sys.stdout) + + +def _setup_logfile(root_log: LoggerClass) -> None: + """Setup a file handle for logging using our log format.""" + if LOG_FILE is None: + return + + LOG_FILE.parent.mkdir(parents=True, exist_ok=True) + + file_handler = logging.FileHandler(LOG_FILE) + log_formatter = logging.Formatter(LOG_FORMAT) + file_handler.setFormatter(log_formatter) + root_log.addHandler(file_handler) + + +def _setup_log_levels(root_log: LoggerClass) -> None: + """Set loggers to the log levels according to the value from the TRACE_LEVEL_FILTER and DEBUG env vars. + + DEBUG env var: + - When set to a truthy value (1,true,yes), the root log level will be set to DEBUG. + - Otherwise (including if not set at all), the root log level will be set to INFO. + + TRACE_LEVEL_FILTER env var: + This variable is ignored if DEBUG is not set to a truthy value! + + - If not set, no trace logs will appear and root log will be set to DEBUG. + - If set to "*", the root logger will be set to the TRACE level. All trace logs will appear. + - When set to a list of logger names, delimited by a comma, each of the listed loggers will + be set to the TRACE level. The root logger will retain the DEBUG log level. + - If this list is prefixed by a "!", the root logger is set to TRACE level, with all of the + listed loggers set to a DEBUG log level. + """ + # DEBUG wasn't specified, no DEBUG level logs (INFO log level) + if not DEBUG: + root_log.setLevel(logging.INFO) + return + + # TRACE_LEVEL_FILTER wasn't specified, no TRACE level logs (DEBUG log level) + if TRACE_LEVEL_FILTER is None: + root_log.setLevel(logging.DEBUG) + return + + # TRACE_LEVEL_FILTER enables all TRACE loggers + if TRACE_LEVEL_FILTER == "*": + root_log.setLevel(TRACE_LEVEL) + return + + # TRACE_LEVEL_FILTER is a list of loggers to not set to TRACE level (default is TRACE) + if TRACE_LEVEL_FILTER.startswith("!"): + root_log.setLevel(TRACE_LEVEL) + for logger_name in TRACE_LEVEL_FILTER.removeprefix("!").strip(",").split(","): + get_logger(logger_name).setLevel(logging.DEBUG) + return + + # TRACE_LEVEL_FILTER is a list of loggers to set to TRACE level + root_log.setLevel(logging.DEBUG) + for logger_name in TRACE_LEVEL_FILTER.strip(",").split(","): + get_logger(logger_name).setLevel(TRACE_LEVEL) + + +def _setup_external_log_levels(root_log: LoggerClass) -> None: + """Set log levels of some external libraries explicitly. + + Some libraries produce a lot of logs which we don't necessarily need to see, + and they often tend to clutter our own. These libraries have their log levels + set explicitly here, avoiding unneeded spammy logs. + """ + get_logger("asyncio").setLevel(logging.INFO) + get_logger("discord.http").setLevel(logging.INFO) + get_logger("discord.gateway").setLevel(logging.WARNING) + get_logger("discord.webhook.async_").setLevel(logging.INFO) + get_logger("aiosqlite").setLevel(logging.INFO) + get_logger("alembic.runtime.migration").setLevel(logging.WARNING) + get_logger("aiocache.base").setLevel(logging.INFO) + + get_logger("parso").setLevel(logging.WARNING) # For usage in IPython diff --git a/contemplative-constellations/src/utils/markdown.py b/contemplative-constellations/src/utils/markdown.py new file mode 100644 index 0000000..033694d --- /dev/null +++ b/contemplative-constellations/src/utils/markdown.py @@ -0,0 +1,8 @@ +from typing import Any + +import discord + + +def mention_command(command: discord.ApplicationCommand[Any, ..., Any]) -> str: + """Mentions the command using discord markdown.""" + return f"" diff --git a/contemplative-constellations/src/utils/ratelimit.py b/contemplative-constellations/src/utils/ratelimit.py new file mode 100644 index 0000000..0702cc4 --- /dev/null +++ b/contemplative-constellations/src/utils/ratelimit.py @@ -0,0 +1,215 @@ +import time +from collections.abc import Awaitable, Callable +from functools import wraps +from typing import Concatenate, cast + +from aiocache import BaseCache +from discord import ApplicationContext, Cog + +from src.bot import Bot +from src.utils.log import get_logger + +log = get_logger(__name__) + + +class RateLimitExceededError(Exception): + """Exception raised when a rate limit was exceeded.""" + + def __init__( + self, + msg: str | None, + *, + key: str, + limit: int, + period: float, + closest_expiration: float, + updates_when_exceeded: bool, + ) -> None: + """Initialize the rate limit exceeded error. + + :param msg: + Custom error message to include in the exception. + + This exception should also be shown to the user if the exception makes its way + to the error handler. If this is not provided, a generic message will be used. + :param key: Cache key that was rate-limit. + :param period: The period of time in seconds, in which the limit is enforced. + :param closest_expiration: The unix time-stamp of the closest expiration of the rate limit. + :param updates_when_exceeded: Does the rate limit get updated even if it was exceeded. + """ + self.msg = msg + self.key = key + self.limit = limit + self.period = period + self.closest_expiration = closest_expiration + self.updates_when_exceeded = updates_when_exceeded + + err_msg = f"Rate limit exceeded for key '{key}' ({limit}/{period}s)" + if msg: + err_msg += f": {msg}" + super().__init__(err_msg) + + +async def rate_limit( + cache: BaseCache, + key: str, + *, + limit: int, + period: float, + update_when_exceeded: bool = False, + err_msg: str | None = None, +) -> None: + """Log a new request for given key, enforcing the rate limit. + + The cache keys are name-spaced under 'rate-limit' to avoid conflicts with other cache keys. + + The rate-limiting uses a sliding window approach, where each request has its own expiration time + (i.e. a request is allowed if it is within the last `period` seconds). The requests are stored + as time-stamps under given key in the cache. + + :param cache: The cache instance used to keep track of the rate limits. + :param key: The key to use for this rate-limit. + :param limit: The number of requests allowed in the period. + :param period: The period of time in seconds, in which the limit is enforced. + :param update_when_exceeded: + Log a new rate-limit request time even if the limit was exceeded. + + This can be useful to disincentivize users from spamming requests, as they would + otherwise still receive the response eventually. With this behavior, they will + actually need to wait and not spam requests. + + By default, this behavior is disabled, mainly for global / internal rate limits. + :param err_msg: + Custom error message to include in the `RateLimitExceededError` exception. + + This message will be caught by the error handler and sent to the user, instead + of using a more generic message. + :raises RateLimitExceededError: If the rate limit was exceeded. + """ + current_timestamp = time.time() + + # No existing requests + if not await cache.exists(key, namespace="rate-limit"): + log.trace(f"No existing rate-limit requests for key {key!r}, adding the first one") + await cache.set(key, (current_timestamp,), ttl=period, namespace="rate-limit") + return + + # Get the existing requests + cache_time_stamps = cast(tuple[float, ...], await cache.get(key, namespace="rate-limit")) + log.trace(f"Fetched {len(cache_time_stamps)} existing requests for key {key!r}") + + # Expire requests older than the period + remaining_time_stamps = list(cache_time_stamps) + for time_stamp in cache_time_stamps: + if (current_timestamp - time_stamp) > period: + remaining_time_stamps.remove(time_stamp) + + # Also remove the oldest requests, keeping only up to limit + # This is just to avoid the list growing for no reason. + # As an advantage, it also makes it easier to find the closest expiration time. + remaining_time_stamps = remaining_time_stamps[-limit:] + + log.trace(f"Found {len(remaining_time_stamps)} non-expired existing requests for key {key!r}") + + # Add the new request, along with the existing non-expired ones, resetting the key + # Only do this if the rate limit wasn't exceeded, or if updating on exceeded requests is enabled + if len(remaining_time_stamps) < limit or update_when_exceeded: + log.trace("Updating rate limit with the new request") + new_timestamps: tuple[float, ...] = (*remaining_time_stamps, current_timestamp) + await cache.set(key, new_timestamps, ttl=period, namespace="rate-limit") + + # Check if the limit was exceeded + if len(remaining_time_stamps) >= limit: + # If update on exceeded requests are enabled, add the current timestamp to the list + # and trim to limit requests, allowing us to obtain the proper closest timestamp + if update_when_exceeded: + remaining_time_stamps.append(current_timestamp) + remaining_time_stamps = remaining_time_stamps[-limit:] + + closest_expiration = min(remaining_time_stamps) + period + + log.debug(f"Rate limit exceeded on key: {key!r}") + log.trace(f"Exceeded rate limit details: {limit}/{period}s, {remaining_time_stamps=!r}, {closest_expiration=}") + raise RateLimitExceededError( + err_msg, + key=key, + limit=limit, + period=period, + closest_expiration=closest_expiration, + updates_when_exceeded=update_when_exceeded, + ) + + +type CogCommandFunction[T: Cog, **P] = Callable[Concatenate[T, ApplicationContext, P], Awaitable[None]] +type TransformFunction[T, R] = Callable[[T, ApplicationContext], R] + + +def rate_limited[T: Cog, **P]( + key: str | Callable[[T, ApplicationContext], str], + *, + limit: int | TransformFunction[T, int], + period: float | TransformFunction[T, float], + update_when_exceeded: bool | TransformFunction[T, bool] = False, + err_msg: str | None | TransformFunction[T, str | None] = None, + prefix_key: bool = False, +) -> Callable[[CogCommandFunction[T, P]], CogCommandFunction[T, P]]: + """Apply rate limits to given cog command function. + + The decorated function must be a slash command function that belongs to a cog class + (as an instance method). Make sure to apply this decorator before the ``slash_command`` + decorator. + + This uses the :func:`rate_limit` function internally to enforce the rate limits. + See its description for more info. + + All of the parameters (except `prefix_key`) can be set directly, or they can be callables, + which will get called with self (the cog instance) and ctx, using the return value as the + value of that parameter. These parameters will then all be forwarded to the ``rate_limit`` function. + + :param prefix_key: Whether to prefix the key with the hash of the slash command function object. + + .. note:: + Py-cord does provide a built-in way to rate-limit commands through "cooldown" structures. + These work similarly to our custom implementation, but bucketing isn't as flexible and + doesn't work globally across the whole application. + + Using this decorator is therefore preferred, even for simple rate limits, for consistency. + """ + + def inner(func: CogCommandFunction[T, P]) -> CogCommandFunction[T, P]: + @wraps(func) + async def wrapper(self: T, ctx: ApplicationContext, *args: P.args, **kwargs: P.kwargs) -> None: + bot = ctx.bot + if not isinstance(bot, Bot): + raise TypeError( + "The bot instance must be of our custom Bot type (src.bot.Bot), " + f"found: {bot.__class__.__qualname__}" + ) + + cache = bot.cache + + # Call transformer functions, if used + key_ = key(self, ctx) if isinstance(key, Callable) else key + limit_ = limit(self, ctx) if isinstance(limit, Callable) else limit + period_ = period(self, ctx) if isinstance(period, Callable) else period + update_when_exceeded_ = ( + update_when_exceeded(self, ctx) if isinstance(update_when_exceeded, Callable) else update_when_exceeded + ) + err_msg_ = err_msg(self, ctx) if isinstance(err_msg, Callable) else err_msg + + if prefix_key: + key_ = f"{hash(func)}-{key_}" + + await rate_limit( + cache, + key_, + limit=limit_, + period=period_, + update_when_exceeded=update_when_exceeded_, + err_msg=err_msg_, + ) + return await func(self, ctx, *args, **kwargs) + + return wrapper + + return inner diff --git a/contemplative-constellations/src/utils/tvdb.py b/contemplative-constellations/src/utils/tvdb.py new file mode 100644 index 0000000..0a75690 --- /dev/null +++ b/contemplative-constellations/src/utils/tvdb.py @@ -0,0 +1,12 @@ +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from src.tvdb.client import Episode + + +def by_season(episodes: list["Episode"]) -> dict[int, list["Episode"]]: + """Group episodes by season.""" + seasons = {} + for episode in episodes: + seasons.setdefault(episode.season_number, []).append(episode) + return seasons diff --git a/contemplative-constellations/tests/__init__.py b/contemplative-constellations/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/contemplative-constellations/tools/__init__.py b/contemplative-constellations/tools/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/contemplative-constellations/tools/generate_tvdb_models.py b/contemplative-constellations/tools/generate_tvdb_models.py new file mode 100644 index 0000000..69bf35e --- /dev/null +++ b/contemplative-constellations/tools/generate_tvdb_models.py @@ -0,0 +1,53 @@ +import subprocess +from pathlib import Path +from urllib.parse import ParseResult, urlparse + +from datamodel_code_generator import DataModelType, InputFileType, OpenAPIScope, PythonVersion, generate + +HEADER: str = """# ruff: noqa: D101, ERA001, E501 +""" + + +def _generate_models() -> Path: + output = Path("./src/tvdb/generated_models.py") + url: ParseResult = urlparse("https://thetvdb.github.io/v4-api/swagger.yml") + generate( + url, + input_file_type=InputFileType.OpenAPI, + input_filename="swagger.yml", + output=output, + output_model_type=DataModelType.PydanticV2BaseModel, + field_constraints=True, + snake_case_field=True, + target_python_version=PythonVersion.PY_312, + use_default_kwarg=True, + use_union_operator=True, + reuse_model=True, + field_include_all_keys=True, + strict_nullable=True, + use_schema_description=True, + keep_model_order=True, + enable_version_header=True, + openapi_scopes=[OpenAPIScope.Schemas, OpenAPIScope.Paths], + ) + with output.open("r") as f: + contents = f.read() + contents = contents.replace("’", "'") # noqa: RUF001 + with output.open("w") as f: + f.write(HEADER + contents) + f.truncate() + return output + + +def _run_ruff(file_path: Path) -> None: + subprocess.run(["poetry", "run", "ruff", "check", "--fix", "--unsafe-fixes", str(file_path)], check=True) # noqa: S603, S607 + + +def main() -> None: + """The main entry point for the script.""" + generated_file = _generate_models() + _run_ruff(generated_file) + + +if __name__ == "__main__": + main()