From ae2c6763ee5e8b7831c29ef6abdb881222928765 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 12 Sep 2024 23:06:22 +0300 Subject: [PATCH 001/153] chore(ci): remove the limitation on branches for automatic testing --- .github/workflows/run-linter-and-tests.yaml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.github/workflows/run-linter-and-tests.yaml b/.github/workflows/run-linter-and-tests.yaml index 8fec05e5..3bbc0c9c 100644 --- a/.github/workflows/run-linter-and-tests.yaml +++ b/.github/workflows/run-linter-and-tests.yaml @@ -1,11 +1,7 @@ name: Lint on: pull_request: - branches: - - main push: - branches: - - main jobs: run-linters-and-tests: runs-on: ubuntu-latest From bbdd4eafea5949b2cd4331e3c7dc23b61b41c574 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 12 Sep 2024 23:10:15 +0300 Subject: [PATCH 002/153] chore(domain): replace optional model to new domain module --- internal/domain/common/README.md | 2 ++ internal/domain/common/__init__.py | 1 + internal/domain/common/optional_model.py | 38 ++++++++++++++++++++++++ 3 files changed, 41 insertions(+) create mode 100644 internal/domain/common/README.md create mode 100644 internal/domain/common/__init__.py create mode 100644 internal/domain/common/optional_model.py diff --git a/internal/domain/common/README.md b/internal/domain/common/README.md new file mode 100644 index 00000000..60efd9c0 --- /dev/null +++ b/internal/domain/common/README.md @@ -0,0 +1,2 @@ +## domain.common module +This module contains shared components, utilities, and base classes that are used across multiple domain entities in the project. The goal of these components is to promote code reusability and maintain consistency within the domain layer of the application. diff --git a/internal/domain/common/__init__.py b/internal/domain/common/__init__.py new file mode 100644 index 00000000..6ca710f4 --- /dev/null +++ b/internal/domain/common/__init__.py @@ -0,0 +1 @@ +from .optional_model import OptionalModel diff --git a/internal/domain/common/optional_model.py b/internal/domain/common/optional_model.py new file mode 100644 index 00000000..7ccdfac4 --- /dev/null +++ b/internal/domain/common/optional_model.py @@ -0,0 +1,38 @@ +from __future__ import annotations +from pydantic import BaseModel +from typing import Any + + +class OptionalModel(BaseModel): + """ + A base model class that automatically sets all fields, except those defined in + `__non_optional_fields__`, to `None` by default. This allows for the creation + of models where fields are optional unless explicitly marked as required. + + Attributes: + __non_optional_fields__ (set): A set of field names that should remain + non-optional. Fields listed here will not have `None` as their default value. + + """ + __non_optional_fields__ = set() + + @classmethod + def __pydantic_init_subclass__(cls, **kwargs: Any) -> None: + """ + Class-level initializer that ensures all fields except those specified + in `__non_optional_fields__` are set to `None` by default. This method + is called during the subclass initialization process. + + Args: + **kwargs: Arbitrary keyword arguments passed to the superclass initializer. + """ + super().__pydantic_init_subclass__(**kwargs) + + for field_name, value in cls.model_fields.items(): + if field_name in cls.__non_optional_fields__: + if value.default is None: + raise ValueError(f"Field '{field_name}' is in __non_optional_fields__ but has a default value of None.") + continue + value.default = None + + cls.model_rebuild(force=True) From 20fa0c1211aa25b8f59cf512971fb8c8f39527ad Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 12 Sep 2024 23:10:40 +0300 Subject: [PATCH 003/153] feat(tests): add tests for optional model --- tests/domain/common/test_optional_model.py | 70 ++++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 tests/domain/common/test_optional_model.py diff --git a/tests/domain/common/test_optional_model.py b/tests/domain/common/test_optional_model.py new file mode 100644 index 00000000..e67ed442 --- /dev/null +++ b/tests/domain/common/test_optional_model.py @@ -0,0 +1,70 @@ +import pytest +from pydantic import ValidationError + +from internal.domain.common import OptionalModel + + +class OptionalModelWithoutRequiredFields(OptionalModel): + implicitly_optional_field: str + clearly_optional_field: str | None = None + + +@pytest.mark.parametrize( + "kwargs, expected_implicitly, expected_clearly", + [ + ({}, None, None), + ({"implicitly_optional_field": "value"}, "value", None), + ({"clearly_optional_field": "value"}, None, "value"), + ( + {"implicitly_optional_field": "value", "clearly_optional_field": "value"}, + "value", + "value", + ), + ( + {"implicitly_optional_field": "value", "clearly_optional_field": None}, + "value", + None, + ), + ], +) +def test_model_initialization(kwargs, expected_implicitly, expected_clearly): + model = OptionalModelWithoutRequiredFields(**kwargs) + + assert model.implicitly_optional_field == expected_implicitly + assert model.clearly_optional_field == expected_clearly + + +class OptionalModelWithRequiredFields(OptionalModel): + __non_optional_fields__ = { + "non_optional_field", + } + + non_optional_field: str + optional_field: str + + +@pytest.mark.parametrize( + "kwargs, expected_non_optional_field, expected_optional_field, should_fail", + [ + ({}, None, None, True), + ({"non_optional_field": "value"}, "value", None, False), + ({"optional_field": "value"}, None, "value", True), + ( + {"non_optional_field": "value", "optional_field": "value"}, + "value", + "value", + False, + ), + ({"non_optional_field": None, "optional_field": None}, None, None, True), + ], +) +def test_model_with_non_optional_fields_initialization( + kwargs, expected_non_optional_field, expected_optional_field, should_fail +): + if should_fail: + with pytest.raises(ValidationError): + OptionalModelWithRequiredFields(**kwargs) + else: + model = OptionalModelWithRequiredFields(**kwargs) + assert model.non_optional_field == expected_non_optional_field + assert model.optional_field == expected_optional_field From f801d15eff3194533fe10fc863b156f678192574 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 12 Sep 2024 23:31:59 +0300 Subject: [PATCH 004/153] feat(domain): add file entity --- internal/domain/file/README.md | 19 ++++++++++++++++++ internal/domain/file/__init__.py | 1 + internal/domain/file/file.py | 33 ++++++++++++++++++++++++++++++++ 3 files changed, 53 insertions(+) create mode 100644 internal/domain/file/README.md create mode 100644 internal/domain/file/__init__.py create mode 100644 internal/domain/file/file.py diff --git a/internal/domain/file/README.md b/internal/domain/file/README.md new file mode 100644 index 00000000..6ed54b23 --- /dev/null +++ b/internal/domain/file/README.md @@ -0,0 +1,19 @@ +## domain.file module +This module contains core entities for managing and handling files and datasets. It includes abstractions and utilities for working with files and offers ways to represent datasets. + +## Usage +### File entity +The `File` class represents a file entity and generates a UUID for each file instance, ensuring that every file is uniquely identifiable. +Example: +```python +from domain.file import File + +# Creating a new file instance +file = File() + +# Access the file's UUID as a string +print(file.name) + +# Access the file's UUID in UUID format +print(file.name_as_uuid) +``` diff --git a/internal/domain/file/__init__.py b/internal/domain/file/__init__.py new file mode 100644 index 00000000..401f3300 --- /dev/null +++ b/internal/domain/file/__init__.py @@ -0,0 +1 @@ +from .file import File diff --git a/internal/domain/file/file.py b/internal/domain/file/file.py new file mode 100644 index 00000000..b3e9e3a1 --- /dev/null +++ b/internal/domain/file/file.py @@ -0,0 +1,33 @@ +from uuid import uuid4, UUID + + +class File: + """ + A class that represents a file with a unique identifier. + """ + + def __init__(self): + """ + Initializes a new file instance with a unique UUID as the file's name. + """ + self._name = uuid4() + + @property + def name(self) -> str: + """ + Returns the file's UUID as a string. + + Returns: + str: The UUID of the file in string format. + """ + return str(self._name) + + @property + def name_as_uuid(self) -> UUID: + """ + Returns the file's UUID as a UUID object. + + Returns: + UUID: The UUID of the file in UUID format. + """ + return self._name From 9f7757e0e41ffbbb509b6aacf0e3ccfb60b0226c Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 12 Sep 2024 23:32:21 +0300 Subject: [PATCH 005/153] feat(tests): add tests for file entity --- tests/domain/file/test_file_entity.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 tests/domain/file/test_file_entity.py diff --git a/tests/domain/file/test_file_entity.py b/tests/domain/file/test_file_entity.py new file mode 100644 index 00000000..393a3eee --- /dev/null +++ b/tests/domain/file/test_file_entity.py @@ -0,0 +1,17 @@ +from uuid import UUID + +from internal.domain.file import File + + +def test_file_initialization(): + file = File() + assert isinstance(file._name, UUID) + assert isinstance(file.name, str) + assert isinstance(file.name_as_uuid, UUID) + + +def test_file_name_properties(): + file = File() + + assert file.name == str(file._name) + assert file.name_as_uuid == file._name From 464bf2c52871de649b24f4b73023d8b1a3ecd060 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 13 Sep 2024 00:14:16 +0300 Subject: [PATCH 006/153] chore(domain): replace and restructure task entities to new domain module --- internal/domain/task/README.md | 0 internal/domain/task/__init__.py | 2 + internal/domain/task/entities/__init__.py | 2 + internal/domain/task/entities/afd/__init__.py | 1 + internal/domain/task/entities/afd/afd_task.py | 24 +++++ internal/domain/task/entities/fd/__init__.py | 1 + internal/domain/task/entities/fd/fd_task.py | 51 ++++++++++ internal/domain/task/entities/task.py | 21 ++++ .../domain/task/value_objects/__init__.py | 41 ++++++++ .../domain/task/value_objects/afd/__init__.py | 20 ++++ .../task/value_objects/afd/algo_config.py | 38 +++++++ .../task/value_objects/afd/algo_name.py | 6 ++ .../task/value_objects/afd/algo_result.py | 4 + internal/domain/task/value_objects/config.py | 23 +++++ .../domain/task/value_objects/fd/__init__.py | 20 ++++ .../task/value_objects/fd/algo_config.py | 98 +++++++++++++++++++ .../domain/task/value_objects/fd/algo_name.py | 14 +++ .../task/value_objects/fd/algo_result.py | 15 +++ .../task/value_objects/primitive_name.py | 13 +++ internal/domain/task/value_objects/result.py | 13 +++ tests/domain/task/test_fd.py | 6 +- 21 files changed, 410 insertions(+), 3 deletions(-) create mode 100644 internal/domain/task/README.md create mode 100644 internal/domain/task/__init__.py create mode 100644 internal/domain/task/entities/__init__.py create mode 100644 internal/domain/task/entities/afd/__init__.py create mode 100644 internal/domain/task/entities/afd/afd_task.py create mode 100644 internal/domain/task/entities/fd/__init__.py create mode 100644 internal/domain/task/entities/fd/fd_task.py create mode 100644 internal/domain/task/entities/task.py create mode 100644 internal/domain/task/value_objects/__init__.py create mode 100644 internal/domain/task/value_objects/afd/__init__.py create mode 100644 internal/domain/task/value_objects/afd/algo_config.py create mode 100644 internal/domain/task/value_objects/afd/algo_name.py create mode 100644 internal/domain/task/value_objects/afd/algo_result.py create mode 100644 internal/domain/task/value_objects/config.py create mode 100644 internal/domain/task/value_objects/fd/__init__.py create mode 100644 internal/domain/task/value_objects/fd/algo_config.py create mode 100644 internal/domain/task/value_objects/fd/algo_name.py create mode 100644 internal/domain/task/value_objects/fd/algo_result.py create mode 100644 internal/domain/task/value_objects/primitive_name.py create mode 100644 internal/domain/task/value_objects/result.py diff --git a/internal/domain/task/README.md b/internal/domain/task/README.md new file mode 100644 index 00000000..e69de29b diff --git a/internal/domain/task/__init__.py b/internal/domain/task/__init__.py new file mode 100644 index 00000000..4a830b0e --- /dev/null +++ b/internal/domain/task/__init__.py @@ -0,0 +1,2 @@ +from internal.domain.task.entities import FdTask +from internal.domain.task.entities import AfdTask diff --git a/internal/domain/task/entities/__init__.py b/internal/domain/task/entities/__init__.py new file mode 100644 index 00000000..87feb827 --- /dev/null +++ b/internal/domain/task/entities/__init__.py @@ -0,0 +1,2 @@ +from internal.domain.task.entities.fd import FdTask +from internal.domain.task.entities.afd import AfdTask diff --git a/internal/domain/task/entities/afd/__init__.py b/internal/domain/task/entities/afd/__init__.py new file mode 100644 index 00000000..5ea101b1 --- /dev/null +++ b/internal/domain/task/entities/afd/__init__.py @@ -0,0 +1 @@ +from internal.domain.task.entities.afd.afd_task import AfdTask diff --git a/internal/domain/task/entities/afd/afd_task.py b/internal/domain/task/entities/afd/afd_task.py new file mode 100644 index 00000000..40cac1c8 --- /dev/null +++ b/internal/domain/task/entities/afd/afd_task.py @@ -0,0 +1,24 @@ +from typing import assert_never +from desbordante.fd import FdAlgorithm # This is not a typo +from desbordante.afd.algorithms import Pyro, Tane + +from internal.domain.task.entities.task import Task +from internal.domain.task.value_objects import PrimitiveName + +from internal.domain.task.value_objects.afd import AfdTaskResult, AfdTaskConfig +from internal.domain.task.value_objects.afd import AfdAlgoName, AfdAlgoResult, FdModel + + +class AfdTask(Task[AfdTaskConfig, AfdTaskResult]): + def _collect_result(self, algo: FdAlgorithm) -> AfdTaskResult: + fds = algo.get_fds() + algo_result = AfdAlgoResult(fds=list(map(FdModel.from_fd, fds))) + return AfdTaskResult(primitive_name=PrimitiveName.afd, result=algo_result) + + def _match_algo_by_name(self, algo_name: AfdAlgoName) -> FdAlgorithm: + match algo_name: + case AfdAlgoName.Pyro: + return Pyro() + case AfdAlgoName.Tane: + return Tane() + assert_never(algo_name) diff --git a/internal/domain/task/entities/fd/__init__.py b/internal/domain/task/entities/fd/__init__.py new file mode 100644 index 00000000..220ae96a --- /dev/null +++ b/internal/domain/task/entities/fd/__init__.py @@ -0,0 +1 @@ +from internal.domain.task.entities.fd.fd_task import FdTask diff --git a/internal/domain/task/entities/fd/fd_task.py b/internal/domain/task/entities/fd/fd_task.py new file mode 100644 index 00000000..1c96731d --- /dev/null +++ b/internal/domain/task/entities/fd/fd_task.py @@ -0,0 +1,51 @@ +from typing import assert_never +from desbordante.fd import FdAlgorithm +from desbordante.fd.algorithms import ( + Aid, + DFD, + Depminer, + FDep, + FUN, + FastFDs, + FdMine, + HyFD, + Pyro, + Tane, +) + + +from internal.domain.task.entities.task import Task +from internal.domain.task.value_objects import PrimitiveName +from internal.domain.task.value_objects.fd import FdTaskConfig, FdTaskResult +from internal.domain.task.value_objects.fd import FdAlgoName, FdModel, FdAlgoResult + + +class FdTask(Task[FdTaskConfig, FdTaskResult]): + def _collect_result(self, algo: FdAlgorithm) -> FdTaskResult: + fds = algo.get_fds() + algo_result = FdAlgoResult(fds=list(map(FdModel.from_fd, fds))) + return FdTaskResult(primitive_name=PrimitiveName.fd, result=algo_result) + + def _match_algo_by_name(self, algo_name: FdAlgoName) -> FdAlgorithm: + match algo_name: + case FdAlgoName.Aid: + return Aid() + case FdAlgoName.DFD: + return DFD() + case FdAlgoName.Depminer: + return Depminer() + case FdAlgoName.FDep: + return FDep() + case FdAlgoName.FUN: + return FUN() + case FdAlgoName.FastFDs: + return FastFDs() + case FdAlgoName.FdMine: + return FdMine() + case FdAlgoName.HyFD: + return HyFD() + case FdAlgoName.Pyro: + return Pyro() + case FdAlgoName.Tane: + return Tane() + assert_never(algo_name) diff --git a/internal/domain/task/entities/task.py b/internal/domain/task/entities/task.py new file mode 100644 index 00000000..a60cefbf --- /dev/null +++ b/internal/domain/task/entities/task.py @@ -0,0 +1,21 @@ +from abc import ABC, abstractmethod +import desbordante +import pandas +from internal.domain.task.value_objects import TaskConfig +from internal.domain.task.value_objects import TaskResult + + +class Task[C: TaskConfig, R: TaskResult](ABC): + @abstractmethod + def _match_algo_by_name(self, algo_name) -> desbordante.Algorithm: ... + + @abstractmethod + def _collect_result(self, algo) -> R: ... + + def execute(self, table: pandas.DataFrame, task_config: C) -> R: + algo_config = task_config.config + options = algo_config.model_dump(exclude_unset=True, exclude={"algo_name"}) + algo = self._match_algo_by_name(algo_config.algo_name) + algo.load_data(table=table) + algo.execute(**options) + return self._collect_result(algo) diff --git a/internal/domain/task/value_objects/__init__.py b/internal/domain/task/value_objects/__init__.py new file mode 100644 index 00000000..53ce68f9 --- /dev/null +++ b/internal/domain/task/value_objects/__init__.py @@ -0,0 +1,41 @@ +from enum import StrEnum, auto +from typing import Annotated, Union +from pydantic import Field + +from internal.domain.task.value_objects.afd import AfdTaskConfig, AfdTaskResult +from internal.domain.task.value_objects.fd import FdTaskConfig, FdTaskResult + +from internal.domain.task.value_objects.config import TaskConfig +from internal.domain.task.value_objects.result import TaskResult + +from internal.domain.task.value_objects.primitive_name import PrimitiveName + +class TaskStatus(StrEnum): + FAILED = auto() + CREATED = auto() + RUNNING = auto() + COMPLETED = auto() + + +class TaskFailureReason(StrEnum): + MEMORY_LIMIT_EXCEEDED = auto() + TIME_LIMIT_EXCEEDED = auto() + WORKER_KILLED_BY_SIGNAL = auto() + OTHER = auto() + + +OneOfTaskConfig = Annotated[ + Union[ + FdTaskConfig, + AfdTaskConfig, + ], + Field(discriminator="primitive_name"), +] + +OneOfTaskResult = Annotated[ + Union[ + FdTaskResult, + AfdTaskResult, + ], + Field(discriminator="primitive_name"), +] diff --git a/internal/domain/task/value_objects/afd/__init__.py b/internal/domain/task/value_objects/afd/__init__.py new file mode 100644 index 00000000..f8705e15 --- /dev/null +++ b/internal/domain/task/value_objects/afd/__init__.py @@ -0,0 +1,20 @@ +from typing import Literal + +from pydantic import BaseModel + +from internal.domain.task.value_objects.afd.algo_config import OneOfAfdConfig +from internal.domain.task.value_objects.afd.algo_result import AfdAlgoResult, FdModel +from internal.domain.task.value_objects.afd.algo_name import AfdAlgoName +from internal.domain.task.value_objects.primitive_name import PrimitiveName + + +class BaseAfdTaskModel(BaseModel): + primitive_name: Literal[PrimitiveName.afd] + + +class AfdTaskConfig(BaseAfdTaskModel): + config: OneOfAfdConfig + + +class AfdTaskResult(BaseAfdTaskModel): + result: AfdAlgoResult diff --git a/internal/domain/task/value_objects/afd/algo_config.py b/internal/domain/task/value_objects/afd/algo_config.py new file mode 100644 index 00000000..beb04c21 --- /dev/null +++ b/internal/domain/task/value_objects/afd/algo_config.py @@ -0,0 +1,38 @@ +from typing import Literal, Annotated, Union +from pydantic import Field + +from internal.domain.common import OptionalModel +from internal.domain.task.value_objects.afd.algo_name import AfdAlgoName + + +class BaseAfdConfig(OptionalModel): + __non_optional_fields__ = { + "algo_name", + } + + +class PyroConfig(BaseAfdConfig): + algo_name: Literal[AfdAlgoName.Pyro] + + is_null_equal_null: bool + error: Annotated[float, Field(ge=0, le=1)] + max_lhs: Annotated[int, Field(ge=1, le=10)] + threads: Annotated[int, Field(ge=1, le=8)] + seed: int + + +class TaneConfig(BaseAfdConfig): + algo_name: Literal[AfdAlgoName.Tane] + + is_null_equal_null: bool + error: Annotated[float, Field(ge=0, le=1)] + max_lhs: Annotated[int, Field(ge=1, le=10)] + + +OneOfAfdConfig = Annotated[ + Union[ + PyroConfig, + TaneConfig, + ], + Field(discriminator="algo_name"), +] diff --git a/internal/domain/task/value_objects/afd/algo_name.py b/internal/domain/task/value_objects/afd/algo_name.py new file mode 100644 index 00000000..5a55ccb6 --- /dev/null +++ b/internal/domain/task/value_objects/afd/algo_name.py @@ -0,0 +1,6 @@ +from enum import StrEnum, auto + + +class AfdAlgoName(StrEnum): + Pyro = auto() + Tane = auto() diff --git a/internal/domain/task/value_objects/afd/algo_result.py b/internal/domain/task/value_objects/afd/algo_result.py new file mode 100644 index 00000000..afc32ce0 --- /dev/null +++ b/internal/domain/task/value_objects/afd/algo_result.py @@ -0,0 +1,4 @@ +from internal.domain.task.value_objects.fd.algo_result import FdAlgoResult, FdModel + +AfdAlgoResult = FdAlgoResult +FdModel = FdModel diff --git a/internal/domain/task/value_objects/config.py b/internal/domain/task/value_objects/config.py new file mode 100644 index 00000000..20f23f7e --- /dev/null +++ b/internal/domain/task/value_objects/config.py @@ -0,0 +1,23 @@ +from enum import StrEnum +from typing import Protocol + +from pydantic import BaseModel + + +class AlgoConfig(Protocol): + @property + def algo_name(self) -> StrEnum: ... + + # forces to use pydantic classes there + model_dump = BaseModel.model_dump + + +class TaskConfig(Protocol): + @property + def primitive_name(self) -> StrEnum: ... + + @property + def config(self) -> AlgoConfig: ... + + # forces to use pydantic classes there + model_dump = BaseModel.model_dump diff --git a/internal/domain/task/value_objects/fd/__init__.py b/internal/domain/task/value_objects/fd/__init__.py new file mode 100644 index 00000000..8c3a3f94 --- /dev/null +++ b/internal/domain/task/value_objects/fd/__init__.py @@ -0,0 +1,20 @@ +from typing import Literal + +from pydantic import BaseModel + +from internal.domain.task.value_objects.primitive_name import PrimitiveName +from internal.domain.task.value_objects.fd.algo_config import OneOfFdAlgoConfig +from internal.domain.task.value_objects.fd.algo_result import FdAlgoResult, FdModel +from internal.domain.task.value_objects.fd.algo_name import FdAlgoName + + +class BaseFdTaskModel(BaseModel): + primitive_name: Literal[PrimitiveName.fd] + + +class FdTaskConfig(BaseFdTaskModel): + config: OneOfFdAlgoConfig + + +class FdTaskResult(BaseFdTaskModel): + result: FdAlgoResult diff --git a/internal/domain/task/value_objects/fd/algo_config.py b/internal/domain/task/value_objects/fd/algo_config.py new file mode 100644 index 00000000..da396473 --- /dev/null +++ b/internal/domain/task/value_objects/fd/algo_config.py @@ -0,0 +1,98 @@ +from typing import Literal, Annotated, Union + +from pydantic import Field + +from internal.domain.common import OptionalModel +from internal.domain.task.value_objects.fd.algo_name import FdAlgoName + + +class BaseFdConfig(OptionalModel): + __non_optional_fields__ = { + "algo_name", + } + + +class AidConfig(BaseFdConfig): + algo_name: Literal[FdAlgoName.Aid] + + is_null_equal_null: bool + + +class DFDConfig(BaseFdConfig): + algo_name: Literal[FdAlgoName.DFD] + + is_null_equal_null: bool + threads: Annotated[int, Field(ge=1, le=8)] + + +class DepminerConfig(BaseFdConfig): + algo_name: Literal[FdAlgoName.Depminer] + + is_null_equal_null: bool + + +class FDepConfig(BaseFdConfig): + algo_name: Literal[FdAlgoName.FDep] + + is_null_equal_null: bool + + +class FUNConfig(BaseFdConfig): + algo_name: Literal[FdAlgoName.FUN] + + is_null_equal_null: bool + + +class FastFDsConfig(BaseFdConfig): + algo_name: Literal[FdAlgoName.FastFDs] + + is_null_equal_null: bool + max_lhs: Annotated[int, Field(ge=1, le=10)] + threads: Annotated[int, Field(ge=1, le=8)] + + +class FdMineConfig(BaseFdConfig): + algo_name: Literal[FdAlgoName.FdMine] + + is_null_equal_null: bool + + +class HyFDConfig(BaseFdConfig): + algo_name: Literal[FdAlgoName.HyFD] + + is_null_equal_null: bool + + +class PyroConfig(BaseFdConfig): + algo_name: Literal[FdAlgoName.Pyro] + + is_null_equal_null: bool + error: Annotated[float, Field(ge=0, le=1)] + max_lhs: Annotated[int, Field(ge=1, le=10)] + threads: Annotated[int, Field(ge=1, le=8)] + seed: int + + +class TaneConfig(BaseFdConfig): + algo_name: Literal[FdAlgoName.Tane] + + is_null_equal_null: bool + error: Annotated[float, Field(ge=0, le=1)] + max_lhs: Annotated[int, Field(ge=1, le=10)] + + +OneOfFdAlgoConfig = Annotated[ + Union[ + AidConfig, + DFDConfig, + DepminerConfig, + FDepConfig, + FUNConfig, + FastFDsConfig, + FdMineConfig, + HyFDConfig, + PyroConfig, + TaneConfig, + ], + Field(discriminator="algo_name"), +] diff --git a/internal/domain/task/value_objects/fd/algo_name.py b/internal/domain/task/value_objects/fd/algo_name.py new file mode 100644 index 00000000..93514588 --- /dev/null +++ b/internal/domain/task/value_objects/fd/algo_name.py @@ -0,0 +1,14 @@ +from enum import StrEnum, auto + + +class FdAlgoName(StrEnum): + Aid = auto() + DFD = auto() + Depminer = auto() + FDep = auto() + FUN = auto() + FastFDs = auto() + FdMine = auto() + HyFD = auto() + Pyro = auto() + Tane = auto() diff --git a/internal/domain/task/value_objects/fd/algo_result.py b/internal/domain/task/value_objects/fd/algo_result.py new file mode 100644 index 00000000..d38d2f0e --- /dev/null +++ b/internal/domain/task/value_objects/fd/algo_result.py @@ -0,0 +1,15 @@ +from pydantic import BaseModel +from desbordante.fd import FD + + +class FdModel(BaseModel): + @classmethod + def from_fd(cls, fd: FD): + return cls(lhs_indices=fd.lhs_indices, rhs_index=fd.rhs_index) + + lhs_indices: list[int] + rhs_index: int + + +class FdAlgoResult(BaseModel): + fds: list[FdModel] diff --git a/internal/domain/task/value_objects/primitive_name.py b/internal/domain/task/value_objects/primitive_name.py new file mode 100644 index 00000000..1959867a --- /dev/null +++ b/internal/domain/task/value_objects/primitive_name.py @@ -0,0 +1,13 @@ +from enum import StrEnum, auto + + +class PrimitiveName(StrEnum): + fd = auto() + afd = auto() + # ar = auto() + # ac = auto() + # fd_verification = auto() + # mfd_verification = auto() + # statistics = auto() + # ucc = auto() + # ucc_verification = auto() diff --git a/internal/domain/task/value_objects/result.py b/internal/domain/task/value_objects/result.py new file mode 100644 index 00000000..8d57fbbf --- /dev/null +++ b/internal/domain/task/value_objects/result.py @@ -0,0 +1,13 @@ +from enum import StrEnum +from typing import Protocol, Any +from pydantic import BaseModel + + +class TaskResult(Protocol): + @property + def primitive_name(self) -> StrEnum: ... + + result: Any + + # forces to use pydantic classes there + model_dump = BaseModel.model_dump diff --git a/tests/domain/task/test_fd.py b/tests/domain/task/test_fd.py index 9e6e5c7e..0919cc3e 100644 --- a/tests/domain/task/test_fd.py +++ b/tests/domain/task/test_fd.py @@ -1,11 +1,11 @@ -from app.domain.task.fd import FdTask, FdTaskConfig import pytest import pandas as pd import logging from polyfactory.factories.pydantic_factory import ModelFactory -from app.domain.task.fd.algo_name import FdAlgoName -from app.domain.task.primitive_name import PrimitiveName +from internal.domain.task import FdTask +from internal.domain.task.value_objects import FdTaskConfig, PrimitiveName +from internal.domain.task.value_objects.fd import FdAlgoName @pytest.mark.parametrize("algo_name", [algo_name.value for algo_name in FdAlgoName]) From 3fb52eb8a7e520cd246b2fb94bfd896324fcb232 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 13 Sep 2024 00:18:27 +0300 Subject: [PATCH 007/153] chore: change imports to absolute ones --- internal/domain/common/__init__.py | 2 +- internal/domain/file/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/domain/common/__init__.py b/internal/domain/common/__init__.py index 6ca710f4..f24fd5b4 100644 --- a/internal/domain/common/__init__.py +++ b/internal/domain/common/__init__.py @@ -1 +1 @@ -from .optional_model import OptionalModel +from internal.domain.common.optional_model import OptionalModel diff --git a/internal/domain/file/__init__.py b/internal/domain/file/__init__.py index 401f3300..4a4654ca 100644 --- a/internal/domain/file/__init__.py +++ b/internal/domain/file/__init__.py @@ -1 +1 @@ -from .file import File +from internal.domain.file.file import File From 2c397d839fa90e07fc7c44904613c66c3409d59e Mon Sep 17 00:00:00 2001 From: raf-nr Date: Tue, 17 Sep 2024 17:47:27 +0300 Subject: [PATCH 008/153] chore(domain): add empty user submodule to domain module --- internal/domain/user/README.md | 2 ++ internal/domain/user/__init__.py | 0 2 files changed, 2 insertions(+) create mode 100644 internal/domain/user/README.md create mode 100644 internal/domain/user/__init__.py diff --git a/internal/domain/user/README.md b/internal/domain/user/README.md new file mode 100644 index 00000000..15f8cbb6 --- /dev/null +++ b/internal/domain/user/README.md @@ -0,0 +1,2 @@ +## domain.user module +This module provides the foundational components for managing user entities and related functionalities within an application. It includes classes and configurations for user settings, authorization, and other user-specific features. diff --git a/internal/domain/user/__init__.py b/internal/domain/user/__init__.py new file mode 100644 index 00000000..e69de29b From 9fbfed1fcef4f67ac647c7895192c2f3b963cd3e Mon Sep 17 00:00:00 2001 From: raf-nr Date: Tue, 17 Sep 2024 17:53:33 +0300 Subject: [PATCH 009/153] fix(domain): add handling of incorrect case in optional model --- internal/domain/common/optional_model.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/internal/domain/common/optional_model.py b/internal/domain/common/optional_model.py index 7ccdfac4..8f25940f 100644 --- a/internal/domain/common/optional_model.py +++ b/internal/domain/common/optional_model.py @@ -7,12 +7,11 @@ class OptionalModel(BaseModel): """ A base model class that automatically sets all fields, except those defined in `__non_optional_fields__`, to `None` by default. This allows for the creation - of models where fields are optional unless explicitly marked as required. + of model where fields are optional unless explicitly marked as required. Attributes: __non_optional_fields__ (set): A set of field names that should remain non-optional. Fields listed here will not have `None` as their default value. - """ __non_optional_fields__ = set() @@ -25,13 +24,17 @@ def __pydantic_init_subclass__(cls, **kwargs: Any) -> None: Args: **kwargs: Arbitrary keyword arguments passed to the superclass initializer. + Exceptions: + ValueError: If a field listed in `__non_optional_fields__` has a default value of `None`. """ super().__pydantic_init_subclass__(**kwargs) for field_name, value in cls.model_fields.items(): if field_name in cls.__non_optional_fields__: if value.default is None: - raise ValueError(f"Field '{field_name}' is in __non_optional_fields__ but has a default value of None.") + raise ValueError( + f"Field '{field_name}' is in __non_optional_fields__ but has a default value of None." + ) continue value.default = None From 4e929ea2a95d4460229b4c5f9a752a16873e055b Mon Sep 17 00:00:00 2001 From: raf-nr Date: Tue, 17 Sep 2024 18:08:46 +0300 Subject: [PATCH 010/153] chore(domain): add code documentation for tasks and file --- internal/domain/file/file.py | 8 +-- internal/domain/task/entities/afd/afd_task.py | 31 ++++++++++- internal/domain/task/entities/fd/fd_task.py | 31 ++++++++++- internal/domain/task/entities/task.py | 55 +++++++++++++++++-- internal/domain/task/value_objects/config.py | 38 ++++++++++++- internal/domain/task/value_objects/result.py | 16 +++++- 6 files changed, 163 insertions(+), 16 deletions(-) diff --git a/internal/domain/file/file.py b/internal/domain/file/file.py index b3e9e3a1..9e70a7fb 100644 --- a/internal/domain/file/file.py +++ b/internal/domain/file/file.py @@ -8,15 +8,13 @@ class File: def __init__(self): """ - Initializes a new file instance with a unique UUID as the file's name. + This constructor generates a new UUID and assigns it as the file's name. """ self._name = uuid4() @property def name(self) -> str: """ - Returns the file's UUID as a string. - Returns: str: The UUID of the file in string format. """ @@ -25,9 +23,7 @@ def name(self) -> str: @property def name_as_uuid(self) -> UUID: """ - Returns the file's UUID as a UUID object. - Returns: - UUID: The UUID of the file in UUID format. + UUID: The file's name as a UUID object. """ return self._name diff --git a/internal/domain/task/entities/afd/afd_task.py b/internal/domain/task/entities/afd/afd_task.py index 40cac1c8..381a4499 100644 --- a/internal/domain/task/entities/afd/afd_task.py +++ b/internal/domain/task/entities/afd/afd_task.py @@ -8,14 +8,43 @@ from internal.domain.task.value_objects.afd import AfdTaskResult, AfdTaskConfig from internal.domain.task.value_objects.afd import AfdAlgoName, AfdAlgoResult, FdModel - class AfdTask(Task[AfdTaskConfig, AfdTaskResult]): + """ + Task class for Approximate Functional Dependency (AFD) profiling. + + This class manages the execution of AFD algorithms and processes + the results into the appropriate format. It implements the abstract methods + defined in the Task base class. + + Methods: + - _match_algo_by_name(algo_name: AfdAlgoName) -> FdAlgorithm: + Match AFD algorithm by its name. + - _collect_result(algo: FdAlgorithm) -> AfdTaskResult: + Process the output of the AFD algorithm and return the result. + """ + def _collect_result(self, algo: FdAlgorithm) -> AfdTaskResult: + """ + Collect and process the AFD result. + + Args: + algo (FdAlgorithm): The executed AFD algorithm. + Returns: + AfdTaskResult: The processed result containing approximate functional dependencies. + """ fds = algo.get_fds() algo_result = AfdAlgoResult(fds=list(map(FdModel.from_fd, fds))) return AfdTaskResult(primitive_name=PrimitiveName.afd, result=algo_result) def _match_algo_by_name(self, algo_name: AfdAlgoName) -> FdAlgorithm: + """ + Match the approximate functional dependency algorithm by name. + + Args: + algo_name (AfdAlgoName): The name of the AFD algorithm. + Returns: + FdAlgorithm: The corresponding algorithm instance. + """ match algo_name: case AfdAlgoName.Pyro: return Pyro() diff --git a/internal/domain/task/entities/fd/fd_task.py b/internal/domain/task/entities/fd/fd_task.py index 1c96731d..245f483d 100644 --- a/internal/domain/task/entities/fd/fd_task.py +++ b/internal/domain/task/entities/fd/fd_task.py @@ -13,7 +13,6 @@ Tane, ) - from internal.domain.task.entities.task import Task from internal.domain.task.value_objects import PrimitiveName from internal.domain.task.value_objects.fd import FdTaskConfig, FdTaskResult @@ -21,12 +20,42 @@ class FdTask(Task[FdTaskConfig, FdTaskResult]): + """ + Task class for Functional Dependency (FD) profiling. + + This class handles the execution of different FD algorithms and processes + the results into the appropriate format. It implements the abstract methods + defined in the Task base class. + + Methods: + - _match_algo_by_name(algo_name: FdAlgoName) -> FdAlgorithm: + Match FD algorithm by its name. + - _collect_result(algo: FdAlgorithm) -> FdTaskResult: + Process the output of the FD algorithm and return the result. + """ + def _collect_result(self, algo: FdAlgorithm) -> FdTaskResult: + """ + Collect and process the FD result. + + Args: + algo (FdAlgorithm): FD algorithm to process. + Returns: + FdTaskResult: The processed result containing functional dependencies. + """ fds = algo.get_fds() algo_result = FdAlgoResult(fds=list(map(FdModel.from_fd, fds))) return FdTaskResult(primitive_name=PrimitiveName.fd, result=algo_result) def _match_algo_by_name(self, algo_name: FdAlgoName) -> FdAlgorithm: + """ + Match the functional dependency algorithm by name. + + Args: + algo_name (FdAlgoName): The name of the FD algorithm. + Returns: + FdAlgorithm: The corresponding algorithm instance. + """ match algo_name: case FdAlgoName.Aid: return Aid() diff --git a/internal/domain/task/entities/task.py b/internal/domain/task/entities/task.py index a60cefbf..c16d0faf 100644 --- a/internal/domain/task/entities/task.py +++ b/internal/domain/task/entities/task.py @@ -1,18 +1,65 @@ from abc import ABC, abstractmethod import desbordante import pandas -from internal.domain.task.value_objects import TaskConfig -from internal.domain.task.value_objects import TaskResult +from internal.domain.task.value_objects import TaskConfig, TaskResult class Task[C: TaskConfig, R: TaskResult](ABC): + """ + Abstract base class for data profiling tasks. + + This class defines the structure for executing algorithms and processing + their results. Specific task types, such as functional dependency tasks, + should inherit from this class and implement the necessary methods. + + Type Parameters: + - C: TaskConfig - Configuration object that defines the algorithm settings. + - R: TaskResult - Result object that will store the output of the algorithm. + + Methods: + - _match_algo_by_name(algo_name): Match the algorithm by its name. + - _collect_result(algo): Collect and process the result of the algorithm. + - execute(table: pandas.DataFrame, task_config: C): Execute the task + on a given table with the provided configuration. + """ + + @abstractmethod - def _match_algo_by_name(self, algo_name) -> desbordante.Algorithm: ... + def _match_algo_by_name(self, algo_name: str) -> desbordante.Algorithm: + """ + Match and return the algorithm instance based on its name. + + Args: + algo_name (str): Name of the algorithm to match. + Returns: + desbordante.Algorithm: Algorithm instance. + """ + pass + @abstractmethod - def _collect_result(self, algo) -> R: ... + def _collect_result(self, algo: desbordante.Algorithm) -> R: + """ + Collect and process the result from the executed algorithm. + + Args: + algo (desbordante.Algorithm): Algorithm instance. + Returns: + TaskResult: The task result containing the processed output. + """ + pass + def execute(self, table: pandas.DataFrame, task_config: C) -> R: + """ + Execute the algorithm on the provided data table. + + Args: + table (pandas.DataFrame): Data to be processed. + task_config (TaskConfig): Configuration object that defines the algorithm settings. + Returns: + TaskResult: The task result containing the processed output. + """ algo_config = task_config.config options = algo_config.model_dump(exclude_unset=True, exclude={"algo_name"}) algo = self._match_algo_by_name(algo_config.algo_name) diff --git a/internal/domain/task/value_objects/config.py b/internal/domain/task/value_objects/config.py index 20f23f7e..642f3523 100644 --- a/internal/domain/task/value_objects/config.py +++ b/internal/domain/task/value_objects/config.py @@ -5,19 +5,51 @@ class AlgoConfig(Protocol): + """ + Protocol for algorithm configuration. + + This protocol enforces that any implementing class must: + - Have a property `algo_name` that returns a `StrEnum`. + - Use Pydantic's `model_dump` method for serializing the model. + """ + @property - def algo_name(self) -> StrEnum: ... + def algo_name(self) -> StrEnum: + """ + Returns: + str: The name of the algorithm. + """ + ... # forces to use pydantic classes there model_dump = BaseModel.model_dump class TaskConfig(Protocol): + """ + Protocol for task configuration. + + This protocol enforces that any implementing class must: + - Have a property `primitive_name` that returns a `StrEnum` representing the task type. + - Have a property `config` that returns an `AlgoConfig`. + - Use Pydantic's `model_dump` method for serializing the model. + """ + @property - def primitive_name(self) -> StrEnum: ... + def primitive_name(self) -> StrEnum: + """ + Returns: + str: The name of the primitive associated with the task. + """ + ... @property - def config(self) -> AlgoConfig: ... + def config(self) -> AlgoConfig: + """ + Returns: + AlgoConfig: the algorithm configuration associated with the task. + """ + ... # forces to use pydantic classes there model_dump = BaseModel.model_dump diff --git a/internal/domain/task/value_objects/result.py b/internal/domain/task/value_objects/result.py index 8d57fbbf..abc73a54 100644 --- a/internal/domain/task/value_objects/result.py +++ b/internal/domain/task/value_objects/result.py @@ -4,8 +4,22 @@ class TaskResult(Protocol): + """ + Protocol for task result. + + This protocol enforces that any implementing class must: + - Have a property `primitive_name` that returns a `StrEnum` representing the task type. + - Include a `result` field which can be any data type. + - Use Pydantic's `model_dump` method for serializing the result. + """ + @property - def primitive_name(self) -> StrEnum: ... + def primitive_name(self) -> StrEnum: + """ + Returns: + str: The name of the primitive associated with the task result. + """ + ... result: Any From bac652ed8b49ee74cea21a299fd966755ab4922a Mon Sep 17 00:00:00 2001 From: raf-nr Date: Tue, 17 Sep 2024 18:09:46 +0300 Subject: [PATCH 011/153] chore(domain): add README files for domain modules --- internal/domain/README.md | 22 +++++++++++++ internal/domain/__init__.py | 0 internal/domain/task/README.md | 59 ++++++++++++++++++++++++++++++++++ 3 files changed, 81 insertions(+) create mode 100644 internal/domain/README.md create mode 100644 internal/domain/__init__.py diff --git a/internal/domain/README.md b/internal/domain/README.md new file mode 100644 index 00000000..56ba0df1 --- /dev/null +++ b/internal/domain/README.md @@ -0,0 +1,22 @@ +# domain module +The `domain` module serves as the core component of the application, encapsulating the fundamental domain logic and entities. + +## Purpose +The `domain` module is designed to encapsulate the core business logic and domain entities of the application. By organizing the module into submodules, the architecture maintains a clear separation of concerns, promoting scalability and ease of maintenance. + +To extend or modify the domain logic, developers can add new submodules or enhance existing ones while adhering to the principles of clean architecture. + + +## Submodules + +### Common +The `common` submodule contains shared components and utilities used across other submodules. This includes base models and common logic that can be leveraged by user, task, and file entities. + +### User +The `user` submodule manages user-related entities and functionalities. It includes classes and configurations for handling user settings, authentication, and authorization, centralizing user management within the domain. + +### Task +The `task` submodule provides the foundation for data profiling tasks. It includes abstract base classes and concrete implementations for different profiling algorithms. + +### File +The `file` submodule handles file and dataset-related entities and operations. diff --git a/internal/domain/__init__.py b/internal/domain/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/internal/domain/task/README.md b/internal/domain/task/README.md index e69de29b..a8a0f6b8 100644 --- a/internal/domain/task/README.md +++ b/internal/domain/task/README.md @@ -0,0 +1,59 @@ +# domain.task module + +This module provides the foundational components for defining and executing data profiling tasks. It includes abstract base classes for tasks and specific implementations for different types of algorithms. + +## Usage + +### Task Entity + +The `Task` class is an abstract base class that serves as the foundation for specific data profiling tasks. It defines the essential methods for executing algorithms and handling their results. + +#### Methods: +- **`_match_algo_by_name(algo_name)`**: This method is responsible for matching and returning the appropriate algorithm instance based on the given algorithm name. This method must be implemented by subclasses to handle specific algorithms. + +- **`_collect_result(algo)`**: This method processes the result obtained from the executed algorithm and returns it in a standardized format. It must be implemented by subclasses to handle the result processing specific to each task type. + +- **`execute(table: pandas.DataFrame, task_config: C) -> R`**: This method runs the algorithm on the provided data table using the given configuration and returns the result. It orchestrates the workflow of loading data, executing the algorithm, and collecting results. + +The `Task` class is designed to be extended by specific task implementations. +To add a new primitive to an application, implement a schema for the configuration and result of task execution, and then inherit from Task class, implementing methods **`_match_algo_by_name(algo_name)`** and **`_collect_result(algo)`**. + +### FD Task Entity + +The `FdTask` class is a specific implementation of the `Task` class designed for Functional Dependency (FD) profiling. + +The `FdTask` class enables the execution of different FD algorithms and processes their results into the appropriate format for further use. + +#### Example: +```python +from internal.domain.task import FdTask +from internal.domain.task.value_objects import FdTaskConfig, PrimitiveName +from internal.domain.task.value_objects.fd import FdAlgoName +from some_storage import table # read dataset + +task = FdTask() +config = FdTaskConfig( + primitive_name=PrimitiveName.fd, + config={"algo_name": FdAlgoName.FdMine}, +) +result = task.execute(table, config) +``` + +### AFD Task Entity +The `AfdTask` class is a specific implementation of the `Task` class designed for Approximate Functional Dependency (AFD) profiling. +All capabilities are similar to the previous one, just for AFD. +#### Example: +```python +from internal.domain.task import AfdTask +from internal.domain.task.value_objects import AfdTaskConfig, PrimitiveName +from internal.domain.task.value_objects.afd import AfdAlgoName +from some_storage import table # read dataset + +task = AfdTask() +config = AfdTaskConfig( + primitive_name=PrimitiveName.afd, + config={"algo_name": AfdAlgoName.Pyro}, +) +result = task.execute(table, config) + +``` From 11319b742fa6afc13fa891077a92a59646029150 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Tue, 17 Sep 2024 18:15:44 +0300 Subject: [PATCH 012/153] chore(use case): init use case module, exception and dto submodules --- internal/usecase/__init__.py | 0 internal/usecase/dto/__init__.py | 0 internal/usecase/exception/__init__.py | 2 ++ 3 files changed, 2 insertions(+) create mode 100644 internal/usecase/__init__.py create mode 100644 internal/usecase/dto/__init__.py create mode 100644 internal/usecase/exception/__init__.py diff --git a/internal/usecase/__init__.py b/internal/usecase/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/internal/usecase/dto/__init__.py b/internal/usecase/dto/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/internal/usecase/exception/__init__.py b/internal/usecase/exception/__init__.py new file mode 100644 index 00000000..250a44ca --- /dev/null +++ b/internal/usecase/exception/__init__.py @@ -0,0 +1,2 @@ +from internal.usecase.exception.file import IncorrectFileFormatException, DatasetNotFoundException +from internal.usecase.exception.task import TaskNotFoundException From 19b2f3ff5b05778370ab1dbda14d82db3e77e598 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Tue, 17 Sep 2024 18:16:33 +0300 Subject: [PATCH 013/153] chore(use case): init file, task and user use case modules --- internal/usecase/file/__init__.py | 0 internal/usecase/task/__init__.py | 0 internal/usecase/user/__init__.py | 0 3 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 internal/usecase/file/__init__.py create mode 100644 internal/usecase/task/__init__.py create mode 100644 internal/usecase/user/__init__.py diff --git a/internal/usecase/file/__init__.py b/internal/usecase/file/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/internal/usecase/task/__init__.py b/internal/usecase/task/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/internal/usecase/user/__init__.py b/internal/usecase/user/__init__.py new file mode 100644 index 00000000..e69de29b From 9f60d7b0dc2b3800c1b23cfa559c53ec2e9f7f03 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Tue, 17 Sep 2024 18:20:54 +0300 Subject: [PATCH 014/153] chore: add pytest mock, pytest asyncio and cfgv to dependencies --- poetry.lock | 6551 +++++++++++++++++++++++++----------------------- pyproject.toml | 3 + 2 files changed, 3411 insertions(+), 3143 deletions(-) diff --git a/poetry.lock b/poetry.lock index 13dcc6bd..2466adc2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,3143 +1,3408 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. - -[[package]] -name = "aiofiles" -version = "23.2.1" -description = "File support for asyncio." -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiofiles-23.2.1-py3-none-any.whl", hash = "sha256:19297512c647d4b27a2cf7c34caa7e405c0d60b5560618a29a9fe027b18b0107"}, - {file = "aiofiles-23.2.1.tar.gz", hash = "sha256:84ec2218d8419404abcb9f0c02df3f34c6e0a68ed41072acfb1cef5cbc29051a"}, -] - -[[package]] -name = "alembic" -version = "1.13.1" -description = "A database migration tool for SQLAlchemy." -optional = false -python-versions = ">=3.8" -files = [ - {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, - {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, -] - -[package.dependencies] -Mako = "*" -SQLAlchemy = ">=1.3.0" -typing-extensions = ">=4" - -[package.extras] -tz = ["backports.zoneinfo"] - -[[package]] -name = "amqp" -version = "5.2.0" -description = "Low-level AMQP client for Python (fork of amqplib)." -optional = false -python-versions = ">=3.6" -files = [ - {file = "amqp-5.2.0-py3-none-any.whl", hash = "sha256:827cb12fb0baa892aad844fd95258143bce4027fdac4fccddbc43330fd281637"}, - {file = "amqp-5.2.0.tar.gz", hash = "sha256:a1ecff425ad063ad42a486c902807d1482311481c8ad95a72694b2975e75f7fd"}, -] - -[package.dependencies] -vine = ">=5.0.0,<6.0.0" - -[[package]] -name = "annotated-types" -version = "0.7.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -files = [ - {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, - {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, -] - -[[package]] -name = "anyio" -version = "4.4.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.8" -files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, -] - -[package.dependencies] -idna = ">=2.8" -sniffio = ">=1.1" - -[package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] - -[[package]] -name = "appnope" -version = "0.1.4" -description = "Disable App Nap on macOS >= 10.9" -optional = false -python-versions = ">=3.6" -files = [ - {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, - {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, -] - -[[package]] -name = "asttokens" -version = "2.4.1" -description = "Annotate AST trees with source code positions" -optional = false -python-versions = "*" -files = [ - {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, - {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, -] - -[package.dependencies] -six = ">=1.12.0" - -[package.extras] -astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] -test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] - -[[package]] -name = "billiard" -version = "4.2.0" -description = "Python multiprocessing fork with improvements and bugfixes" -optional = false -python-versions = ">=3.7" -files = [ - {file = "billiard-4.2.0-py3-none-any.whl", hash = "sha256:07aa978b308f334ff8282bd4a746e681b3513db5c9a514cbdd810cbbdc19714d"}, - {file = "billiard-4.2.0.tar.gz", hash = "sha256:9a3c3184cb275aa17a732f93f65b20c525d3d9f253722d26a82194803ade5a2c"}, -] - -[[package]] -name = "black" -version = "24.4.2" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.8" -files = [ - {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, - {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, - {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, - {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, - {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, - {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, - {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, - {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, - {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, - {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, - {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, - {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, - {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, - {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, - {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, - {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, - {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, - {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, - {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, - {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, - {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, - {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "celery" -version = "5.4.0" -description = "Distributed Task Queue." -optional = false -python-versions = ">=3.8" -files = [ - {file = "celery-5.4.0-py3-none-any.whl", hash = "sha256:369631eb580cf8c51a82721ec538684994f8277637edde2dfc0dacd73ed97f64"}, - {file = "celery-5.4.0.tar.gz", hash = "sha256:504a19140e8d3029d5acad88330c541d4c3f64c789d85f94756762d8bca7e706"}, -] - -[package.dependencies] -billiard = ">=4.2.0,<5.0" -click = ">=8.1.2,<9.0" -click-didyoumean = ">=0.3.0" -click-plugins = ">=1.1.1" -click-repl = ">=0.2.0" -kombu = ">=5.3.4,<6.0" -pytest-celery = {version = ">=1.0.0", extras = ["all"], optional = true, markers = "extra == \"pytest\""} -python-dateutil = ">=2.8.2" -tzdata = ">=2022.7" -vine = ">=5.1.0,<6.0" - -[package.extras] -arangodb = ["pyArango (>=2.0.2)"] -auth = ["cryptography (==42.0.5)"] -azureblockblob = ["azure-storage-blob (>=12.15.0)"] -brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] -cassandra = ["cassandra-driver (>=3.25.0,<4)"] -consul = ["python-consul2 (==0.1.5)"] -cosmosdbsql = ["pydocumentdb (==2.3.5)"] -couchbase = ["couchbase (>=3.0.0)"] -couchdb = ["pycouchdb (==1.14.2)"] -django = ["Django (>=2.2.28)"] -dynamodb = ["boto3 (>=1.26.143)"] -elasticsearch = ["elastic-transport (<=8.13.0)", "elasticsearch (<=8.13.0)"] -eventlet = ["eventlet (>=0.32.0)"] -gcs = ["google-cloud-storage (>=2.10.0)"] -gevent = ["gevent (>=1.5.0)"] -librabbitmq = ["librabbitmq (>=2.0.0)"] -memcache = ["pylibmc (==1.6.3)"] -mongodb = ["pymongo[srv] (>=4.0.2)"] -msgpack = ["msgpack (==1.0.8)"] -pymemcache = ["python-memcached (>=1.61)"] -pyro = ["pyro4 (==4.82)"] -pytest = ["pytest-celery[all] (>=1.0.0)"] -redis = ["redis (>=4.5.2,!=4.5.5,<6.0.0)"] -s3 = ["boto3 (>=1.26.143)"] -slmq = ["softlayer-messaging (>=1.0.3)"] -solar = ["ephem (==4.1.5)"] -sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] -sqs = ["boto3 (>=1.26.143)", "kombu[sqs] (>=5.3.4)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] -tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] -yaml = ["PyYAML (>=3.10)"] -zookeeper = ["kazoo (>=1.3.1)"] -zstd = ["zstandard (==0.22.0)"] - -[[package]] -name = "celery-types" -version = "0.22.0" -description = "Type stubs for Celery and its related packages" -optional = false -python-versions = ">=3.9,<4.0" -files = [ - {file = "celery_types-0.22.0-py3-none-any.whl", hash = "sha256:79a66637d1d6af5992d1dc80259d9538869941325e966006f1e795220519b9ac"}, - {file = "celery_types-0.22.0.tar.gz", hash = "sha256:0ecad2fa5a6eded0a1f919e5e1e381cc2ff0635fe4b21db53b4661b6876d5b30"}, -] - -[package.dependencies] -typing-extensions = ">=4.9.0,<5.0.0" - -[[package]] -name = "certifi" -version = "2024.6.2" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, - {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, -] - -[[package]] -name = "cffi" -version = "1.16.0" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "cfgv" -version = "3.4.0" -description = "Validate configuration and produce human readable error messages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, - {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "click-didyoumean" -version = "0.3.1" -description = "Enables git-like *did-you-mean* feature in click" -optional = false -python-versions = ">=3.6.2" -files = [ - {file = "click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c"}, - {file = "click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463"}, -] - -[package.dependencies] -click = ">=7" - -[[package]] -name = "click-plugins" -version = "1.1.1" -description = "An extension module for click to enable registering CLI commands via setuptools entry-points." -optional = false -python-versions = "*" -files = [ - {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, - {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, -] - -[package.dependencies] -click = ">=4.0" - -[package.extras] -dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] - -[[package]] -name = "click-repl" -version = "0.3.0" -description = "REPL plugin for Click" -optional = false -python-versions = ">=3.6" -files = [ - {file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"}, - {file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"}, -] - -[package.dependencies] -click = ">=7.0" -prompt-toolkit = ">=3.0.36" - -[package.extras] -testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "comm" -version = "0.2.2" -description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -optional = false -python-versions = ">=3.8" -files = [ - {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, - {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, -] - -[package.dependencies] -traitlets = ">=4" - -[package.extras] -test = ["pytest"] - -[[package]] -name = "coverage" -version = "7.5.3" -description = "Code coverage measurement for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, - {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, - {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, - {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, - {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, - {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, - {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, - {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, - {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, - {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, - {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, - {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, - {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, - {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, -] - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "debugpy" -version = "1.8.1" -description = "An implementation of the Debug Adapter Protocol for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "debugpy-1.8.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:3bda0f1e943d386cc7a0e71bfa59f4137909e2ed947fb3946c506e113000f741"}, - {file = "debugpy-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dda73bf69ea479c8577a0448f8c707691152e6c4de7f0c4dec5a4bc11dee516e"}, - {file = "debugpy-1.8.1-cp310-cp310-win32.whl", hash = "sha256:3a79c6f62adef994b2dbe9fc2cc9cc3864a23575b6e387339ab739873bea53d0"}, - {file = "debugpy-1.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:7eb7bd2b56ea3bedb009616d9e2f64aab8fc7000d481faec3cd26c98a964bcdd"}, - {file = "debugpy-1.8.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:016a9fcfc2c6b57f939673c874310d8581d51a0fe0858e7fac4e240c5eb743cb"}, - {file = "debugpy-1.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd97ed11a4c7f6d042d320ce03d83b20c3fb40da892f994bc041bbc415d7a099"}, - {file = "debugpy-1.8.1-cp311-cp311-win32.whl", hash = "sha256:0de56aba8249c28a300bdb0672a9b94785074eb82eb672db66c8144fff673146"}, - {file = "debugpy-1.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:1a9fe0829c2b854757b4fd0a338d93bc17249a3bf69ecf765c61d4c522bb92a8"}, - {file = "debugpy-1.8.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3ebb70ba1a6524d19fa7bb122f44b74170c447d5746a503e36adc244a20ac539"}, - {file = "debugpy-1.8.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2e658a9630f27534e63922ebf655a6ab60c370f4d2fc5c02a5b19baf4410ace"}, - {file = "debugpy-1.8.1-cp312-cp312-win32.whl", hash = "sha256:caad2846e21188797a1f17fc09c31b84c7c3c23baf2516fed5b40b378515bbf0"}, - {file = "debugpy-1.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:edcc9f58ec0fd121a25bc950d4578df47428d72e1a0d66c07403b04eb93bcf98"}, - {file = "debugpy-1.8.1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:7a3afa222f6fd3d9dfecd52729bc2e12c93e22a7491405a0ecbf9e1d32d45b39"}, - {file = "debugpy-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d915a18f0597ef685e88bb35e5d7ab968964b7befefe1aaea1eb5b2640b586c7"}, - {file = "debugpy-1.8.1-cp38-cp38-win32.whl", hash = "sha256:92116039b5500633cc8d44ecc187abe2dfa9b90f7a82bbf81d079fcdd506bae9"}, - {file = "debugpy-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:e38beb7992b5afd9d5244e96ad5fa9135e94993b0c551ceebf3fe1a5d9beb234"}, - {file = "debugpy-1.8.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:bfb20cb57486c8e4793d41996652e5a6a885b4d9175dd369045dad59eaacea42"}, - {file = "debugpy-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efd3fdd3f67a7e576dd869c184c5dd71d9aaa36ded271939da352880c012e703"}, - {file = "debugpy-1.8.1-cp39-cp39-win32.whl", hash = "sha256:58911e8521ca0c785ac7a0539f1e77e0ce2df753f786188f382229278b4cdf23"}, - {file = "debugpy-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:6df9aa9599eb05ca179fb0b810282255202a66835c6efb1d112d21ecb830ddd3"}, - {file = "debugpy-1.8.1-py2.py3-none-any.whl", hash = "sha256:28acbe2241222b87e255260c76741e1fbf04fdc3b6d094fcf57b6c6f75ce1242"}, - {file = "debugpy-1.8.1.zip", hash = "sha256:f696d6be15be87aef621917585f9bb94b1dc9e8aced570db1b8a6fc14e8f9b42"}, -] - -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - -[[package]] -name = "desbordante" -version = "1.1.0" -description = "Science-intensive high-performance data profiler" -optional = false -python-versions = ">=3.7" -files = [ - {file = "desbordante-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6ac9c19ffaefc5a6a85908c47069988d3993f7ef46ff40cf931358959d25395"}, - {file = "desbordante-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01cc9c6733b1894ba04df78d41446a03b0d9bd3f0ce3dce33ae543c897568eb0"}, - {file = "desbordante-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b99c90bea8156d63c5f40a94b48464b26c7da96987f18fb57baeda5558376d"}, - {file = "desbordante-1.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c00db13a79273b66dbcece0fcacc81cb9271993e77725108c4b8e0ed95188eb"}, - {file = "desbordante-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:099f1852522684c0e42e6c8cb7ee32ac1d9bb9d87d1e5863bba81bf36e42d536"}, - {file = "desbordante-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42cbef2c227acd26dfb6bb6209f28a440a6ea0f1f4e770c338e03c19467a948"}, - {file = "desbordante-1.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7075e865b830ca194ed77ba21f8667fb8f87a47a1313ba7cb83f6c506fed05c0"}, - {file = "desbordante-1.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19413ecb9f1215672500ffae4c806bee27a286d83704952085e4dbb37f22aeb3"}, - {file = "desbordante-1.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:798b12cfd28d7354aec251cae1adf75fa0a5c3a9c757b8a4ab11840a56332b1e"}, - {file = "desbordante-1.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a69a17ba645e8547564167817628260761ec5ade26d3fbb49112eeddc7813717"}, -] - -[[package]] -name = "desbordante-stubs" -version = "0.1.0" -description = "Stubs for desbordante package" -optional = false -python-versions = ">=3.11,<4.0" -files = [ - {file = "desbordante_stubs-0.1.0-py3-none-any.whl", hash = "sha256:51818787dbd39ffc6909c11fc5782fb419e42df055d214abadf74d37551c69e9"}, - {file = "desbordante_stubs-0.1.0.tar.gz", hash = "sha256:915860190da94e0ca2e8e9ccf54e669af8abf6518fd539c2e700a3b7a522df0d"}, -] - -[package.dependencies] -desbordante = ">=1.1.0,<2.0.0" - -[[package]] -name = "distlib" -version = "0.3.8" -description = "Distribution utilities" -optional = false -python-versions = "*" -files = [ - {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, - {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, -] - -[[package]] -name = "dnspython" -version = "2.6.1" -description = "DNS toolkit" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, - {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, -] - -[package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] -dnssec = ["cryptography (>=41)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] -doq = ["aioquic (>=0.9.25)"] -idna = ["idna (>=3.6)"] -trio = ["trio (>=0.23)"] -wmi = ["wmi (>=1.5.1)"] - -[[package]] -name = "docker" -version = "7.1.0" -description = "A Python library for the Docker Engine API." -optional = false -python-versions = ">=3.8" -files = [ - {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, - {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, -] - -[package.dependencies] -pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} -requests = ">=2.26.0" -urllib3 = ">=1.26.0" - -[package.extras] -dev = ["coverage (==7.2.7)", "pytest (==7.4.2)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.1.0)", "ruff (==0.1.8)"] -docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] -ssh = ["paramiko (>=2.4.3)"] -websockets = ["websocket-client (>=1.3.0)"] - -[[package]] -name = "email-validator" -version = "2.2.0" -description = "A robust email address syntax and deliverability validation library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, - {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, -] - -[package.dependencies] -dnspython = ">=2.0.0" -idna = ">=2.0.0" - -[[package]] -name = "executing" -version = "2.0.1" -description = "Get the currently executing AST node of a frame, and other information" -optional = false -python-versions = ">=3.5" -files = [ - {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, - {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, -] - -[package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] - -[[package]] -name = "faker" -version = "25.9.1" -description = "Faker is a Python package that generates fake data for you." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Faker-25.9.1-py3-none-any.whl", hash = "sha256:f1dc27dc8035cb7e97e96afbb5fe1305eed6aeea53374702cbac96acfe851626"}, - {file = "Faker-25.9.1.tar.gz", hash = "sha256:0e1cf7a8d3c94de91a65ab1e9cf7050903efae1e97901f8e5924a9f45147ae44"}, -] - -[package.dependencies] -python-dateutil = ">=2.4" - -[[package]] -name = "fastapi" -version = "0.109.2" -description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fastapi-0.109.2-py3-none-any.whl", hash = "sha256:2c9bab24667293b501cad8dd388c05240c850b58ec5876ee3283c47d6e1e3a4d"}, - {file = "fastapi-0.109.2.tar.gz", hash = "sha256:f3817eac96fe4f65a2ebb4baa000f394e55f5fccdaf7f75250804bc58f354f73"}, -] - -[package.dependencies] -email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"all\""} -httpx = {version = ">=0.23.0", optional = true, markers = "extra == \"all\""} -itsdangerous = {version = ">=1.1.0", optional = true, markers = "extra == \"all\""} -jinja2 = {version = ">=2.11.2", optional = true, markers = "extra == \"all\""} -orjson = {version = ">=3.2.1", optional = true, markers = "extra == \"all\""} -pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -pydantic-extra-types = {version = ">=2.0.0", optional = true, markers = "extra == \"all\""} -pydantic-settings = {version = ">=2.0.0", optional = true, markers = "extra == \"all\""} -python-multipart = {version = ">=0.0.7", optional = true, markers = "extra == \"all\""} -pyyaml = {version = ">=5.3.1", optional = true, markers = "extra == \"all\""} -starlette = ">=0.36.3,<0.37.0" -typing-extensions = ">=4.8.0" -ujson = {version = ">=4.0.1,<4.0.2 || >4.0.2,<4.1.0 || >4.1.0,<4.2.0 || >4.2.0,<4.3.0 || >4.3.0,<5.0.0 || >5.0.0,<5.1.0 || >5.1.0", optional = true, markers = "extra == \"all\""} -uvicorn = {version = ">=0.12.0", extras = ["standard"], optional = true, markers = "extra == \"all\""} - -[package.extras] -all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] - -[[package]] -name = "filelock" -version = "3.15.3" -description = "A platform independent file lock." -optional = false -python-versions = ">=3.8" -files = [ - {file = "filelock-3.15.3-py3-none-any.whl", hash = "sha256:0151273e5b5d6cf753a61ec83b3a9b7d8821c39ae9af9d7ecf2f9e2f17404103"}, - {file = "filelock-3.15.3.tar.gz", hash = "sha256:e1199bf5194a2277273dacd50269f0d87d0682088a3c561c15674ea9005d8635"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] -typing = ["typing-extensions (>=4.8)"] - -[[package]] -name = "greenlet" -version = "3.0.3" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "httpcore" -version = "1.0.5" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] - -[[package]] -name = "httptools" -version = "0.6.1" -description = "A collection of framework independent HTTP protocol utils." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, - {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, - {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, - {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, - {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, - {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, - {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, -] - -[package.extras] -test = ["Cython (>=0.29.24,<0.30.0)"] - -[[package]] -name = "httpx" -version = "0.27.0" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -httpcore = "==1.*" -idna = "*" -sniffio = "*" - -[package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] - -[[package]] -name = "identify" -version = "2.5.36" -description = "File identification library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"}, - {file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"}, -] - -[package.extras] -license = ["ukkonen"] - -[[package]] -name = "idna" -version = "3.7" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "ipykernel" -version = "6.29.4" -description = "IPython Kernel for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ipykernel-6.29.4-py3-none-any.whl", hash = "sha256:1181e653d95c6808039c509ef8e67c4126b3b3af7781496c7cbfb5ed938a27da"}, - {file = "ipykernel-6.29.4.tar.gz", hash = "sha256:3d44070060f9475ac2092b760123fadf105d2e2493c24848b6691a7c4f42af5c"}, -] - -[package.dependencies] -appnope = {version = "*", markers = "platform_system == \"Darwin\""} -comm = ">=0.1.1" -debugpy = ">=1.6.5" -ipython = ">=7.23.1" -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -matplotlib-inline = ">=0.1" -nest-asyncio = "*" -packaging = "*" -psutil = "*" -pyzmq = ">=24" -tornado = ">=6.1" -traitlets = ">=5.4.0" - -[package.extras] -cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] -pyqt5 = ["pyqt5"] -pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "ipython" -version = "8.25.0" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.10" -files = [ - {file = "ipython-8.25.0-py3-none-any.whl", hash = "sha256:53eee7ad44df903a06655871cbab66d156a051fd86f3ec6750470ac9604ac1ab"}, - {file = "ipython-8.25.0.tar.gz", hash = "sha256:c6ed726a140b6e725b911528f80439c534fac915246af3efc39440a6b0f9d716"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} -prompt-toolkit = ">=3.0.41,<3.1.0" -pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5.13.0" - -[package.extras] -all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] -black = ["black"] -doc = ["docrepr", "exceptiongroup", "intersphinx-registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing-extensions"] -kernel = ["ipykernel"] -matplotlib = ["matplotlib"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] - -[[package]] -name = "itsdangerous" -version = "2.2.0" -description = "Safely pass data to untrusted environments and back." -optional = false -python-versions = ">=3.8" -files = [ - {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, - {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, -] - -[[package]] -name = "jedi" -version = "0.19.1" -description = "An autocompletion tool for Python that can be used for text editors." -optional = false -python-versions = ">=3.6" -files = [ - {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, - {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, -] - -[package.dependencies] -parso = ">=0.8.3,<0.9.0" - -[package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] - -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "jupyter-client" -version = "8.6.2" -description = "Jupyter protocol implementation and client libraries" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_client-8.6.2-py3-none-any.whl", hash = "sha256:50cbc5c66fd1b8f65ecb66bc490ab73217993632809b6e505687de18e9dea39f"}, - {file = "jupyter_client-8.6.2.tar.gz", hash = "sha256:2bda14d55ee5ba58552a8c53ae43d215ad9868853489213f37da060ced54d8df"}, -] - -[package.dependencies] -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -python-dateutil = ">=2.8.2" -pyzmq = ">=23.0" -tornado = ">=6.2" -traitlets = ">=5.3" - -[package.extras] -docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] - -[[package]] -name = "jupyter-core" -version = "5.7.2" -description = "Jupyter core package. A base package on which Jupyter projects rely." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, - {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, -] - -[package.dependencies] -platformdirs = ">=2.5" -pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} -traitlets = ">=5.3" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "kombu" -version = "5.3.7" -description = "Messaging library for Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "kombu-5.3.7-py3-none-any.whl", hash = "sha256:5634c511926309c7f9789f1433e9ed402616b56836ef9878f01bd59267b4c7a9"}, - {file = "kombu-5.3.7.tar.gz", hash = "sha256:011c4cd9a355c14a1de8d35d257314a1d2456d52b7140388561acac3cf1a97bf"}, -] - -[package.dependencies] -amqp = ">=5.1.1,<6.0.0" -vine = "*" - -[package.extras] -azureservicebus = ["azure-servicebus (>=7.10.0)"] -azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6.0)"] -confluentkafka = ["confluent-kafka (>=2.2.0)"] -consul = ["python-consul2"] -librabbitmq = ["librabbitmq (>=2.0.0)"] -mongodb = ["pymongo (>=4.1.1)"] -msgpack = ["msgpack"] -pyro = ["pyro4"] -qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] -redis = ["redis (>=4.5.2,!=4.5.5,!=5.0.2)"] -slmq = ["softlayer-messaging (>=1.0.3)"] -sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] -sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] -yaml = ["PyYAML (>=3.10)"] -zookeeper = ["kazoo (>=2.8.0)"] - -[[package]] -name = "mako" -version = "1.3.5" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, - {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, -] - -[package.dependencies] -MarkupSafe = ">=0.9.2" - -[package.extras] -babel = ["Babel"] -lingua = ["lingua"] -testing = ["pytest"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "matplotlib-inline" -version = "0.1.7" -description = "Inline Matplotlib backend for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, - {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, -] - -[package.dependencies] -traitlets = "*" - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "nest-asyncio" -version = "1.6.0" -description = "Patch asyncio to allow nested event loops" -optional = false -python-versions = ">=3.5" -files = [ - {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, - {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, -] - -[[package]] -name = "nodeenv" -version = "1.9.1" -description = "Node.js virtual environment builder" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, - {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, -] - -[[package]] -name = "numpy" -version = "2.0.0" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-2.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:04494f6ec467ccb5369d1808570ae55f6ed9b5809d7f035059000a37b8d7e86f"}, - {file = "numpy-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2635dbd200c2d6faf2ef9a0d04f0ecc6b13b3cad54f7c67c61155138835515d2"}, - {file = "numpy-2.0.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:0a43f0974d501842866cc83471bdb0116ba0dffdbaac33ec05e6afed5b615238"}, - {file = "numpy-2.0.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:8d83bb187fb647643bd56e1ae43f273c7f4dbcdf94550d7938cfc32566756514"}, - {file = "numpy-2.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79e843d186c8fb1b102bef3e2bc35ef81160ffef3194646a7fdd6a73c6b97196"}, - {file = "numpy-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d7696c615765091cc5093f76fd1fa069870304beaccfd58b5dcc69e55ef49c1"}, - {file = "numpy-2.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b4c76e3d4c56f145d41b7b6751255feefae92edbc9a61e1758a98204200f30fc"}, - {file = "numpy-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:acd3a644e4807e73b4e1867b769fbf1ce8c5d80e7caaef0d90dcdc640dfc9787"}, - {file = "numpy-2.0.0-cp310-cp310-win32.whl", hash = "sha256:cee6cc0584f71adefe2c908856ccc98702baf95ff80092e4ca46061538a2ba98"}, - {file = "numpy-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:ed08d2703b5972ec736451b818c2eb9da80d66c3e84aed1deeb0c345fefe461b"}, - {file = "numpy-2.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad0c86f3455fbd0de6c31a3056eb822fc939f81b1618f10ff3406971893b62a5"}, - {file = "numpy-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7f387600d424f91576af20518334df3d97bc76a300a755f9a8d6e4f5cadd289"}, - {file = "numpy-2.0.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:34f003cb88b1ba38cb9a9a4a3161c1604973d7f9d5552c38bc2f04f829536609"}, - {file = "numpy-2.0.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:b6f6a8f45d0313db07d6d1d37bd0b112f887e1369758a5419c0370ba915b3871"}, - {file = "numpy-2.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f64641b42b2429f56ee08b4f427a4d2daf916ec59686061de751a55aafa22e4"}, - {file = "numpy-2.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7039a136017eaa92c1848152827e1424701532ca8e8967fe480fe1569dae581"}, - {file = "numpy-2.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46e161722e0f619749d1cd892167039015b2c2817296104487cd03ed4a955995"}, - {file = "numpy-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0e50842b2295ba8414c8c1d9d957083d5dfe9e16828b37de883f51fc53c4016f"}, - {file = "numpy-2.0.0-cp311-cp311-win32.whl", hash = "sha256:2ce46fd0b8a0c947ae047d222f7136fc4d55538741373107574271bc00e20e8f"}, - {file = "numpy-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbd6acc766814ea6443628f4e6751d0da6593dae29c08c0b2606164db026970c"}, - {file = "numpy-2.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:354f373279768fa5a584bac997de6a6c9bc535c482592d7a813bb0c09be6c76f"}, - {file = "numpy-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4d2f62e55a4cd9c58c1d9a1c9edaedcd857a73cb6fda875bf79093f9d9086f85"}, - {file = "numpy-2.0.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:1e72728e7501a450288fc8e1f9ebc73d90cfd4671ebbd631f3e7857c39bd16f2"}, - {file = "numpy-2.0.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:84554fc53daa8f6abf8e8a66e076aff6ece62de68523d9f665f32d2fc50fd66e"}, - {file = "numpy-2.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73aafd1afca80afecb22718f8700b40ac7cab927b8abab3c3e337d70e10e5a2"}, - {file = "numpy-2.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d9f7d256fbc804391a7f72d4a617302b1afac1112fac19b6c6cec63fe7fe8a"}, - {file = "numpy-2.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0ec84b9ba0654f3b962802edc91424331f423dcf5d5f926676e0150789cb3d95"}, - {file = "numpy-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:feff59f27338135776f6d4e2ec7aeeac5d5f7a08a83e80869121ef8164b74af9"}, - {file = "numpy-2.0.0-cp312-cp312-win32.whl", hash = "sha256:c5a59996dc61835133b56a32ebe4ef3740ea5bc19b3983ac60cc32be5a665d54"}, - {file = "numpy-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:a356364941fb0593bb899a1076b92dfa2029f6f5b8ba88a14fd0984aaf76d0df"}, - {file = "numpy-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e61155fae27570692ad1d327e81c6cf27d535a5d7ef97648a17d922224b216de"}, - {file = "numpy-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4554eb96f0fd263041baf16cf0881b3f5dafae7a59b1049acb9540c4d57bc8cb"}, - {file = "numpy-2.0.0-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:903703372d46bce88b6920a0cd86c3ad82dae2dbef157b5fc01b70ea1cfc430f"}, - {file = "numpy-2.0.0-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:3e8e01233d57639b2e30966c63d36fcea099d17c53bf424d77f088b0f4babd86"}, - {file = "numpy-2.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cde1753efe513705a0c6d28f5884e22bdc30438bf0085c5c486cdaff40cd67a"}, - {file = "numpy-2.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821eedb7165ead9eebdb569986968b541f9908979c2da8a4967ecac4439bae3d"}, - {file = "numpy-2.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a1712c015831da583b21c5bfe15e8684137097969c6d22e8316ba66b5baabe4"}, - {file = "numpy-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9c27f0946a3536403efb0e1c28def1ae6730a72cd0d5878db38824855e3afc44"}, - {file = "numpy-2.0.0-cp39-cp39-win32.whl", hash = "sha256:63b92c512d9dbcc37f9d81b123dec99fdb318ba38c8059afc78086fe73820275"}, - {file = "numpy-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:3f6bed7f840d44c08ebdb73b1825282b801799e325bcbdfa6bc5c370e5aecc65"}, - {file = "numpy-2.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9416a5c2e92ace094e9f0082c5fd473502c91651fb896bc17690d6fc475128d6"}, - {file = "numpy-2.0.0-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:17067d097ed036636fa79f6a869ac26df7db1ba22039d962422506640314933a"}, - {file = "numpy-2.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ecb5b0582cd125f67a629072fed6f83562d9dd04d7e03256c9829bdec027ad"}, - {file = "numpy-2.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cef04d068f5fb0518a77857953193b6bb94809a806bd0a14983a8f12ada060c9"}, - {file = "numpy-2.0.0.tar.gz", hash = "sha256:cf5d1c9e6837f8af9f92b6bd3e86d513cdc11f60fd62185cc49ec7d1aba34864"}, -] - -[[package]] -name = "orjson" -version = "3.10.5" -description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" -optional = false -python-versions = ">=3.8" -files = [ - {file = "orjson-3.10.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:545d493c1f560d5ccfc134803ceb8955a14c3fcb47bbb4b2fee0232646d0b932"}, - {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4324929c2dd917598212bfd554757feca3e5e0fa60da08be11b4aa8b90013c1"}, - {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c13ca5e2ddded0ce6a927ea5a9f27cae77eee4c75547b4297252cb20c4d30e6"}, - {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6c8e30adfa52c025f042a87f450a6b9ea29649d828e0fec4858ed5e6caecf63"}, - {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338fd4f071b242f26e9ca802f443edc588fa4ab60bfa81f38beaedf42eda226c"}, - {file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6970ed7a3126cfed873c5d21ece1cd5d6f83ca6c9afb71bbae21a0b034588d96"}, - {file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:235dadefb793ad12f7fa11e98a480db1f7c6469ff9e3da5e73c7809c700d746b"}, - {file = "orjson-3.10.5-cp310-none-win32.whl", hash = "sha256:be79e2393679eda6a590638abda16d167754393f5d0850dcbca2d0c3735cebe2"}, - {file = "orjson-3.10.5-cp310-none-win_amd64.whl", hash = "sha256:c4a65310ccb5c9910c47b078ba78e2787cb3878cdded1702ac3d0da71ddc5228"}, - {file = "orjson-3.10.5-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:cdf7365063e80899ae3a697def1277c17a7df7ccfc979990a403dfe77bb54d40"}, - {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b68742c469745d0e6ca5724506858f75e2f1e5b59a4315861f9e2b1df77775a"}, - {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d10cc1b594951522e35a3463da19e899abe6ca95f3c84c69e9e901e0bd93d38"}, - {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcbe82b35d1ac43b0d84072408330fd3295c2896973112d495e7234f7e3da2e1"}, - {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c0eb7e0c75e1e486c7563fe231b40fdd658a035ae125c6ba651ca3b07936f5"}, - {file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:53ed1c879b10de56f35daf06dbc4a0d9a5db98f6ee853c2dbd3ee9d13e6f302f"}, - {file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:099e81a5975237fda3100f918839af95f42f981447ba8f47adb7b6a3cdb078fa"}, - {file = "orjson-3.10.5-cp311-none-win32.whl", hash = "sha256:1146bf85ea37ac421594107195db8bc77104f74bc83e8ee21a2e58596bfb2f04"}, - {file = "orjson-3.10.5-cp311-none-win_amd64.whl", hash = "sha256:36a10f43c5f3a55c2f680efe07aa93ef4a342d2960dd2b1b7ea2dd764fe4a37c"}, - {file = "orjson-3.10.5-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:68f85ecae7af14a585a563ac741b0547a3f291de81cd1e20903e79f25170458f"}, - {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28afa96f496474ce60d3340fe8d9a263aa93ea01201cd2bad844c45cd21f5268"}, - {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cd684927af3e11b6e754df80b9ffafd9fb6adcaa9d3e8fdd5891be5a5cad51e"}, - {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d21b9983da032505f7050795e98b5d9eee0df903258951566ecc358f6696969"}, - {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ad1de7fef79736dde8c3554e75361ec351158a906d747bd901a52a5c9c8d24b"}, - {file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d97531cdfe9bdd76d492e69800afd97e5930cb0da6a825646667b2c6c6c0211"}, - {file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d69858c32f09c3e1ce44b617b3ebba1aba030e777000ebdf72b0d8e365d0b2b3"}, - {file = "orjson-3.10.5-cp312-none-win32.whl", hash = "sha256:64c9cc089f127e5875901ac05e5c25aa13cfa5dbbbd9602bda51e5c611d6e3e2"}, - {file = "orjson-3.10.5-cp312-none-win_amd64.whl", hash = "sha256:b2efbd67feff8c1f7728937c0d7f6ca8c25ec81373dc8db4ef394c1d93d13dc5"}, - {file = "orjson-3.10.5-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:03b565c3b93f5d6e001db48b747d31ea3819b89abf041ee10ac6988886d18e01"}, - {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:584c902ec19ab7928fd5add1783c909094cc53f31ac7acfada817b0847975f26"}, - {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a35455cc0b0b3a1eaf67224035f5388591ec72b9b6136d66b49a553ce9eb1e6"}, - {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1670fe88b116c2745a3a30b0f099b699a02bb3482c2591514baf5433819e4f4d"}, - {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185c394ef45b18b9a7d8e8f333606e2e8194a50c6e3c664215aae8cf42c5385e"}, - {file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ca0b3a94ac8d3886c9581b9f9de3ce858263865fdaa383fbc31c310b9eac07c9"}, - {file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dfc91d4720d48e2a709e9c368d5125b4b5899dced34b5400c3837dadc7d6271b"}, - {file = "orjson-3.10.5-cp38-none-win32.whl", hash = "sha256:c05f16701ab2a4ca146d0bca950af254cb7c02f3c01fca8efbbad82d23b3d9d4"}, - {file = "orjson-3.10.5-cp38-none-win_amd64.whl", hash = "sha256:8a11d459338f96a9aa7f232ba95679fc0c7cedbd1b990d736467894210205c09"}, - {file = "orjson-3.10.5-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:85c89131d7b3218db1b24c4abecea92fd6c7f9fab87441cfc342d3acc725d807"}, - {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66215277a230c456f9038d5e2d84778141643207f85336ef8d2a9da26bd7ca"}, - {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51bbcdea96cdefa4a9b4461e690c75ad4e33796530d182bdd5c38980202c134a"}, - {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbead71dbe65f959b7bd8cf91e0e11d5338033eba34c114f69078d59827ee139"}, - {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df58d206e78c40da118a8c14fc189207fffdcb1f21b3b4c9c0c18e839b5a214"}, - {file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c4057c3b511bb8aef605616bd3f1f002a697c7e4da6adf095ca5b84c0fd43595"}, - {file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b39e006b00c57125ab974362e740c14a0c6a66ff695bff44615dcf4a70ce2b86"}, - {file = "orjson-3.10.5-cp39-none-win32.whl", hash = "sha256:eded5138cc565a9d618e111c6d5c2547bbdd951114eb822f7f6309e04db0fb47"}, - {file = "orjson-3.10.5-cp39-none-win_amd64.whl", hash = "sha256:cc28e90a7cae7fcba2493953cff61da5a52950e78dc2dacfe931a317ee3d8de7"}, - {file = "orjson-3.10.5.tar.gz", hash = "sha256:7a5baef8a4284405d96c90c7c62b755e9ef1ada84c2406c24a9ebec86b89f46d"}, -] - -[[package]] -name = "packaging" -version = "24.1" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, -] - -[[package]] -name = "pandas" -version = "2.2.2" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, - {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, - {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, - {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, - {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, - {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, - {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, -] - -[package.dependencies] -numpy = {version = ">=1.26.0", markers = "python_version >= \"3.12\""} -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.7" - -[package.extras] -all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] -aws = ["s3fs (>=2022.11.0)"] -clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] -compression = ["zstandard (>=0.19.0)"] -computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] -feather = ["pyarrow (>=10.0.1)"] -fss = ["fsspec (>=2022.11.0)"] -gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] -hdf5 = ["tables (>=3.8.0)"] -html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] -mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] -parquet = ["pyarrow (>=10.0.1)"] -performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] -plot = ["matplotlib (>=3.6.3)"] -postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] -pyarrow = ["pyarrow (>=10.0.1)"] -spss = ["pyreadstat (>=1.2.0)"] -sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.9.2)"] - -[[package]] -name = "pandas-stubs" -version = "2.2.2.240603" -description = "Type annotations for pandas" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas_stubs-2.2.2.240603-py3-none-any.whl", hash = "sha256:e08ce7f602a4da2bff5a67475ba881c39f2a4d4f7fccc1cba57c6f35a379c6c0"}, - {file = "pandas_stubs-2.2.2.240603.tar.gz", hash = "sha256:2dcc86e8fa6ea41535a4561c1f08b3942ba5267b464eff2e99caeee66f9e4cd1"}, -] - -[package.dependencies] -numpy = {version = ">=1.26.0", markers = "python_version >= \"3.12\" and python_version < \"3.13\""} -types-pytz = ">=2022.1.1" - -[[package]] -name = "parso" -version = "0.8.4" -description = "A Python Parser" -optional = false -python-versions = ">=3.6" -files = [ - {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, - {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, -] - -[package.extras] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["docopt", "pytest"] - -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "pexpect" -version = "4.9.0" -description = "Pexpect allows easy control of interactive console applications." -optional = false -python-versions = "*" -files = [ - {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, - {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, -] - -[package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -name = "platformdirs" -version = "4.2.2" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "polyfactory" -version = "2.16.0" -description = "Mock data generation factories" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "polyfactory-2.16.0-py3-none-any.whl", hash = "sha256:168d8e50b77e91e35e691e8b3eedac43d7e423a6857fa26d473def96d53f0ecf"}, - {file = "polyfactory-2.16.0.tar.gz", hash = "sha256:03d8c706b70c4782ac8e637d0f6ab52760a7d11b712da5936a95a8f7022b2688"}, -] - -[package.dependencies] -faker = "*" -typing-extensions = ">=4.6.0" - -[package.extras] -attrs = ["attrs (>=22.2.0)"] -beanie = ["beanie", "pydantic[email]"] -full = ["attrs", "beanie", "msgspec", "odmantic", "pydantic", "sqlalchemy"] -msgspec = ["msgspec"] -odmantic = ["odmantic (<1.0.0)", "pydantic[email]"] -pydantic = ["pydantic[email]"] -sqlalchemy = ["sqlalchemy (>=1.4.29)"] - -[[package]] -name = "pre-commit" -version = "3.7.1" -description = "A framework for managing and maintaining multi-language pre-commit hooks." -optional = false -python-versions = ">=3.9" -files = [ - {file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"}, - {file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"}, -] - -[package.dependencies] -cfgv = ">=2.0.0" -identify = ">=1.0.0" -nodeenv = ">=0.11.1" -pyyaml = ">=5.1" -virtualenv = ">=20.10.0" - -[[package]] -name = "prompt-toolkit" -version = "3.0.47" -description = "Library for building powerful interactive command lines in Python" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, - {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, -] - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "psutil" -version = "6.0.0" -description = "Cross-platform lib for process and system monitoring in Python." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"}, - {file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"}, - {file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"}, - {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"}, - {file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"}, - {file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"}, - {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"}, - {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"}, - {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"}, - {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, -] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "psycopg" -version = "3.1.19" -description = "PostgreSQL database adapter for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "psycopg-3.1.19-py3-none-any.whl", hash = "sha256:dca5e5521c859f6606686432ae1c94e8766d29cc91f2ee595378c510cc5b0731"}, - {file = "psycopg-3.1.19.tar.gz", hash = "sha256:92d7b78ad82426cdcf1a0440678209faa890c6e1721361c2f8901f0dccd62961"}, -] - -[package.dependencies] -psycopg-binary = {version = "3.1.19", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} -typing-extensions = ">=4.1" -tzdata = {version = "*", markers = "sys_platform == \"win32\""} - -[package.extras] -binary = ["psycopg-binary (==3.1.19)"] -c = ["psycopg-c (==3.1.19)"] -dev = ["black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.4.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] -docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] -pool = ["psycopg-pool"] -test = ["anyio (>=3.6.2,<4.0)", "mypy (>=1.4.1)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] - -[[package]] -name = "psycopg-binary" -version = "3.1.19" -description = "PostgreSQL database adapter for Python -- C optimisation distribution" -optional = false -python-versions = ">=3.7" -files = [ - {file = "psycopg_binary-3.1.19-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7204818f05151dd08f8f851defb01972ec9d2cc925608eb0de232563f203f354"}, - {file = "psycopg_binary-3.1.19-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d4e67fd86758dbeac85641419a54f84d74495a8683b58ad5dfad08b7fc37a8f"}, - {file = "psycopg_binary-3.1.19-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12173e34b176e93ad2da913de30f774d5119c2d4d4640c6858d2d77dfa6c9bf"}, - {file = "psycopg_binary-3.1.19-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052f5193304066318853b4b2e248f523c8f52b371fc4e95d4ef63baee3f30955"}, - {file = "psycopg_binary-3.1.19-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29008f3f8977f600b8a7fb07c2e041b01645b08121760609cc45e861a0364dc9"}, - {file = "psycopg_binary-3.1.19-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6a9a651a08d876303ed059c9553df18b3c13c3406584a70a8f37f1a1fe2709"}, - {file = "psycopg_binary-3.1.19-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:91a645e6468c4f064b7f4f3b81074bdd68fe5aa2b8c5107de15dcd85ba6141be"}, - {file = "psycopg_binary-3.1.19-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5c6956808fd5cf0576de5a602243af8e04594b25b9a28675feddc71c5526410a"}, - {file = "psycopg_binary-3.1.19-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:1622ca27d5a7a98f7d8f35e8b146dc7efda4a4b6241d2edf7e076bd6bcecbeb4"}, - {file = "psycopg_binary-3.1.19-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a100482950a55228f648bd382bb71bfaff520002f29845274fccbbf02e28bd52"}, - {file = "psycopg_binary-3.1.19-cp310-cp310-win_amd64.whl", hash = "sha256:955ca8905c0251fc4af7ce0a20999e824a25652f53a558ab548b60969f1f368e"}, - {file = "psycopg_binary-3.1.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cf49e91dcf699b8a449944ed898ef1466b39b92720613838791a551bc8f587a"}, - {file = "psycopg_binary-3.1.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:964c307e400c5f33fa762ba1e19853e048814fcfbd9679cc923431adb7a2ead2"}, - {file = "psycopg_binary-3.1.19-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3433924e1b14074798331dc2bfae2af452ed7888067f2fc145835704d8981b15"}, - {file = "psycopg_binary-3.1.19-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00879d4c6be4b3afc510073f48a5e960f797200e261ab3d9bd9b7746a08c669d"}, - {file = "psycopg_binary-3.1.19-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34a6997c80f86d3dd80a4f078bb3b200079c47eeda4fd409d8899b883c90d2ac"}, - {file = "psycopg_binary-3.1.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0106e42b481677c41caa69474fe530f786dcef88b11b70000f0e45a03534bc8f"}, - {file = "psycopg_binary-3.1.19-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81efe09ba27533e35709905c3061db4dc9fb814f637360578d065e2061fbb116"}, - {file = "psycopg_binary-3.1.19-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d312d6dddc18d9c164e1893706269c293cba1923118349d375962b1188dafb01"}, - {file = "psycopg_binary-3.1.19-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:bfd2c734da9950f7afaad5f132088e0e1478f32f042881fca6651bb0c8d14206"}, - {file = "psycopg_binary-3.1.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8a732610a5a6b4f06dadcf9288688a8ff202fd556d971436a123b7adb85596e2"}, - {file = "psycopg_binary-3.1.19-cp311-cp311-win_amd64.whl", hash = "sha256:321814a9a3ad785855a821b842aba08ca1b7de7dfb2979a2f0492dca9ec4ae70"}, - {file = "psycopg_binary-3.1.19-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4aa0ca13bb8a725bb6d12c13999217fd5bc8b86a12589f28a74b93e076fbb959"}, - {file = "psycopg_binary-3.1.19-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:469424e354ebcec949aa6aa30e5a9edc352a899d9a68ad7a48f97df83cc914cf"}, - {file = "psycopg_binary-3.1.19-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b04f5349313529ae1f1c42fe1aa0443faaf50fdf12d13866c2cc49683bfa53d0"}, - {file = "psycopg_binary-3.1.19-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959feabddc7fffac89b054d6f23f3b3c62d7d3c90cd414a02e3747495597f150"}, - {file = "psycopg_binary-3.1.19-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e9da624a6ca4bc5f7fa1f03f8485446b5b81d5787b6beea2b4f8d9dbef878ad7"}, - {file = "psycopg_binary-3.1.19-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1823221a6b96e38b15686170d4fc5b36073efcb87cce7d3da660440b50077f6"}, - {file = "psycopg_binary-3.1.19-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:866db42f986298f0cf15d805225eb8df2228bf19f7997d7f1cb5f388cbfc6a0f"}, - {file = "psycopg_binary-3.1.19-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:738c34657305b5973af6dbb6711b07b179dfdd21196d60039ca30a74bafe9648"}, - {file = "psycopg_binary-3.1.19-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb9758473200384a04374d0e0cac6f451218ff6945a024f65a1526802c34e56e"}, - {file = "psycopg_binary-3.1.19-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0e991632777e217953ac960726158987da684086dd813ac85038c595e7382c91"}, - {file = "psycopg_binary-3.1.19-cp312-cp312-win_amd64.whl", hash = "sha256:1d87484dd42c8783c44a30400949efb3d81ef2487eaa7d64d1c54df90cf8b97a"}, - {file = "psycopg_binary-3.1.19-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d1d1723d7449c12bb61aca7eb6e0c6ab2863cd8dc0019273cc4d4a1982f84bdb"}, - {file = "psycopg_binary-3.1.19-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e538a8671005641fa195eab962f85cf0504defbd3b548c4c8fc27102a59f687b"}, - {file = "psycopg_binary-3.1.19-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c50592bc8517092f40979e4a5d934f96a1737a77724bb1d121eb78b614b30fc8"}, - {file = "psycopg_binary-3.1.19-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95f16ae82bc242b76cd3c3e5156441e2bd85ff9ec3a9869d750aad443e46073c"}, - {file = "psycopg_binary-3.1.19-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebd1e98e865e9a28ce0cb2c25b7dfd752f0d1f0a423165b55cd32a431dcc0f4"}, - {file = "psycopg_binary-3.1.19-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:49cd7af7d49e438a39593d1dd8cab106a1912536c2b78a4d814ebdff2786094e"}, - {file = "psycopg_binary-3.1.19-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:affebd61aa3b7a8880fd4ac3ee94722940125ff83ff485e1a7c76be9adaabb38"}, - {file = "psycopg_binary-3.1.19-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d1bac282f140fa092f2bbb6c36ed82270b4a21a6fc55d4b16748ed9f55e50fdb"}, - {file = "psycopg_binary-3.1.19-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1285aa54449e362b1d30d92b2dc042ad3ee80f479cc4e323448d0a0a8a1641fa"}, - {file = "psycopg_binary-3.1.19-cp37-cp37m-win_amd64.whl", hash = "sha256:6cff31af8155dc9ee364098a328bab688c887c732c66b8d027e5b03818ca0287"}, - {file = "psycopg_binary-3.1.19-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d9b689c4a17dd3130791dcbb8c30dbf05602f7c2d56c792e193fb49adc7bf5f8"}, - {file = "psycopg_binary-3.1.19-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:017518bd2de4851adc826a224fb105411e148ad845e11355edd6786ba3dfedf5"}, - {file = "psycopg_binary-3.1.19-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c35fd811f339a3cbe7f9b54b2d9a5e592e57426c6cc1051632a62c59c4810208"}, - {file = "psycopg_binary-3.1.19-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38ed45ec9673709bfa5bc17f140e71dd4cca56d4e58ef7fd50d5a5043a4f55c6"}, - {file = "psycopg_binary-3.1.19-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:433f1c256108f9e26f480a8cd6ddb0fb37dbc87d7f5a97e4540a9da9b881f23f"}, - {file = "psycopg_binary-3.1.19-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ed61e43bf5dc8d0936daf03a19fef3168d64191dbe66483f7ad08c4cea0bc36b"}, - {file = "psycopg_binary-3.1.19-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ae8109ff9fdf1fa0cb87ab6645298693fdd2666a7f5f85660df88f6965e0bb7"}, - {file = "psycopg_binary-3.1.19-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a53809ee02e3952fae7977c19b30fd828bd117b8f5edf17a3a94212feb57faaf"}, - {file = "psycopg_binary-3.1.19-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9d39d5ffc151fb33bcd55b99b0e8957299c0b1b3e5a1a5f4399c1287ef0051a9"}, - {file = "psycopg_binary-3.1.19-cp38-cp38-win_amd64.whl", hash = "sha256:e14bc8250000921fcccd53722f86b3b3d1b57db901e206e49e2ab2afc5919c2d"}, - {file = "psycopg_binary-3.1.19-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cd88c5cea4efe614d5004fb5f5dcdea3d7d59422be796689e779e03363102d24"}, - {file = "psycopg_binary-3.1.19-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:621a814e60825162d38760c66351b4df679fd422c848b7c2f86ad399bff27145"}, - {file = "psycopg_binary-3.1.19-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46e50c05952b59a214e27d3606f6d510aaa429daed898e16b8a37bfbacc81acc"}, - {file = "psycopg_binary-3.1.19-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03354a9db667c27946e70162cb0042c3929154167f3678a30d23cebfe0ad55b5"}, - {file = "psycopg_binary-3.1.19-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c2f3b79037581afec7baa2bdbcb0a1787f1758744a7662099b0eca2d721cb"}, - {file = "psycopg_binary-3.1.19-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6469ebd9e93327e9f5f36dcf8692fb1e7aeaf70087c1c15d4f2c020e0be3a891"}, - {file = "psycopg_binary-3.1.19-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:85bca9765c04b6be90cb46e7566ffe0faa2d7480ff5c8d5e055ac427f039fd24"}, - {file = "psycopg_binary-3.1.19-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:a836610d5c75e9cff98b9fdb3559c007c785c09eaa84a60d5d10ef6f85f671e8"}, - {file = "psycopg_binary-3.1.19-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ef8de7a1d9fb3518cc6b58e3c80b75a824209ad52b90c542686c912db8553dad"}, - {file = "psycopg_binary-3.1.19-cp39-cp39-win_amd64.whl", hash = "sha256:76fcd33342f38e35cd6b5408f1bc117d55ab8b16e5019d99b6d3ce0356c51717"}, -] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -optional = false -python-versions = "*" -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] - -[[package]] -name = "pure-eval" -version = "0.2.2" -description = "Safely evaluate AST nodes without side effects" -optional = false -python-versions = "*" -files = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, -] - -[package.extras] -tests = ["pytest"] - -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] - -[[package]] -name = "pycparser" -version = "2.22" -description = "C parser in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, -] - -[[package]] -name = "pydantic" -version = "2.7.4" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"}, - {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"}, -] - -[package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.18.4" -typing-extensions = ">=4.6.1" - -[package.extras] -email = ["email-validator (>=2.0.0)"] - -[[package]] -name = "pydantic-core" -version = "2.18.4" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"}, - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"}, - {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"}, - {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"}, - {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"}, - {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"}, - {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"}, - {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"}, - {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"}, - {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"}, - {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"}, - {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"}, - {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"}, - {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"}, - {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pydantic-extra-types" -version = "2.8.2" -description = "Extra Pydantic types." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_extra_types-2.8.2-py3-none-any.whl", hash = "sha256:f2400b3c3553fb7fa09a131967b4edf2d53f01ad9fa89d158784653f2e5c13d1"}, - {file = "pydantic_extra_types-2.8.2.tar.gz", hash = "sha256:4d2b3c52c1e2e4dfa31bf1d5a37b841b09e3c5a08ec2bffca0e07fc2ad7d5c4a"}, -] - -[package.dependencies] -pydantic = ">=2.5.2" - -[package.extras] -all = ["pendulum (>=3.0.0,<4.0.0)", "phonenumbers (>=8,<9)", "pycountry (>=23)", "python-ulid (>=1,<2)", "python-ulid (>=1,<3)"] -pendulum = ["pendulum (>=3.0.0,<4.0.0)"] -phonenumbers = ["phonenumbers (>=8,<9)"] -pycountry = ["pycountry (>=23)"] -python-ulid = ["python-ulid (>=1,<2)", "python-ulid (>=1,<3)"] - -[[package]] -name = "pydantic-settings" -version = "2.3.3" -description = "Settings management using Pydantic" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_settings-2.3.3-py3-none-any.whl", hash = "sha256:e4ed62ad851670975ec11285141db888fd24947f9440bd4380d7d8788d4965de"}, - {file = "pydantic_settings-2.3.3.tar.gz", hash = "sha256:87fda838b64b5039b970cd47c3e8a1ee460ce136278ff672980af21516f6e6ce"}, -] - -[package.dependencies] -pydantic = ">=2.7.0" -python-dotenv = ">=0.21.0" - -[package.extras] -toml = ["tomli (>=2.0.1)"] -yaml = ["pyyaml (>=6.0.1)"] - -[[package]] -name = "pydantic-validators" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.6.1,<4.0.0" -files = [ - {file = "pydantic-validators-0.1.0.tar.gz", hash = "sha256:1ce97dae12d3e7577051b473e864deee7cae2abc10fcdb5c489709093b0e1de2"}, - {file = "pydantic_validators-0.1.0-py3-none-any.whl", hash = "sha256:d2be33d1c7ff8974b59603c80d7265c71a7a23b321658ea593ee09bda7a7438f"}, -] - -[[package]] -name = "pygments" -version = "2.18.0" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pyright" -version = "1.1.368" -description = "Command line wrapper for pyright" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyright-1.1.368-py3-none-any.whl", hash = "sha256:4a86e34b61c755b43b367af7fbf927fc6466fff6b81a9dcea07d42416c640af3"}, - {file = "pyright-1.1.368.tar.gz", hash = "sha256:9b2aa48142d9d9fc9a6aedff743c76873cc4e615f3297cdbf893d5793f75b306"}, -] - -[package.dependencies] -nodeenv = ">=1.6.0" - -[package.extras] -all = ["twine (>=3.4.1)"] -dev = ["twine (>=3.4.1)"] - -[[package]] -name = "pytest" -version = "8.2.2" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, - {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=1.5,<2.0" - -[package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-alembic" -version = "0.11.0" -description = "A pytest plugin for verifying alembic migrations." -optional = false -python-versions = ">=3.6,<4" -files = [ - {file = "pytest_alembic-0.11.0-py3-none-any.whl", hash = "sha256:bce543dae00eec8b69b283105d48e0b3e1cc31ca721b0a8c46cb4710e7aa0ec8"}, - {file = "pytest_alembic-0.11.0.tar.gz", hash = "sha256:4f4237c06598226b2bc58df8154a8aed29f7efadd7157ee3a946f81585532627"}, -] - -[package.dependencies] -alembic = "*" -pytest = ">=6.0" -sqlalchemy = "*" - -[[package]] -name = "pytest-celery" -version = "1.0.0" -description = "Pytest plugin for Celery" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "pytest_celery-1.0.0-py3-none-any.whl", hash = "sha256:c10bc7d16daa3ae4a5784efcbd1855d610c0e087c21d185e52fa018b3a6c4249"}, - {file = "pytest_celery-1.0.0.tar.gz", hash = "sha256:17a066b1554d4fa8797d4928e8b8cda1bfb441dae4688ca29fdbde28ffa49ff7"}, -] - -[package.dependencies] -celery = "*" -debugpy = ">=1.8.1,<2.0.0" -docker = ">=7.0.0,<8.0.0" -psutil = ">=5.9.7" -pytest-docker-tools = ">=3.1.3" -python-memcached = {version = "*", optional = true, markers = "extra == \"all\" or extra == \"memcached\""} -redis = {version = "*", optional = true, markers = "extra == \"all\" or extra == \"redis\""} -retry = ">=0.9.2" -setuptools = ">=69.1.0" - -[package.extras] -all = ["python-memcached", "redis"] -memcached = ["python-memcached"] -redis = ["redis"] - -[[package]] -name = "pytest-cov" -version = "4.1.0" -description = "Pytest plugin for measuring coverage." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, - {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, -] - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] - -[[package]] -name = "pytest-docker-tools" -version = "3.1.3" -description = "Docker integration tests for pytest" -optional = false -python-versions = ">=3.7.0,<4.0.0" -files = [ - {file = "pytest_docker_tools-3.1.3-py3-none-any.whl", hash = "sha256:63e659043160f41d89f94ea42616102594bcc85682aac394fcbc14f14cd1b189"}, - {file = "pytest_docker_tools-3.1.3.tar.gz", hash = "sha256:c7e28841839d67b3ac80ad7b345b953701d5ae61ffda97586114244292aeacc0"}, -] - -[package.dependencies] -docker = ">=4.3.1" -pytest = ">=6.0.1" - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-dotenv" -version = "1.0.1" -description = "Read key-value pairs from a .env file and set them as environment variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, -] - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "python-memcached" -version = "1.62" -description = "Pure python memcached client" -optional = false -python-versions = "*" -files = [ - {file = "python-memcached-1.62.tar.gz", hash = "sha256:0285470599b7f593fbf3bec084daa1f483221e68c1db2cf1d846a9f7c2655103"}, - {file = "python_memcached-1.62-py2.py3-none-any.whl", hash = "sha256:1bdd8d2393ff53e80cd5e9442d750e658e0b35c3eebb3211af137303e3b729d1"}, -] - -[[package]] -name = "python-multipart" -version = "0.0.9" -description = "A streaming multipart parser for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python_multipart-0.0.9-py3-none-any.whl", hash = "sha256:97ca7b8ea7b05f977dc3849c3ba99d51689822fab725c3703af7c866a0c2b215"}, - {file = "python_multipart-0.0.9.tar.gz", hash = "sha256:03f54688c663f1b7977105f021043b0793151e4cb1c1a9d4a11fc13d622c4026"}, -] - -[package.extras] -dev = ["atomicwrites (==1.4.1)", "attrs (==23.2.0)", "coverage (==7.4.1)", "hatch", "invoke (==2.2.0)", "more-itertools (==10.2.0)", "pbr (==6.0.0)", "pluggy (==1.4.0)", "py (==1.11.0)", "pytest (==8.0.0)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.2.0)", "pyyaml (==6.0.1)", "ruff (==0.2.1)"] - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "pywin32" -version = "306" -description = "Python for Window Extensions" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "pyzmq" -version = "26.0.3" -description = "Python bindings for 0MQ" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:44dd6fc3034f1eaa72ece33588867df9e006a7303725a12d64c3dff92330f625"}, - {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:acb704195a71ac5ea5ecf2811c9ee19ecdc62b91878528302dd0be1b9451cc90"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dbb9c997932473a27afa93954bb77a9f9b786b4ccf718d903f35da3232317de"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bcb34f869d431799c3ee7d516554797f7760cb2198ecaa89c3f176f72d062be"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ece17ec5f20d7d9b442e5174ae9f020365d01ba7c112205a4d59cf19dc38ee"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ba6e5e6588e49139a0979d03a7deb9c734bde647b9a8808f26acf9c547cab1bf"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3bf8b000a4e2967e6dfdd8656cd0757d18c7e5ce3d16339e550bd462f4857e59"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2136f64fbb86451dbbf70223635a468272dd20075f988a102bf8a3f194a411dc"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e8918973fbd34e7814f59143c5f600ecd38b8038161239fd1a3d33d5817a38b8"}, - {file = "pyzmq-26.0.3-cp310-cp310-win32.whl", hash = "sha256:0aaf982e68a7ac284377d051c742610220fd06d330dcd4c4dbb4cdd77c22a537"}, - {file = "pyzmq-26.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:f1a9b7d00fdf60b4039f4455afd031fe85ee8305b019334b72dcf73c567edc47"}, - {file = "pyzmq-26.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:80b12f25d805a919d53efc0a5ad7c0c0326f13b4eae981a5d7b7cc343318ebb7"}, - {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:a72a84570f84c374b4c287183debc776dc319d3e8ce6b6a0041ce2e400de3f32"}, - {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ca684ee649b55fd8f378127ac8462fb6c85f251c2fb027eb3c887e8ee347bcd"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e222562dc0f38571c8b1ffdae9d7adb866363134299264a1958d077800b193b7"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f17cde1db0754c35a91ac00b22b25c11da6eec5746431d6e5092f0cd31a3fea9"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7c0c0b3244bb2275abe255d4a30c050d541c6cb18b870975553f1fb6f37527"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ac97a21de3712afe6a6c071abfad40a6224fd14fa6ff0ff8d0c6e6cd4e2f807a"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:88b88282e55fa39dd556d7fc04160bcf39dea015f78e0cecec8ff4f06c1fc2b5"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:72b67f966b57dbd18dcc7efbc1c7fc9f5f983e572db1877081f075004614fcdd"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4b6cecbbf3b7380f3b61de3a7b93cb721125dc125c854c14ddc91225ba52f83"}, - {file = "pyzmq-26.0.3-cp311-cp311-win32.whl", hash = "sha256:eed56b6a39216d31ff8cd2f1d048b5bf1700e4b32a01b14379c3b6dde9ce3aa3"}, - {file = "pyzmq-26.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:3191d312c73e3cfd0f0afdf51df8405aafeb0bad71e7ed8f68b24b63c4f36500"}, - {file = "pyzmq-26.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:b6907da3017ef55139cf0e417c5123a84c7332520e73a6902ff1f79046cd3b94"}, - {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:068ca17214038ae986d68f4a7021f97e187ed278ab6dccb79f837d765a54d753"}, - {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7821d44fe07335bea256b9f1f41474a642ca55fa671dfd9f00af8d68a920c2d4"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeb438a26d87c123bb318e5f2b3d86a36060b01f22fbdffd8cf247d52f7c9a2b"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69ea9d6d9baa25a4dc9cef5e2b77b8537827b122214f210dd925132e34ae9b12"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7daa3e1369355766dea11f1d8ef829905c3b9da886ea3152788dc25ee6079e02"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6ca7a9a06b52d0e38ccf6bca1aeff7be178917893f3883f37b75589d42c4ac20"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1b7d0e124948daa4d9686d421ef5087c0516bc6179fdcf8828b8444f8e461a77"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e746524418b70f38550f2190eeee834db8850088c834d4c8406fbb9bc1ae10b2"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6b3146f9ae6af82c47a5282ac8803523d381b3b21caeae0327ed2f7ecb718798"}, - {file = "pyzmq-26.0.3-cp312-cp312-win32.whl", hash = "sha256:2b291d1230845871c00c8462c50565a9cd6026fe1228e77ca934470bb7d70ea0"}, - {file = "pyzmq-26.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:926838a535c2c1ea21c903f909a9a54e675c2126728c21381a94ddf37c3cbddf"}, - {file = "pyzmq-26.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:5bf6c237f8c681dfb91b17f8435b2735951f0d1fad10cc5dfd96db110243370b"}, - {file = "pyzmq-26.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c0991f5a96a8e620f7691e61178cd8f457b49e17b7d9cfa2067e2a0a89fc1d5"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dbf012d8fcb9f2cf0643b65df3b355fdd74fc0035d70bb5c845e9e30a3a4654b"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:01fbfbeb8249a68d257f601deb50c70c929dc2dfe683b754659569e502fbd3aa"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c8eb19abe87029c18f226d42b8a2c9efdd139d08f8bf6e085dd9075446db450"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5344b896e79800af86ad643408ca9aa303a017f6ebff8cee5a3163c1e9aec987"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:204e0f176fd1d067671157d049466869b3ae1fc51e354708b0dc41cf94e23a3a"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a42db008d58530efa3b881eeee4991146de0b790e095f7ae43ba5cc612decbc5"}, - {file = "pyzmq-26.0.3-cp37-cp37m-win32.whl", hash = "sha256:8d7a498671ca87e32b54cb47c82a92b40130a26c5197d392720a1bce1b3c77cf"}, - {file = "pyzmq-26.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:3b4032a96410bdc760061b14ed6a33613ffb7f702181ba999df5d16fb96ba16a"}, - {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2cc4e280098c1b192c42a849de8de2c8e0f3a84086a76ec5b07bfee29bda7d18"}, - {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bde86a2ed3ce587fa2b207424ce15b9a83a9fa14422dcc1c5356a13aed3df9d"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:34106f68e20e6ff253c9f596ea50397dbd8699828d55e8fa18bd4323d8d966e6"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ebbbd0e728af5db9b04e56389e2299a57ea8b9dd15c9759153ee2455b32be6ad"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6b1d1c631e5940cac5a0b22c5379c86e8df6a4ec277c7a856b714021ab6cfad"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e891ce81edd463b3b4c3b885c5603c00141151dd9c6936d98a680c8c72fe5c67"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9b273ecfbc590a1b98f014ae41e5cf723932f3b53ba9367cfb676f838038b32c"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b32bff85fb02a75ea0b68f21e2412255b5731f3f389ed9aecc13a6752f58ac97"}, - {file = "pyzmq-26.0.3-cp38-cp38-win32.whl", hash = "sha256:f6c21c00478a7bea93caaaef9e7629145d4153b15a8653e8bb4609d4bc70dbfc"}, - {file = "pyzmq-26.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:3401613148d93ef0fd9aabdbddb212de3db7a4475367f49f590c837355343972"}, - {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:2ed8357f4c6e0daa4f3baf31832df8a33334e0fe5b020a61bc8b345a3db7a606"}, - {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1c8f2a2ca45292084c75bb6d3a25545cff0ed931ed228d3a1810ae3758f975f"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b63731993cdddcc8e087c64e9cf003f909262b359110070183d7f3025d1c56b5"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b3cd31f859b662ac5d7f4226ec7d8bd60384fa037fc02aee6ff0b53ba29a3ba8"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:115f8359402fa527cf47708d6f8a0f8234f0e9ca0cab7c18c9c189c194dbf620"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:715bdf952b9533ba13dfcf1f431a8f49e63cecc31d91d007bc1deb914f47d0e4"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e1258c639e00bf5e8a522fec6c3eaa3e30cf1c23a2f21a586be7e04d50c9acab"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:15c59e780be8f30a60816a9adab900c12a58d79c1ac742b4a8df044ab2a6d920"}, - {file = "pyzmq-26.0.3-cp39-cp39-win32.whl", hash = "sha256:d0cdde3c78d8ab5b46595054e5def32a755fc028685add5ddc7403e9f6de9879"}, - {file = "pyzmq-26.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:ce828058d482ef860746bf532822842e0ff484e27f540ef5c813d516dd8896d2"}, - {file = "pyzmq-26.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:788f15721c64109cf720791714dc14afd0f449d63f3a5487724f024345067381"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c18645ef6294d99b256806e34653e86236eb266278c8ec8112622b61db255de"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e6bc96ebe49604df3ec2c6389cc3876cabe475e6bfc84ced1bf4e630662cb35"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:971e8990c5cc4ddcff26e149398fc7b0f6a042306e82500f5e8db3b10ce69f84"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8416c23161abd94cc7da80c734ad7c9f5dbebdadfdaa77dad78244457448223"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:082a2988364b60bb5de809373098361cf1dbb239623e39e46cb18bc035ed9c0c"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d57dfbf9737763b3a60d26e6800e02e04284926329aee8fb01049635e957fe81"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:77a85dca4c2430ac04dc2a2185c2deb3858a34fe7f403d0a946fa56970cf60a1"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c82a6d952a1d555bf4be42b6532927d2a5686dd3c3e280e5f63225ab47ac1f5"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4496b1282c70c442809fc1b151977c3d967bfb33e4e17cedbf226d97de18f709"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e4946d6bdb7ba972dfda282f9127e5756d4f299028b1566d1245fa0d438847e6"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:03c0ae165e700364b266876d712acb1ac02693acd920afa67da2ebb91a0b3c09"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3e3070e680f79887d60feeda051a58d0ac36622e1759f305a41059eff62c6da7"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6ca08b840fe95d1c2bd9ab92dac5685f949fc6f9ae820ec16193e5ddf603c3b2"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e76654e9dbfb835b3518f9938e565c7806976c07b37c33526b574cc1a1050480"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:871587bdadd1075b112e697173e946a07d722459d20716ceb3d1bd6c64bd08ce"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d0a2d1bd63a4ad79483049b26514e70fa618ce6115220da9efdff63688808b17"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0270b49b6847f0d106d64b5086e9ad5dc8a902413b5dbbb15d12b60f9c1747a4"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:703c60b9910488d3d0954ca585c34f541e506a091a41930e663a098d3b794c67"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74423631b6be371edfbf7eabb02ab995c2563fee60a80a30829176842e71722a"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4adfbb5451196842a88fda3612e2c0414134874bffb1c2ce83ab4242ec9e027d"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3516119f4f9b8671083a70b6afaa0a070f5683e431ab3dc26e9215620d7ca1ad"}, - {file = "pyzmq-26.0.3.tar.gz", hash = "sha256:dba7d9f2e047dfa2bca3b01f4f84aa5246725203d6284e3790f2ca15fba6b40a"}, -] - -[package.dependencies] -cffi = {version = "*", markers = "implementation_name == \"pypy\""} - -[[package]] -name = "redis" -version = "5.0.6" -description = "Python client for Redis database and key-value store" -optional = false -python-versions = ">=3.7" -files = [ - {file = "redis-5.0.6-py3-none-any.whl", hash = "sha256:c0d6d990850c627bbf7be01c5c4cbaadf67b48593e913bb71c9819c30df37eee"}, - {file = "redis-5.0.6.tar.gz", hash = "sha256:38473cd7c6389ad3e44a91f4c3eaf6bcb8a9f746007f29bf4fb20824ff0b2197"}, -] - -[package.extras] -hiredis = ["hiredis (>=1.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] - -[[package]] -name = "requests" -version = "2.32.3" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "retry" -version = "0.9.2" -description = "Easy to use retry decorator." -optional = false -python-versions = "*" -files = [ - {file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"}, - {file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"}, -] - -[package.dependencies] -decorator = ">=3.4.2" -py = ">=1.4.26,<2.0.0" - -[[package]] -name = "ruff" -version = "0.3.7" -description = "An extremely fast Python linter and code formatter, written in Rust." -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, - {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"}, - {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"}, - {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"}, - {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"}, - {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"}, - {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"}, - {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"}, - {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"}, - {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"}, -] - -[[package]] -name = "setuptools" -version = "70.1.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-70.1.0-py3-none-any.whl", hash = "sha256:d9b8b771455a97c8a9f3ab3448ebe0b29b5e105f1228bba41028be116985a267"}, - {file = "setuptools-70.1.0.tar.gz", hash = "sha256:01a1e793faa5bd89abc851fa15d0a0db26f160890c7102cd8dce643e886b47f5"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "sqlalchemy" -version = "2.0.31" -description = "Database Abstraction Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2a213c1b699d3f5768a7272de720387ae0122f1becf0901ed6eaa1abd1baf6c"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9fea3d0884e82d1e33226935dac990b967bef21315cbcc894605db3441347443"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ad7f221d8a69d32d197e5968d798217a4feebe30144986af71ada8c548e9fa"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2bee229715b6366f86a95d497c347c22ddffa2c7c96143b59a2aa5cc9eebbc"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cd5b94d4819c0c89280b7c6109c7b788a576084bf0a480ae17c227b0bc41e109"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:750900a471d39a7eeba57580b11983030517a1f512c2cb287d5ad0fcf3aebd58"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-win32.whl", hash = "sha256:7bd112be780928c7f493c1a192cd8c5fc2a2a7b52b790bc5a84203fb4381c6be"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-win_amd64.whl", hash = "sha256:5a48ac4d359f058474fadc2115f78a5cdac9988d4f99eae44917f36aa1476327"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f68470edd70c3ac3b6cd5c2a22a8daf18415203ca1b036aaeb9b0fb6f54e8298"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e2c38c2a4c5c634fe6c3c58a789712719fa1bf9b9d6ff5ebfce9a9e5b89c1ca"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd15026f77420eb2b324dcb93551ad9c5f22fab2c150c286ef1dc1160f110203"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2196208432deebdfe3b22185d46b08f00ac9d7b01284e168c212919891289396"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:352b2770097f41bff6029b280c0e03b217c2dcaddc40726f8f53ed58d8a85da4"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56d51ae825d20d604583f82c9527d285e9e6d14f9a5516463d9705dab20c3740"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-win32.whl", hash = "sha256:6e2622844551945db81c26a02f27d94145b561f9d4b0c39ce7bfd2fda5776dac"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-win_amd64.whl", hash = "sha256:ccaf1b0c90435b6e430f5dd30a5aede4764942a695552eb3a4ab74ed63c5b8d3"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3b74570d99126992d4b0f91fb87c586a574a5872651185de8297c6f90055ae42"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f77c4f042ad493cb8595e2f503c7a4fe44cd7bd59c7582fd6d78d7e7b8ec52c"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd1591329333daf94467e699e11015d9c944f44c94d2091f4ac493ced0119449"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74afabeeff415e35525bf7a4ecdab015f00e06456166a2eba7590e49f8db940e"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b9c01990d9015df2c6f818aa8f4297d42ee71c9502026bb074e713d496e26b67"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66f63278db425838b3c2b1c596654b31939427016ba030e951b292e32b99553e"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-win32.whl", hash = "sha256:0b0f658414ee4e4b8cbcd4a9bb0fd743c5eeb81fc858ca517217a8013d282c96"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-win_amd64.whl", hash = "sha256:fa4b1af3e619b5b0b435e333f3967612db06351217c58bfb50cee5f003db2a5a"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f43e93057cf52a227eda401251c72b6fbe4756f35fa6bfebb5d73b86881e59b0"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d337bf94052856d1b330d5fcad44582a30c532a2463776e1651bd3294ee7e58b"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c06fb43a51ccdff3b4006aafee9fcf15f63f23c580675f7734245ceb6b6a9e05"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:b6e22630e89f0e8c12332b2b4c282cb01cf4da0d26795b7eae16702a608e7ca1"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:79a40771363c5e9f3a77f0e28b3302801db08040928146e6808b5b7a40749c88"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-win32.whl", hash = "sha256:501ff052229cb79dd4c49c402f6cb03b5a40ae4771efc8bb2bfac9f6c3d3508f"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-win_amd64.whl", hash = "sha256:597fec37c382a5442ffd471f66ce12d07d91b281fd474289356b1a0041bdf31d"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dc6d69f8829712a4fd799d2ac8d79bdeff651c2301b081fd5d3fe697bd5b4ab9"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23b9fbb2f5dd9e630db70fbe47d963c7779e9c81830869bd7d137c2dc1ad05fb"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a21c97efcbb9f255d5c12a96ae14da873233597dfd00a3a0c4ce5b3e5e79704"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26a6a9837589c42b16693cf7bf836f5d42218f44d198f9343dd71d3164ceeeac"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc251477eae03c20fae8db9c1c23ea2ebc47331bcd73927cdcaecd02af98d3c3"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2fd17e3bb8058359fa61248c52c7b09a97cf3c820e54207a50af529876451808"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-win32.whl", hash = "sha256:c76c81c52e1e08f12f4b6a07af2b96b9b15ea67ccdd40ae17019f1c373faa227"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-win_amd64.whl", hash = "sha256:4b600e9a212ed59355813becbcf282cfda5c93678e15c25a0ef896b354423238"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b6cf796d9fcc9b37011d3f9936189b3c8074a02a4ed0c0fbbc126772c31a6d4"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:78fe11dbe37d92667c2c6e74379f75746dc947ee505555a0197cfba9a6d4f1a4"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc47dc6185a83c8100b37acda27658fe4dbd33b7d5e7324111f6521008ab4fe"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a41514c1a779e2aa9a19f67aaadeb5cbddf0b2b508843fcd7bafdf4c6864005"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:afb6dde6c11ea4525318e279cd93c8734b795ac8bb5dda0eedd9ebaca7fa23f1"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3f9faef422cfbb8fd53716cd14ba95e2ef655400235c3dfad1b5f467ba179c8c"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-win32.whl", hash = "sha256:fc6b14e8602f59c6ba893980bea96571dd0ed83d8ebb9c4479d9ed5425d562e9"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-win_amd64.whl", hash = "sha256:3cb8a66b167b033ec72c3812ffc8441d4e9f5f78f5e31e54dcd4c90a4ca5bebc"}, - {file = "SQLAlchemy-2.0.31-py3-none-any.whl", hash = "sha256:69f3e3c08867a8e4856e92d7afb618b95cdee18e0bc1647b77599722c9a28911"}, - {file = "SQLAlchemy-2.0.31.tar.gz", hash = "sha256:b607489dd4a54de56984a0c7656247504bd5523d9d0ba799aef59d4add009484"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} -typing-extensions = ">=4.6.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "sqlalchemy-mixins" -version = "2.0.5" -description = "Active Record, Django-like queries, nested eager load and beauty __repr__ for SQLAlchemy" -optional = false -python-versions = "*" -files = [ - {file = "sqlalchemy_mixins-2.0.5-py3-none-any.whl", hash = "sha256:9067b630744741b472aa91d92494cc5612ed2d29c66729a5a4a1d3fbbeccd448"}, - {file = "sqlalchemy_mixins-2.0.5.tar.gz", hash = "sha256:85197fc3682c4bf9c35671fb3d10282a0973b19cd2ff2b6791d601cbfb0fb89e"}, -] - -[package.dependencies] -six = "*" -SQLAlchemy = ">=2.0" - -[[package]] -name = "sqlalchemy-utils" -version = "0.41.2" -description = "Various utility functions for SQLAlchemy." -optional = false -python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-Utils-0.41.2.tar.gz", hash = "sha256:bc599c8c3b3319e53ce6c5c3c471120bd325d0071fb6f38a10e924e3d07b9990"}, - {file = "SQLAlchemy_Utils-0.41.2-py3-none-any.whl", hash = "sha256:85cf3842da2bf060760f955f8467b87983fb2e30f1764fd0e24a48307dc8ec6e"}, -] - -[package.dependencies] -SQLAlchemy = ">=1.3" - -[package.extras] -arrow = ["arrow (>=0.3.4)"] -babel = ["Babel (>=1.3)"] -color = ["colour (>=0.0.4)"] -encrypted = ["cryptography (>=0.6)"] -intervals = ["intervals (>=0.7.1)"] -password = ["passlib (>=1.6,<2.0)"] -pendulum = ["pendulum (>=2.0.5)"] -phone = ["phonenumbers (>=5.9.2)"] -test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] -test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] -timezone = ["python-dateutil"] -url = ["furl (>=0.4.1)"] - -[[package]] -name = "stack-data" -version = "0.6.3" -description = "Extract data from python stack frames and tracebacks for informative displays" -optional = false -python-versions = "*" -files = [ - {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, - {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, -] - -[package.dependencies] -asttokens = ">=2.1.0" -executing = ">=1.2.0" -pure-eval = "*" - -[package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] - -[[package]] -name = "starlette" -version = "0.36.3" -description = "The little ASGI library that shines." -optional = false -python-versions = ">=3.8" -files = [ - {file = "starlette-0.36.3-py3-none-any.whl", hash = "sha256:13d429aa93a61dc40bf503e8c801db1f1bca3dc706b10ef2434a36123568f044"}, - {file = "starlette-0.36.3.tar.gz", hash = "sha256:90a671733cfb35771d8cc605e0b679d23b992f8dcfad48cc60b38cb29aeb7080"}, -] - -[package.dependencies] -anyio = ">=3.4.0,<5" - -[package.extras] -full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] - -[[package]] -name = "tornado" -version = "6.4.1" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -optional = false -python-versions = ">=3.8" -files = [ - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, - {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, - {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, - {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, -] - -[[package]] -name = "traitlets" -version = "5.14.3" -description = "Traitlets Python configuration system" -optional = false -python-versions = ">=3.8" -files = [ - {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, - {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, -] - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] - -[[package]] -name = "types-pytz" -version = "2024.1.0.20240417" -description = "Typing stubs for pytz" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-pytz-2024.1.0.20240417.tar.gz", hash = "sha256:6810c8a1f68f21fdf0f4f374a432487c77645a0ac0b31de4bf4690cf21ad3981"}, - {file = "types_pytz-2024.1.0.20240417-py3-none-any.whl", hash = "sha256:8335d443310e2db7b74e007414e74c4f53b67452c0cb0d228ca359ccfba59659"}, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, -] - -[[package]] -name = "ujson" -version = "5.10.0" -description = "Ultra fast JSON encoder and decoder for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, - {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, - {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cffecf73391e8abd65ef5f4e4dd523162a3399d5e84faa6aebbf9583df86d6"}, - {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0e2d2366543c1bb4fbd457446f00b0187a2bddf93148ac2da07a53fe51569"}, - {file = "ujson-5.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf270c6dba1be7a41125cd1e4fc7ba384bf564650beef0df2dd21a00b7f5770"}, - {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a245d59f2ffe750446292b0094244df163c3dc96b3ce152a2c837a44e7cda9d1"}, - {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94a87f6e151c5f483d7d54ceef83b45d3a9cca7a9cb453dbdbb3f5a6f64033f5"}, - {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:29b443c4c0a113bcbb792c88bea67b675c7ca3ca80c3474784e08bba01c18d51"}, - {file = "ujson-5.10.0-cp310-cp310-win32.whl", hash = "sha256:c18610b9ccd2874950faf474692deee4223a994251bc0a083c114671b64e6518"}, - {file = "ujson-5.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:924f7318c31874d6bb44d9ee1900167ca32aa9b69389b98ecbde34c1698a250f"}, - {file = "ujson-5.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a5b366812c90e69d0f379a53648be10a5db38f9d4ad212b60af00bd4048d0f00"}, - {file = "ujson-5.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:502bf475781e8167f0f9d0e41cd32879d120a524b22358e7f205294224c71126"}, - {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b91b5d0d9d283e085e821651184a647699430705b15bf274c7896f23fe9c9d8"}, - {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:129e39af3a6d85b9c26d5577169c21d53821d8cf68e079060602e861c6e5da1b"}, - {file = "ujson-5.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f77b74475c462cb8b88680471193064d3e715c7c6074b1c8c412cb526466efe9"}, - {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7ec0ca8c415e81aa4123501fee7f761abf4b7f386aad348501a26940beb1860f"}, - {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab13a2a9e0b2865a6c6db9271f4b46af1c7476bfd51af1f64585e919b7c07fd4"}, - {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:57aaf98b92d72fc70886b5a0e1a1ca52c2320377360341715dd3933a18e827b1"}, - {file = "ujson-5.10.0-cp311-cp311-win32.whl", hash = "sha256:2987713a490ceb27edff77fb184ed09acdc565db700ee852823c3dc3cffe455f"}, - {file = "ujson-5.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f00ea7e00447918ee0eff2422c4add4c5752b1b60e88fcb3c067d4a21049a720"}, - {file = "ujson-5.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98ba15d8cbc481ce55695beee9f063189dce91a4b08bc1d03e7f0152cd4bbdd5"}, - {file = "ujson-5.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9d2edbf1556e4f56e50fab7d8ff993dbad7f54bac68eacdd27a8f55f433578e"}, - {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6627029ae4f52d0e1a2451768c2c37c0c814ffc04f796eb36244cf16b8e57043"}, - {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ccb77b3e40b151e20519c6ae6d89bfe3f4c14e8e210d910287f778368bb3d1"}, - {file = "ujson-5.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3caf9cd64abfeb11a3b661329085c5e167abbe15256b3b68cb5d914ba7396f3"}, - {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6e32abdce572e3a8c3d02c886c704a38a1b015a1fb858004e03d20ca7cecbb21"}, - {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a65b6af4d903103ee7b6f4f5b85f1bfd0c90ba4eeac6421aae436c9988aa64a2"}, - {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:604a046d966457b6cdcacc5aa2ec5314f0e8c42bae52842c1e6fa02ea4bda42e"}, - {file = "ujson-5.10.0-cp312-cp312-win32.whl", hash = "sha256:6dea1c8b4fc921bf78a8ff00bbd2bfe166345f5536c510671bccececb187c80e"}, - {file = "ujson-5.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:38665e7d8290188b1e0d57d584eb8110951a9591363316dd41cf8686ab1d0abc"}, - {file = "ujson-5.10.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:618efd84dc1acbd6bff8eaa736bb6c074bfa8b8a98f55b61c38d4ca2c1f7f287"}, - {file = "ujson-5.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38d5d36b4aedfe81dfe251f76c0467399d575d1395a1755de391e58985ab1c2e"}, - {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67079b1f9fb29ed9a2914acf4ef6c02844b3153913eb735d4bf287ee1db6e557"}, - {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7d0e0ceeb8fe2468c70ec0c37b439dd554e2aa539a8a56365fd761edb418988"}, - {file = "ujson-5.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59e02cd37bc7c44d587a0ba45347cc815fb7a5fe48de16bf05caa5f7d0d2e816"}, - {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a890b706b64e0065f02577bf6d8ca3b66c11a5e81fb75d757233a38c07a1f20"}, - {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:621e34b4632c740ecb491efc7f1fcb4f74b48ddb55e65221995e74e2d00bbff0"}, - {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9500e61fce0cfc86168b248104e954fead61f9be213087153d272e817ec7b4f"}, - {file = "ujson-5.10.0-cp313-cp313-win32.whl", hash = "sha256:4c4fc16f11ac1612f05b6f5781b384716719547e142cfd67b65d035bd85af165"}, - {file = "ujson-5.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:4573fd1695932d4f619928fd09d5d03d917274381649ade4328091ceca175539"}, - {file = "ujson-5.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a984a3131da7f07563057db1c3020b1350a3e27a8ec46ccbfbf21e5928a43050"}, - {file = "ujson-5.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73814cd1b9db6fc3270e9d8fe3b19f9f89e78ee9d71e8bd6c9a626aeaeaf16bd"}, - {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61e1591ed9376e5eddda202ec229eddc56c612b61ac6ad07f96b91460bb6c2fb"}, - {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2c75269f8205b2690db4572a4a36fe47cd1338e4368bc73a7a0e48789e2e35a"}, - {file = "ujson-5.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7223f41e5bf1f919cd8d073e35b229295aa8e0f7b5de07ed1c8fddac63a6bc5d"}, - {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d4dc2fd6b3067c0782e7002ac3b38cf48608ee6366ff176bbd02cf969c9c20fe"}, - {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:232cc85f8ee3c454c115455195a205074a56ff42608fd6b942aa4c378ac14dd7"}, - {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cc6139531f13148055d691e442e4bc6601f6dba1e6d521b1585d4788ab0bfad4"}, - {file = "ujson-5.10.0-cp38-cp38-win32.whl", hash = "sha256:e7ce306a42b6b93ca47ac4a3b96683ca554f6d35dd8adc5acfcd55096c8dfcb8"}, - {file = "ujson-5.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:e82d4bb2138ab05e18f089a83b6564fee28048771eb63cdecf4b9b549de8a2cc"}, - {file = "ujson-5.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dfef2814c6b3291c3c5f10065f745a1307d86019dbd7ea50e83504950136ed5b"}, - {file = "ujson-5.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4734ee0745d5928d0ba3a213647f1c4a74a2a28edc6d27b2d6d5bd9fa4319e27"}, - {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ebb01bd865fdea43da56254a3930a413f0c5590372a1241514abae8aa7c76"}, - {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dee5e97c2496874acbf1d3e37b521dd1f307349ed955e62d1d2f05382bc36dd5"}, - {file = "ujson-5.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7490655a2272a2d0b072ef16b0b58ee462f4973a8f6bbe64917ce5e0a256f9c0"}, - {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba17799fcddaddf5c1f75a4ba3fd6441f6a4f1e9173f8a786b42450851bd74f1"}, - {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2aff2985cef314f21d0fecc56027505804bc78802c0121343874741650a4d3d1"}, - {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad88ac75c432674d05b61184178635d44901eb749786c8eb08c102330e6e8996"}, - {file = "ujson-5.10.0-cp39-cp39-win32.whl", hash = "sha256:2544912a71da4ff8c4f7ab5606f947d7299971bdd25a45e008e467ca638d13c9"}, - {file = "ujson-5.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ff201d62b1b177a46f113bb43ad300b424b7847f9c5d38b1b4ad8f75d4a282a"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b6fee72fa77dc172a28f21693f64d93166534c263adb3f96c413ccc85ef6e64"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:61d0af13a9af01d9f26d2331ce49bb5ac1fb9c814964018ac8df605b5422dcb3"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb24f0bdd899d368b715c9e6664166cf694d1e57be73f17759573a6986dd95a"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbd8fd427f57a03cff3ad6574b5e299131585d9727c8c366da4624a9069ed746"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beeaf1c48e32f07d8820c705ff8e645f8afa690cca1544adba4ebfa067efdc88"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:baed37ea46d756aca2955e99525cc02d9181de67f25515c468856c38d52b5f3b"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7663960f08cd5a2bb152f5ee3992e1af7690a64c0e26d31ba7b3ff5b2ee66337"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8640fb4072d36b08e95a3a380ba65779d356b2fee8696afeb7794cf0902d0a1"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78778a3aa7aafb11e7ddca4e29f46bc5139131037ad628cc10936764282d6753"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0111b27f2d5c820e7f2dbad7d48e3338c824e7ac4d2a12da3dc6061cc39c8e6"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:c66962ca7565605b355a9ed478292da628b8f18c0f2793021ca4425abf8b01e5"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba43cc34cce49cf2d4bc76401a754a81202d8aa926d0e2b79f0ee258cb15d3a4"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac56eb983edce27e7f51d05bc8dd820586c6e6be1c5216a6809b0c668bb312b8"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44bd4b23a0e723bf8b10628288c2c7c335161d6840013d4d5de20e48551773b"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c10f4654e5326ec14a46bcdeb2b685d4ada6911050aa8baaf3501e57024b804"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0de4971a89a762398006e844ae394bd46991f7c385d7a6a3b93ba229e6dac17e"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e1402f0564a97d2a52310ae10a64d25bcef94f8dd643fcf5d310219d915484f7"}, - {file = "ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1"}, -] - -[[package]] -name = "urllib3" -version = "2.2.2" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "uvicorn" -version = "0.30.1" -description = "The lightning-fast ASGI server." -optional = false -python-versions = ">=3.8" -files = [ - {file = "uvicorn-0.30.1-py3-none-any.whl", hash = "sha256:cd17daa7f3b9d7a24de3617820e634d0933b69eed8e33a516071174427238c81"}, - {file = "uvicorn-0.30.1.tar.gz", hash = "sha256:d46cd8e0fd80240baffbcd9ec1012a712938754afcf81bce56c024c1656aece8"}, -] - -[package.dependencies] -click = ">=7.0" -colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} -h11 = ">=0.8" -httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} -python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} -pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} -uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} -watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} -websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} - -[package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] - -[[package]] -name = "uvloop" -version = "0.19.0" -description = "Fast implementation of asyncio event loop on top of libuv" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"}, - {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"}, - {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"}, - {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"}, - {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"}, - {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"}, - {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"}, - {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"}, - {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"}, - {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"}, - {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"}, - {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"}, - {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"}, - {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"}, - {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"}, - {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"}, - {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"}, - {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"}, - {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"}, - {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"}, - {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"}, - {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"}, - {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"}, - {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"}, - {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"}, - {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"}, - {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"}, - {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"}, - {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"}, - {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"}, - {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"}, -] - -[package.extras] -docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] - -[[package]] -name = "vine" -version = "5.1.0" -description = "Python promises." -optional = false -python-versions = ">=3.6" -files = [ - {file = "vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc"}, - {file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"}, -] - -[[package]] -name = "virtualenv" -version = "20.26.2" -description = "Virtual Python Environment builder" -optional = false -python-versions = ">=3.7" -files = [ - {file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"}, - {file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"}, -] - -[package.dependencies] -distlib = ">=0.3.7,<1" -filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<5" - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] - -[[package]] -name = "watchdog" -version = "4.0.1" -description = "Filesystem events monitoring" -optional = false -python-versions = ">=3.8" -files = [ - {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"}, - {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"}, - {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d4925e4bf7b9bddd1c3de13c9b8a2cdb89a468f640e66fbfabaf735bd85b3e35"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cad0bbd66cd59fc474b4a4376bc5ac3fc698723510cbb64091c2a793b18654db"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a3c2c317a8fb53e5b3d25790553796105501a235343f5d2bf23bb8649c2c8709"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9904904b6564d4ee8a1ed820db76185a3c96e05560c776c79a6ce5ab71888ba"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:667f3c579e813fcbad1b784db7a1aaa96524bed53437e119f6a2f5de4db04235"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d10a681c9a1d5a77e75c48a3b8e1a9f2ae2928eda463e8d33660437705659682"}, - {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"}, - {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"}, - {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"}, - {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"}, - {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"}, - {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"}, - {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"}, - {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"}, - {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"}, - {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"}, -] - -[package.extras] -watchmedo = ["PyYAML (>=3.10)"] - -[[package]] -name = "watchfiles" -version = "0.22.0" -description = "Simple, modern and high performance file watching and code reload in python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "watchfiles-0.22.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:da1e0a8caebf17976e2ffd00fa15f258e14749db5e014660f53114b676e68538"}, - {file = "watchfiles-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61af9efa0733dc4ca462347becb82e8ef4945aba5135b1638bfc20fad64d4f0e"}, - {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d9188979a58a096b6f8090e816ccc3f255f137a009dd4bbec628e27696d67c1"}, - {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2bdadf6b90c099ca079d468f976fd50062905d61fae183f769637cb0f68ba59a"}, - {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:067dea90c43bf837d41e72e546196e674f68c23702d3ef80e4e816937b0a3ffd"}, - {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbf8a20266136507abf88b0df2328e6a9a7c7309e8daff124dda3803306a9fdb"}, - {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1235c11510ea557fe21be5d0e354bae2c655a8ee6519c94617fe63e05bca4171"}, - {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2444dc7cb9d8cc5ab88ebe792a8d75709d96eeef47f4c8fccb6df7c7bc5be71"}, - {file = "watchfiles-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c5af2347d17ab0bd59366db8752d9e037982e259cacb2ba06f2c41c08af02c39"}, - {file = "watchfiles-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9624a68b96c878c10437199d9a8b7d7e542feddda8d5ecff58fdc8e67b460848"}, - {file = "watchfiles-0.22.0-cp310-none-win32.whl", hash = "sha256:4b9f2a128a32a2c273d63eb1fdbf49ad64852fc38d15b34eaa3f7ca2f0d2b797"}, - {file = "watchfiles-0.22.0-cp310-none-win_amd64.whl", hash = "sha256:2627a91e8110b8de2406d8b2474427c86f5a62bf7d9ab3654f541f319ef22bcb"}, - {file = "watchfiles-0.22.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8c39987a1397a877217be1ac0fb1d8b9f662c6077b90ff3de2c05f235e6a8f96"}, - {file = "watchfiles-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a927b3034d0672f62fb2ef7ea3c9fc76d063c4b15ea852d1db2dc75fe2c09696"}, - {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052d668a167e9fc345c24203b104c313c86654dd6c0feb4b8a6dfc2462239249"}, - {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e45fb0d70dda1623a7045bd00c9e036e6f1f6a85e4ef2c8ae602b1dfadf7550"}, - {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c49b76a78c156979759d759339fb62eb0549515acfe4fd18bb151cc07366629c"}, - {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4a65474fd2b4c63e2c18ac67a0c6c66b82f4e73e2e4d940f837ed3d2fd9d4da"}, - {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc0cba54f47c660d9fa3218158b8963c517ed23bd9f45fe463f08262a4adae1"}, - {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ebe84a035993bb7668f58a0ebf998174fb723a39e4ef9fce95baabb42b787f"}, - {file = "watchfiles-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e0f0a874231e2839abbf473256efffe577d6ee2e3bfa5b540479e892e47c172d"}, - {file = "watchfiles-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:213792c2cd3150b903e6e7884d40660e0bcec4465e00563a5fc03f30ea9c166c"}, - {file = "watchfiles-0.22.0-cp311-none-win32.whl", hash = "sha256:b44b70850f0073b5fcc0b31ede8b4e736860d70e2dbf55701e05d3227a154a67"}, - {file = "watchfiles-0.22.0-cp311-none-win_amd64.whl", hash = "sha256:00f39592cdd124b4ec5ed0b1edfae091567c72c7da1487ae645426d1b0ffcad1"}, - {file = "watchfiles-0.22.0-cp311-none-win_arm64.whl", hash = "sha256:3218a6f908f6a276941422b035b511b6d0d8328edd89a53ae8c65be139073f84"}, - {file = "watchfiles-0.22.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c7b978c384e29d6c7372209cbf421d82286a807bbcdeb315427687f8371c340a"}, - {file = "watchfiles-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd4c06100bce70a20c4b81e599e5886cf504c9532951df65ad1133e508bf20be"}, - {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:425440e55cd735386ec7925f64d5dde392e69979d4c8459f6bb4e920210407f2"}, - {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68fe0c4d22332d7ce53ad094622b27e67440dacefbaedd29e0794d26e247280c"}, - {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8a31bfd98f846c3c284ba694c6365620b637debdd36e46e1859c897123aa232"}, - {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc2e8fe41f3cac0660197d95216c42910c2b7e9c70d48e6d84e22f577d106fc1"}, - {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b7cc10261c2786c41d9207193a85c1db1b725cf87936df40972aab466179b6"}, - {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28585744c931576e535860eaf3f2c0ec7deb68e3b9c5a85ca566d69d36d8dd27"}, - {file = "watchfiles-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00095dd368f73f8f1c3a7982a9801190cc88a2f3582dd395b289294f8975172b"}, - {file = "watchfiles-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:52fc9b0dbf54d43301a19b236b4a4614e610605f95e8c3f0f65c3a456ffd7d35"}, - {file = "watchfiles-0.22.0-cp312-none-win32.whl", hash = "sha256:581f0a051ba7bafd03e17127735d92f4d286af941dacf94bcf823b101366249e"}, - {file = "watchfiles-0.22.0-cp312-none-win_amd64.whl", hash = "sha256:aec83c3ba24c723eac14225194b862af176d52292d271c98820199110e31141e"}, - {file = "watchfiles-0.22.0-cp312-none-win_arm64.whl", hash = "sha256:c668228833c5619f6618699a2c12be057711b0ea6396aeaece4ded94184304ea"}, - {file = "watchfiles-0.22.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d47e9ef1a94cc7a536039e46738e17cce058ac1593b2eccdede8bf72e45f372a"}, - {file = "watchfiles-0.22.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28f393c1194b6eaadcdd8f941307fc9bbd7eb567995232c830f6aef38e8a6e88"}, - {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd64f3a4db121bc161644c9e10a9acdb836853155a108c2446db2f5ae1778c3d"}, - {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2abeb79209630da981f8ebca30a2c84b4c3516a214451bfc5f106723c5f45843"}, - {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cc382083afba7918e32d5ef12321421ef43d685b9a67cc452a6e6e18920890e"}, - {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d048ad5d25b363ba1d19f92dcf29023988524bee6f9d952130b316c5802069cb"}, - {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:103622865599f8082f03af4214eaff90e2426edff5e8522c8f9e93dc17caee13"}, - {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3e1f3cf81f1f823e7874ae563457828e940d75573c8fbf0ee66818c8b6a9099"}, - {file = "watchfiles-0.22.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8597b6f9dc410bdafc8bb362dac1cbc9b4684a8310e16b1ff5eee8725d13dcd6"}, - {file = "watchfiles-0.22.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b04a2cbc30e110303baa6d3ddce8ca3664bc3403be0f0ad513d1843a41c97d1"}, - {file = "watchfiles-0.22.0-cp38-none-win32.whl", hash = "sha256:b610fb5e27825b570554d01cec427b6620ce9bd21ff8ab775fc3a32f28bba63e"}, - {file = "watchfiles-0.22.0-cp38-none-win_amd64.whl", hash = "sha256:fe82d13461418ca5e5a808a9e40f79c1879351fcaeddbede094028e74d836e86"}, - {file = "watchfiles-0.22.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3973145235a38f73c61474d56ad6199124e7488822f3a4fc97c72009751ae3b0"}, - {file = "watchfiles-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:280a4afbc607cdfc9571b9904b03a478fc9f08bbeec382d648181c695648202f"}, - {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a0d883351a34c01bd53cfa75cd0292e3f7e268bacf2f9e33af4ecede7e21d1d"}, - {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9165bcab15f2b6d90eedc5c20a7f8a03156b3773e5fb06a790b54ccecdb73385"}, - {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc1b9b56f051209be458b87edb6856a449ad3f803315d87b2da4c93b43a6fe72"}, - {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dc1fc25a1dedf2dd952909c8e5cb210791e5f2d9bc5e0e8ebc28dd42fed7562"}, - {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc92d2d2706d2b862ce0568b24987eba51e17e14b79a1abcd2edc39e48e743c8"}, - {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97b94e14b88409c58cdf4a8eaf0e67dfd3ece7e9ce7140ea6ff48b0407a593ec"}, - {file = "watchfiles-0.22.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:96eec15e5ea7c0b6eb5bfffe990fc7c6bd833acf7e26704eb18387fb2f5fd087"}, - {file = "watchfiles-0.22.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:28324d6b28bcb8d7c1041648d7b63be07a16db5510bea923fc80b91a2a6cbed6"}, - {file = "watchfiles-0.22.0-cp39-none-win32.whl", hash = "sha256:8c3e3675e6e39dc59b8fe5c914a19d30029e36e9f99468dddffd432d8a7b1c93"}, - {file = "watchfiles-0.22.0-cp39-none-win_amd64.whl", hash = "sha256:25c817ff2a86bc3de3ed2df1703e3d24ce03479b27bb4527c57e722f8554d971"}, - {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b810a2c7878cbdecca12feae2c2ae8af59bea016a78bc353c184fa1e09f76b68"}, - {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7e1f9c5d1160d03b93fc4b68a0aeb82fe25563e12fbcdc8507f8434ab6f823c"}, - {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030bc4e68d14bcad2294ff68c1ed87215fbd9a10d9dea74e7cfe8a17869785ab"}, - {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace7d060432acde5532e26863e897ee684780337afb775107c0a90ae8dbccfd2"}, - {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5834e1f8b71476a26df97d121c0c0ed3549d869124ed2433e02491553cb468c2"}, - {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0bc3b2f93a140df6806c8467c7f51ed5e55a931b031b5c2d7ff6132292e803d6"}, - {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fdebb655bb1ba0122402352b0a4254812717a017d2dc49372a1d47e24073795"}, - {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c8e0aa0e8cc2a43561e0184c0513e291ca891db13a269d8d47cb9841ced7c71"}, - {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2f350cbaa4bb812314af5dab0eb8d538481e2e2279472890864547f3fe2281ed"}, - {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7a74436c415843af2a769b36bf043b6ccbc0f8d784814ba3d42fc961cdb0a9dc"}, - {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00ad0bcd399503a84cc688590cdffbe7a991691314dde5b57b3ed50a41319a31"}, - {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72a44e9481afc7a5ee3291b09c419abab93b7e9c306c9ef9108cb76728ca58d2"}, - {file = "watchfiles-0.22.0.tar.gz", hash = "sha256:988e981aaab4f3955209e7e28c7794acdb690be1efa7f16f8ea5aba7ffdadacb"}, -] - -[package.dependencies] -anyio = ">=3.0.0" - -[[package]] -name = "wcwidth" -version = "0.2.13" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - -[[package]] -name = "websockets" -version = "12.0" -description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, - {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, - {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, - {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, - {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, - {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, - {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, - {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, - {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, - {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, - {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, - {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, - {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, - {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, - {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, - {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, - {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, - {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, - {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, - {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, - {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, - {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, - {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, - {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, - {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, -] - -[metadata] -lock-version = "2.0" -python-versions = "^3.12" -content-hash = "50b4bd1789fc4f925d9b9a9c0e8c8e7fae28b6ac490c5430e287b9b8faa6bde0" +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "aiofiles" +version = "23.2.1" +description = "File support for asyncio." +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiofiles-23.2.1-py3-none-any.whl", hash = "sha256:19297512c647d4b27a2cf7c34caa7e405c0d60b5560618a29a9fe027b18b0107"}, + {file = "aiofiles-23.2.1.tar.gz", hash = "sha256:84ec2218d8419404abcb9f0c02df3f34c6e0a68ed41072acfb1cef5cbc29051a"}, +] + +[[package]] +name = "alembic" +version = "1.13.2" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.8" +files = [ + {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, + {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["backports.zoneinfo"] + +[[package]] +name = "amqp" +version = "5.2.0" +description = "Low-level AMQP client for Python (fork of amqplib)." +optional = false +python-versions = ">=3.6" +files = [ + {file = "amqp-5.2.0-py3-none-any.whl", hash = "sha256:827cb12fb0baa892aad844fd95258143bce4027fdac4fccddbc43330fd281637"}, + {file = "amqp-5.2.0.tar.gz", hash = "sha256:a1ecff425ad063ad42a486c902807d1482311481c8ad95a72694b2975e75f7fd"}, +] + +[package.dependencies] +vine = ">=5.0.0,<6.0.0" + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.4.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "appnope" +version = "0.1.4" +description = "Disable App Nap on macOS >= 10.9" +optional = false +python-versions = ">=3.6" +files = [ + {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, + {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, +] + +[[package]] +name = "asttokens" +version = "2.4.1" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = "*" +files = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] + +[package.dependencies] +six = ">=1.12.0" + +[package.extras] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] + +[[package]] +name = "billiard" +version = "4.2.0" +description = "Python multiprocessing fork with improvements and bugfixes" +optional = false +python-versions = ">=3.7" +files = [ + {file = "billiard-4.2.0-py3-none-any.whl", hash = "sha256:07aa978b308f334ff8282bd4a746e681b3513db5c9a514cbdd810cbbdc19714d"}, + {file = "billiard-4.2.0.tar.gz", hash = "sha256:9a3c3184cb275aa17a732f93f65b20c525d3d9f253722d26a82194803ade5a2c"}, +] + +[[package]] +name = "black" +version = "24.8.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, + {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, + {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, + {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, + {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, + {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, + {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, + {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, + {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, + {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, + {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, + {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, + {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, + {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, + {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, + {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, + {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, + {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, + {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, + {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, + {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, + {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "boto3" +version = "1.35.19" +description = "The AWS SDK for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3-1.35.19-py3-none-any.whl", hash = "sha256:84b3fe1727945bc3cada832d969ddb3dc0d08fce1677064ca8bdc13a89c1a143"}, + {file = "boto3-1.35.19.tar.gz", hash = "sha256:9979fe674780a0b7100eae9156d74ee374cd1638a9f61c77277e3ce712f3e496"}, +] + +[package.dependencies] +botocore = ">=1.35.19,<1.36.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.10.0,<0.11.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.35.19" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">=3.8" +files = [ + {file = "botocore-1.35.19-py3-none-any.whl", hash = "sha256:c83f7f0cacfe7c19b109b363ebfa8736e570d24922f16ed371681f58ebab44a9"}, + {file = "botocore-1.35.19.tar.gz", hash = "sha256:42d6d8db7250cbd7899f786f9861e02cab17dc238f64d6acb976098ed9809625"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} + +[package.extras] +crt = ["awscrt (==0.21.5)"] + +[[package]] +name = "celery" +version = "5.4.0" +description = "Distributed Task Queue." +optional = false +python-versions = ">=3.8" +files = [ + {file = "celery-5.4.0-py3-none-any.whl", hash = "sha256:369631eb580cf8c51a82721ec538684994f8277637edde2dfc0dacd73ed97f64"}, + {file = "celery-5.4.0.tar.gz", hash = "sha256:504a19140e8d3029d5acad88330c541d4c3f64c789d85f94756762d8bca7e706"}, +] + +[package.dependencies] +billiard = ">=4.2.0,<5.0" +click = ">=8.1.2,<9.0" +click-didyoumean = ">=0.3.0" +click-plugins = ">=1.1.1" +click-repl = ">=0.2.0" +kombu = ">=5.3.4,<6.0" +pytest-celery = {version = ">=1.0.0", extras = ["all"], optional = true, markers = "extra == \"pytest\""} +python-dateutil = ">=2.8.2" +tzdata = ">=2022.7" +vine = ">=5.1.0,<6.0" + +[package.extras] +arangodb = ["pyArango (>=2.0.2)"] +auth = ["cryptography (==42.0.5)"] +azureblockblob = ["azure-storage-blob (>=12.15.0)"] +brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] +cassandra = ["cassandra-driver (>=3.25.0,<4)"] +consul = ["python-consul2 (==0.1.5)"] +cosmosdbsql = ["pydocumentdb (==2.3.5)"] +couchbase = ["couchbase (>=3.0.0)"] +couchdb = ["pycouchdb (==1.14.2)"] +django = ["Django (>=2.2.28)"] +dynamodb = ["boto3 (>=1.26.143)"] +elasticsearch = ["elastic-transport (<=8.13.0)", "elasticsearch (<=8.13.0)"] +eventlet = ["eventlet (>=0.32.0)"] +gcs = ["google-cloud-storage (>=2.10.0)"] +gevent = ["gevent (>=1.5.0)"] +librabbitmq = ["librabbitmq (>=2.0.0)"] +memcache = ["pylibmc (==1.6.3)"] +mongodb = ["pymongo[srv] (>=4.0.2)"] +msgpack = ["msgpack (==1.0.8)"] +pymemcache = ["python-memcached (>=1.61)"] +pyro = ["pyro4 (==4.82)"] +pytest = ["pytest-celery[all] (>=1.0.0)"] +redis = ["redis (>=4.5.2,!=4.5.5,<6.0.0)"] +s3 = ["boto3 (>=1.26.143)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +solar = ["ephem (==4.1.5)"] +sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] +sqs = ["boto3 (>=1.26.143)", "kombu[sqs] (>=5.3.4)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] +tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=1.3.1)"] +zstd = ["zstandard (==0.22.0)"] + +[[package]] +name = "celery-types" +version = "0.22.0" +description = "Type stubs for Celery and its related packages" +optional = false +python-versions = ">=3.9,<4.0" +files = [ + {file = "celery_types-0.22.0-py3-none-any.whl", hash = "sha256:79a66637d1d6af5992d1dc80259d9538869941325e966006f1e795220519b9ac"}, + {file = "celery_types-0.22.0.tar.gz", hash = "sha256:0ecad2fa5a6eded0a1f919e5e1e381cc2ff0635fe4b21db53b4661b6876d5b30"}, +] + +[package.dependencies] +typing-extensions = ">=4.9.0,<5.0.0" + +[[package]] +name = "certifi" +version = "2024.8.30" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "click-didyoumean" +version = "0.3.1" +description = "Enables git-like *did-you-mean* feature in click" +optional = false +python-versions = ">=3.6.2" +files = [ + {file = "click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c"}, + {file = "click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463"}, +] + +[package.dependencies] +click = ">=7" + +[[package]] +name = "click-plugins" +version = "1.1.1" +description = "An extension module for click to enable registering CLI commands via setuptools entry-points." +optional = false +python-versions = "*" +files = [ + {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, + {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, +] + +[package.dependencies] +click = ">=4.0" + +[package.extras] +dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] + +[[package]] +name = "click-repl" +version = "0.3.0" +description = "REPL plugin for Click" +optional = false +python-versions = ">=3.6" +files = [ + {file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"}, + {file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"}, +] + +[package.dependencies] +click = ">=7.0" +prompt-toolkit = ">=3.0.36" + +[package.extras] +testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "comm" +version = "0.2.2" +description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +optional = false +python-versions = ">=3.8" +files = [ + {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, + {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, +] + +[package.dependencies] +traitlets = ">=4" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "coverage" +version = "7.6.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "debugpy" +version = "1.8.5" +description = "An implementation of the Debug Adapter Protocol for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "debugpy-1.8.5-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7e4d594367d6407a120b76bdaa03886e9eb652c05ba7f87e37418426ad2079f7"}, + {file = "debugpy-1.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4413b7a3ede757dc33a273a17d685ea2b0c09dbd312cc03f5534a0fd4d40750a"}, + {file = "debugpy-1.8.5-cp310-cp310-win32.whl", hash = "sha256:dd3811bd63632bb25eda6bd73bea8e0521794cda02be41fa3160eb26fc29e7ed"}, + {file = "debugpy-1.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:b78c1250441ce893cb5035dd6f5fc12db968cc07f91cc06996b2087f7cefdd8e"}, + {file = "debugpy-1.8.5-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:606bccba19f7188b6ea9579c8a4f5a5364ecd0bf5a0659c8a5d0e10dcee3032a"}, + {file = "debugpy-1.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db9fb642938a7a609a6c865c32ecd0d795d56c1aaa7a7a5722d77855d5e77f2b"}, + {file = "debugpy-1.8.5-cp311-cp311-win32.whl", hash = "sha256:4fbb3b39ae1aa3e5ad578f37a48a7a303dad9a3d018d369bc9ec629c1cfa7408"}, + {file = "debugpy-1.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:345d6a0206e81eb68b1493ce2fbffd57c3088e2ce4b46592077a943d2b968ca3"}, + {file = "debugpy-1.8.5-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:5b5c770977c8ec6c40c60d6f58cacc7f7fe5a45960363d6974ddb9b62dbee156"}, + {file = "debugpy-1.8.5-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a65b00b7cdd2ee0c2cf4c7335fef31e15f1b7056c7fdbce9e90193e1a8c8cb"}, + {file = "debugpy-1.8.5-cp312-cp312-win32.whl", hash = "sha256:c9f7c15ea1da18d2fcc2709e9f3d6de98b69a5b0fff1807fb80bc55f906691f7"}, + {file = "debugpy-1.8.5-cp312-cp312-win_amd64.whl", hash = "sha256:28ced650c974aaf179231668a293ecd5c63c0a671ae6d56b8795ecc5d2f48d3c"}, + {file = "debugpy-1.8.5-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:3df6692351172a42af7558daa5019651f898fc67450bf091335aa8a18fbf6f3a"}, + {file = "debugpy-1.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd04a73eb2769eb0bfe43f5bfde1215c5923d6924b9b90f94d15f207a402226"}, + {file = "debugpy-1.8.5-cp38-cp38-win32.whl", hash = "sha256:8f913ee8e9fcf9d38a751f56e6de12a297ae7832749d35de26d960f14280750a"}, + {file = "debugpy-1.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:a697beca97dad3780b89a7fb525d5e79f33821a8bc0c06faf1f1289e549743cf"}, + {file = "debugpy-1.8.5-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0a1029a2869d01cb777216af8c53cda0476875ef02a2b6ff8b2f2c9a4b04176c"}, + {file = "debugpy-1.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84c276489e141ed0b93b0af648eef891546143d6a48f610945416453a8ad406"}, + {file = "debugpy-1.8.5-cp39-cp39-win32.whl", hash = "sha256:ad84b7cde7fd96cf6eea34ff6c4a1b7887e0fe2ea46e099e53234856f9d99a34"}, + {file = "debugpy-1.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:7b0fe36ed9d26cb6836b0a51453653f8f2e347ba7348f2bbfe76bfeb670bfb1c"}, + {file = "debugpy-1.8.5-py2.py3-none-any.whl", hash = "sha256:55919dce65b471eff25901acf82d328bbd5b833526b6c1364bd5133754777a44"}, + {file = "debugpy-1.8.5.zip", hash = "sha256:b2112cfeb34b4507399d298fe7023a16656fc553ed5246536060ca7bd0e668d0"}, +] + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "desbordante" +version = "1.1.0" +description = "Science-intensive high-performance data profiler" +optional = false +python-versions = ">=3.7" +files = [ + {file = "desbordante-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6ac9c19ffaefc5a6a85908c47069988d3993f7ef46ff40cf931358959d25395"}, + {file = "desbordante-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01cc9c6733b1894ba04df78d41446a03b0d9bd3f0ce3dce33ae543c897568eb0"}, + {file = "desbordante-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b99c90bea8156d63c5f40a94b48464b26c7da96987f18fb57baeda5558376d"}, + {file = "desbordante-1.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c00db13a79273b66dbcece0fcacc81cb9271993e77725108c4b8e0ed95188eb"}, + {file = "desbordante-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:099f1852522684c0e42e6c8cb7ee32ac1d9bb9d87d1e5863bba81bf36e42d536"}, + {file = "desbordante-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42cbef2c227acd26dfb6bb6209f28a440a6ea0f1f4e770c338e03c19467a948"}, + {file = "desbordante-1.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7075e865b830ca194ed77ba21f8667fb8f87a47a1313ba7cb83f6c506fed05c0"}, + {file = "desbordante-1.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19413ecb9f1215672500ffae4c806bee27a286d83704952085e4dbb37f22aeb3"}, + {file = "desbordante-1.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:798b12cfd28d7354aec251cae1adf75fa0a5c3a9c757b8a4ab11840a56332b1e"}, + {file = "desbordante-1.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a69a17ba645e8547564167817628260761ec5ade26d3fbb49112eeddc7813717"}, +] + +[[package]] +name = "desbordante-stubs" +version = "0.1.0" +description = "Stubs for desbordante package" +optional = false +python-versions = ">=3.11,<4.0" +files = [ + {file = "desbordante_stubs-0.1.0-py3-none-any.whl", hash = "sha256:51818787dbd39ffc6909c11fc5782fb419e42df055d214abadf74d37551c69e9"}, + {file = "desbordante_stubs-0.1.0.tar.gz", hash = "sha256:915860190da94e0ca2e8e9ccf54e669af8abf6518fd539c2e700a3b7a522df0d"}, +] + +[package.dependencies] +desbordante = ">=1.1.0,<2.0.0" + +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + +[[package]] +name = "dnspython" +version = "2.6.1" +description = "DNS toolkit" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, + {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=41)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=0.9.25)"] +idna = ["idna (>=3.6)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "docker" +version = "7.1.0" +description = "A Python library for the Docker Engine API." +optional = false +python-versions = ">=3.8" +files = [ + {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, + {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, +] + +[package.dependencies] +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" + +[package.extras] +dev = ["coverage (==7.2.7)", "pytest (==7.4.2)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.1.0)", "ruff (==0.1.8)"] +docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] +ssh = ["paramiko (>=2.4.3)"] +websockets = ["websocket-client (>=1.3.0)"] + +[[package]] +name = "email-validator" +version = "2.2.0" +description = "A robust email address syntax and deliverability validation library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, + {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +idna = ">=2.0.0" + +[[package]] +name = "executing" +version = "2.1.0" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.8" +files = [ + {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"}, + {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] + +[[package]] +name = "faker" +version = "28.4.1" +description = "Faker is a Python package that generates fake data for you." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Faker-28.4.1-py3-none-any.whl", hash = "sha256:e59c01d1e8b8e20a83255ab8232c143cb2af3b4f5ab6a3f5ce495f385ad8ab4c"}, + {file = "faker-28.4.1.tar.gz", hash = "sha256:4294d169255a045990720d6f3fa4134b764a4cdf46ef0d3c7553d2506f1adaa1"}, +] + +[package.dependencies] +python-dateutil = ">=2.4" + +[[package]] +name = "fastapi" +version = "0.109.2" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi-0.109.2-py3-none-any.whl", hash = "sha256:2c9bab24667293b501cad8dd388c05240c850b58ec5876ee3283c47d6e1e3a4d"}, + {file = "fastapi-0.109.2.tar.gz", hash = "sha256:f3817eac96fe4f65a2ebb4baa000f394e55f5fccdaf7f75250804bc58f354f73"}, +] + +[package.dependencies] +email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"all\""} +httpx = {version = ">=0.23.0", optional = true, markers = "extra == \"all\""} +itsdangerous = {version = ">=1.1.0", optional = true, markers = "extra == \"all\""} +jinja2 = {version = ">=2.11.2", optional = true, markers = "extra == \"all\""} +orjson = {version = ">=3.2.1", optional = true, markers = "extra == \"all\""} +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +pydantic-extra-types = {version = ">=2.0.0", optional = true, markers = "extra == \"all\""} +pydantic-settings = {version = ">=2.0.0", optional = true, markers = "extra == \"all\""} +python-multipart = {version = ">=0.0.7", optional = true, markers = "extra == \"all\""} +pyyaml = {version = ">=5.3.1", optional = true, markers = "extra == \"all\""} +starlette = ">=0.36.3,<0.37.0" +typing-extensions = ">=4.8.0" +ujson = {version = ">=4.0.1,<4.0.2 || >4.0.2,<4.1.0 || >4.1.0,<4.2.0 || >4.2.0,<4.3.0 || >4.3.0,<5.0.0 || >5.0.0,<5.1.0 || >5.1.0", optional = true, markers = "extra == \"all\""} +uvicorn = {version = ">=0.12.0", extras = ["standard"], optional = true, markers = "extra == \"all\""} + +[package.extras] +all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "filelock" +version = "3.16.0" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.16.0-py3-none-any.whl", hash = "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609"}, + {file = "filelock-3.16.0.tar.gz", hash = "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] +typing = ["typing-extensions (>=4.12.2)"] + +[[package]] +name = "greenlet" +version = "3.1.0" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a814dc3100e8a046ff48faeaa909e80cdb358411a3d6dd5293158425c684eda8"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a771dc64fa44ebe58d65768d869fcfb9060169d203446c1d446e844b62bdfdca"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e49a65d25d7350cca2da15aac31b6f67a43d867448babf997fe83c7505f57bc"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2cd8518eade968bc52262d8c46727cfc0826ff4d552cf0430b8d65aaf50bb91d"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76dc19e660baea5c38e949455c1181bc018893f25372d10ffe24b3ed7341fb25"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0a5b1c22c82831f56f2f7ad9bbe4948879762fe0d59833a4a71f16e5fa0f682"}, + {file = "greenlet-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2651dfb006f391bcb240635079a68a261b227a10a08af6349cba834a2141efa1"}, + {file = "greenlet-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3e7e6ef1737a819819b1163116ad4b48d06cfdd40352d813bb14436024fcda99"}, + {file = "greenlet-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:ffb08f2a1e59d38c7b8b9ac8083c9c8b9875f0955b1e9b9b9a965607a51f8e54"}, + {file = "greenlet-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9730929375021ec90f6447bff4f7f5508faef1c02f399a1953870cdb78e0c345"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:713d450cf8e61854de9420fb7eea8ad228df4e27e7d4ed465de98c955d2b3fa6"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c3446937be153718250fe421da548f973124189f18fe4575a0510b5c928f0cc"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ddc7bcedeb47187be74208bc652d63d6b20cb24f4e596bd356092d8000da6d6"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44151d7b81b9391ed759a2f2865bbe623ef00d648fed59363be2bbbd5154656f"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cea1cca3be76c9483282dc7760ea1cc08a6ecec1f0b6ca0a94ea0d17432da19"}, + {file = "greenlet-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:619935a44f414274a2c08c9e74611965650b730eb4efe4b2270f91df5e4adf9a"}, + {file = "greenlet-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:221169d31cada333a0c7fd087b957c8f431c1dba202c3a58cf5a3583ed973e9b"}, + {file = "greenlet-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:01059afb9b178606b4b6e92c3e710ea1635597c3537e44da69f4531e111dd5e9"}, + {file = "greenlet-3.1.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:24fc216ec7c8be9becba8b64a98a78f9cd057fd2dc75ae952ca94ed8a893bf27"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d07c28b85b350564bdff9f51c1c5007dfb2f389385d1bc23288de51134ca303"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:243a223c96a4246f8a30ea470c440fe9db1f5e444941ee3c3cd79df119b8eebf"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26811df4dc81271033a7836bc20d12cd30938e6bd2e9437f56fa03da81b0f8fc"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9d86401550b09a55410f32ceb5fe7efcd998bd2dad9e82521713cb148a4a15f"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:26d9c1c4f1748ccac0bae1dbb465fb1a795a75aba8af8ca871503019f4285e2a"}, + {file = "greenlet-3.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cd468ec62257bb4544989402b19d795d2305eccb06cde5da0eb739b63dc04665"}, + {file = "greenlet-3.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a53dfe8f82b715319e9953330fa5c8708b610d48b5c59f1316337302af5c0811"}, + {file = "greenlet-3.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:28fe80a3eb673b2d5cc3b12eea468a5e5f4603c26aa34d88bf61bba82ceb2f9b"}, + {file = "greenlet-3.1.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:76b3e3976d2a452cba7aa9e453498ac72240d43030fdc6d538a72b87eaff52fd"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655b21ffd37a96b1e78cc48bf254f5ea4b5b85efaf9e9e2a526b3c9309d660ca"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6f4c2027689093775fd58ca2388d58789009116844432d920e9147f91acbe64"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76e5064fd8e94c3f74d9fd69b02d99e3cdb8fc286ed49a1f10b256e59d0d3a0b"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a4bf607f690f7987ab3291406e012cd8591a4f77aa54f29b890f9c331e84989"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:037d9ac99540ace9424cb9ea89f0accfaff4316f149520b4ae293eebc5bded17"}, + {file = "greenlet-3.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:90b5bbf05fe3d3ef697103850c2ce3374558f6fe40fd57c9fac1bf14903f50a5"}, + {file = "greenlet-3.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:726377bd60081172685c0ff46afbc600d064f01053190e4450857483c4d44484"}, + {file = "greenlet-3.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:d46d5069e2eeda111d6f71970e341f4bd9aeeee92074e649ae263b834286ecc0"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81eeec4403a7d7684b5812a8aaa626fa23b7d0848edb3a28d2eb3220daddcbd0"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a3dae7492d16e85ea6045fd11cb8e782b63eac8c8d520c3a92c02ac4573b0a6"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b5ea3664eed571779403858d7cd0a9b0ebf50d57d2cdeafc7748e09ef8cd81a"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22f4e26400f7f48faef2d69c20dc055a1f3043d330923f9abe08ea0aecc44df"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13ff8c8e54a10472ce3b2a2da007f915175192f18e6495bad50486e87c7f6637"}, + {file = "greenlet-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9671e7282d8c6fcabc32c0fb8d7c0ea8894ae85cee89c9aadc2d7129e1a9954"}, + {file = "greenlet-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:184258372ae9e1e9bddce6f187967f2e08ecd16906557c4320e3ba88a93438c3"}, + {file = "greenlet-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:a0409bc18a9f85321399c29baf93545152d74a49d92f2f55302f122007cfda00"}, + {file = "greenlet-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9eb4a1d7399b9f3c7ac68ae6baa6be5f9195d1d08c9ddc45ad559aa6b556bce6"}, + {file = "greenlet-3.1.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:a8870983af660798dc1b529e1fd6f1cefd94e45135a32e58bd70edd694540f33"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfcfb73aed40f550a57ea904629bdaf2e562c68fa1164fa4588e752af6efdc3f"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9482c2ed414781c0af0b35d9d575226da6b728bd1a720668fa05837184965b7"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d58ec349e0c2c0bc6669bf2cd4982d2f93bf067860d23a0ea1fe677b0f0b1e09"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd65695a8df1233309b701dec2539cc4b11e97d4fcc0f4185b4a12ce54db0491"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:665b21e95bc0fce5cab03b2e1d90ba9c66c510f1bb5fdc864f3a377d0f553f6b"}, + {file = "greenlet-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3c59a06c2c28a81a026ff11fbf012081ea34fb9b7052f2ed0366e14896f0a1d"}, + {file = "greenlet-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415b9494ff6240b09af06b91a375731febe0090218e2898d2b85f9b92abcda0"}, + {file = "greenlet-3.1.0-cp38-cp38-win32.whl", hash = "sha256:1544b8dd090b494c55e60c4ff46e238be44fdc472d2589e943c241e0169bcea2"}, + {file = "greenlet-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:7f346d24d74c00b6730440f5eb8ec3fe5774ca8d1c9574e8e57c8671bb51b910"}, + {file = "greenlet-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:db1b3ccb93488328c74e97ff888604a8b95ae4f35f4f56677ca57a4fc3a4220b"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44cd313629ded43bb3b98737bba2f3e2c2c8679b55ea29ed73daea6b755fe8e7"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fad7a051e07f64e297e6e8399b4d6a3bdcad3d7297409e9a06ef8cbccff4f501"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3967dcc1cd2ea61b08b0b276659242cbce5caca39e7cbc02408222fb9e6ff39"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d45b75b0f3fd8d99f62eb7908cfa6d727b7ed190737dec7fe46d993da550b81a"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2d004db911ed7b6218ec5c5bfe4cf70ae8aa2223dffbb5b3c69e342bb253cb28"}, + {file = "greenlet-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9505a0c8579899057cbefd4ec34d865ab99852baf1ff33a9481eb3924e2da0b"}, + {file = "greenlet-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fd6e94593f6f9714dbad1aaba734b5ec04593374fa6638df61592055868f8b8"}, + {file = "greenlet-3.1.0-cp39-cp39-win32.whl", hash = "sha256:d0dd943282231480aad5f50f89bdf26690c995e8ff555f26d8a5b9887b559bcc"}, + {file = "greenlet-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:ac0adfdb3a21dc2a24ed728b61e72440d297d0fd3a577389df566651fcd08f97"}, + {file = "greenlet-3.1.0.tar.gz", hash = "sha256:b395121e9bbe8d02a750886f108d540abe66075e61e22f7353d9acb0b81be0f0"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + +[[package]] +name = "httptools" +version = "0.6.1" +description = "A collection of framework independent HTTP protocol utils." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, + {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, + {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, + {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, + {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, + {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, + {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, +] + +[package.extras] +test = ["Cython (>=0.29.24,<0.30.0)"] + +[[package]] +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "identify" +version = "2.6.0" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, + {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.9" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "idna-3.9-py3-none-any.whl", hash = "sha256:69297d5da0cc9281c77efffb4e730254dd45943f45bbfb461de5991713989b1e"}, + {file = "idna-3.9.tar.gz", hash = "sha256:e5c5dafde284f26e9e0f28f6ea2d6400abd5ca099864a67f576f3981c6476124"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "ipykernel" +version = "6.29.5" +description = "IPython Kernel for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, + {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "platform_system == \"Darwin\""} +comm = ">=0.1.1" +debugpy = ">=1.6.5" +ipython = ">=7.23.1" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +matplotlib-inline = ">=0.1" +nest-asyncio = "*" +packaging = "*" +psutil = "*" +pyzmq = ">=24" +tornado = ">=6.1" +traitlets = ">=5.4.0" + +[package.extras] +cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] +pyqt5 = ["pyqt5"] +pyside6 = ["pyside6"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "ipython" +version = "8.27.0" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.10" +files = [ + {file = "ipython-8.27.0-py3-none-any.whl", hash = "sha256:f68b3cb8bde357a5d7adc9598d57e22a45dfbea19eb6b98286fa3b288c9cd55c"}, + {file = "ipython-8.27.0.tar.gz", hash = "sha256:0b99a2dc9f15fd68692e898e5568725c6d49c527d36a9fb5960ffbdeaa82ff7e"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} +prompt-toolkit = ">=3.0.41,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5.13.0" + +[package.extras] +all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "intersphinx-registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing-extensions"] +kernel = ["ipykernel"] +matplotlib = ["matplotlib"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] + +[[package]] +name = "itsdangerous" +version = "2.2.0" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.8" +files = [ + {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, + {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, +] + +[[package]] +name = "jedi" +version = "0.19.1" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, +] + +[package.dependencies] +parso = ">=0.8.3,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "jupyter-client" +version = "8.6.2" +description = "Jupyter protocol implementation and client libraries" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_client-8.6.2-py3-none-any.whl", hash = "sha256:50cbc5c66fd1b8f65ecb66bc490ab73217993632809b6e505687de18e9dea39f"}, + {file = "jupyter_client-8.6.2.tar.gz", hash = "sha256:2bda14d55ee5ba58552a8c53ae43d215ad9868853489213f37da060ced54d8df"}, +] + +[package.dependencies] +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +python-dateutil = ">=2.8.2" +pyzmq = ">=23.0" +tornado = ">=6.2" +traitlets = ">=5.3" + +[package.extras] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] + +[[package]] +name = "jupyter-core" +version = "5.7.2" +description = "Jupyter core package. A base package on which Jupyter projects rely." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, + {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, +] + +[package.dependencies] +platformdirs = ">=2.5" +pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +traitlets = ">=5.3" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "kombu" +version = "5.4.1" +description = "Messaging library for Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "kombu-5.4.1-py3-none-any.whl", hash = "sha256:621d365f234e4c089596f3a2510f1ade07026efc28caca426161d8f458786cab"}, + {file = "kombu-5.4.1.tar.gz", hash = "sha256:1c05178826dab811f8cab5b0a154d42a7a33d8bcdde9fa3d7b4582e43c3c03db"}, +] + +[package.dependencies] +amqp = ">=5.1.1,<6.0.0" +vine = "5.1.0" + +[package.extras] +azureservicebus = ["azure-servicebus (>=7.10.0)"] +azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6.0)"] +confluentkafka = ["confluent-kafka (>=2.2.0)"] +consul = ["python-consul2 (==0.1.5)"] +librabbitmq = ["librabbitmq (>=2.0.0)"] +mongodb = ["pymongo (>=4.1.1)"] +msgpack = ["msgpack (==1.1.0)"] +pyro = ["pyro4 (==4.82)"] +qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] +redis = ["redis (>=4.5.2,!=4.5.5,!=5.0.2)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] +sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=2.8.0)"] + +[[package]] +name = "mako" +version = "1.3.5" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, + {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, + {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, +] + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nest-asyncio" +version = "1.6.0" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "numpy" +version = "2.1.1" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.10" +files = [ + {file = "numpy-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8a0e34993b510fc19b9a2ce7f31cb8e94ecf6e924a40c0c9dd4f62d0aac47d9"}, + {file = "numpy-2.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7dd86dfaf7c900c0bbdcb8b16e2f6ddf1eb1fe39c6c8cca6e94844ed3152a8fd"}, + {file = "numpy-2.1.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:5889dd24f03ca5a5b1e8a90a33b5a0846d8977565e4ae003a63d22ecddf6782f"}, + {file = "numpy-2.1.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:59ca673ad11d4b84ceb385290ed0ebe60266e356641428c845b39cd9df6713ab"}, + {file = "numpy-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13ce49a34c44b6de5241f0b38b07e44c1b2dcacd9e36c30f9c2fcb1bb5135db7"}, + {file = "numpy-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:913cc1d311060b1d409e609947fa1b9753701dac96e6581b58afc36b7ee35af6"}, + {file = "numpy-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:caf5d284ddea7462c32b8d4a6b8af030b6c9fd5332afb70e7414d7fdded4bfd0"}, + {file = "numpy-2.1.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:57eb525e7c2a8fdee02d731f647146ff54ea8c973364f3b850069ffb42799647"}, + {file = "numpy-2.1.1-cp310-cp310-win32.whl", hash = "sha256:9a8e06c7a980869ea67bbf551283bbed2856915f0a792dc32dd0f9dd2fb56728"}, + {file = "numpy-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:d10c39947a2d351d6d466b4ae83dad4c37cd6c3cdd6d5d0fa797da56f710a6ae"}, + {file = "numpy-2.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0d07841fd284718feffe7dd17a63a2e6c78679b2d386d3e82f44f0108c905550"}, + {file = "numpy-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b5613cfeb1adfe791e8e681128f5f49f22f3fcaa942255a6124d58ca59d9528f"}, + {file = "numpy-2.1.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0b8cc2715a84b7c3b161f9ebbd942740aaed913584cae9cdc7f8ad5ad41943d0"}, + {file = "numpy-2.1.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:b49742cdb85f1f81e4dc1b39dcf328244f4d8d1ded95dea725b316bd2cf18c95"}, + {file = "numpy-2.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8d5f8a8e3bc87334f025194c6193e408903d21ebaeb10952264943a985066ca"}, + {file = "numpy-2.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d51fc141ddbe3f919e91a096ec739f49d686df8af254b2053ba21a910ae518bf"}, + {file = "numpy-2.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:98ce7fb5b8063cfdd86596b9c762bf2b5e35a2cdd7e967494ab78a1fa7f8b86e"}, + {file = "numpy-2.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:24c2ad697bd8593887b019817ddd9974a7f429c14a5469d7fad413f28340a6d2"}, + {file = "numpy-2.1.1-cp311-cp311-win32.whl", hash = "sha256:397bc5ce62d3fb73f304bec332171535c187e0643e176a6e9421a6e3eacef06d"}, + {file = "numpy-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:ae8ce252404cdd4de56dcfce8b11eac3c594a9c16c231d081fb705cf23bd4d9e"}, + {file = "numpy-2.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c803b7934a7f59563db459292e6aa078bb38b7ab1446ca38dd138646a38203e"}, + {file = "numpy-2.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6435c48250c12f001920f0751fe50c0348f5f240852cfddc5e2f97e007544cbe"}, + {file = "numpy-2.1.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:3269c9eb8745e8d975980b3a7411a98976824e1fdef11f0aacf76147f662b15f"}, + {file = "numpy-2.1.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:fac6e277a41163d27dfab5f4ec1f7a83fac94e170665a4a50191b545721c6521"}, + {file = "numpy-2.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcd8f556cdc8cfe35e70efb92463082b7f43dd7e547eb071ffc36abc0ca4699b"}, + {file = "numpy-2.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b9cd92c8f8e7b313b80e93cedc12c0112088541dcedd9197b5dee3738c1201"}, + {file = "numpy-2.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:afd9c680df4de71cd58582b51e88a61feed4abcc7530bcd3d48483f20fc76f2a"}, + {file = "numpy-2.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8661c94e3aad18e1ea17a11f60f843a4933ccaf1a25a7c6a9182af70610b2313"}, + {file = "numpy-2.1.1-cp312-cp312-win32.whl", hash = "sha256:950802d17a33c07cba7fd7c3dcfa7d64705509206be1606f196d179e539111ed"}, + {file = "numpy-2.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:3fc5eabfc720db95d68e6646e88f8b399bfedd235994016351b1d9e062c4b270"}, + {file = "numpy-2.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:046356b19d7ad1890c751b99acad5e82dc4a02232013bd9a9a712fddf8eb60f5"}, + {file = "numpy-2.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6e5a9cb2be39350ae6c8f79410744e80154df658d5bea06e06e0ac5bb75480d5"}, + {file = "numpy-2.1.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:d4c57b68c8ef5e1ebf47238e99bf27657511ec3f071c465f6b1bccbef12d4136"}, + {file = "numpy-2.1.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:8ae0fd135e0b157365ac7cc31fff27f07a5572bdfc38f9c2d43b2aff416cc8b0"}, + {file = "numpy-2.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981707f6b31b59c0c24bcda52e5605f9701cb46da4b86c2e8023656ad3e833cb"}, + {file = "numpy-2.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ca4b53e1e0b279142113b8c5eb7d7a877e967c306edc34f3b58e9be12fda8df"}, + {file = "numpy-2.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e097507396c0be4e547ff15b13dc3866f45f3680f789c1a1301b07dadd3fbc78"}, + {file = "numpy-2.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7506387e191fe8cdb267f912469a3cccc538ab108471291636a96a54e599556"}, + {file = "numpy-2.1.1-cp313-cp313-win32.whl", hash = "sha256:251105b7c42abe40e3a689881e1793370cc9724ad50d64b30b358bbb3a97553b"}, + {file = "numpy-2.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:f212d4f46b67ff604d11fff7cc62d36b3e8714edf68e44e9760e19be38c03eb0"}, + {file = "numpy-2.1.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:920b0911bb2e4414c50e55bd658baeb78281a47feeb064ab40c2b66ecba85553"}, + {file = "numpy-2.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:bab7c09454460a487e631ffc0c42057e3d8f2a9ddccd1e60c7bb8ed774992480"}, + {file = "numpy-2.1.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:cea427d1350f3fd0d2818ce7350095c1a2ee33e30961d2f0fef48576ddbbe90f"}, + {file = "numpy-2.1.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:e30356d530528a42eeba51420ae8bf6c6c09559051887196599d96ee5f536468"}, + {file = "numpy-2.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8dfa9e94fc127c40979c3eacbae1e61fda4fe71d84869cc129e2721973231ef"}, + {file = "numpy-2.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910b47a6d0635ec1bd53b88f86120a52bf56dcc27b51f18c7b4a2e2224c29f0f"}, + {file = "numpy-2.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:13cc11c00000848702322af4de0147ced365c81d66053a67c2e962a485b3717c"}, + {file = "numpy-2.1.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:53e27293b3a2b661c03f79aa51c3987492bd4641ef933e366e0f9f6c9bf257ec"}, + {file = "numpy-2.1.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7be6a07520b88214ea85d8ac8b7d6d8a1839b0b5cb87412ac9f49fa934eb15d5"}, + {file = "numpy-2.1.1-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:52ac2e48f5ad847cd43c4755520a2317f3380213493b9d8a4c5e37f3b87df504"}, + {file = "numpy-2.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50a95ca3560a6058d6ea91d4629a83a897ee27c00630aed9d933dff191f170cd"}, + {file = "numpy-2.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:99f4a9ee60eed1385a86e82288971a51e71df052ed0b2900ed30bc840c0f2e39"}, + {file = "numpy-2.1.1.tar.gz", hash = "sha256:d0cf7d55b1051387807405b3898efafa862997b4cba8aa5dbe657be794afeafd"}, +] + +[[package]] +name = "orjson" +version = "3.10.7" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84"}, + {file = "orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175"}, + {file = "orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c"}, + {file = "orjson-3.10.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7db8539039698ddfb9a524b4dd19508256107568cdad24f3682d5773e60504a2"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:480f455222cb7a1dea35c57a67578848537d2602b46c464472c995297117fa09"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a9c9b168b3a19e37fe2778c0003359f07822c90fdff8f98d9d2a91b3144d8e0"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8de062de550f63185e4c1c54151bdddfc5625e37daf0aa1e75d2a1293e3b7d9a"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b0dd04483499d1de9c8f6203f8975caf17a6000b9c0c54630cef02e44ee624e"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b58d3795dafa334fc8fd46f7c5dc013e6ad06fd5b9a4cc98cb1456e7d3558bd6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33cfb96c24034a878d83d1a9415799a73dc77480e6c40417e5dda0710d559ee6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e724cebe1fadc2b23c6f7415bad5ee6239e00a69f30ee423f319c6af70e2a5c0"}, + {file = "orjson-3.10.7-cp311-none-win32.whl", hash = "sha256:82763b46053727a7168d29c772ed5c870fdae2f61aa8a25994c7984a19b1021f"}, + {file = "orjson-3.10.7-cp311-none-win_amd64.whl", hash = "sha256:eb8d384a24778abf29afb8e41d68fdd9a156cf6e5390c04cc07bbc24b89e98b5"}, + {file = "orjson-3.10.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44a96f2d4c3af51bfac6bc4ef7b182aa33f2f054fd7f34cc0ee9a320d051d41f"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ac14cd57df0572453543f8f2575e2d01ae9e790c21f57627803f5e79b0d3c3"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bdbb61dcc365dd9be94e8f7df91975edc9364d6a78c8f7adb69c1cdff318ec93"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b48b3db6bb6e0a08fa8c83b47bc169623f801e5cc4f24442ab2b6617da3b5313"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23820a1563a1d386414fef15c249040042b8e5d07b40ab3fe3efbfbbcbcb8864"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0c6a008e91d10a2564edbb6ee5069a9e66df3fbe11c9a005cb411f441fd2c09"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d352ee8ac1926d6193f602cbe36b1643bbd1bbcb25e3c1a657a4390f3000c9a5"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d2d9f990623f15c0ae7ac608103c33dfe1486d2ed974ac3f40b693bad1a22a7b"}, + {file = "orjson-3.10.7-cp312-none-win32.whl", hash = "sha256:7c4c17f8157bd520cdb7195f75ddbd31671997cbe10aee559c2d613592e7d7eb"}, + {file = "orjson-3.10.7-cp312-none-win_amd64.whl", hash = "sha256:1d9c0e733e02ada3ed6098a10a8ee0052dd55774de3d9110d29868d24b17faa1"}, + {file = "orjson-3.10.7-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:77d325ed866876c0fa6492598ec01fe30e803272a6e8b10e992288b009cbe149"}, + {file = "orjson-3.10.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ea2c232deedcb605e853ae1db2cc94f7390ac776743b699b50b071b02bea6fe"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3dcfbede6737fdbef3ce9c37af3fb6142e8e1ebc10336daa05872bfb1d87839c"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11748c135f281203f4ee695b7f80bb1358a82a63905f9f0b794769483ea854ad"}, + {file = "orjson-3.10.7-cp313-none-win32.whl", hash = "sha256:a7e19150d215c7a13f39eb787d84db274298d3f83d85463e61d277bbd7f401d2"}, + {file = "orjson-3.10.7-cp313-none-win_amd64.whl", hash = "sha256:eef44224729e9525d5261cc8d28d6b11cafc90e6bd0be2157bde69a52ec83024"}, + {file = "orjson-3.10.7-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6ea2b2258eff652c82652d5e0f02bd5e0463a6a52abb78e49ac288827aaa1469"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430ee4d85841e1483d487e7b81401785a5dfd69db5de01314538f31f8fbf7ee1"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b6146e439af4c2472c56f8540d799a67a81226e11992008cb47e1267a9b3225"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:084e537806b458911137f76097e53ce7bf5806dda33ddf6aaa66a028f8d43a23"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829cf2195838e3f93b70fd3b4292156fc5e097aac3739859ac0dcc722b27ac0"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1193b2416cbad1a769f868b1749535d5da47626ac29445803dae7cc64b3f5c98"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4e6c3da13e5a57e4b3dca2de059f243ebec705857522f188f0180ae88badd354"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c31008598424dfbe52ce8c5b47e0752dca918a4fdc4a2a32004efd9fab41d866"}, + {file = "orjson-3.10.7-cp38-none-win32.whl", hash = "sha256:7122a99831f9e7fe977dc45784d3b2edc821c172d545e6420c375e5a935f5a1c"}, + {file = "orjson-3.10.7-cp38-none-win_amd64.whl", hash = "sha256:a763bc0e58504cc803739e7df040685816145a6f3c8a589787084b54ebc9f16e"}, + {file = "orjson-3.10.7-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e76be12658a6fa376fcd331b1ea4e58f5a06fd0220653450f0d415b8fd0fbe20"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed350d6978d28b92939bfeb1a0570c523f6170efc3f0a0ef1f1df287cd4f4960"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:144888c76f8520e39bfa121b31fd637e18d4cc2f115727865fdf9fa325b10412"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09b2d92fd95ad2402188cf51573acde57eb269eddabaa60f69ea0d733e789fe9"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b24a579123fa884f3a3caadaed7b75eb5715ee2b17ab5c66ac97d29b18fe57f"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591bcfe7512353bd609875ab38050efe3d55e18934e2f18950c108334b4ff"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f4db56635b58cd1a200b0a23744ff44206ee6aa428185e2b6c4a65b3197abdcd"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0fa5886854673222618638c6df7718ea7fe2f3f2384c452c9ccedc70b4a510a5"}, + {file = "orjson-3.10.7-cp39-none-win32.whl", hash = "sha256:8272527d08450ab16eb405f47e0f4ef0e5ff5981c3d82afe0efd25dcbef2bcd2"}, + {file = "orjson-3.10.7-cp39-none-win_amd64.whl", hash = "sha256:974683d4618c0c7dbf4f69c95a979734bf183d0658611760017f6e70a145af58"}, + {file = "orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3"}, +] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pandas" +version = "2.2.2" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, +] + +[package.dependencies] +numpy = {version = ">=1.26.0", markers = "python_version >= \"3.12\""} +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "pandas-stubs" +version = "2.2.2.240909" +description = "Type annotations for pandas" +optional = false +python-versions = ">=3.10" +files = [ + {file = "pandas_stubs-2.2.2.240909-py3-none-any.whl", hash = "sha256:e230f5fa4065f9417804f4d65cd98f86c002efcc07933e8abcd48c3fad9c30a2"}, + {file = "pandas_stubs-2.2.2.240909.tar.gz", hash = "sha256:3c0951a2c3e45e3475aed9d80b7147ae82f176b9e42e9fb321cfdebf3d411b3d"}, +] + +[package.dependencies] +numpy = ">=1.23.5" +types-pytz = ">=2022.1.1" + +[[package]] +name = "parso" +version = "0.8.4" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, +] + +[package.extras] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["docopt", "pytest"] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "pexpect" +version = "4.9.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "platformdirs" +version = "4.3.3" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.3.3-py3-none-any.whl", hash = "sha256:50a5450e2e84f44539718293cbb1da0a0885c9d14adf21b77bae4e66fc99d9b5"}, + {file = "platformdirs-4.3.3.tar.gz", hash = "sha256:d4e0b7d8ec176b341fb03cb11ca12d0276faa8c485f9cd218f613840463fc2c0"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "polyfactory" +version = "2.16.2" +description = "Mock data generation factories" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "polyfactory-2.16.2-py3-none-any.whl", hash = "sha256:e5eaf97358fee07d0d8de86a93e81dc56e3be1e1514d145fea6c5f486cda6ea1"}, + {file = "polyfactory-2.16.2.tar.gz", hash = "sha256:6d0d90deb85e5bb1733ea8744c2d44eea2b31656e11b4fa73832d2e2ab5422da"}, +] + +[package.dependencies] +faker = "*" +typing-extensions = ">=4.6.0" + +[package.extras] +attrs = ["attrs (>=22.2.0)"] +beanie = ["beanie", "pydantic[email]"] +full = ["attrs", "beanie", "msgspec", "odmantic", "pydantic", "sqlalchemy"] +msgspec = ["msgspec"] +odmantic = ["odmantic (<1.0.0)", "pydantic[email]"] +pydantic = ["pydantic[email]"] +sqlalchemy = ["sqlalchemy (>=1.4.29)"] + +[[package]] +name = "pre-commit" +version = "3.8.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, + {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "prompt-toolkit" +version = "3.0.47" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, + {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "psutil" +version = "6.0.0" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"}, + {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"}, + {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"}, + {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"}, + {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"}, + {file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"}, + {file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"}, + {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"}, + {file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"}, + {file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"}, + {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"}, + {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"}, + {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"}, + {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "psycopg" +version = "3.2.1" +description = "PostgreSQL database adapter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "psycopg-3.2.1-py3-none-any.whl", hash = "sha256:ece385fb413a37db332f97c49208b36cf030ff02b199d7635ed2fbd378724175"}, + {file = "psycopg-3.2.1.tar.gz", hash = "sha256:dc8da6dc8729dacacda3cc2f17d2c9397a70a66cf0d2b69c91065d60d5f00cb7"}, +] + +[package.dependencies] +psycopg-binary = {version = "3.2.1", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} +typing-extensions = ">=4.4" +tzdata = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +binary = ["psycopg-binary (==3.2.1)"] +c = ["psycopg-c (==3.2.1)"] +dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.6)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] +docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] +pool = ["psycopg-pool"] +test = ["anyio (>=4.0)", "mypy (>=1.6)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] + +[[package]] +name = "psycopg-binary" +version = "3.2.1" +description = "PostgreSQL database adapter for Python -- C optimisation distribution" +optional = false +python-versions = ">=3.8" +files = [ + {file = "psycopg_binary-3.2.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:cad2de17804c4cfee8640ae2b279d616bb9e4734ac3c17c13db5e40982bd710d"}, + {file = "psycopg_binary-3.2.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:592b27d6c46a40f9eeaaeea7c1fef6f3c60b02c634365eb649b2d880669f149f"}, + {file = "psycopg_binary-3.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a997efbaadb5e1a294fb5760e2f5643d7b8e4e3fe6cb6f09e6d605fd28e0291"}, + {file = "psycopg_binary-3.2.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1d2b6438fb83376f43ebb798bf0ad5e57bc56c03c9c29c85bc15405c8c0ac5a"}, + {file = "psycopg_binary-3.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b1f087bd84bdcac78bf9f024ebdbfacd07fc0a23ec8191448a50679e2ac4a19e"}, + {file = "psycopg_binary-3.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:415c3b72ea32119163255c6504085f374e47ae7345f14bc3f0ef1f6e0976a879"}, + {file = "psycopg_binary-3.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f092114f10f81fb6bae544a0ec027eb720e2d9c74a4fcdaa9dd3899873136935"}, + {file = "psycopg_binary-3.2.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06a7aae34edfe179ddc04da005e083ff6c6b0020000399a2cbf0a7121a8a22ea"}, + {file = "psycopg_binary-3.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0b018631e5c80ce9bc210b71ea885932f9cca6db131e4df505653d7e3873a938"}, + {file = "psycopg_binary-3.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f8a509aeaac364fa965454e80cd110fe6d48ba2c80f56c9b8563423f0b5c3cfd"}, + {file = "psycopg_binary-3.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:413977d18412ff83486eeb5875eb00b185a9391c57febac45b8993bf9c0ff489"}, + {file = "psycopg_binary-3.2.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:62b1b7b07e00ee490afb39c0a47d8282a9c2822c7cfed9553a04b0058adf7e7f"}, + {file = "psycopg_binary-3.2.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:f8afb07114ea9b924a4a0305ceb15354ccf0ef3c0e14d54b8dbeb03e50182dd7"}, + {file = "psycopg_binary-3.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40bb515d042f6a345714ec0403df68ccf13f73b05e567837d80c886c7c9d3805"}, + {file = "psycopg_binary-3.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6418712ba63cebb0c88c050b3997185b0ef54173b36568522d5634ac06153040"}, + {file = "psycopg_binary-3.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:101472468d59c74bb8565fab603e032803fd533d16be4b2d13da1bab8deb32a3"}, + {file = "psycopg_binary-3.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa3931f308ab4a479d0ee22dc04bea867a6365cac0172e5ddcba359da043854b"}, + {file = "psycopg_binary-3.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dc314a47d44fe1a8069b075a64abffad347a3a1d8652fed1bab5d3baea37acb2"}, + {file = "psycopg_binary-3.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:cc304a46be1e291031148d9d95c12451ffe783ff0cc72f18e2cc7ec43cdb8c68"}, + {file = "psycopg_binary-3.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f9e13600647087df5928875559f0eb8f496f53e6278b7da9511b4b3d0aff960"}, + {file = "psycopg_binary-3.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b140182830c76c74d17eba27df3755a46442ce8d4fb299e7f1cf2f74a87c877b"}, + {file = "psycopg_binary-3.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:3c838806eeb99af39f934b7999e35f947a8e577997cc892c12b5053a97a9057f"}, + {file = "psycopg_binary-3.2.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:7066d3dca196ed0dc6172f9777b2d62e4f138705886be656cccff2d555234d60"}, + {file = "psycopg_binary-3.2.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:28ada5f610468c57d8a4a055a8ea915d0085a43d794266c4f3b9d02f4288f4db"}, + {file = "psycopg_binary-3.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e8213bf50af073b1aa8dc3cff123bfeedac86332a16c1b7274910bc88a847c7"}, + {file = "psycopg_binary-3.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74d623261655a169bc84a9669890975c229f2fa6e19a7f2d10a77675dcf1a707"}, + {file = "psycopg_binary-3.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42781ba94e8842ee98bca5a7d0c44cc9d067500fedca2d6a90fa3609b6d16b42"}, + {file = "psycopg_binary-3.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33e6669091d09f8ba36e10ce678a6d9916e110446236a9b92346464a3565635e"}, + {file = "psycopg_binary-3.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b09e8a576a2ac69d695032ee76f31e03b30781828b5dd6d18c6a009e5a3d1c35"}, + {file = "psycopg_binary-3.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8f28ff0cb9f1defdc4a6f8c958bf6787274247e7dfeca811f6e2f56602695fb1"}, + {file = "psycopg_binary-3.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4c84fcac8a3a3479ac14673095cc4e1fdba2935499f72c436785ac679bec0d1a"}, + {file = "psycopg_binary-3.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:950fd666ec9e9fe6a8eeb2b5a8f17301790e518953730ad44d715b59ffdbc67f"}, + {file = "psycopg_binary-3.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:334046a937bb086c36e2c6889fe327f9f29bfc085d678f70fac0b0618949f674"}, + {file = "psycopg_binary-3.2.1-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:1d6833f607f3fc7b22226a9e121235d3b84c0eda1d3caab174673ef698f63788"}, + {file = "psycopg_binary-3.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d353e028b8f848b9784450fc2abf149d53a738d451eab3ee4c85703438128b9"}, + {file = "psycopg_binary-3.2.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f34e369891f77d0738e5d25727c307d06d5344948771e5379ea29c76c6d84555"}, + {file = "psycopg_binary-3.2.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ab58213cc976a1666f66bc1cb2e602315cd753b7981a8e17237ac2a185bd4a1"}, + {file = "psycopg_binary-3.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0104a72a17aa84b3b7dcab6c84826c595355bf54bb6ea6d284dcb06d99c6801"}, + {file = "psycopg_binary-3.2.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:059cbd4e6da2337e17707178fe49464ed01de867dc86c677b30751755ec1dc51"}, + {file = "psycopg_binary-3.2.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:73f9c9b984be9c322b5ec1515b12df1ee5896029f5e72d46160eb6517438659c"}, + {file = "psycopg_binary-3.2.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:af0469c00f24c4bec18c3d2ede124bf62688d88d1b8a5f3c3edc2f61046fe0d7"}, + {file = "psycopg_binary-3.2.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:463d55345f73ff391df8177a185ad57b552915ad33f5cc2b31b930500c068b22"}, + {file = "psycopg_binary-3.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:302b86f92c0d76e99fe1b5c22c492ae519ce8b98b88d37ef74fda4c9e24c6b46"}, + {file = "psycopg_binary-3.2.1-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0879b5d76b7d48678d31278242aaf951bc2d69ca4e4d7cef117e4bbf7bfefda9"}, + {file = "psycopg_binary-3.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f99e59f8a5f4dcd9cbdec445f3d8ac950a492fc0e211032384d6992ed3c17eb7"}, + {file = "psycopg_binary-3.2.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84837e99353d16c6980603b362d0f03302d4b06c71672a6651f38df8a482923d"}, + {file = "psycopg_binary-3.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ce965caf618061817f66c0906f0452aef966c293ae0933d4fa5a16ea6eaf5bb"}, + {file = "psycopg_binary-3.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78c2007caf3c90f08685c5378e3ceb142bafd5636be7495f7d86ec8a977eaeef"}, + {file = "psycopg_binary-3.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7a84b5eb194a258116154b2a4ff2962ea60ea52de089508db23a51d3d6b1c7d1"}, + {file = "psycopg_binary-3.2.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4a42b8f9ab39affcd5249b45cac763ac3cf12df962b67e23fd15a2ee2932afe5"}, + {file = "psycopg_binary-3.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:788ffc43d7517c13e624c83e0e553b7b8823c9655e18296566d36a829bfb373f"}, + {file = "psycopg_binary-3.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:21927f41c4d722ae8eb30d62a6ce732c398eac230509af5ba1749a337f8a63e2"}, + {file = "psycopg_binary-3.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:921f0c7f39590763d64a619de84d1b142587acc70fd11cbb5ba8fa39786f3073"}, +] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.3" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +files = [ + {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, + {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pycurl" +version = "7.45.3" +description = "PycURL -- A Python Interface To The cURL library" +optional = false +python-versions = ">=3.5" +files = [ + {file = "pycurl-7.45.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86f66d334deaaab20a576fb785587566081407adc703318203fe26e43277ef12"}, + {file = "pycurl-7.45.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:205983e87d6aa0b6e93ec7320060de44efaa905ecc5d13f70cbe38c65684c5c4"}, + {file = "pycurl-7.45.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbd4a6b8654b779089c5a44af1c65c1419c2cd60718780df6d8f354eb35d6d55"}, + {file = "pycurl-7.45.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:5ebc6a0ac60c371a9efaf7d55dec5820f76fdafb43a3be1e390011339dc329ae"}, + {file = "pycurl-7.45.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:2facab1c35600088cb82b5b093bd700bfbd1e3191deab24f7d1803d9dc5b76fc"}, + {file = "pycurl-7.45.3-cp310-cp310-win32.whl", hash = "sha256:7cfca02d70579853041063e53ca713d31161b8831b98d4f68c3554dc0448beec"}, + {file = "pycurl-7.45.3-cp310-cp310-win_amd64.whl", hash = "sha256:8451e8475051f16eb4776380384699cb8ddd10ea8410bcbfaee5a6fc4c046de6"}, + {file = "pycurl-7.45.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1610cc45b5bc8b39bc18b981d0473e59ef41226ee467eaa8fbfc7276603ef5af"}, + {file = "pycurl-7.45.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c854885398410fa6e88fc29f7a420a3c13b88bae9b4e10a804437b582e24f58b"}, + {file = "pycurl-7.45.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:921c9db0c3128481954f625b3b1bc10c730100aa944d54643528f716676439ee"}, + {file = "pycurl-7.45.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:483f3aa5d1bc8cff5657ad96f68e1d89281f971a7b6aa93408a31e3199981ea9"}, + {file = "pycurl-7.45.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1e0d32d6ed3a7ba13dbbd3a6fb50ca76c40c70e6bc6fe347f90677478d3422c7"}, + {file = "pycurl-7.45.3-cp311-cp311-win32.whl", hash = "sha256:beaaa4450e23d41dd0c2f2f47a4f8a171210271543550c2c556090c7eeea88f5"}, + {file = "pycurl-7.45.3-cp311-cp311-win_amd64.whl", hash = "sha256:dd33fd9de8907a6275c70113124aeb7eea672c1324f5d5423f203738b341697d"}, + {file = "pycurl-7.45.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0c41a172d5e8a5cdd8328cc8134f47b2a57960ac677f7cda8520eaa9fbe7d990"}, + {file = "pycurl-7.45.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13006b62c157bb4483c58e1abdced6df723c9399255a4f5f6bb7f8e425106679"}, + {file = "pycurl-7.45.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27f4c5c20c86a9a823677316724306fb1ce3b25ec568efd52026dc6c563e5b29"}, + {file = "pycurl-7.45.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c2c246bc29e8762ff4c8a833ac5b4da4c797d16ab138286e8aec9b0c0a0da2d4"}, + {file = "pycurl-7.45.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3d07c5daef2d0d85949e32ec254ee44232bb57febb0634194379dd14d1ff4f87"}, + {file = "pycurl-7.45.3-cp312-cp312-win32.whl", hash = "sha256:9f7afe5ef0e4750ac4515baebc251ee94aaefe5de6e2e8a24668473128d69904"}, + {file = "pycurl-7.45.3-cp312-cp312-win_amd64.whl", hash = "sha256:3648ed9a57a6b704673faeab3dc64d1469cc69f2bc1ed8227ffa0f84e147c500"}, + {file = "pycurl-7.45.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c0915ea139f66a289edc4f9de10cb45078af1bb950491c5612969864236a2e7e"}, + {file = "pycurl-7.45.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:43c5e61a58783ddf78ef84949f6bb6e52e092a13ec67678e9a9e21071ecf5b80"}, + {file = "pycurl-7.45.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bf613844a1647fe3d2bba1f5c9c96a62a85280123a57a8a0c8d2f37d518bc10a"}, + {file = "pycurl-7.45.3-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:936afd9c5ff7fe7457065e878a279811787778f472f9a4e8c5df79e7728358e2"}, + {file = "pycurl-7.45.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:dbf816a6d0cb71e7fd06609246bbea4eaf100649d9decf49e4eb329594f70be7"}, + {file = "pycurl-7.45.3-cp38-cp38-win32.whl", hash = "sha256:2c8a2ce568193f9f84763717d8961cec0db4ec1aa08c6bcf4d90da5eb72bec86"}, + {file = "pycurl-7.45.3-cp38-cp38-win_amd64.whl", hash = "sha256:80ac7c17e69ca6b76ccccb4255f7c29a2a36e5b69eb10c2adba82135d43afe8c"}, + {file = "pycurl-7.45.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fa7751b614d9aa82d7a0f49ca90924c29c6cedf85a2f8687fb6a772dbfe48711"}, + {file = "pycurl-7.45.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b129e9ee07f80b4af957607917af46ab517b0c4e746692f6d9e50e973edba8d8"}, + {file = "pycurl-7.45.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a0f920582b8713ca87d5a288a7532607bc4454275d733fc880650d602dbe3c67"}, + {file = "pycurl-7.45.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c7c13e4268550cde14a6f4743cc8bd8c035d4cd36514d58eff70276d68954b6f"}, + {file = "pycurl-7.45.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:0f0e1251a608ffd75fc502f4014442e554c67d3d7a1b0a839c35efb6ad2f8bf8"}, + {file = "pycurl-7.45.3-cp39-cp39-win32.whl", hash = "sha256:51a40a56c58e63dac6145829f9e9bd66e5867a9f0741bcb9ffefab619851d44f"}, + {file = "pycurl-7.45.3-cp39-cp39-win_amd64.whl", hash = "sha256:e08a06802c8c8a9d04cf3319f9230ec09062c55d2550bd48f8ada1df1431adcf"}, + {file = "pycurl-7.45.3.tar.gz", hash = "sha256:8c2471af9079ad798e1645ec0b0d3d4223db687379d17dd36a70637449f81d6b"}, +] + +[[package]] +name = "pydantic" +version = "2.9.1" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.9.1-py3-none-any.whl", hash = "sha256:7aff4db5fdf3cf573d4b3c30926a510a10e19a0774d38fc4967f78beb6deb612"}, + {file = "pydantic-2.9.1.tar.gz", hash = "sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.23.3" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] + +[[package]] +name = "pydantic-core" +version = "2.23.3" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.23.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7f10a5d1b9281392f1bf507d16ac720e78285dfd635b05737c3911637601bae6"}, + {file = "pydantic_core-2.23.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c09a7885dd33ee8c65266e5aa7fb7e2f23d49d8043f089989726391dd7350c5"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6470b5a1ec4d1c2e9afe928c6cb37eb33381cab99292a708b8cb9aa89e62429b"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9172d2088e27d9a185ea0a6c8cebe227a9139fd90295221d7d495944d2367700"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86fc6c762ca7ac8fbbdff80d61b2c59fb6b7d144aa46e2d54d9e1b7b0e780e01"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0cb80fd5c2df4898693aa841425ea1727b1b6d2167448253077d2a49003e0ed"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03667cec5daf43ac4995cefa8aaf58f99de036204a37b889c24a80927b629cec"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:047531242f8e9c2db733599f1c612925de095e93c9cc0e599e96cf536aaf56ba"}, + {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5499798317fff7f25dbef9347f4451b91ac2a4330c6669821c8202fd354c7bee"}, + {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bbb5e45eab7624440516ee3722a3044b83fff4c0372efe183fd6ba678ff681fe"}, + {file = "pydantic_core-2.23.3-cp310-none-win32.whl", hash = "sha256:8b5b3ed73abb147704a6e9f556d8c5cb078f8c095be4588e669d315e0d11893b"}, + {file = "pydantic_core-2.23.3-cp310-none-win_amd64.whl", hash = "sha256:2b603cde285322758a0279995b5796d64b63060bfbe214b50a3ca23b5cee3e83"}, + {file = "pydantic_core-2.23.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c889fd87e1f1bbeb877c2ee56b63bb297de4636661cc9bbfcf4b34e5e925bc27"}, + {file = "pydantic_core-2.23.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea85bda3189fb27503af4c45273735bcde3dd31c1ab17d11f37b04877859ef45"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7f7f72f721223f33d3dc98a791666ebc6a91fa023ce63733709f4894a7dc611"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b2b55b0448e9da68f56b696f313949cda1039e8ec7b5d294285335b53104b61"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c24574c7e92e2c56379706b9a3f07c1e0c7f2f87a41b6ee86653100c4ce343e5"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2b05e6ccbee333a8f4b8f4d7c244fdb7a979e90977ad9c51ea31261e2085ce0"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2c409ce1c219c091e47cb03feb3c4ed8c2b8e004efc940da0166aaee8f9d6c8"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d965e8b325f443ed3196db890d85dfebbb09f7384486a77461347f4adb1fa7f8"}, + {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f56af3a420fb1ffaf43ece3ea09c2d27c444e7c40dcb7c6e7cf57aae764f2b48"}, + {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b01a078dd4f9a52494370af21aa52964e0a96d4862ac64ff7cea06e0f12d2c5"}, + {file = "pydantic_core-2.23.3-cp311-none-win32.whl", hash = "sha256:560e32f0df04ac69b3dd818f71339983f6d1f70eb99d4d1f8e9705fb6c34a5c1"}, + {file = "pydantic_core-2.23.3-cp311-none-win_amd64.whl", hash = "sha256:c744fa100fdea0d000d8bcddee95213d2de2e95b9c12be083370b2072333a0fa"}, + {file = "pydantic_core-2.23.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e0ec50663feedf64d21bad0809f5857bac1ce91deded203efc4a84b31b2e4305"}, + {file = "pydantic_core-2.23.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db6e6afcb95edbe6b357786684b71008499836e91f2a4a1e55b840955b341dbb"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ccd69edcf49f0875d86942f4418a4e83eb3047f20eb897bffa62a5d419c8fa"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a678c1ac5c5ec5685af0133262103defb427114e62eafeda12f1357a12140162"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01491d8b4d8db9f3391d93b0df60701e644ff0894352947f31fff3e52bd5c801"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fcf31facf2796a2d3b7fe338fe8640aa0166e4e55b4cb108dbfd1058049bf4cb"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7200fd561fb3be06827340da066df4311d0b6b8eb0c2116a110be5245dceb326"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc1636770a809dee2bd44dd74b89cc80eb41172bcad8af75dd0bc182c2666d4c"}, + {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:67a5def279309f2e23014b608c4150b0c2d323bd7bccd27ff07b001c12c2415c"}, + {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:748bdf985014c6dd3e1e4cc3db90f1c3ecc7246ff5a3cd4ddab20c768b2f1dab"}, + {file = "pydantic_core-2.23.3-cp312-none-win32.whl", hash = "sha256:255ec6dcb899c115f1e2a64bc9ebc24cc0e3ab097775755244f77360d1f3c06c"}, + {file = "pydantic_core-2.23.3-cp312-none-win_amd64.whl", hash = "sha256:40b8441be16c1e940abebed83cd006ddb9e3737a279e339dbd6d31578b802f7b"}, + {file = "pydantic_core-2.23.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6daaf5b1ba1369a22c8b050b643250e3e5efc6a78366d323294aee54953a4d5f"}, + {file = "pydantic_core-2.23.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d015e63b985a78a3d4ccffd3bdf22b7c20b3bbd4b8227809b3e8e75bc37f9cb2"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3fc572d9b5b5cfe13f8e8a6e26271d5d13f80173724b738557a8c7f3a8a3791"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f6bd91345b5163ee7448bee201ed7dd601ca24f43f439109b0212e296eb5b423"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc379c73fd66606628b866f661e8785088afe2adaba78e6bbe80796baf708a63"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbdce4b47592f9e296e19ac31667daed8753c8367ebb34b9a9bd89dacaa299c9"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3cf31edf405a161a0adad83246568647c54404739b614b1ff43dad2b02e6d5"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8e22b477bf90db71c156f89a55bfe4d25177b81fce4aa09294d9e805eec13855"}, + {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0a0137ddf462575d9bce863c4c95bac3493ba8e22f8c28ca94634b4a1d3e2bb4"}, + {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:203171e48946c3164fe7691fc349c79241ff8f28306abd4cad5f4f75ed80bc8d"}, + {file = "pydantic_core-2.23.3-cp313-none-win32.whl", hash = "sha256:76bdab0de4acb3f119c2a4bff740e0c7dc2e6de7692774620f7452ce11ca76c8"}, + {file = "pydantic_core-2.23.3-cp313-none-win_amd64.whl", hash = "sha256:37ba321ac2a46100c578a92e9a6aa33afe9ec99ffa084424291d84e456f490c1"}, + {file = "pydantic_core-2.23.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d063c6b9fed7d992bcbebfc9133f4c24b7a7f215d6b102f3e082b1117cddb72c"}, + {file = "pydantic_core-2.23.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6cb968da9a0746a0cf521b2b5ef25fc5a0bee9b9a1a8214e0a1cfaea5be7e8a4"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbefe079a520c5984e30e1f1f29325054b59534729c25b874a16a5048028d16"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbaaf2ef20d282659093913da9d402108203f7cb5955020bd8d1ae5a2325d1c4"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb539d7e5dc4aac345846f290cf504d2fd3c1be26ac4e8b5e4c2b688069ff4cf"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e6f33503c5495059148cc486867e1d24ca35df5fc064686e631e314d959ad5b"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04b07490bc2f6f2717b10c3969e1b830f5720b632f8ae2f3b8b1542394c47a8e"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:03795b9e8a5d7fda05f3873efc3f59105e2dcff14231680296b87b80bb327295"}, + {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c483dab0f14b8d3f0df0c6c18d70b21b086f74c87ab03c59250dbf6d3c89baba"}, + {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b2682038e255e94baf2c473dca914a7460069171ff5cdd4080be18ab8a7fd6e"}, + {file = "pydantic_core-2.23.3-cp38-none-win32.whl", hash = "sha256:f4a57db8966b3a1d1a350012839c6a0099f0898c56512dfade8a1fe5fb278710"}, + {file = "pydantic_core-2.23.3-cp38-none-win_amd64.whl", hash = "sha256:13dd45ba2561603681a2676ca56006d6dee94493f03d5cadc055d2055615c3ea"}, + {file = "pydantic_core-2.23.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82da2f4703894134a9f000e24965df73cc103e31e8c31906cc1ee89fde72cbd8"}, + {file = "pydantic_core-2.23.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dd9be0a42de08f4b58a3cc73a123f124f65c24698b95a54c1543065baca8cf0e"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89b731f25c80830c76fdb13705c68fef6a2b6dc494402987c7ea9584fe189f5d"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6de1ec30c4bb94f3a69c9f5f2182baeda5b809f806676675e9ef6b8dc936f28"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb68b41c3fa64587412b104294b9cbb027509dc2f6958446c502638d481525ef"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c3980f2843de5184656aab58698011b42763ccba11c4a8c35936c8dd6c7068c"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94f85614f2cba13f62c3c6481716e4adeae48e1eaa7e8bac379b9d177d93947a"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:510b7fb0a86dc8f10a8bb43bd2f97beb63cffad1203071dc434dac26453955cd"}, + {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1eba2f7ce3e30ee2170410e2171867ea73dbd692433b81a93758ab2de6c64835"}, + {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b259fd8409ab84b4041b7b3f24dcc41e4696f180b775961ca8142b5b21d0e70"}, + {file = "pydantic_core-2.23.3-cp39-none-win32.whl", hash = "sha256:40d9bd259538dba2f40963286009bf7caf18b5112b19d2b55b09c14dde6db6a7"}, + {file = "pydantic_core-2.23.3-cp39-none-win_amd64.whl", hash = "sha256:5a8cd3074a98ee70173a8633ad3c10e00dcb991ecec57263aacb4095c5efb958"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f399e8657c67313476a121a6944311fab377085ca7f490648c9af97fc732732d"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6b5547d098c76e1694ba85f05b595720d7c60d342f24d5aad32c3049131fa5c4"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dda0290a6f608504882d9f7650975b4651ff91c85673341789a476b1159f211"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6e5da855e9c55a0c67f4db8a492bf13d8d3316a59999cfbaf98cc6e401961"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:09e926397f392059ce0afdcac920df29d9c833256354d0c55f1584b0b70cf07e"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:87cfa0ed6b8c5bd6ae8b66de941cece179281239d482f363814d2b986b79cedc"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e61328920154b6a44d98cabcb709f10e8b74276bc709c9a513a8c37a18786cc4"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce3317d155628301d649fe5e16a99528d5680af4ec7aa70b90b8dacd2d725c9b"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e89513f014c6be0d17b00a9a7c81b1c426f4eb9224b15433f3d98c1a071f8433"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4f62c1c953d7ee375df5eb2e44ad50ce2f5aff931723b398b8bc6f0ac159791a"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2718443bc671c7ac331de4eef9b673063b10af32a0bb385019ad61dcf2cc8f6c"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d90e08b2727c5d01af1b5ef4121d2f0c99fbee692c762f4d9d0409c9da6541"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b676583fc459c64146debea14ba3af54e540b61762dfc0613dc4e98c3f66eeb"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:50e4661f3337977740fdbfbae084ae5693e505ca2b3130a6d4eb0f2281dc43b8"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:68f4cf373f0de6abfe599a38307f4417c1c867ca381c03df27c873a9069cda25"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:59d52cf01854cb26c46958552a21acb10dd78a52aa34c86f284e66b209db8cab"}, + {file = "pydantic_core-2.23.3.tar.gz", hash = "sha256:3cb0f65d8b4121c1b015c60104a685feb929a29d7cf204387c7f2688c7974690"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydantic-extra-types" +version = "2.9.0" +description = "Extra Pydantic types." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_extra_types-2.9.0-py3-none-any.whl", hash = "sha256:f0bb975508572ba7bf3390b7337807588463b7248587e69f43b1ad7c797530d0"}, + {file = "pydantic_extra_types-2.9.0.tar.gz", hash = "sha256:e061c01636188743bb69f368dcd391f327b8cfbfede2fe1cbb1211b06601ba3b"}, +] + +[package.dependencies] +pydantic = ">=2.5.2" + +[package.extras] +all = ["pendulum (>=3.0.0,<4.0.0)", "phonenumbers (>=8,<9)", "pycountry (>=23)", "python-ulid (>=1,<2)", "python-ulid (>=1,<3)", "pytz (>=2024.1)", "semver (>=3.0.2)", "tzdata (>=2024.1)"] +pendulum = ["pendulum (>=3.0.0,<4.0.0)"] +phonenumbers = ["phonenumbers (>=8,<9)"] +pycountry = ["pycountry (>=23)"] +python-ulid = ["python-ulid (>=1,<2)", "python-ulid (>=1,<3)"] +semver = ["semver (>=3.0.2)"] + +[[package]] +name = "pydantic-settings" +version = "2.5.2" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_settings-2.5.2-py3-none-any.whl", hash = "sha256:2c912e55fd5794a59bf8c832b9de832dcfdf4778d79ff79b708744eed499a907"}, + {file = "pydantic_settings-2.5.2.tar.gz", hash = "sha256:f90b139682bee4d2065273d5185d71d37ea46cfe57e1b5ae184fc6a0b2484ca0"}, +] + +[package.dependencies] +pydantic = ">=2.7.0" +python-dotenv = ">=0.21.0" + +[package.extras] +azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] +toml = ["tomli (>=2.0.1)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "pydantic-validators" +version = "0.1.0" +description = "" +optional = false +python-versions = ">=3.6.1,<4.0.0" +files = [ + {file = "pydantic-validators-0.1.0.tar.gz", hash = "sha256:1ce97dae12d3e7577051b473e864deee7cae2abc10fcdb5c489709093b0e1de2"}, + {file = "pydantic_validators-0.1.0-py3-none-any.whl", hash = "sha256:d2be33d1c7ff8974b59603c80d7265c71a7a23b321658ea593ee09bda7a7438f"}, +] + +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyright" +version = "1.1.380" +description = "Command line wrapper for pyright" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyright-1.1.380-py3-none-any.whl", hash = "sha256:a6404392053d8848bacc7aebcbd9d318bb46baf1a1a000359305481920f43879"}, + {file = "pyright-1.1.380.tar.gz", hash = "sha256:e6ceb1a5f7e9f03106e0aa1d6fbb4d97735a5e7ffb59f3de6b2db590baf935b2"}, +] + +[package.dependencies] +nodeenv = ">=1.6.0" + +[package.extras] +all = ["twine (>=3.4.1)"] +dev = ["twine (>=3.4.1)"] + +[[package]] +name = "pytest" +version = "8.3.3" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-alembic" +version = "0.11.1" +description = "A pytest plugin for verifying alembic migrations." +optional = false +python-versions = "<4,>=3.6" +files = [ + {file = "pytest_alembic-0.11.1-py3-none-any.whl", hash = "sha256:f83e8c1534d50ced053aa4b1dbf6e261f4674aa626cb852fc1dcb565049ae152"}, + {file = "pytest_alembic-0.11.1.tar.gz", hash = "sha256:a920d8770b5be77326c5c1b2bd8d4d4a0dd8fc2c2d57abbcd1fec28a21131b85"}, +] + +[package.dependencies] +alembic = "*" +pytest = ">=6.0" +sqlalchemy = "*" + +[[package]] +name = "pytest-asyncio" +version = "0.24.0" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, + {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, +] + +[package.dependencies] +pytest = ">=8.2,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pytest-celery" +version = "1.1.2" +description = "Pytest plugin for Celery" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "pytest_celery-1.1.2-py3-none-any.whl", hash = "sha256:e117cf2aa5164f99d7ac8296bd3dee16413faa0153d9b5de2c9f9242c60f350b"}, + {file = "pytest_celery-1.1.2.tar.gz", hash = "sha256:b02cfab5775dd74af99e03ab963047fe1d980dc40f1da19934bf1142d11fa398"}, +] + +[package.dependencies] +boto3 = {version = "*", optional = true, markers = "extra == \"all\" or extra == \"sqs\""} +botocore = {version = "*", optional = true, markers = "extra == \"all\" or extra == \"sqs\""} +celery = "*" +debugpy = ">=1.8.5,<2.0.0" +docker = ">=7.1.0,<8.0.0" +psutil = ">=6.0.0" +pycurl = {version = "*", optional = true, markers = "sys_platform != \"win32\" and platform_python_implementation == \"CPython\" and (extra == \"all\" or extra == \"sqs\")"} +pytest-docker-tools = ">=3.1.3" +python-memcached = {version = "*", optional = true, markers = "extra == \"all\" or extra == \"memcached\""} +redis = {version = "*", optional = true, markers = "extra == \"all\" or extra == \"redis\""} +setuptools = ">=74.1.2" +tenacity = ">=9.0.0" +urllib3 = {version = "*", optional = true, markers = "extra == \"all\" or extra == \"sqs\""} + +[package.extras] +all = ["boto3", "botocore", "pycurl", "python-memcached", "redis", "urllib3"] +memcached = ["python-memcached"] +redis = ["redis"] +sqs = ["boto3", "botocore", "pycurl", "urllib3"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-docker-tools" +version = "3.1.3" +description = "Docker integration tests for pytest" +optional = false +python-versions = ">=3.7.0,<4.0.0" +files = [ + {file = "pytest_docker_tools-3.1.3-py3-none-any.whl", hash = "sha256:63e659043160f41d89f94ea42616102594bcc85682aac394fcbc14f14cd1b189"}, + {file = "pytest_docker_tools-3.1.3.tar.gz", hash = "sha256:c7e28841839d67b3ac80ad7b345b953701d5ae61ffda97586114244292aeacc0"}, +] + +[package.dependencies] +docker = ">=4.3.1" +pytest = ">=6.0.1" + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-memcached" +version = "1.62" +description = "Pure python memcached client" +optional = false +python-versions = "*" +files = [ + {file = "python-memcached-1.62.tar.gz", hash = "sha256:0285470599b7f593fbf3bec084daa1f483221e68c1db2cf1d846a9f7c2655103"}, + {file = "python_memcached-1.62-py2.py3-none-any.whl", hash = "sha256:1bdd8d2393ff53e80cd5e9442d750e658e0b35c3eebb3211af137303e3b729d1"}, +] + +[[package]] +name = "python-multipart" +version = "0.0.9" +description = "A streaming multipart parser for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python_multipart-0.0.9-py3-none-any.whl", hash = "sha256:97ca7b8ea7b05f977dc3849c3ba99d51689822fab725c3703af7c866a0c2b215"}, + {file = "python_multipart-0.0.9.tar.gz", hash = "sha256:03f54688c663f1b7977105f021043b0793151e4cb1c1a9d4a11fc13d622c4026"}, +] + +[package.extras] +dev = ["atomicwrites (==1.4.1)", "attrs (==23.2.0)", "coverage (==7.4.1)", "hatch", "invoke (==2.2.0)", "more-itertools (==10.2.0)", "pbr (==6.0.0)", "pluggy (==1.4.0)", "py (==1.11.0)", "pytest (==8.0.0)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.2.0)", "pyyaml (==6.0.1)", "ruff (==0.2.1)"] + +[[package]] +name = "pytz" +version = "2024.2" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, +] + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "pyzmq" +version = "26.2.0" +description = "Python bindings for 0MQ" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629"}, + {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89289a5ee32ef6c439086184529ae060c741334b8970a6855ec0b6ad3ff28764"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5506f06d7dc6ecf1efacb4a013b1f05071bb24b76350832c96449f4a2d95091c"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea039387c10202ce304af74def5021e9adc6297067f3441d348d2b633e8166a"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2224fa4a4c2ee872886ed00a571f5e967c85e078e8e8c2530a2fb01b3309b88"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:28ad5233e9c3b52d76196c696e362508959741e1a005fb8fa03b51aea156088f"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1c17211bc037c7d88e85ed8b7d8f7e52db6dc8eca5590d162717c654550f7282"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b8f86dd868d41bea9a5f873ee13bf5551c94cf6bc51baebc6f85075971fe6eea"}, + {file = "pyzmq-26.2.0-cp310-cp310-win32.whl", hash = "sha256:46a446c212e58456b23af260f3d9fb785054f3e3653dbf7279d8f2b5546b21c2"}, + {file = "pyzmq-26.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:49d34ab71db5a9c292a7644ce74190b1dd5a3475612eefb1f8be1d6961441971"}, + {file = "pyzmq-26.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:bfa832bfa540e5b5c27dcf5de5d82ebc431b82c453a43d141afb1e5d2de025fa"}, + {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218"}, + {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6"}, + {file = "pyzmq-26.2.0-cp311-cp311-win32.whl", hash = "sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4"}, + {file = "pyzmq-26.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5"}, + {file = "pyzmq-26.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003"}, + {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9"}, + {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b"}, + {file = "pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7"}, + {file = "pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a"}, + {file = "pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b"}, + {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726"}, + {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e"}, + {file = "pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5"}, + {file = "pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad"}, + {file = "pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797"}, + {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a"}, + {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0"}, + {file = "pyzmq-26.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b55a4229ce5da9497dd0452b914556ae58e96a4381bb6f59f1305dfd7e53fc8"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9cb3a6460cdea8fe8194a76de8895707e61ded10ad0be97188cc8463ffa7e3a8"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ab5cad923cc95c87bffee098a27856c859bd5d0af31bd346035aa816b081fe1"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ed69074a610fad1c2fda66180e7b2edd4d31c53f2d1872bc2d1211563904cd9"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cccba051221b916a4f5e538997c45d7d136a5646442b1231b916d0164067ea27"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0eaa83fc4c1e271c24eaf8fb083cbccef8fde77ec8cd45f3c35a9a123e6da097"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9edda2df81daa129b25a39b86cb57dfdfe16f7ec15b42b19bfac503360d27a93"}, + {file = "pyzmq-26.2.0-cp37-cp37m-win32.whl", hash = "sha256:ea0eb6af8a17fa272f7b98d7bebfab7836a0d62738e16ba380f440fceca2d951"}, + {file = "pyzmq-26.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4ff9dc6bc1664bb9eec25cd17506ef6672d506115095411e237d571e92a58231"}, + {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2eb7735ee73ca1b0d71e0e67c3739c689067f055c764f73aac4cc8ecf958ee3f"}, + {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a534f43bc738181aa7cbbaf48e3eca62c76453a40a746ab95d4b27b1111a7d2"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:aedd5dd8692635813368e558a05266b995d3d020b23e49581ddd5bbe197a8ab6"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8be4700cd8bb02cc454f630dcdf7cfa99de96788b80c51b60fe2fe1dac480289"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fcc03fa4997c447dce58264e93b5aa2d57714fbe0f06c07b7785ae131512732"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:402b190912935d3db15b03e8f7485812db350d271b284ded2b80d2e5704be780"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8685fa9c25ff00f550c1fec650430c4b71e4e48e8d852f7ddcf2e48308038640"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:76589c020680778f06b7e0b193f4b6dd66d470234a16e1df90329f5e14a171cd"}, + {file = "pyzmq-26.2.0-cp38-cp38-win32.whl", hash = "sha256:8423c1877d72c041f2c263b1ec6e34360448decfb323fa8b94e85883043ef988"}, + {file = "pyzmq-26.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:76589f2cd6b77b5bdea4fca5992dc1c23389d68b18ccc26a53680ba2dc80ff2f"}, + {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:b1d464cb8d72bfc1a3adc53305a63a8e0cac6bc8c5a07e8ca190ab8d3faa43c2"}, + {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4da04c48873a6abdd71811c5e163bd656ee1b957971db7f35140a2d573f6949c"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d049df610ac811dcffdc147153b414147428567fbbc8be43bb8885f04db39d98"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05590cdbc6b902101d0e65d6a4780af14dc22914cc6ab995d99b85af45362cc9"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c811cfcd6a9bf680236c40c6f617187515269ab2912f3d7e8c0174898e2519db"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6835dd60355593de10350394242b5757fbbd88b25287314316f266e24c61d073"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc6bee759a6bddea5db78d7dcd609397449cb2d2d6587f48f3ca613b19410cfc"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c530e1eecd036ecc83c3407f77bb86feb79916d4a33d11394b8234f3bd35b940"}, + {file = "pyzmq-26.2.0-cp39-cp39-win32.whl", hash = "sha256:367b4f689786fca726ef7a6c5ba606958b145b9340a5e4808132cc65759abd44"}, + {file = "pyzmq-26.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:e6fa2e3e683f34aea77de8112f6483803c96a44fd726d7358b9888ae5bb394ec"}, + {file = "pyzmq-26.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:7445be39143a8aa4faec43b076e06944b8f9d0701b669df4af200531b21e40bb"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:706e794564bec25819d21a41c31d4df2d48e1cc4b061e8d345d7fb4dd3e94072"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b435f2753621cd36e7c1762156815e21c985c72b19135dac43a7f4f31d28dd1"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160c7e0a5eb178011e72892f99f918c04a131f36056d10d9c1afb223fc952c2d"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4a71d5d6e7b28a47a394c0471b7e77a0661e2d651e7ae91e0cab0a587859ca"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2ea4ad4e6a12e454de05f2949d4beddb52460f3de7c8b9d5c46fbb7d7222e02c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fc4f7a173a5609631bb0c42c23d12c49df3966f89f496a51d3eb0ec81f4519d6"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:878206a45202247781472a2d99df12a176fef806ca175799e1c6ad263510d57c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17c412bad2eb9468e876f556eb4ee910e62d721d2c7a53c7fa31e643d35352e6"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0d987a3ae5a71c6226b203cfd298720e0086c7fe7c74f35fa8edddfbd6597eed"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39887ac397ff35b7b775db7201095fc6310a35fdbae85bac4523f7eb3b840e20"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fdb5b3e311d4d4b0eb8b3e8b4d1b0a512713ad7e6a68791d0923d1aec433d919"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:226af7dcb51fdb0109f0016449b357e182ea0ceb6b47dfb5999d569e5db161d5"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bed0e799e6120b9c32756203fb9dfe8ca2fb8467fed830c34c877e25638c3fc"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:29c7947c594e105cb9e6c466bace8532dc1ca02d498684128b339799f5248277"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cdeabcff45d1c219636ee2e54d852262e5c2e085d6cb476d938aee8d921356b3"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35cffef589bcdc587d06f9149f8d5e9e8859920a071df5a2671de2213bef592a"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18c8dc3b7468d8b4bdf60ce9d7141897da103c7a4690157b32b60acb45e333e6"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7133d0a1677aec369d67dd78520d3fa96dd7f3dcec99d66c1762870e5ea1a50a"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a96179a24b14fa6428cbfc08641c779a53f8fcec43644030328f44034c7f1f4"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4f78c88905461a9203eac9faac157a2a0dbba84a0fd09fd29315db27be40af9f"}, + {file = "pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f"}, +] + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} + +[[package]] +name = "redis" +version = "5.0.8" +description = "Python client for Redis database and key-value store" +optional = false +python-versions = ">=3.7" +files = [ + {file = "redis-5.0.8-py3-none-any.whl", hash = "sha256:56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4"}, + {file = "redis-5.0.8.tar.gz", hash = "sha256:0c5b10d387568dfe0698c6fad6615750c24170e548ca2deac10c649d463e9870"}, +] + +[package.extras] +hiredis = ["hiredis (>1.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "ruff" +version = "0.3.7" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"}, + {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"}, + {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"}, + {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"}, + {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"}, +] + +[[package]] +name = "s3transfer" +version = "0.10.2" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">=3.8" +files = [ + {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, + {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, +] + +[package.dependencies] +botocore = ">=1.33.2,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] + +[[package]] +name = "setuptools" +version = "74.1.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-74.1.2-py3-none-any.whl", hash = "sha256:5f4c08aa4d3ebcb57a50c33b1b07e94315d7fc7230f7115e47fc99776c8ce308"}, + {file = "setuptools-74.1.2.tar.gz", hash = "sha256:95b40ed940a1c67eb70fc099094bd6e99c6ee7c23aa2306f4d2697ba7916f9c6"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.34" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:95d0b2cf8791ab5fb9e3aa3d9a79a0d5d51f55b6357eecf532a120ba3b5524db"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:243f92596f4fd4c8bd30ab8e8dd5965afe226363d75cab2468f2c707f64cd83b"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ea54f7300553af0a2a7235e9b85f4204e1fc21848f917a3213b0e0818de9a24"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:173f5f122d2e1bff8fbd9f7811b7942bead1f5e9f371cdf9e670b327e6703ebd"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:196958cde924a00488e3e83ff917be3b73cd4ed8352bbc0f2989333176d1c54d"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd90c221ed4e60ac9d476db967f436cfcecbd4ef744537c0f2d5291439848768"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-win32.whl", hash = "sha256:3166dfff2d16fe9be3241ee60ece6fcb01cf8e74dd7c5e0b64f8e19fab44911b"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-win_amd64.whl", hash = "sha256:6831a78bbd3c40f909b3e5233f87341f12d0b34a58f14115c9e94b4cdaf726d3"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7db3db284a0edaebe87f8f6642c2b2c27ed85c3e70064b84d1c9e4ec06d5d84"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:430093fce0efc7941d911d34f75a70084f12f6ca5c15d19595c18753edb7c33b"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79cb400c360c7c210097b147c16a9e4c14688a6402445ac848f296ade6283bbc"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1b30f31a36c7f3fee848391ff77eebdd3af5750bf95fbf9b8b5323edfdb4ec"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fddde2368e777ea2a4891a3fb4341e910a056be0bb15303bf1b92f073b80c02"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:80bd73ea335203b125cf1d8e50fef06be709619eb6ab9e7b891ea34b5baa2287"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-win32.whl", hash = "sha256:6daeb8382d0df526372abd9cb795c992e18eed25ef2c43afe518c73f8cccb721"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-win_amd64.whl", hash = "sha256:5bc08e75ed11693ecb648b7a0a4ed80da6d10845e44be0c98c03f2f880b68ff4"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:53e68b091492c8ed2bd0141e00ad3089bcc6bf0e6ec4142ad6505b4afe64163e"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bcd18441a49499bf5528deaa9dee1f5c01ca491fc2791b13604e8f972877f812"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:165bbe0b376541092bf49542bd9827b048357f4623486096fc9aaa6d4e7c59a2"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3330415cd387d2b88600e8e26b510d0370db9b7eaf984354a43e19c40df2e2b"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97b850f73f8abbffb66ccbab6e55a195a0eb655e5dc74624d15cff4bfb35bd74"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee4c6917857fd6121ed84f56d1dc78eb1d0e87f845ab5a568aba73e78adf83"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-win32.whl", hash = "sha256:fbb034f565ecbe6c530dff948239377ba859420d146d5f62f0271407ffb8c580"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-win_amd64.whl", hash = "sha256:707c8f44931a4facd4149b52b75b80544a8d824162602b8cd2fe788207307f9a"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:24af3dc43568f3780b7e1e57c49b41d98b2d940c1fd2e62d65d3928b6f95f021"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60ed6ef0a35c6b76b7640fe452d0e47acc832ccbb8475de549a5cc5f90c2c06"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:413c85cd0177c23e32dee6898c67a5f49296640041d98fddb2c40888fe4daa2e"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:25691f4adfb9d5e796fd48bf1432272f95f4bbe5f89c475a788f31232ea6afba"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:526ce723265643dbc4c7efb54f56648cc30e7abe20f387d763364b3ce7506c82"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-win32.whl", hash = "sha256:13be2cc683b76977a700948411a94c67ad8faf542fa7da2a4b167f2244781cf3"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-win_amd64.whl", hash = "sha256:e54ef33ea80d464c3dcfe881eb00ad5921b60f8115ea1a30d781653edc2fd6a2"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:43f28005141165edd11fbbf1541c920bd29e167b8bbc1fb410d4fe2269c1667a"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b68094b165a9e930aedef90725a8fcfafe9ef95370cbb54abc0464062dbf808f"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1e03db964e9d32f112bae36f0cc1dcd1988d096cfd75d6a588a3c3def9ab2b"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:203d46bddeaa7982f9c3cc693e5bc93db476ab5de9d4b4640d5c99ff219bee8c"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ae92bebca3b1e6bd203494e5ef919a60fb6dfe4d9a47ed2453211d3bd451b9f5"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9661268415f450c95f72f0ac1217cc6f10256f860eed85c2ae32e75b60278ad8"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-win32.whl", hash = "sha256:895184dfef8708e15f7516bd930bda7e50ead069280d2ce09ba11781b630a434"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-win_amd64.whl", hash = "sha256:6e7cde3a2221aa89247944cafb1b26616380e30c63e37ed19ff0bba5e968688d"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dbcdf987f3aceef9763b6d7b1fd3e4ee210ddd26cac421d78b3c206d07b2700b"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ce119fc4ce0d64124d37f66a6f2a584fddc3c5001755f8a49f1ca0a177ef9796"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a17d8fac6df9835d8e2b4c5523666e7051d0897a93756518a1fe101c7f47f2f0"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ebc11c54c6ecdd07bb4efbfa1554538982f5432dfb8456958b6d46b9f834bb7"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e6965346fc1491a566e019a4a1d3dfc081ce7ac1a736536367ca305da6472a8"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:220574e78ad986aea8e81ac68821e47ea9202b7e44f251b7ed8c66d9ae3f4278"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-win32.whl", hash = "sha256:b75b00083e7fe6621ce13cfce9d4469c4774e55e8e9d38c305b37f13cf1e874c"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-win_amd64.whl", hash = "sha256:c29d03e0adf3cc1a8c3ec62d176824972ae29b67a66cbb18daff3062acc6faa8"}, + {file = "SQLAlchemy-2.0.34-py3-none-any.whl", hash = "sha256:7286c353ee6475613d8beff83167374006c6b3e3f0e6491bfe8ca610eb1dec0f"}, + {file = "sqlalchemy-2.0.34.tar.gz", hash = "sha256:10d8f36990dd929690666679b0f42235c159a7051534adb135728ee52828dd22"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "sqlalchemy-mixins" +version = "2.0.5" +description = "Active Record, Django-like queries, nested eager load and beauty __repr__ for SQLAlchemy" +optional = false +python-versions = "*" +files = [ + {file = "sqlalchemy_mixins-2.0.5-py3-none-any.whl", hash = "sha256:9067b630744741b472aa91d92494cc5612ed2d29c66729a5a4a1d3fbbeccd448"}, + {file = "sqlalchemy_mixins-2.0.5.tar.gz", hash = "sha256:85197fc3682c4bf9c35671fb3d10282a0973b19cd2ff2b6791d601cbfb0fb89e"}, +] + +[package.dependencies] +six = "*" +SQLAlchemy = ">=2.0" + +[[package]] +name = "sqlalchemy-utils" +version = "0.41.2" +description = "Various utility functions for SQLAlchemy." +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-Utils-0.41.2.tar.gz", hash = "sha256:bc599c8c3b3319e53ce6c5c3c471120bd325d0071fb6f38a10e924e3d07b9990"}, + {file = "SQLAlchemy_Utils-0.41.2-py3-none-any.whl", hash = "sha256:85cf3842da2bf060760f955f8467b87983fb2e30f1764fd0e24a48307dc8ec6e"}, +] + +[package.dependencies] +SQLAlchemy = ">=1.3" + +[package.extras] +arrow = ["arrow (>=0.3.4)"] +babel = ["Babel (>=1.3)"] +color = ["colour (>=0.0.4)"] +encrypted = ["cryptography (>=0.6)"] +intervals = ["intervals (>=0.7.1)"] +password = ["passlib (>=1.6,<2.0)"] +pendulum = ["pendulum (>=2.0.5)"] +phone = ["phonenumbers (>=5.9.2)"] +test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +timezone = ["python-dateutil"] +url = ["furl (>=0.4.1)"] + +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "starlette" +version = "0.36.3" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.8" +files = [ + {file = "starlette-0.36.3-py3-none-any.whl", hash = "sha256:13d429aa93a61dc40bf503e8c801db1f1bca3dc706b10ef2434a36123568f044"}, + {file = "starlette-0.36.3.tar.gz", hash = "sha256:90a671733cfb35771d8cc605e0b679d23b992f8dcfad48cc60b38cb29aeb7080"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" + +[package.extras] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] + +[[package]] +name = "tenacity" +version = "9.0.0" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, + {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, +] + +[package.extras] +doc = ["reno", "sphinx"] +test = ["pytest", "tornado (>=4.5)", "typeguard"] + +[[package]] +name = "tornado" +version = "6.4.1" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">=3.8" +files = [ + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, + {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, + {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, + {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, +] + +[[package]] +name = "traitlets" +version = "5.14.3" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +files = [ + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] + +[[package]] +name = "types-pytz" +version = "2024.2.0.20240913" +description = "Typing stubs for pytz" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-pytz-2024.2.0.20240913.tar.gz", hash = "sha256:4433b5df4a6fc587bbed41716d86a5ba5d832b4378e506f40d34bc9c81df2c24"}, + {file = "types_pytz-2024.2.0.20240913-py3-none-any.whl", hash = "sha256:a1eebf57ebc6e127a99d2fa2ba0a88d2b173784ef9b3defcc2004ab6855a44df"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "ujson" +version = "5.10.0" +description = "Ultra fast JSON encoder and decoder for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, + {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cffecf73391e8abd65ef5f4e4dd523162a3399d5e84faa6aebbf9583df86d6"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0e2d2366543c1bb4fbd457446f00b0187a2bddf93148ac2da07a53fe51569"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf270c6dba1be7a41125cd1e4fc7ba384bf564650beef0df2dd21a00b7f5770"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a245d59f2ffe750446292b0094244df163c3dc96b3ce152a2c837a44e7cda9d1"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94a87f6e151c5f483d7d54ceef83b45d3a9cca7a9cb453dbdbb3f5a6f64033f5"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:29b443c4c0a113bcbb792c88bea67b675c7ca3ca80c3474784e08bba01c18d51"}, + {file = "ujson-5.10.0-cp310-cp310-win32.whl", hash = "sha256:c18610b9ccd2874950faf474692deee4223a994251bc0a083c114671b64e6518"}, + {file = "ujson-5.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:924f7318c31874d6bb44d9ee1900167ca32aa9b69389b98ecbde34c1698a250f"}, + {file = "ujson-5.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a5b366812c90e69d0f379a53648be10a5db38f9d4ad212b60af00bd4048d0f00"}, + {file = "ujson-5.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:502bf475781e8167f0f9d0e41cd32879d120a524b22358e7f205294224c71126"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b91b5d0d9d283e085e821651184a647699430705b15bf274c7896f23fe9c9d8"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:129e39af3a6d85b9c26d5577169c21d53821d8cf68e079060602e861c6e5da1b"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f77b74475c462cb8b88680471193064d3e715c7c6074b1c8c412cb526466efe9"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7ec0ca8c415e81aa4123501fee7f761abf4b7f386aad348501a26940beb1860f"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab13a2a9e0b2865a6c6db9271f4b46af1c7476bfd51af1f64585e919b7c07fd4"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:57aaf98b92d72fc70886b5a0e1a1ca52c2320377360341715dd3933a18e827b1"}, + {file = "ujson-5.10.0-cp311-cp311-win32.whl", hash = "sha256:2987713a490ceb27edff77fb184ed09acdc565db700ee852823c3dc3cffe455f"}, + {file = "ujson-5.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f00ea7e00447918ee0eff2422c4add4c5752b1b60e88fcb3c067d4a21049a720"}, + {file = "ujson-5.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98ba15d8cbc481ce55695beee9f063189dce91a4b08bc1d03e7f0152cd4bbdd5"}, + {file = "ujson-5.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9d2edbf1556e4f56e50fab7d8ff993dbad7f54bac68eacdd27a8f55f433578e"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6627029ae4f52d0e1a2451768c2c37c0c814ffc04f796eb36244cf16b8e57043"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ccb77b3e40b151e20519c6ae6d89bfe3f4c14e8e210d910287f778368bb3d1"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3caf9cd64abfeb11a3b661329085c5e167abbe15256b3b68cb5d914ba7396f3"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6e32abdce572e3a8c3d02c886c704a38a1b015a1fb858004e03d20ca7cecbb21"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a65b6af4d903103ee7b6f4f5b85f1bfd0c90ba4eeac6421aae436c9988aa64a2"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:604a046d966457b6cdcacc5aa2ec5314f0e8c42bae52842c1e6fa02ea4bda42e"}, + {file = "ujson-5.10.0-cp312-cp312-win32.whl", hash = "sha256:6dea1c8b4fc921bf78a8ff00bbd2bfe166345f5536c510671bccececb187c80e"}, + {file = "ujson-5.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:38665e7d8290188b1e0d57d584eb8110951a9591363316dd41cf8686ab1d0abc"}, + {file = "ujson-5.10.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:618efd84dc1acbd6bff8eaa736bb6c074bfa8b8a98f55b61c38d4ca2c1f7f287"}, + {file = "ujson-5.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38d5d36b4aedfe81dfe251f76c0467399d575d1395a1755de391e58985ab1c2e"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67079b1f9fb29ed9a2914acf4ef6c02844b3153913eb735d4bf287ee1db6e557"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7d0e0ceeb8fe2468c70ec0c37b439dd554e2aa539a8a56365fd761edb418988"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59e02cd37bc7c44d587a0ba45347cc815fb7a5fe48de16bf05caa5f7d0d2e816"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a890b706b64e0065f02577bf6d8ca3b66c11a5e81fb75d757233a38c07a1f20"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:621e34b4632c740ecb491efc7f1fcb4f74b48ddb55e65221995e74e2d00bbff0"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9500e61fce0cfc86168b248104e954fead61f9be213087153d272e817ec7b4f"}, + {file = "ujson-5.10.0-cp313-cp313-win32.whl", hash = "sha256:4c4fc16f11ac1612f05b6f5781b384716719547e142cfd67b65d035bd85af165"}, + {file = "ujson-5.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:4573fd1695932d4f619928fd09d5d03d917274381649ade4328091ceca175539"}, + {file = "ujson-5.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a984a3131da7f07563057db1c3020b1350a3e27a8ec46ccbfbf21e5928a43050"}, + {file = "ujson-5.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73814cd1b9db6fc3270e9d8fe3b19f9f89e78ee9d71e8bd6c9a626aeaeaf16bd"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61e1591ed9376e5eddda202ec229eddc56c612b61ac6ad07f96b91460bb6c2fb"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2c75269f8205b2690db4572a4a36fe47cd1338e4368bc73a7a0e48789e2e35a"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7223f41e5bf1f919cd8d073e35b229295aa8e0f7b5de07ed1c8fddac63a6bc5d"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d4dc2fd6b3067c0782e7002ac3b38cf48608ee6366ff176bbd02cf969c9c20fe"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:232cc85f8ee3c454c115455195a205074a56ff42608fd6b942aa4c378ac14dd7"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cc6139531f13148055d691e442e4bc6601f6dba1e6d521b1585d4788ab0bfad4"}, + {file = "ujson-5.10.0-cp38-cp38-win32.whl", hash = "sha256:e7ce306a42b6b93ca47ac4a3b96683ca554f6d35dd8adc5acfcd55096c8dfcb8"}, + {file = "ujson-5.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:e82d4bb2138ab05e18f089a83b6564fee28048771eb63cdecf4b9b549de8a2cc"}, + {file = "ujson-5.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dfef2814c6b3291c3c5f10065f745a1307d86019dbd7ea50e83504950136ed5b"}, + {file = "ujson-5.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4734ee0745d5928d0ba3a213647f1c4a74a2a28edc6d27b2d6d5bd9fa4319e27"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ebb01bd865fdea43da56254a3930a413f0c5590372a1241514abae8aa7c76"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dee5e97c2496874acbf1d3e37b521dd1f307349ed955e62d1d2f05382bc36dd5"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7490655a2272a2d0b072ef16b0b58ee462f4973a8f6bbe64917ce5e0a256f9c0"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba17799fcddaddf5c1f75a4ba3fd6441f6a4f1e9173f8a786b42450851bd74f1"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2aff2985cef314f21d0fecc56027505804bc78802c0121343874741650a4d3d1"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad88ac75c432674d05b61184178635d44901eb749786c8eb08c102330e6e8996"}, + {file = "ujson-5.10.0-cp39-cp39-win32.whl", hash = "sha256:2544912a71da4ff8c4f7ab5606f947d7299971bdd25a45e008e467ca638d13c9"}, + {file = "ujson-5.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ff201d62b1b177a46f113bb43ad300b424b7847f9c5d38b1b4ad8f75d4a282a"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b6fee72fa77dc172a28f21693f64d93166534c263adb3f96c413ccc85ef6e64"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:61d0af13a9af01d9f26d2331ce49bb5ac1fb9c814964018ac8df605b5422dcb3"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb24f0bdd899d368b715c9e6664166cf694d1e57be73f17759573a6986dd95a"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbd8fd427f57a03cff3ad6574b5e299131585d9727c8c366da4624a9069ed746"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beeaf1c48e32f07d8820c705ff8e645f8afa690cca1544adba4ebfa067efdc88"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:baed37ea46d756aca2955e99525cc02d9181de67f25515c468856c38d52b5f3b"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7663960f08cd5a2bb152f5ee3992e1af7690a64c0e26d31ba7b3ff5b2ee66337"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8640fb4072d36b08e95a3a380ba65779d356b2fee8696afeb7794cf0902d0a1"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78778a3aa7aafb11e7ddca4e29f46bc5139131037ad628cc10936764282d6753"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0111b27f2d5c820e7f2dbad7d48e3338c824e7ac4d2a12da3dc6061cc39c8e6"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:c66962ca7565605b355a9ed478292da628b8f18c0f2793021ca4425abf8b01e5"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba43cc34cce49cf2d4bc76401a754a81202d8aa926d0e2b79f0ee258cb15d3a4"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac56eb983edce27e7f51d05bc8dd820586c6e6be1c5216a6809b0c668bb312b8"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44bd4b23a0e723bf8b10628288c2c7c335161d6840013d4d5de20e48551773b"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c10f4654e5326ec14a46bcdeb2b685d4ada6911050aa8baaf3501e57024b804"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0de4971a89a762398006e844ae394bd46991f7c385d7a6a3b93ba229e6dac17e"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e1402f0564a97d2a52310ae10a64d25bcef94f8dd643fcf5d310219d915484f7"}, + {file = "ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1"}, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uvicorn" +version = "0.30.6" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.8" +files = [ + {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"}, + {file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} +h11 = ">=0.8" +httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} +python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "uvloop" +version = "0.20.0" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9ebafa0b96c62881d5cafa02d9da2e44c23f9f0cd829f3a32a6aff771449c996"}, + {file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:35968fc697b0527a06e134999eef859b4034b37aebca537daeb598b9d45a137b"}, + {file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b16696f10e59d7580979b420eedf6650010a4a9c3bd8113f24a103dfdb770b10"}, + {file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b04d96188d365151d1af41fa2d23257b674e7ead68cfd61c725a422764062ae"}, + {file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94707205efbe809dfa3a0d09c08bef1352f5d3d6612a506f10a319933757c006"}, + {file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89e8d33bb88d7263f74dc57d69f0063e06b5a5ce50bb9a6b32f5fcbe655f9e73"}, + {file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e50289c101495e0d1bb0bfcb4a60adde56e32f4449a67216a1ab2750aa84f037"}, + {file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e237f9c1e8a00e7d9ddaa288e535dc337a39bcbf679f290aee9d26df9e72bce9"}, + {file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:746242cd703dc2b37f9d8b9f173749c15e9a918ddb021575a0205ec29a38d31e"}, + {file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82edbfd3df39fb3d108fc079ebc461330f7c2e33dbd002d146bf7c445ba6e756"}, + {file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:80dc1b139516be2077b3e57ce1cb65bfed09149e1d175e0478e7a987863b68f0"}, + {file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f44af67bf39af25db4c1ac27e82e9665717f9c26af2369c404be865c8818dcf"}, + {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4b75f2950ddb6feed85336412b9a0c310a2edbcf4cf931aa5cfe29034829676d"}, + {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:77fbc69c287596880ecec2d4c7a62346bef08b6209749bf6ce8c22bbaca0239e"}, + {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6462c95f48e2d8d4c993a2950cd3d31ab061864d1c226bbf0ee2f1a8f36674b9"}, + {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649c33034979273fa71aa25d0fe120ad1777c551d8c4cd2c0c9851d88fcb13ab"}, + {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a609780e942d43a275a617c0839d85f95c334bad29c4c0918252085113285b5"}, + {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aea15c78e0d9ad6555ed201344ae36db5c63d428818b4b2a42842b3870127c00"}, + {file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0e94b221295b5e69de57a1bd4aeb0b3a29f61be6e1b478bb8a69a73377db7ba"}, + {file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fee6044b64c965c425b65a4e17719953b96e065c5b7e09b599ff332bb2744bdf"}, + {file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:265a99a2ff41a0fd56c19c3838b29bf54d1d177964c300dad388b27e84fd7847"}, + {file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10c2956efcecb981bf9cfb8184d27d5d64b9033f917115a960b83f11bfa0d6b"}, + {file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e7d61fe8e8d9335fac1bf8d5d82820b4808dd7a43020c149b63a1ada953d48a6"}, + {file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2beee18efd33fa6fdb0976e18475a4042cd31c7433c866e8a09ab604c7c22ff2"}, + {file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d8c36fdf3e02cec92aed2d44f63565ad1522a499c654f07935c8f9d04db69e95"}, + {file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0fac7be202596c7126146660725157d4813aa29a4cc990fe51346f75ff8fde7"}, + {file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d0fba61846f294bce41eb44d60d58136090ea2b5b99efd21cbdf4e21927c56a"}, + {file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95720bae002ac357202e0d866128eb1ac82545bcf0b549b9abe91b5178d9b541"}, + {file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:36c530d8fa03bfa7085af54a48f2ca16ab74df3ec7108a46ba82fd8b411a2315"}, + {file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e97152983442b499d7a71e44f29baa75b3b02e65d9c44ba53b10338e98dedb66"}, + {file = "uvloop-0.20.0.tar.gz", hash = "sha256:4603ca714a754fc8d9b197e325db25b2ea045385e8a3ad05d3463de725fdf469"}, +] + +[package.extras] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] + +[[package]] +name = "vine" +version = "5.1.0" +description = "Python promises." +optional = false +python-versions = ">=3.6" +files = [ + {file = "vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc"}, + {file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"}, +] + +[[package]] +name = "virtualenv" +version = "20.26.4" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.26.4-py3-none-any.whl", hash = "sha256:48f2695d9809277003f30776d155615ffc11328e6a0a8c1f0ec80188d7874a55"}, + {file = "virtualenv-20.26.4.tar.gz", hash = "sha256:c17f4e0f3e6036e9f26700446f85c76ab11df65ff6d8a9cbfad9f71aabfcf23c"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "watchdog" +version = "4.0.2" +description = "Filesystem events monitoring" +optional = false +python-versions = ">=3.8" +files = [ + {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c50f148b31b03fbadd6d0b5980e38b558046b127dc483e5e4505fcef250f9503"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c7d4bf585ad501c5f6c980e7be9c4f15604c7cc150e942d82083b31a7548930"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:914285126ad0b6eb2258bbbcb7b288d9dfd655ae88fa28945be05a7b475a800b"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:984306dc4720da5498b16fc037b36ac443816125a3705dfde4fd90652d8028ef"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1cdcfd8142f604630deef34722d695fb455d04ab7cfe9963055df1fc69e6727a"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7ab624ff2f663f98cd03c8b7eedc09375a911794dfea6bf2a359fcc266bff29"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:132937547a716027bd5714383dfc40dc66c26769f1ce8a72a859d6a48f371f3a"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:cd67c7df93eb58f360c43802acc945fa8da70c675b6fa37a241e17ca698ca49b"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcfd02377be80ef3b6bc4ce481ef3959640458d6feaae0bd43dd90a43da90a7d"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:980b71510f59c884d684b3663d46e7a14b457c9611c481e5cef08f4dd022eed7"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:aa160781cafff2719b663c8a506156e9289d111d80f3387cf3af49cedee1f040"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f6ee8dedd255087bc7fe82adf046f0b75479b989185fb0bdf9a98b612170eac7"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b4359067d30d5b864e09c8597b112fe0a0a59321a0f331498b013fb097406b4"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:770eef5372f146997638d737c9a3c597a3b41037cfbc5c41538fc27c09c3a3f9"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eeea812f38536a0aa859972d50c76e37f4456474b02bd93674d1947cf1e39578"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b2c45f6e1e57ebb4687690c05bc3a2c1fb6ab260550c4290b8abb1335e0fd08b"}, + {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:10b6683df70d340ac3279eff0b2766813f00f35a1d37515d2c99959ada8f05fa"}, + {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7c739888c20f99824f7aa9d31ac8a97353e22d0c0e54703a547a218f6637eb3"}, + {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c100d09ac72a8a08ddbf0629ddfa0b8ee41740f9051429baa8e31bb903ad7508"}, + {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f5315a8c8dd6dd9425b974515081fc0aadca1d1d61e078d2246509fd756141ee"}, + {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2d468028a77b42cc685ed694a7a550a8d1771bb05193ba7b24006b8241a571a1"}, + {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f15edcae3830ff20e55d1f4e743e92970c847bcddc8b7509bcd172aa04de506e"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:936acba76d636f70db8f3c66e76aa6cb5136a936fc2a5088b9ce1c7a3508fc83"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:e252f8ca942a870f38cf785aef420285431311652d871409a64e2a0a52a2174c"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:0e83619a2d5d436a7e58a1aea957a3c1ccbf9782c43c0b4fed80580e5e4acd1a"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:88456d65f207b39f1981bf772e473799fcdc10801062c36fd5ad9f9d1d463a73"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:32be97f3b75693a93c683787a87a0dc8db98bb84701539954eef991fb35f5fbc"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:c82253cfc9be68e3e49282831afad2c1f6593af80c0daf1287f6a92657986757"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c0b14488bd336c5b1845cee83d3e631a1f8b4e9c5091ec539406e4a324f882d8"}, + {file = "watchdog-4.0.2-py3-none-win32.whl", hash = "sha256:0d8a7e523ef03757a5aa29f591437d64d0d894635f8a50f370fe37f913ce4e19"}, + {file = "watchdog-4.0.2-py3-none-win_amd64.whl", hash = "sha256:c344453ef3bf875a535b0488e3ad28e341adbd5a9ffb0f7d62cefacc8824ef2b"}, + {file = "watchdog-4.0.2-py3-none-win_ia64.whl", hash = "sha256:baececaa8edff42cd16558a639a9b0ddf425f93d892e8392a56bf904f5eff22c"}, + {file = "watchdog-4.0.2.tar.gz", hash = "sha256:b4dfbb6c49221be4535623ea4474a4d6ee0a9cef4a80b20c28db4d858b64e270"}, +] + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[[package]] +name = "watchfiles" +version = "0.24.0" +description = "Simple, modern and high performance file watching and code reload in python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "watchfiles-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:083dc77dbdeef09fa44bb0f4d1df571d2e12d8a8f985dccde71ac3ac9ac067a0"}, + {file = "watchfiles-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e94e98c7cb94cfa6e071d401ea3342767f28eb5a06a58fafdc0d2a4974f4f35c"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82ae557a8c037c42a6ef26c494d0631cacca040934b101d001100ed93d43f361"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:acbfa31e315a8f14fe33e3542cbcafc55703b8f5dcbb7c1eecd30f141df50db3"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b74fdffce9dfcf2dc296dec8743e5b0332d15df19ae464f0e249aa871fc1c571"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:449f43f49c8ddca87c6b3980c9284cab6bd1f5c9d9a2b00012adaaccd5e7decd"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4abf4ad269856618f82dee296ac66b0cd1d71450fc3c98532d93798e73399b7a"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f895d785eb6164678ff4bb5cc60c5996b3ee6df3edb28dcdeba86a13ea0465e"}, + {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ae3e208b31be8ce7f4c2c0034f33406dd24fbce3467f77223d10cd86778471c"}, + {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2efec17819b0046dde35d13fb8ac7a3ad877af41ae4640f4109d9154ed30a188"}, + {file = "watchfiles-0.24.0-cp310-none-win32.whl", hash = "sha256:6bdcfa3cd6fdbdd1a068a52820f46a815401cbc2cb187dd006cb076675e7b735"}, + {file = "watchfiles-0.24.0-cp310-none-win_amd64.whl", hash = "sha256:54ca90a9ae6597ae6dc00e7ed0a040ef723f84ec517d3e7ce13e63e4bc82fa04"}, + {file = "watchfiles-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:bdcd5538e27f188dd3c804b4a8d5f52a7fc7f87e7fd6b374b8e36a4ca03db428"}, + {file = "watchfiles-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2dadf8a8014fde6addfd3c379e6ed1a981c8f0a48292d662e27cabfe4239c83c"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6509ed3f467b79d95fc62a98229f79b1a60d1b93f101e1c61d10c95a46a84f43"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8360f7314a070c30e4c976b183d1d8d1585a4a50c5cb603f431cebcbb4f66327"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:316449aefacf40147a9efaf3bd7c9bdd35aaba9ac5d708bd1eb5763c9a02bef5"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73bde715f940bea845a95247ea3e5eb17769ba1010efdc938ffcb967c634fa61"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3770e260b18e7f4e576edca4c0a639f704088602e0bc921c5c2e721e3acb8d15"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa0fd7248cf533c259e59dc593a60973a73e881162b1a2f73360547132742823"}, + {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d7a2e3b7f5703ffbd500dabdefcbc9eafeff4b9444bbdd5d83d79eedf8428fab"}, + {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d831ee0a50946d24a53821819b2327d5751b0c938b12c0653ea5be7dea9c82ec"}, + {file = "watchfiles-0.24.0-cp311-none-win32.whl", hash = "sha256:49d617df841a63b4445790a254013aea2120357ccacbed00253f9c2b5dc24e2d"}, + {file = "watchfiles-0.24.0-cp311-none-win_amd64.whl", hash = "sha256:d3dcb774e3568477275cc76554b5a565024b8ba3a0322f77c246bc7111c5bb9c"}, + {file = "watchfiles-0.24.0-cp311-none-win_arm64.whl", hash = "sha256:9301c689051a4857d5b10777da23fafb8e8e921bcf3abe6448a058d27fb67633"}, + {file = "watchfiles-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7211b463695d1e995ca3feb38b69227e46dbd03947172585ecb0588f19b0d87a"}, + {file = "watchfiles-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4b8693502d1967b00f2fb82fc1e744df128ba22f530e15b763c8d82baee15370"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdab9555053399318b953a1fe1f586e945bc8d635ce9d05e617fd9fe3a4687d6"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34e19e56d68b0dad5cff62273107cf5d9fbaf9d75c46277aa5d803b3ef8a9e9b"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41face41f036fee09eba33a5b53a73e9a43d5cb2c53dad8e61fa6c9f91b5a51e"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5148c2f1ea043db13ce9b0c28456e18ecc8f14f41325aa624314095b6aa2e9ea"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e4bd963a935aaf40b625c2499f3f4f6bbd0c3776f6d3bc7c853d04824ff1c9f"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c79d7719d027b7a42817c5d96461a99b6a49979c143839fc37aa5748c322f234"}, + {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:32aa53a9a63b7f01ed32e316e354e81e9da0e6267435c7243bf8ae0f10b428ef"}, + {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce72dba6a20e39a0c628258b5c308779b8697f7676c254a845715e2a1039b968"}, + {file = "watchfiles-0.24.0-cp312-none-win32.whl", hash = "sha256:d9018153cf57fc302a2a34cb7564870b859ed9a732d16b41a9b5cb2ebed2d444"}, + {file = "watchfiles-0.24.0-cp312-none-win_amd64.whl", hash = "sha256:551ec3ee2a3ac9cbcf48a4ec76e42c2ef938a7e905a35b42a1267fa4b1645896"}, + {file = "watchfiles-0.24.0-cp312-none-win_arm64.whl", hash = "sha256:b52a65e4ea43c6d149c5f8ddb0bef8d4a1e779b77591a458a893eb416624a418"}, + {file = "watchfiles-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2e3ab79a1771c530233cadfd277fcc762656d50836c77abb2e5e72b88e3a48"}, + {file = "watchfiles-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327763da824817b38ad125dcd97595f942d720d32d879f6c4ddf843e3da3fe90"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd82010f8ab451dabe36054a1622870166a67cf3fce894f68895db6f74bbdc94"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d64ba08db72e5dfd5c33be1e1e687d5e4fcce09219e8aee893a4862034081d4e"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1cf1f6dd7825053f3d98f6d33f6464ebdd9ee95acd74ba2c34e183086900a827"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43e3e37c15a8b6fe00c1bce2473cfa8eb3484bbeecf3aefbf259227e487a03df"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88bcd4d0fe1d8ff43675360a72def210ebad3f3f72cabfeac08d825d2639b4ab"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:999928c6434372fde16c8f27143d3e97201160b48a614071261701615a2a156f"}, + {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:30bbd525c3262fd9f4b1865cb8d88e21161366561cd7c9e1194819e0a33ea86b"}, + {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:edf71b01dec9f766fb285b73930f95f730bb0943500ba0566ae234b5c1618c18"}, + {file = "watchfiles-0.24.0-cp313-none-win32.whl", hash = "sha256:f4c96283fca3ee09fb044f02156d9570d156698bc3734252175a38f0e8975f07"}, + {file = "watchfiles-0.24.0-cp313-none-win_amd64.whl", hash = "sha256:a974231b4fdd1bb7f62064a0565a6b107d27d21d9acb50c484d2cdba515b9366"}, + {file = "watchfiles-0.24.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ee82c98bed9d97cd2f53bdb035e619309a098ea53ce525833e26b93f673bc318"}, + {file = "watchfiles-0.24.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fd92bbaa2ecdb7864b7600dcdb6f2f1db6e0346ed425fbd01085be04c63f0b05"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f83df90191d67af5a831da3a33dd7628b02a95450e168785586ed51e6d28943c"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fca9433a45f18b7c779d2bae7beeec4f740d28b788b117a48368d95a3233ed83"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b995bfa6bf01a9e09b884077a6d37070464b529d8682d7691c2d3b540d357a0c"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed9aba6e01ff6f2e8285e5aa4154e2970068fe0fc0998c4380d0e6278222269b"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5171ef898299c657685306d8e1478a45e9303ddcd8ac5fed5bd52ad4ae0b69b"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4933a508d2f78099162da473841c652ad0de892719043d3f07cc83b33dfd9d91"}, + {file = "watchfiles-0.24.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95cf3b95ea665ab03f5a54765fa41abf0529dbaf372c3b83d91ad2cfa695779b"}, + {file = "watchfiles-0.24.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:01def80eb62bd5db99a798d5e1f5f940ca0a05986dcfae21d833af7a46f7ee22"}, + {file = "watchfiles-0.24.0-cp38-none-win32.whl", hash = "sha256:4d28cea3c976499475f5b7a2fec6b3a36208656963c1a856d328aeae056fc5c1"}, + {file = "watchfiles-0.24.0-cp38-none-win_amd64.whl", hash = "sha256:21ab23fdc1208086d99ad3f69c231ba265628014d4aed31d4e8746bd59e88cd1"}, + {file = "watchfiles-0.24.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b665caeeda58625c3946ad7308fbd88a086ee51ccb706307e5b1fa91556ac886"}, + {file = "watchfiles-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5c51749f3e4e269231510da426ce4a44beb98db2dce9097225c338f815b05d4f"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b2509f08761f29a0fdad35f7e1638b8ab1adfa2666d41b794090361fb8b855"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a60e2bf9dc6afe7f743e7c9b149d1fdd6dbf35153c78fe3a14ae1a9aee3d98b"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7d9b87c4c55e3ea8881dfcbf6d61ea6775fffed1fedffaa60bd047d3c08c430"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:78470906a6be5199524641f538bd2c56bb809cd4bf29a566a75051610bc982c3"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07cdef0c84c03375f4e24642ef8d8178e533596b229d32d2bbd69e5128ede02a"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d337193bbf3e45171c8025e291530fb7548a93c45253897cd764a6a71c937ed9"}, + {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ec39698c45b11d9694a1b635a70946a5bad066b593af863460a8e600f0dff1ca"}, + {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e28d91ef48eab0afb939fa446d8ebe77e2f7593f5f463fd2bb2b14132f95b6e"}, + {file = "watchfiles-0.24.0-cp39-none-win32.whl", hash = "sha256:7138eff8baa883aeaa074359daabb8b6c1e73ffe69d5accdc907d62e50b1c0da"}, + {file = "watchfiles-0.24.0-cp39-none-win_amd64.whl", hash = "sha256:b3ef2c69c655db63deb96b3c3e587084612f9b1fa983df5e0c3379d41307467f"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:632676574429bee8c26be8af52af20e0c718cc7f5f67f3fb658c71928ccd4f7f"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a2a9891723a735d3e2540651184be6fd5b96880c08ffe1a98bae5017e65b544b"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7fa2bc0efef3e209a8199fd111b8969fe9db9c711acc46636686331eda7dd4"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01550ccf1d0aed6ea375ef259706af76ad009ef5b0203a3a4cce0f6024f9b68a"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:96619302d4374de5e2345b2b622dc481257a99431277662c30f606f3e22f42be"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:85d5f0c7771dcc7a26c7a27145059b6bb0ce06e4e751ed76cdf123d7039b60b5"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951088d12d339690a92cef2ec5d3cfd957692834c72ffd570ea76a6790222777"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fb58bcaa343fedc6a9e91f90195b20ccb3135447dc9e4e2570c3a39565853e"}, + {file = "watchfiles-0.24.0.tar.gz", hash = "sha256:afb72325b74fa7a428c009c1b8be4b4d7c2afedafb2982827ef2156646df2fe1"}, +] + +[package.dependencies] +anyio = ">=3.0.0" + +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + +[[package]] +name = "websockets" +version = "13.0.1" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websockets-13.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1841c9082a3ba4a05ea824cf6d99570a6a2d8849ef0db16e9c826acb28089e8f"}, + {file = "websockets-13.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c5870b4a11b77e4caa3937142b650fbbc0914a3e07a0cf3131f35c0587489c1c"}, + {file = "websockets-13.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f1d3d1f2eb79fe7b0fb02e599b2bf76a7619c79300fc55f0b5e2d382881d4f7f"}, + {file = "websockets-13.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15c7d62ee071fa94a2fc52c2b472fed4af258d43f9030479d9c4a2de885fd543"}, + {file = "websockets-13.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6724b554b70d6195ba19650fef5759ef11346f946c07dbbe390e039bcaa7cc3d"}, + {file = "websockets-13.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56a952fa2ae57a42ba7951e6b2605e08a24801a4931b5644dfc68939e041bc7f"}, + {file = "websockets-13.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:17118647c0ea14796364299e942c330d72acc4b248e07e639d34b75067b3cdd8"}, + {file = "websockets-13.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64a11aae1de4c178fa653b07d90f2fb1a2ed31919a5ea2361a38760192e1858b"}, + {file = "websockets-13.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0617fd0b1d14309c7eab6ba5deae8a7179959861846cbc5cb528a7531c249448"}, + {file = "websockets-13.0.1-cp310-cp310-win32.whl", hash = "sha256:11f9976ecbc530248cf162e359a92f37b7b282de88d1d194f2167b5e7ad80ce3"}, + {file = "websockets-13.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c3c493d0e5141ec055a7d6809a28ac2b88d5b878bb22df8c621ebe79a61123d0"}, + {file = "websockets-13.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:699ba9dd6a926f82a277063603fc8d586b89f4cb128efc353b749b641fcddda7"}, + {file = "websockets-13.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf2fae6d85e5dc384bf846f8243ddaa9197f3a1a70044f59399af001fd1f51d4"}, + {file = "websockets-13.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:52aed6ef21a0f1a2a5e310fb5c42d7555e9c5855476bbd7173c3aa3d8a0302f2"}, + {file = "websockets-13.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8eb2b9a318542153674c6e377eb8cb9ca0fc011c04475110d3477862f15d29f0"}, + {file = "websockets-13.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5df891c86fe68b2c38da55b7aea7095beca105933c697d719f3f45f4220a5e0e"}, + {file = "websockets-13.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac2d146ff30d9dd2fcf917e5d147db037a5c573f0446c564f16f1f94cf87462"}, + {file = "websockets-13.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b8ac5b46fd798bbbf2ac6620e0437c36a202b08e1f827832c4bf050da081b501"}, + {file = "websockets-13.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:46af561eba6f9b0848b2c9d2427086cabadf14e0abdd9fde9d72d447df268418"}, + {file = "websockets-13.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b5a06d7f60bc2fc378a333978470dfc4e1415ee52f5f0fce4f7853eb10c1e9df"}, + {file = "websockets-13.0.1-cp311-cp311-win32.whl", hash = "sha256:556e70e4f69be1082e6ef26dcb70efcd08d1850f5d6c5f4f2bcb4e397e68f01f"}, + {file = "websockets-13.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:67494e95d6565bf395476e9d040037ff69c8b3fa356a886b21d8422ad86ae075"}, + {file = "websockets-13.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f9c9e258e3d5efe199ec23903f5da0eeaad58cf6fccb3547b74fd4750e5ac47a"}, + {file = "websockets-13.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6b41a1b3b561f1cba8321fb32987552a024a8f67f0d05f06fcf29f0090a1b956"}, + {file = "websockets-13.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f73e676a46b0fe9426612ce8caeca54c9073191a77c3e9d5c94697aef99296af"}, + {file = "websockets-13.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f613289f4a94142f914aafad6c6c87903de78eae1e140fa769a7385fb232fdf"}, + {file = "websockets-13.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f52504023b1480d458adf496dc1c9e9811df4ba4752f0bc1f89ae92f4f07d0c"}, + {file = "websockets-13.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:139add0f98206cb74109faf3611b7783ceafc928529c62b389917a037d4cfdf4"}, + {file = "websockets-13.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:47236c13be337ef36546004ce8c5580f4b1150d9538b27bf8a5ad8edf23ccfab"}, + {file = "websockets-13.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c44ca9ade59b2e376612df34e837013e2b273e6c92d7ed6636d0556b6f4db93d"}, + {file = "websockets-13.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9bbc525f4be3e51b89b2a700f5746c2a6907d2e2ef4513a8daafc98198b92237"}, + {file = "websockets-13.0.1-cp312-cp312-win32.whl", hash = "sha256:3624fd8664f2577cf8de996db3250662e259bfbc870dd8ebdcf5d7c6ac0b5185"}, + {file = "websockets-13.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0513c727fb8adffa6d9bf4a4463b2bade0186cbd8c3604ae5540fae18a90cb99"}, + {file = "websockets-13.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1ee4cc030a4bdab482a37462dbf3ffb7e09334d01dd37d1063be1136a0d825fa"}, + {file = "websockets-13.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dbb0b697cc0655719522406c059eae233abaa3243821cfdfab1215d02ac10231"}, + {file = "websockets-13.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:acbebec8cb3d4df6e2488fbf34702cbc37fc39ac7abf9449392cefb3305562e9"}, + {file = "websockets-13.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63848cdb6fcc0bf09d4a155464c46c64ffdb5807ede4fb251da2c2692559ce75"}, + {file = "websockets-13.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:872afa52a9f4c414d6955c365b6588bc4401272c629ff8321a55f44e3f62b553"}, + {file = "websockets-13.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05e70fec7c54aad4d71eae8e8cab50525e899791fc389ec6f77b95312e4e9920"}, + {file = "websockets-13.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e82db3756ccb66266504f5a3de05ac6b32f287faacff72462612120074103329"}, + {file = "websockets-13.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4e85f46ce287f5c52438bb3703d86162263afccf034a5ef13dbe4318e98d86e7"}, + {file = "websockets-13.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f3fea72e4e6edb983908f0db373ae0732b275628901d909c382aae3b592589f2"}, + {file = "websockets-13.0.1-cp313-cp313-win32.whl", hash = "sha256:254ecf35572fca01a9f789a1d0f543898e222f7b69ecd7d5381d8d8047627bdb"}, + {file = "websockets-13.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:ca48914cdd9f2ccd94deab5bcb5ac98025a5ddce98881e5cce762854a5de330b"}, + {file = "websockets-13.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b74593e9acf18ea5469c3edaa6b27fa7ecf97b30e9dabd5a94c4c940637ab96e"}, + {file = "websockets-13.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:132511bfd42e77d152c919147078460c88a795af16b50e42a0bd14f0ad71ddd2"}, + {file = "websockets-13.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:165bedf13556f985a2aa064309baa01462aa79bf6112fbd068ae38993a0e1f1b"}, + {file = "websockets-13.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e801ca2f448850685417d723ec70298feff3ce4ff687c6f20922c7474b4746ae"}, + {file = "websockets-13.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30d3a1f041360f029765d8704eae606781e673e8918e6b2c792e0775de51352f"}, + {file = "websockets-13.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67648f5e50231b5a7f6d83b32f9c525e319f0ddc841be0de64f24928cd75a603"}, + {file = "websockets-13.0.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4f0426d51c8f0926a4879390f53c7f5a855e42d68df95fff6032c82c888b5f36"}, + {file = "websockets-13.0.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ef48e4137e8799998a343706531e656fdec6797b80efd029117edacb74b0a10a"}, + {file = "websockets-13.0.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:249aab278810bee585cd0d4de2f08cfd67eed4fc75bde623be163798ed4db2eb"}, + {file = "websockets-13.0.1-cp38-cp38-win32.whl", hash = "sha256:06c0a667e466fcb56a0886d924b5f29a7f0886199102f0a0e1c60a02a3751cb4"}, + {file = "websockets-13.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1f3cf6d6ec1142412d4535adabc6bd72a63f5f148c43fe559f06298bc21953c9"}, + {file = "websockets-13.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1fa082ea38d5de51dd409434edc27c0dcbd5fed2b09b9be982deb6f0508d25bc"}, + {file = "websockets-13.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4a365bcb7be554e6e1f9f3ed64016e67e2fa03d7b027a33e436aecf194febb63"}, + {file = "websockets-13.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:10a0dc7242215d794fb1918f69c6bb235f1f627aaf19e77f05336d147fce7c37"}, + {file = "websockets-13.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59197afd478545b1f73367620407b0083303569c5f2d043afe5363676f2697c9"}, + {file = "websockets-13.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d20516990d8ad557b5abeb48127b8b779b0b7e6771a265fa3e91767596d7d97"}, + {file = "websockets-13.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1a2e272d067030048e1fe41aa1ec8cfbbaabce733b3d634304fa2b19e5c897f"}, + {file = "websockets-13.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ad327ac80ba7ee61da85383ca8822ff808ab5ada0e4a030d66703cc025b021c4"}, + {file = "websockets-13.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:518f90e6dd089d34eaade01101fd8a990921c3ba18ebbe9b0165b46ebff947f0"}, + {file = "websockets-13.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:68264802399aed6fe9652e89761031acc734fc4c653137a5911c2bfa995d6d6d"}, + {file = "websockets-13.0.1-cp39-cp39-win32.whl", hash = "sha256:a5dc0c42ded1557cc7c3f0240b24129aefbad88af4f09346164349391dea8e58"}, + {file = "websockets-13.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b448a0690ef43db5ef31b3a0d9aea79043882b4632cfc3eaab20105edecf6097"}, + {file = "websockets-13.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:faef9ec6354fe4f9a2c0bbb52fb1ff852effc897e2a4501e25eb3a47cb0a4f89"}, + {file = "websockets-13.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:03d3f9ba172e0a53e37fa4e636b86cc60c3ab2cfee4935e66ed1d7acaa4625ad"}, + {file = "websockets-13.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d450f5a7a35662a9b91a64aefa852f0c0308ee256122f5218a42f1d13577d71e"}, + {file = "websockets-13.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f55b36d17ac50aa8a171b771e15fbe1561217510c8768af3d546f56c7576cdc"}, + {file = "websockets-13.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14b9c006cac63772b31abbcd3e3abb6228233eec966bf062e89e7fa7ae0b7333"}, + {file = "websockets-13.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b79915a1179a91f6c5f04ece1e592e2e8a6bd245a0e45d12fd56b2b59e559a32"}, + {file = "websockets-13.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f40de079779acbcdbb6ed4c65af9f018f8b77c5ec4e17a4b737c05c2db554491"}, + {file = "websockets-13.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:80e4ba642fc87fa532bac07e5ed7e19d56940b6af6a8c61d4429be48718a380f"}, + {file = "websockets-13.0.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a02b0161c43cc9e0232711eff846569fad6ec836a7acab16b3cf97b2344c060"}, + {file = "websockets-13.0.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6aa74a45d4cdc028561a7d6ab3272c8b3018e23723100b12e58be9dfa5a24491"}, + {file = "websockets-13.0.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00fd961943b6c10ee6f0b1130753e50ac5dcd906130dcd77b0003c3ab797d026"}, + {file = "websockets-13.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d93572720d781331fb10d3da9ca1067817d84ad1e7c31466e9f5e59965618096"}, + {file = "websockets-13.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:71e6e5a3a3728886caee9ab8752e8113670936a193284be9d6ad2176a137f376"}, + {file = "websockets-13.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c4a6343e3b0714e80da0b0893543bf9a5b5fa71b846ae640e56e9abc6fbc4c83"}, + {file = "websockets-13.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a678532018e435396e37422a95e3ab87f75028ac79570ad11f5bf23cd2a7d8c"}, + {file = "websockets-13.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6716c087e4aa0b9260c4e579bb82e068f84faddb9bfba9906cb87726fa2e870"}, + {file = "websockets-13.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e33505534f3f673270dd67f81e73550b11de5b538c56fe04435d63c02c3f26b5"}, + {file = "websockets-13.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acab3539a027a85d568c2573291e864333ec9d912675107d6efceb7e2be5d980"}, + {file = "websockets-13.0.1-py3-none-any.whl", hash = "sha256:b80f0c51681c517604152eb6a572f5a9378f877763231fddb883ba2f968e8817"}, + {file = "websockets-13.0.1.tar.gz", hash = "sha256:4d6ece65099411cfd9a48d13701d7438d9c34f479046b34c50ff60bb8834e43e"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.12" +content-hash = "2a16ad9195dd67f2442c55835418c818e7528f094bc7bd99356700614ea6a3e5" diff --git a/pyproject.toml b/pyproject.toml index b964ced1..08384723 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ pandas-stubs = "^2.2.0.240218" python-multipart = "^0.0.9" sqlalchemy-mixins = "^2.0.5" aiofiles = "^23.2.1" +cfgv = "^3.4.0" [tool.poetry.group.dev.dependencies] @@ -39,6 +40,8 @@ ipykernel = "^6.29.3" polyfactory = "^2.15.0" pyright = "^1.1.355" pytest-alembic = "^0.11.0" +pytest-mock = "^3.14.0" +pytest-asyncio = "^0.24.0" [build-system] requires = ["poetry-core"] From ffd25d13af5f1396256eb0434b0539bbe595bb74 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 01:25:17 +0300 Subject: [PATCH 015/153] fix: replace dto module to internal --- internal/{usecase => }/dto/__init__.py | 0 internal/dto/repository/__init__.py | 0 internal/dto/worker/__init__.py | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename internal/{usecase => }/dto/__init__.py (100%) create mode 100644 internal/dto/repository/__init__.py create mode 100644 internal/dto/worker/__init__.py diff --git a/internal/usecase/dto/__init__.py b/internal/dto/__init__.py similarity index 100% rename from internal/usecase/dto/__init__.py rename to internal/dto/__init__.py diff --git a/internal/dto/repository/__init__.py b/internal/dto/repository/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/internal/dto/worker/__init__.py b/internal/dto/worker/__init__.py new file mode 100644 index 00000000..e69de29b From a983a547ccb00dd4fe6da6978d6f87e479f6ef52 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 01:27:41 +0300 Subject: [PATCH 016/153] feat(dto): add base schema for dto --- internal/dto/repository/base_schema.py | 30 ++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 internal/dto/repository/base_schema.py diff --git a/internal/dto/repository/base_schema.py b/internal/dto/repository/base_schema.py new file mode 100644 index 00000000..e03e9e4e --- /dev/null +++ b/internal/dto/repository/base_schema.py @@ -0,0 +1,30 @@ +import datetime + +from pydantic import BaseModel, ConfigDict + +from internal.domain.common import OptionalModel + + +class BaseSchema(BaseModel): + model_config = ConfigDict(from_attributes=True) + + +class BaseCreateSchema(BaseSchema): + ... + + +class BaseFindSchema[T](BaseSchema): + id: T + + +class BaseUpdateSchema[T](BaseSchema, OptionalModel): + __non_optional_fields__ = { + "id", + } + + +class BaseResponseSchema[T](BaseSchema): + id: T + + created_at: datetime.datetime | None = None + updated_at: datetime.datetime | None = None From 5cac8bbf04709836decdec39231b0e3257527c31 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 01:29:10 +0300 Subject: [PATCH 017/153] feat(dto): add dto schemas for file --- internal/dto/repository/file/__init__.py | 2 ++ internal/dto/repository/file/file.py | 26 ++++++++++++++++++++++++ 2 files changed, 28 insertions(+) create mode 100644 internal/dto/repository/file/__init__.py create mode 100644 internal/dto/repository/file/file.py diff --git a/internal/dto/repository/file/__init__.py b/internal/dto/repository/file/__init__.py new file mode 100644 index 00000000..651c249a --- /dev/null +++ b/internal/dto/repository/file/__init__.py @@ -0,0 +1,2 @@ +from internal.dto.repository.file.file import (File, FileResponseSchema, FileFindSchema, + FileCreateSchema, FileUpdateSchema) diff --git a/internal/dto/repository/file/file.py b/internal/dto/repository/file/file.py new file mode 100644 index 00000000..50c69305 --- /dev/null +++ b/internal/dto/repository/file/file.py @@ -0,0 +1,26 @@ +from typing import Protocol +from uuid import UUID + +from internal.dto.repository.base_schema import BaseCreateSchema, BaseUpdateSchema, BaseSchema + +class File(Protocol): + + filename: str | None + content_type: str | None + + async def read(self, chunk_size: int) -> bytes: ... + + +class FileBaseSchema(BaseSchema): + file_name: UUID + + +class FileCreateSchema(FileBaseSchema, BaseCreateSchema): ... + + +class FileUpdateSchema(FileBaseSchema, BaseUpdateSchema[UUID]): ... + + +class FileFindSchema(FileBaseSchema, BaseSchema): ... # it's not a typo + +FileResponseSchema = None From f086724bf62ea931af485d6e36e3ac53acdcd6d2 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 01:32:27 +0300 Subject: [PATCH 018/153] feat(dto): add dto schemas for file metadata --- internal/dto/repository/file/__init__.py | 2 ++ internal/dto/repository/file/file_metadata.py | 22 +++++++++++++++++++ 2 files changed, 24 insertions(+) create mode 100644 internal/dto/repository/file/file_metadata.py diff --git a/internal/dto/repository/file/__init__.py b/internal/dto/repository/file/__init__.py index 651c249a..c58b6c87 100644 --- a/internal/dto/repository/file/__init__.py +++ b/internal/dto/repository/file/__init__.py @@ -1,2 +1,4 @@ from internal.dto.repository.file.file import (File, FileResponseSchema, FileFindSchema, FileCreateSchema, FileUpdateSchema) +from internal.dto.repository.file.file_metadata import (FileMetadataResponseSchema, FileMetadataCreateSchema, + FileMetadataFindSchema, FileMetadataUpdateSchema) diff --git a/internal/dto/repository/file/file_metadata.py b/internal/dto/repository/file/file_metadata.py new file mode 100644 index 00000000..2a721455 --- /dev/null +++ b/internal/dto/repository/file/file_metadata.py @@ -0,0 +1,22 @@ +from uuid import UUID + +from internal.dto.repository.base_schema import BaseSchema, BaseCreateSchema, BaseUpdateSchema, \ + BaseResponseSchema, BaseFindSchema + + +class FileMetadataBaseSchema(BaseSchema): + file_name: UUID + original_file_name: str + mime_type: str + + +class FileMetadataCreateSchema(FileMetadataBaseSchema, BaseCreateSchema): ... + + +class FileMetadataUpdateSchema(FileMetadataBaseSchema, BaseUpdateSchema[UUID]): ... + + +class FileMetadataFindSchema(BaseFindSchema[UUID]): ... + + +class FileMetadataResponseSchema(FileMetadataBaseSchema, BaseResponseSchema[UUID]): ... From e0abbe2728839da2101a06b8c6564448e52a373a Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 02:21:57 +0300 Subject: [PATCH 019/153] feat(dto): add dto schemas for dataset --- internal/dto/repository/file/__init__.py | 2 ++ internal/dto/repository/file/dataset.py | 23 +++++++++++++++++++++++ 2 files changed, 25 insertions(+) create mode 100644 internal/dto/repository/file/dataset.py diff --git a/internal/dto/repository/file/__init__.py b/internal/dto/repository/file/__init__.py index c58b6c87..553d8eb2 100644 --- a/internal/dto/repository/file/__init__.py +++ b/internal/dto/repository/file/__init__.py @@ -2,3 +2,5 @@ FileCreateSchema, FileUpdateSchema) from internal.dto.repository.file.file_metadata import (FileMetadataResponseSchema, FileMetadataCreateSchema, FileMetadataFindSchema, FileMetadataUpdateSchema) +from internal.dto.repository.file.dataset import (DatasetResponseSchema, DatasetCreateSchema, + DatasetUpdateSchema, DatasetFindSchema) diff --git a/internal/dto/repository/file/dataset.py b/internal/dto/repository/file/dataset.py new file mode 100644 index 00000000..b20ebc8a --- /dev/null +++ b/internal/dto/repository/file/dataset.py @@ -0,0 +1,23 @@ +from uuid import UUID + +from internal.dto.repository.base_schema import BaseSchema, BaseCreateSchema, BaseUpdateSchema, \ + BaseResponseSchema, BaseFindSchema + + +class DatasetBaseSchema(BaseSchema): + file_id: UUID + separator: str + header: list[int] + is_built_in: bool = False + + +class DatasetCreateSchema(DatasetBaseSchema, BaseCreateSchema): ... + + +class DatasetUpdateSchema(DatasetBaseSchema, BaseUpdateSchema[UUID]): ... + + +class DatasetFindSchema(BaseFindSchema[UUID]): ... + + +class DatasetResponseSchema(DatasetBaseSchema, BaseResponseSchema[UUID]): ... From 7dce2516415f1a4dbe317bbeca0f73c0867d35db Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 02:22:44 +0300 Subject: [PATCH 020/153] feat(dto): add dto schemas for task --- internal/dto/repository/task/__init__.py | 2 ++ internal/dto/repository/task/task.py | 31 ++++++++++++++++++++++++ 2 files changed, 33 insertions(+) create mode 100644 internal/dto/repository/task/__init__.py create mode 100644 internal/dto/repository/task/task.py diff --git a/internal/dto/repository/task/__init__.py b/internal/dto/repository/task/__init__.py new file mode 100644 index 00000000..862a92b9 --- /dev/null +++ b/internal/dto/repository/task/__init__.py @@ -0,0 +1,2 @@ +from internal.dto.repository.task.task import (TaskCreateSchema, TaskUpdateSchema, TaskFindSchema, + TaskResponseSchema) diff --git a/internal/dto/repository/task/task.py b/internal/dto/repository/task/task.py new file mode 100644 index 00000000..766aab69 --- /dev/null +++ b/internal/dto/repository/task/task.py @@ -0,0 +1,31 @@ +from uuid import UUID + +from internal.domain.task.value_objects import TaskStatus, OneOfTaskConfig, OneOfTaskResult, TaskFailureReason +from internal.dto.repository.base_schema import BaseSchema, BaseCreateSchema, BaseUpdateSchema, \ + BaseResponseSchema, BaseFindSchema + + +class TaskBaseSchema(BaseSchema): + status: TaskStatus + config: OneOfTaskConfig + dataset_id: UUID + + +class TaskCreateSchema(TaskBaseSchema, BaseCreateSchema): ... + + +class TaskUpdateSchema(TaskBaseSchema, BaseUpdateSchema[UUID]): + result: OneOfTaskResult | None + raised_exception_name: str | None + failure_reason: TaskFailureReason | None + traceback: str | None + + +class TaskFindSchema(BaseFindSchema[UUID]): ... + + +class TaskResponseSchema(TaskBaseSchema, BaseResponseSchema[UUID]): + result: OneOfTaskResult | None = None + raised_exception_name: str | None = None + failure_reason: TaskFailureReason | None = None + traceback: str | None = None From 876314f164c7506a79679c27c529e56280d48f0c Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 02:23:39 +0300 Subject: [PATCH 021/153] chore: init repository exceptions file --- internal/dto/repository/exception.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 internal/dto/repository/exception.py diff --git a/internal/dto/repository/exception.py b/internal/dto/repository/exception.py new file mode 100644 index 00000000..e69de29b From 77f48147c3976157db4623f4f4549d2f87fbb2a3 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 02:24:29 +0300 Subject: [PATCH 022/153] feat(dto): add dto schemas for task worker --- internal/dto/worker/task/__init__.py | 1 + internal/dto/worker/task/profiling_task.py | 17 +++++++++++++++++ 2 files changed, 18 insertions(+) create mode 100644 internal/dto/worker/task/__init__.py create mode 100644 internal/dto/worker/task/profiling_task.py diff --git a/internal/dto/worker/task/__init__.py b/internal/dto/worker/task/__init__.py new file mode 100644 index 00000000..cd1f8b7d --- /dev/null +++ b/internal/dto/worker/task/__init__.py @@ -0,0 +1 @@ +from internal.dto.worker.task.profiling_task import ProfilingTaskCreateSchema diff --git a/internal/dto/worker/task/profiling_task.py b/internal/dto/worker/task/profiling_task.py new file mode 100644 index 00000000..303ddc03 --- /dev/null +++ b/internal/dto/worker/task/profiling_task.py @@ -0,0 +1,17 @@ +from uuid import UUID +from pydantic import BaseModel + +from internal.domain.task.value_objects import OneOfTaskConfig + + +class ProfilingTaskBaseSchema(BaseModel): + task_id: UUID + dataset_id: UUID + config: OneOfTaskConfig + + +class ProfilingTaskCreateSchema(ProfilingTaskBaseSchema): + ... + + +ProfilingTaskResponseSchema = None From c48e450a43f7718261bc3e4afaf63bbbf9beb573 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 02:26:33 +0300 Subject: [PATCH 023/153] chore: add readme file for dto module --- internal/dto/README.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 internal/dto/README.md diff --git a/internal/dto/README.md b/internal/dto/README.md new file mode 100644 index 00000000..27afeda8 --- /dev/null +++ b/internal/dto/README.md @@ -0,0 +1,12 @@ +## Data transfer objects +This module contains the schemas and exceptions used to transfer data from usecase application layer to another. + +### repository +The `repostory` submodule contains schemas for passing data between usecases and repositories - objects that provide an abstraction for working with data stores. + +In order to comply with some regularity, when creating new schemes in this submodule, we recommend starting the name of the scheme with a noun - describing the entity to which the scheme relates. For example: **FileCreateSchema**. + +### worker +The `worker` submodule contains circuits for passing data between usecases and the worker, an object that provides an abstraction for working with background tasks. + +In order to follow a certain pattern, when creating new schemes in this submodule, we recommend starting the name of the scheme with a verb - describing the action of the background task to which the scheme relates. For example: **ProfilingTaskCreateSchema**. From 095054b9f2a5d9202ce10332c764354879abbcb9 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 02:28:57 +0300 Subject: [PATCH 024/153] feat: add file and dataset exceptions --- internal/usecase/exception/__init__.py | 1 - internal/usecase/exception/file.py | 31 ++++++++++++++++++++++++++ internal/usecase/exception/task.py | 14 ++++++++++++ internal/usecase/file/exception.py | 31 ++++++++++++++++++++++++++ 4 files changed, 76 insertions(+), 1 deletion(-) create mode 100644 internal/usecase/exception/file.py create mode 100644 internal/usecase/exception/task.py create mode 100644 internal/usecase/file/exception.py diff --git a/internal/usecase/exception/__init__.py b/internal/usecase/exception/__init__.py index 250a44ca..2ca00cc3 100644 --- a/internal/usecase/exception/__init__.py +++ b/internal/usecase/exception/__init__.py @@ -1,2 +1 @@ -from internal.usecase.exception.file import IncorrectFileFormatException, DatasetNotFoundException from internal.usecase.exception.task import TaskNotFoundException diff --git a/internal/usecase/exception/file.py b/internal/usecase/exception/file.py new file mode 100644 index 00000000..b732554a --- /dev/null +++ b/internal/usecase/exception/file.py @@ -0,0 +1,31 @@ +class IncorrectFileFormatException(Exception): + """ + Exception raised when a file format is incorrect or not supported. + + :param message: The error message to be reported. + :type message: str + """ + + def __init__(self, message: str): + """ + Initializes an instance of IncorrectFileFormatException with a specific error message. + + Args: + message(str): The error message to be reported. + """ + super().__init__(message) + +class DatasetNotFoundException(Exception): + """ + Exception raised when a dataset is not found. + + This exception does not require any additional attributes beyond the default message. + """ + + def __init__(self): + """ + Initializes an instance of DatasetNotFoundException without any specific message. + + The default message "Dataset not found" is used. + """ + super().__init__("Dataset not found") diff --git a/internal/usecase/exception/task.py b/internal/usecase/exception/task.py new file mode 100644 index 00000000..c55f9846 --- /dev/null +++ b/internal/usecase/exception/task.py @@ -0,0 +1,14 @@ +class TaskNotFoundException(Exception): + """ + Exception raised when a task is not found in data storage. + + This exception does not require any additional attributes beyond the default message. + """ + + def __init__(self): + """ + Initializes an instance of TaskNotFoundException without any specific message. + + The default message "Task not found" is used. + """ + super().__init__("Task not found") diff --git a/internal/usecase/file/exception.py b/internal/usecase/file/exception.py new file mode 100644 index 00000000..b732554a --- /dev/null +++ b/internal/usecase/file/exception.py @@ -0,0 +1,31 @@ +class IncorrectFileFormatException(Exception): + """ + Exception raised when a file format is incorrect or not supported. + + :param message: The error message to be reported. + :type message: str + """ + + def __init__(self, message: str): + """ + Initializes an instance of IncorrectFileFormatException with a specific error message. + + Args: + message(str): The error message to be reported. + """ + super().__init__(message) + +class DatasetNotFoundException(Exception): + """ + Exception raised when a dataset is not found. + + This exception does not require any additional attributes beyond the default message. + """ + + def __init__(self): + """ + Initializes an instance of DatasetNotFoundException without any specific message. + + The default message "Dataset not found" is used. + """ + super().__init__("Dataset not found") From 01c264e6b4be4bafd13a571d57143adcc9725eb2 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 02:30:41 +0300 Subject: [PATCH 025/153] fix: delete old use case exception module --- internal/usecase/exception/__init__.py | 1 - internal/usecase/exception/file.py | 31 -------------------------- internal/usecase/exception/task.py | 14 ------------ 3 files changed, 46 deletions(-) delete mode 100644 internal/usecase/exception/__init__.py delete mode 100644 internal/usecase/exception/file.py delete mode 100644 internal/usecase/exception/task.py diff --git a/internal/usecase/exception/__init__.py b/internal/usecase/exception/__init__.py deleted file mode 100644 index 2ca00cc3..00000000 --- a/internal/usecase/exception/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from internal.usecase.exception.task import TaskNotFoundException diff --git a/internal/usecase/exception/file.py b/internal/usecase/exception/file.py deleted file mode 100644 index b732554a..00000000 --- a/internal/usecase/exception/file.py +++ /dev/null @@ -1,31 +0,0 @@ -class IncorrectFileFormatException(Exception): - """ - Exception raised when a file format is incorrect or not supported. - - :param message: The error message to be reported. - :type message: str - """ - - def __init__(self, message: str): - """ - Initializes an instance of IncorrectFileFormatException with a specific error message. - - Args: - message(str): The error message to be reported. - """ - super().__init__(message) - -class DatasetNotFoundException(Exception): - """ - Exception raised when a dataset is not found. - - This exception does not require any additional attributes beyond the default message. - """ - - def __init__(self): - """ - Initializes an instance of DatasetNotFoundException without any specific message. - - The default message "Dataset not found" is used. - """ - super().__init__("Dataset not found") diff --git a/internal/usecase/exception/task.py b/internal/usecase/exception/task.py deleted file mode 100644 index c55f9846..00000000 --- a/internal/usecase/exception/task.py +++ /dev/null @@ -1,14 +0,0 @@ -class TaskNotFoundException(Exception): - """ - Exception raised when a task is not found in data storage. - - This exception does not require any additional attributes beyond the default message. - """ - - def __init__(self): - """ - Initializes an instance of TaskNotFoundException without any specific message. - - The default message "Task not found" is used. - """ - super().__init__("Task not found") From 1aea56a01f6744cbe38794170c4ae05abd20dd07 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 15:56:32 +0300 Subject: [PATCH 026/153] feat(use case): add use case for file's content type checking --- internal/usecase/file/__init__.py | 1 + internal/usecase/file/check_content_type.py | 14 ++++++++++++++ 2 files changed, 15 insertions(+) create mode 100644 internal/usecase/file/check_content_type.py diff --git a/internal/usecase/file/__init__.py b/internal/usecase/file/__init__.py index e69de29b..0c650a37 100644 --- a/internal/usecase/file/__init__.py +++ b/internal/usecase/file/__init__.py @@ -0,0 +1 @@ +from internal.usecase.file.check_content_type import CheckContentType diff --git a/internal/usecase/file/check_content_type.py b/internal/usecase/file/check_content_type.py new file mode 100644 index 00000000..7443e5a5 --- /dev/null +++ b/internal/usecase/file/check_content_type.py @@ -0,0 +1,14 @@ +from typing import Protocol + +from internal.usecase.file.exception import IncorrectFileFormatException + + +class File(Protocol): + content_type: str + + +class CheckContentType: + + def __call__(self, *, upload_file: File) -> None: + if upload_file.content_type != "text/csv": # TODO: replace with actual validation + raise IncorrectFileFormatException("File is not CSV") From d35b28f73ac4faa29a5edc6d813dd902e21e4afe Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 15:58:49 +0300 Subject: [PATCH 027/153] feat(tests): add tests for file's content type checking use case --- tests/usecase/test_check_content_type.py | 35 ++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 tests/usecase/test_check_content_type.py diff --git a/tests/usecase/test_check_content_type.py b/tests/usecase/test_check_content_type.py new file mode 100644 index 00000000..f2aa4a9b --- /dev/null +++ b/tests/usecase/test_check_content_type.py @@ -0,0 +1,35 @@ +import pytest +from pytest_mock import MockerFixture + +from internal.dto.repository.file import File +from internal.usecase.file import CheckContentType +from internal.usecase.file.exception import IncorrectFileFormatException + + +@pytest.fixture +def check_content_type() -> CheckContentType: + return CheckContentType() + + +@pytest.mark.parametrize( + "content_type, expected_exception", + [ + ("text/csv", None), + ("application/json", IncorrectFileFormatException), + ("", IncorrectFileFormatException), + ], +) +def test_check_content_type( + check_content_type: CheckContentType, + mocker: MockerFixture, + content_type: str, + expected_exception: IncorrectFileFormatException | None, +): + upload_file = mocker.Mock(spec=File) + upload_file.content_type = content_type + + if expected_exception: + with pytest.raises(expected_exception): + check_content_type(upload_file=upload_file) + else: + check_content_type(upload_file=upload_file) From adf1394bb8949400306fe2204ad7f67d30a60ed9 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 16:18:59 +0300 Subject: [PATCH 028/153] feat: add Unit Of Work pattern implementation --- internal/uow/README.md | 33 +++++++++++++++++++++++++++++++++ internal/uow/__init__.py | 1 + internal/uow/uow.py | 29 +++++++++++++++++++++++++++++ 3 files changed, 63 insertions(+) create mode 100644 internal/uow/README.md create mode 100644 internal/uow/__init__.py create mode 100644 internal/uow/uow.py diff --git a/internal/uow/README.md b/internal/uow/README.md new file mode 100644 index 00000000..e704aae0 --- /dev/null +++ b/internal/uow/README.md @@ -0,0 +1,33 @@ +# Unit of Work (UoW) +This module implements the Unit of Work (UoW) pattern, which is designed to manage transactions across multiple operations. It ensures that all changes within a transaction are either successfully committed or completely rolled back in case of an error. + +## What is UoW? +The Unit of Work pattern manages transactional operations within a business process. It groups multiple changes to a data store into a single logical transaction, ensuring that either all operations succeed or none do. This is particularly useful for preventing partial updates, ensuring data integrity, and managing rollbacks in case of errors. + +## Implementation +The Unit Of Work class works with a DataStorageContext interface, which defines essential methods like commit, flush, rollback, and close. This allows different types of data storage (e.g., relational databases, file systems) to be plugged in while adhering to a unified transaction control mechanism. + +To use UoW in your use case, you need to implement the DataStorageContext interface for your data store (if not already done), and you also need to have a repository implementation that supports working with your DataStorageContext. + +### Example +```python + +from typing import Protocol +from uuid import UUID, uuid4 +from sqlalchemy.orm import Session +from internal.uow import UnitOfWork, DataStorageContext + +class DatasetRepo(Protocol): + def create(self, file_id: UUID, context: DataStorageContext) -> None: ... + +def create_uow(context: Session) -> UnitOfWork: + return UnitOfWork(context=context) + +def create_two_datasets( + uow: UnitOfWork, + dataset_repo: DatasetRepo +) -> None: + with uow as context: + dataset_repo.create(uuid4(), context=context) + dataset_repo.create(uuid4(), context=context) +``` diff --git a/internal/uow/__init__.py b/internal/uow/__init__.py new file mode 100644 index 00000000..aedf3b4a --- /dev/null +++ b/internal/uow/__init__.py @@ -0,0 +1 @@ +from internal.uow.uow import DataStorageContext, UnitOfWork diff --git a/internal/uow/uow.py b/internal/uow/uow.py new file mode 100644 index 00000000..759e80fa --- /dev/null +++ b/internal/uow/uow.py @@ -0,0 +1,29 @@ +from typing import Protocol, runtime_checkable + + +@runtime_checkable +class DataStorageContext(Protocol): + + def commit(self) -> None: ... + + def flush(self) -> None: ... + + def rollback(self) -> None: ... + + def close(self) -> None: ... + + +class UnitOfWork: + + def __init__(self, context: DataStorageContext): + self._context = context + + def __enter__(self) -> DataStorageContext: + return self._context + + def __exit__(self, exc_type, exc_val, exc_tb) -> None: + if exc_type: + self._context.rollback() + else: + self._context.commit() + self._context.close() From 4dccf1c2584e2ee44ae12150eaf76e0a37f3a7f2 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 16:19:09 +0300 Subject: [PATCH 029/153] feat(tests): add tests for unit of work --- tests/uow/test_unit_of_work.py | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 tests/uow/test_unit_of_work.py diff --git a/tests/uow/test_unit_of_work.py b/tests/uow/test_unit_of_work.py new file mode 100644 index 00000000..351ee831 --- /dev/null +++ b/tests/uow/test_unit_of_work.py @@ -0,0 +1,34 @@ +import pytest +from pytest_mock import MockerFixture + +from internal.uow import DataStorageContext, UnitOfWork + + +@pytest.fixture +def context_mock(mocker: MockerFixture) -> DataStorageContext: + return mocker.Mock(spec=DataStorageContext) + + +def test_unit_of_work_commit_on_success(context_mock: DataStorageContext) -> None: + uow = UnitOfWork(context_mock) + + with uow as context: + assert isinstance(context, DataStorageContext) + pass + + context_mock.commit.assert_called_once() + context_mock.rollback.assert_not_called() + context_mock.close.assert_called_once() + + +def test_unit_of_work_rollback_on_failure(context_mock: DataStorageContext) -> None: + uow = UnitOfWork(context_mock) + + with pytest.raises(ValueError): + with uow as context: + assert isinstance(context, DataStorageContext) + raise ValueError("Test error") + + context_mock.commit.assert_not_called() + context_mock.rollback.assert_called_once() + context_mock.close.assert_called_once() From 549c90516bf47e69ea85dd96047be21ab4c8b178 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 16:54:17 +0300 Subject: [PATCH 030/153] feat(use case): add use case for saving datasets --- internal/usecase/file/__init__.py | 1 + internal/usecase/file/save_dataset.py | 36 +++++++++++++++++++++++++++ 2 files changed, 37 insertions(+) create mode 100644 internal/usecase/file/save_dataset.py diff --git a/internal/usecase/file/__init__.py b/internal/usecase/file/__init__.py index 0c650a37..b614b149 100644 --- a/internal/usecase/file/__init__.py +++ b/internal/usecase/file/__init__.py @@ -1 +1,2 @@ from internal.usecase.file.check_content_type import CheckContentType +from internal.usecase.file.save_dataset import SaveDataset diff --git a/internal/usecase/file/save_dataset.py b/internal/usecase/file/save_dataset.py new file mode 100644 index 00000000..363912cc --- /dev/null +++ b/internal/usecase/file/save_dataset.py @@ -0,0 +1,36 @@ +from typing import Protocol +from uuid import UUID + +from internal.dto.repository.file import DatasetCreateSchema, DatasetResponseSchema +from internal.uow import DataStorageContext, UnitOfWork + + +class DatasetRepo(Protocol): + + def create(self, dataset_info: DatasetCreateSchema, context: DataStorageContext) -> DatasetResponseSchema: ... + + +class SaveDataset: + + def __init__(self, unit_of_work: UnitOfWork, dataset_repo: DatasetRepo): + self.unit_of_work = unit_of_work + self.dataset_repo = dataset_repo + + def __call__( + self, + *, + file_id: UUID, + separator: str, + header: list[int], + ) -> UUID: + + dataset_create_schema = DatasetCreateSchema( + file_id=file_id, + separator=separator, + header=header + ) + + with self.unit_of_work as context: + result = self.dataset_repo.create(dataset_create_schema, context) + + return result.id From 04d67bc0d095380cad6edbcbc68d6cf6dfb278b6 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 16:57:33 +0300 Subject: [PATCH 031/153] feat(tests): add tests for save dataset use case --- tests/usecase/test_save_dataset.py | 63 ++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 tests/usecase/test_save_dataset.py diff --git a/tests/usecase/test_save_dataset.py b/tests/usecase/test_save_dataset.py new file mode 100644 index 00000000..ed0eefdb --- /dev/null +++ b/tests/usecase/test_save_dataset.py @@ -0,0 +1,63 @@ +from uuid import uuid4 + +import pytest +from pytest_mock import MockerFixture + +from internal.dto.repository.file import DatasetResponseSchema, DatasetCreateSchema +from internal.uow import UnitOfWork, DataStorageContext +from internal.usecase.file.save_dataset import DatasetRepo, SaveDataset + + +@pytest.fixture +def unit_of_work_mock(mocker: MockerFixture) -> UnitOfWork: + mock = mocker.MagicMock() + mock.__enter__.return_value = mocker.Mock( + return_value=mocker.Mock(), spec=DataStorageContext + ) + mock.__exit__.return_value = mocker.Mock(return_value=None) + return mock + + +@pytest.fixture +def dataset_repo_mock(mocker: MockerFixture) -> DatasetRepo: + mock = mocker.Mock(spec=DatasetRepo) + return mock + + +@pytest.fixture +def save_dataset( + unit_of_work_mock: UnitOfWork, dataset_repo_mock: DatasetRepo +) -> SaveDataset: + return SaveDataset(unit_of_work=unit_of_work_mock, dataset_repo=dataset_repo_mock) + + +def test_save_dataset( + save_dataset: SaveDataset, + unit_of_work_mock: UnitOfWork, + dataset_repo_mock: DatasetRepo, +) -> None: + # Prepare data + file_id = uuid4() + dataset_id = uuid4() + separator = "?" + header = [1, 2, 3] + + dataset_repo_mock.create.return_value = DatasetResponseSchema( + id=dataset_id, file_id=file_id, separator=separator, header=header + ) + + # Act + result_id = save_dataset(file_id=file_id, separator=separator, header=header) + + # Check that the create method was called with the correct arguments + dataset_repo_mock.create.assert_called_once_with( + DatasetCreateSchema(file_id=file_id, separator=separator, header=header), + unit_of_work_mock.__enter__.return_value, + ) + + # Check that UnitOfWork was entered and exited correctly + unit_of_work_mock.__enter__.assert_called_once() + unit_of_work_mock.__exit__.assert_called_once() + + # Verify that the result of the use case matches the expected dataset_id + assert result_id == dataset_id From 9bacfc0a1ea9abf7d0a00ff678b3f743f99ca9fe Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 17:18:51 +0300 Subject: [PATCH 032/153] feat(use case): add use case for file saving --- internal/usecase/file/save_file.py | 74 ++++++++++++++++++++++++++++++ 1 file changed, 74 insertions(+) create mode 100644 internal/usecase/file/save_file.py diff --git a/internal/usecase/file/save_file.py b/internal/usecase/file/save_file.py new file mode 100644 index 00000000..797e7c09 --- /dev/null +++ b/internal/usecase/file/save_file.py @@ -0,0 +1,74 @@ +import datetime +from typing import Protocol +from uuid import UUID +from pydantic import BaseModel + +from internal.domain.file import File as FileEntity +from internal.dto.repository.file import FileCreateSchema, FileResponseSchema, File +from internal.dto.repository.file import FileMetadataCreateSchema, FileMetadataResponseSchema +from internal.uow import DataStorageContext, UnitOfWork + + +class FileRepo(Protocol): + + async def create( + self, + file: File, + file_info: FileCreateSchema, + context: DataStorageContext + ) -> FileResponseSchema: ... + + +class FileMetadataRepo(Protocol): + + def create( + self, + file_metadata: FileMetadataCreateSchema, + context: DataStorageContext + ) -> FileMetadataResponseSchema: ... + + +class SaveFileUseCaseResult(BaseModel): + id: UUID + file_name: UUID + original_file_name: str + mime_type: str + created_at: datetime.datetime | None + updated_at: datetime.datetime | None + + +class SaveFile: + + def __init__( + self, + unit_of_work: UnitOfWork, + file_repo: FileRepo, + file_metadata_repo: FileMetadataRepo + ): + + self.unit_of_work = unit_of_work + self.file_repo = file_repo + self.file_metadata_repo = file_metadata_repo + + async def __call__(self, *, upload_file: File) -> SaveFileUseCaseResult: + file = FileEntity() + + create_file_schema = FileCreateSchema(file_name=file.name_as_uuid) + file_metadata_create_schema = FileMetadataCreateSchema( + file_name=file.name_as_uuid, + original_file_name=upload_file.filename, + mime_type=upload_file.content_type + ) + + with self.unit_of_work as context: + response = self.file_metadata_repo.create(file_metadata_create_schema, context) + await self.file_repo.create(upload_file, create_file_schema, context) + + return SaveFileUseCaseResult( + id=response.id, + file_name=response.file_name, + original_file_name=response.original_file_name, + mime_type=response.mime_type, + created_at=response.created_at, + updated_at=response.updated_at + ) From 5054d112e53bd6e9c1a997400f823fab91abb6fa Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 17:56:39 +0300 Subject: [PATCH 033/153] feat(tests): add tests for save file use case --- tests/usecase/test_save_file.py | 139 ++++++++++++++++++++++++++++++++ 1 file changed, 139 insertions(+) create mode 100644 tests/usecase/test_save_file.py diff --git a/tests/usecase/test_save_file.py b/tests/usecase/test_save_file.py new file mode 100644 index 00000000..9aea7e74 --- /dev/null +++ b/tests/usecase/test_save_file.py @@ -0,0 +1,139 @@ +from datetime import datetime +from uuid import uuid4 + +import pytest +from pytest_mock import MockerFixture + +from internal.domain.file import File as FileEntity +from internal.dto.repository.file import ( + FileMetadataResponseSchema, + File, + FileMetadataCreateSchema, + FileCreateSchema, + FileResponseSchema, +) +from internal.uow import UnitOfWork, DataStorageContext +from internal.usecase.file.save_file import ( + FileMetadataRepo, + FileRepo, + SaveFile, + SaveFileUseCaseResult, +) + + +@pytest.fixture +def unit_of_work_mock(mocker: MockerFixture) -> UnitOfWork: + mock = mocker.MagicMock() + mock.__enter__.return_value = mocker.Mock( + return_value=mocker.Mock(), spec=DataStorageContext + ) + mock.__exit__.return_value = mocker.Mock(return_value=None) + return mock + + +@pytest.fixture +def file_entity_mock(mocker: MockerFixture) -> FileEntity: + mock = mocker.Mock(spec=FileEntity) + mock.name_as_uuid = uuid4() + mock.name = str(mock.name_as_uuid) + return mock + + +@pytest.fixture +def file_repo_mock(mocker: MockerFixture) -> FileRepo: + mock = mocker.Mock(spec=FileRepo) + return mock + + +@pytest.fixture +def file_metadata_repo_mock(mocker: MockerFixture) -> FileMetadataRepo: + mock = mocker.Mock(spec=FileMetadataRepo) + return mock + + +@pytest.fixture +def save_file( + mocker: MockerFixture, + unit_of_work_mock: UnitOfWork, + file_repo_mock: FileRepo, + file_metadata_repo_mock: FileMetadataRepo, + file_entity_mock: FileEntity, +) -> SaveFile: + mocker.patch( + "internal.usecase.file.save_file.FileEntity", return_value=file_entity_mock + ) + return SaveFile( + unit_of_work=unit_of_work_mock, + file_repo=file_repo_mock, + file_metadata_repo=file_metadata_repo_mock, + ) + + +@pytest.mark.asyncio +async def test_save_file( + mocker: MockerFixture, + save_file: SaveFile, + unit_of_work_mock: UnitOfWork, + file_repo_mock: FileRepo, + file_metadata_repo_mock: FileMetadataRepo, + file_entity_mock: FileEntity, +) -> None: + # Prepare data + file_id = uuid4() + file_name = file_entity_mock.name_as_uuid + original_file_name = "example.txt" + mime_type = "text/plain" + created_at = datetime.now() + updated_at = datetime.now() + + # Make mocks for entities and repositories responses + file_metadata_response = FileMetadataResponseSchema( + id=file_id, + file_name=file_name, + original_file_name=original_file_name, + mime_type=mime_type, + created_at=created_at, + updated_at=updated_at, + ) + + file_response = FileResponseSchema + + file_repo_mock.create.return_value = file_response + file_metadata_repo_mock.create.return_value = file_metadata_response + + upload_file_mock = mocker.Mock(spec=File) + upload_file_mock.filename = original_file_name + upload_file_mock.content_type = mime_type + + # Act + result = await save_file(upload_file=upload_file_mock) + + # Check that the repositories' create methods were called with the correct arguments + file_metadata_repo_mock.create.assert_called_once_with( + FileMetadataCreateSchema( + file_name=file_name, + original_file_name=original_file_name, + mime_type=mime_type, + ), + unit_of_work_mock.__enter__.return_value, + ) + + file_repo_mock.create.assert_called_once_with( + upload_file_mock, + FileCreateSchema(file_name=file_name), + unit_of_work_mock.__enter__.return_value, + ) + + # Check that UnitOfWork was used correctly + unit_of_work_mock.__enter__.assert_called_once() + unit_of_work_mock.__exit__.assert_called_once() + + # Verify that the result matches the expected SaveFileUseCaseResult + assert result == SaveFileUseCaseResult( + id=file_id, + file_name=file_name, + original_file_name=original_file_name, + mime_type=mime_type, + created_at=created_at, + updated_at=updated_at, + ) From f6eae2914414a82b1cb29d937892071d7129f5e2 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 18:14:05 +0300 Subject: [PATCH 034/153] feat: add task use cases' exceptions --- internal/usecase/task/exception.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 internal/usecase/task/exception.py diff --git a/internal/usecase/task/exception.py b/internal/usecase/task/exception.py new file mode 100644 index 00000000..c55f9846 --- /dev/null +++ b/internal/usecase/task/exception.py @@ -0,0 +1,14 @@ +class TaskNotFoundException(Exception): + """ + Exception raised when a task is not found in data storage. + + This exception does not require any additional attributes beyond the default message. + """ + + def __init__(self): + """ + Initializes an instance of TaskNotFoundException without any specific message. + + The default message "Task not found" is used. + """ + super().__init__("Task not found") From 3f6a6cdb14ef8beac09c5274320c7b7d67455160 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 18:38:37 +0300 Subject: [PATCH 035/153] feat(use case): add use case for retrieving task --- internal/usecase/task/__init__.py | 1 + internal/usecase/task/retrieve_task.py | 59 ++++++++++++++++++++++++++ 2 files changed, 60 insertions(+) create mode 100644 internal/usecase/task/retrieve_task.py diff --git a/internal/usecase/task/__init__.py b/internal/usecase/task/__init__.py index e69de29b..2bdcaf5a 100644 --- a/internal/usecase/task/__init__.py +++ b/internal/usecase/task/__init__.py @@ -0,0 +1 @@ +from internal.usecase.task.retrieve_task import RetrieveTask diff --git a/internal/usecase/task/retrieve_task.py b/internal/usecase/task/retrieve_task.py new file mode 100644 index 00000000..b06ebe14 --- /dev/null +++ b/internal/usecase/task/retrieve_task.py @@ -0,0 +1,59 @@ +import datetime +from typing import Protocol +from uuid import UUID + +from pydantic import BaseModel + +from internal.domain.task.value_objects import TaskStatus, OneOfTaskConfig, OneOfTaskResult, TaskFailureReason +from internal.dto.repository.task import TaskResponseSchema, TaskFindSchema +from internal.uow import DataStorageContext, UnitOfWork +from internal.usecase.task.exception import TaskNotFoundException + + +class TaskRepo(Protocol): + + def find(self, task_info: TaskFindSchema, context: DataStorageContext) -> TaskResponseSchema | None: ... + + +class RetrieveTaskUseCaseResult(BaseModel): + task_id: UUID + status: TaskStatus + config: OneOfTaskConfig + result: OneOfTaskResult | None + dataset_id: UUID + + raised_exception_name: str | None + failure_reason: TaskFailureReason | None + traceback: str | None + + created_at: datetime.datetime | None + updated_at: datetime.datetime | None + + +class RetrieveTask: + + def __init__(self, unit_of_work: UnitOfWork, task_repo: TaskRepo): + self.unit_of_work = unit_of_work + self.task_repo = task_repo + + def __call__(self, task_id: UUID) -> RetrieveTaskUseCaseResult: + task_find_schema = TaskFindSchema(id=task_id) + + with self.unit_of_work as context: + task = self.task_repo.find(task_find_schema, context) + + if not task: + raise TaskNotFoundException() + + return RetrieveTaskUseCaseResult( + task_id=task.id, + status=task.status, + config=task.config, + result=task.result, + dataset_id=task.dataset_id, + raised_exception_name=task.raised_exception_name, + failure_reason=task.failure_reason, + traceback=task.traceback, + created_at=task.created_at, + updated_at=task.updated_at + ) From c1a5bb6d5de0aaf88a0404970d767f64b78944c4 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 18:38:49 +0300 Subject: [PATCH 036/153] feat(tests): add tests for retrieve task use case --- tests/usecase/test_retrieve_task.py | 116 ++++++++++++++++++++++++++++ 1 file changed, 116 insertions(+) create mode 100644 tests/usecase/test_retrieve_task.py diff --git a/tests/usecase/test_retrieve_task.py b/tests/usecase/test_retrieve_task.py new file mode 100644 index 00000000..3bc84eb7 --- /dev/null +++ b/tests/usecase/test_retrieve_task.py @@ -0,0 +1,116 @@ +from uuid import uuid4 + +import pytest +from pytest_mock import MockerFixture + +from internal.domain.task.value_objects import PrimitiveName, TaskStatus, FdTaskResult +from internal.domain.task.value_objects.fd import FdAlgoName, FdAlgoResult, FdTaskConfig +from internal.dto.repository.task import ( + TaskResponseSchema, + TaskFindSchema, +) +from internal.uow import UnitOfWork, DataStorageContext +from internal.usecase.task.exception import TaskNotFoundException +from internal.usecase.task.retrieve_task import ( + RetrieveTask, + TaskRepo, + RetrieveTaskUseCaseResult, +) + + +@pytest.fixture +def unit_of_work_mock(mocker: MockerFixture) -> UnitOfWork: + mock = mocker.MagicMock() + mock.__enter__.return_value = mocker.Mock( + return_value=mocker.Mock(), spec=DataStorageContext + ) + mock.__exit__.return_value = mocker.Mock(return_value=None) + return mock + + +@pytest.fixture +def task_repo_mock(mocker: MockerFixture) -> TaskRepo: + mock = mocker.Mock(spec=TaskRepo) + return mock + + +@pytest.fixture +def retrieve_task_use_case( + unit_of_work_mock: UnitOfWork, task_repo_mock: TaskRepo +) -> RetrieveTask: + return RetrieveTask(unit_of_work=unit_of_work_mock, task_repo=task_repo_mock) + + +def test_retrieve_task_use_case_success( + unit_of_work_mock: UnitOfWork, + task_repo_mock: TaskRepo, + retrieve_task_use_case: RetrieveTask, +): + # Prepare data + task_id = uuid4() + dataset_id = uuid4() + + task_config = FdTaskConfig( + primitive_name=PrimitiveName.fd, config={"algo_name": FdAlgoName.Aid} + ) + task_result = FdTaskResult( + primitive_name=PrimitiveName.fd, result=FdAlgoResult(fds=[]) + ) + + task_repo_mock.find.return_value = TaskResponseSchema( + id=task_id, + status=TaskStatus.COMPLETED, + config=task_config, + result=task_result, + dataset_id=dataset_id, + created_at=None, + updated_at=None, + ) + + # Act + result = retrieve_task_use_case(task_id=task_id) + + # Check result + assert result == RetrieveTaskUseCaseResult( + task_id=task_id, + status=TaskStatus.COMPLETED, + config=task_config, + result=task_result, + dataset_id=dataset_id, + raised_exception_name=None, + failure_reason=None, + traceback=None, + created_at=None, + updated_at=None, + ) + + # Check that repositories' find method work correctly + task_repo_mock.find.assert_called_once_with( + TaskFindSchema(id=task_id), unit_of_work_mock.__enter__.return_value + ) + + # Check that UnitOfWork was entered and exited correctly + unit_of_work_mock.__enter__.assert_called_once() + unit_of_work_mock.__exit__.assert_called_once() + + +def test_retrieve_task_use_case_not_found( + unit_of_work_mock: UnitOfWork, + retrieve_task_use_case: RetrieveTask, + task_repo_mock: TaskRepo, +): + task_id = uuid4() + + task_repo_mock.find.return_value = None + + with pytest.raises(TaskNotFoundException): + retrieve_task_use_case(task_id=task_id) + + # Check that repositories' find method work correctly + task_repo_mock.find.assert_called_once_with( + TaskFindSchema(id=task_id), unit_of_work_mock.__enter__.return_value + ) + + # Check that UnitOfWork was entered and exited correctly + unit_of_work_mock.__enter__.assert_called_once() + unit_of_work_mock.__exit__.assert_called_once() From 933edc1aa8a2ee85e96555b89bce43cf2965e06a Mon Sep 17 00:00:00 2001 From: raf-nr Date: Wed, 18 Sep 2024 18:40:26 +0300 Subject: [PATCH 037/153] chore: add missing imports --- internal/usecase/file/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/internal/usecase/file/__init__.py b/internal/usecase/file/__init__.py index b614b149..73283758 100644 --- a/internal/usecase/file/__init__.py +++ b/internal/usecase/file/__init__.py @@ -1,2 +1,3 @@ from internal.usecase.file.check_content_type import CheckContentType from internal.usecase.file.save_dataset import SaveDataset +from internal.usecase.file.save_file import SaveFile, SaveFileUseCaseResult From 3aeadd76c9a9807f8ab3d5ac9e6beff8401a7157 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 19 Sep 2024 00:29:24 +0300 Subject: [PATCH 038/153] feat(use case): add use case for setting task --- internal/usecase/task/__init__.py | 1 + internal/usecase/task/set_task.py | 70 +++++++++++++++++++++++++++++++ 2 files changed, 71 insertions(+) create mode 100644 internal/usecase/task/set_task.py diff --git a/internal/usecase/task/__init__.py b/internal/usecase/task/__init__.py index 2bdcaf5a..f9fd7c3f 100644 --- a/internal/usecase/task/__init__.py +++ b/internal/usecase/task/__init__.py @@ -1 +1,2 @@ from internal.usecase.task.retrieve_task import RetrieveTask +from internal.usecase.task.set_task import SetTask diff --git a/internal/usecase/task/set_task.py b/internal/usecase/task/set_task.py new file mode 100644 index 00000000..b99c7568 --- /dev/null +++ b/internal/usecase/task/set_task.py @@ -0,0 +1,70 @@ +from typing import Protocol +from uuid import UUID + +from internal.domain.task.value_objects import OneOfTaskConfig, TaskStatus +from internal.dto.repository.file import DatasetResponseSchema, DatasetFindSchema +from internal.dto.repository.task import TaskCreateSchema, TaskResponseSchema +from internal.dto.worker.task import ProfilingTaskCreateSchema +from internal.uow import DataStorageContext, UnitOfWork +from internal.usecase.file.exception import DatasetNotFoundException + + +class DatasetRepo(Protocol): + + def find(self, dataset_info: DatasetFindSchema, context: DataStorageContext) -> DatasetResponseSchema | None: ... + + +class TaskRepo(Protocol): + + def create(self, task_info: TaskCreateSchema, context: DataStorageContext) -> TaskResponseSchema: ... + + +class ProfilingTaskWorker(Protocol): + unit_of_work: UnitOfWork + dataset_repo: DatasetRepo + + def set(self, task_info: ProfilingTaskCreateSchema) -> None: ... + + +class SetTask: + + def __init__( + self, + unit_of_work: UnitOfWork, + dataset_repo: DatasetRepo, + task_repo: TaskRepo, + profiling_task_worker: ProfilingTaskWorker + ): + + self.unit_of_work = unit_of_work + self.dataset_repo = dataset_repo + self.task_repo = task_repo + self.profiling_task_worker = profiling_task_worker + + def __call__( + self, + dataset_id: UUID, + config: OneOfTaskConfig, + ) -> UUID: + + dataset_find_schema = DatasetFindSchema(id=dataset_id) + task_create_schema = TaskCreateSchema( + status=TaskStatus.CREATED, + config=config.model_dump(exclude_unset=True), + dataset_id=dataset_id + ) + + with self.unit_of_work as context: + dataset = self.dataset_repo.find(dataset_find_schema, context) + if not dataset: + raise DatasetNotFoundException() + task = self.task_repo.create(task_create_schema, context) + + profiling_task_create_schema = ProfilingTaskCreateSchema( + task_id=task.id, + dataset_id=dataset_id, + config=config + ) + self.profiling_task_worker.set(profiling_task_create_schema) + + return task.id From 0b3ddff97f9969ece83cdc3aa19f95d9ef2c8696 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 19 Sep 2024 00:30:00 +0300 Subject: [PATCH 039/153] feat(tests): add tests for set task use case --- tests/usecase/test_set_task.py | 182 +++++++++++++++++++++++++++++++++ 1 file changed, 182 insertions(+) create mode 100644 tests/usecase/test_set_task.py diff --git a/tests/usecase/test_set_task.py b/tests/usecase/test_set_task.py new file mode 100644 index 00000000..3f13f3b2 --- /dev/null +++ b/tests/usecase/test_set_task.py @@ -0,0 +1,182 @@ +from datetime import datetime, timezone +from uuid import uuid4 + +import pytest +from pytest_mock import MockerFixture + +from internal.domain.task.value_objects import PrimitiveName, TaskStatus, FdTaskConfig +from internal.domain.task.value_objects.fd import FdAlgoName +from internal.dto.repository.file import ( + DatasetResponseSchema, + DatasetFindSchema, +) +from internal.dto.repository.task import ( + TaskResponseSchema, + TaskCreateSchema, +) +from internal.dto.worker.task import ProfilingTaskCreateSchema +from internal.uow import UnitOfWork, DataStorageContext +from internal.usecase.file.exception import DatasetNotFoundException +from internal.usecase.task.set_task import ( + SetTask, + DatasetRepo, + TaskRepo, + ProfilingTaskWorker, +) + + +@pytest.fixture +def unit_of_work_mock(mocker: MockerFixture) -> UnitOfWork: + mock = mocker.MagicMock() + mock.__enter__.return_value = mocker.Mock( + return_value=mocker.Mock(), spec=DataStorageContext + ) + mock.__exit__.return_value = None + + def exit_side_effect(exc_type, exc_value, traceback) -> bool: + if exc_type: + raise exc_value + return False + + mock.__exit__.side_effect = exit_side_effect + return mock + + +@pytest.fixture +def dataset_repo_mock(mocker: MockerFixture) -> DatasetRepo: + mock = mocker.Mock(spec=DatasetRepo) + return mock + + +@pytest.fixture +def task_repo_mock(mocker: MockerFixture) -> TaskRepo: + mock = mocker.Mock(spec=TaskRepo) + return mock + + +@pytest.fixture +def profiling_task_worker(mocker: MockerFixture) -> ProfilingTaskWorker: + mock = mocker.Mock(spec=ProfilingTaskWorker) + return mock + + +@pytest.fixture +def set_task_use_case( + unit_of_work_mock: UnitOfWork, + dataset_repo_mock: DatasetRepo, + task_repo_mock: TaskRepo, + profiling_task_worker: ProfilingTaskWorker, +): + return SetTask( + unit_of_work=unit_of_work_mock, + dataset_repo=dataset_repo_mock, + task_repo=task_repo_mock, + profiling_task_worker=profiling_task_worker, + ) + + +def test_set_task_use_case_success( + set_task_use_case: SetTask, + unit_of_work_mock: UnitOfWork, + dataset_repo_mock: DatasetRepo, + task_repo_mock: TaskRepo, + profiling_task_worker: ProfilingTaskWorker, +) -> None: + # Prepare data + dataset_id = uuid4() + task_id = uuid4() + task_config = FdTaskConfig( + primitive_name=PrimitiveName.fd, config={"algo_name": FdAlgoName.Aid} + ) + + # Mocks repo methods + dataset_repo_mock.find.return_value = DatasetResponseSchema( + id=dataset_id, + file_id=uuid4(), + separator="", + header=[], + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc), + ) + task_repo_mock.create.return_value = TaskResponseSchema( + id=task_id, + status=TaskStatus.CREATED, + config=task_config, + dataset_id=dataset_id, + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc), + ) + + # Act + result = set_task_use_case(dataset_id=dataset_id, config=task_config) + + # Check that result is correct task identifier + assert result == task_id + + # Check that all methods inside the use case were called correctly + dataset_repo_mock.find.assert_called_once_with( + DatasetFindSchema( + id=dataset_id, + ), + unit_of_work_mock.__enter__.return_value, + ) + + task_repo_mock.create.assert_called_once_with( + TaskCreateSchema( + status=TaskStatus.CREATED, + config=task_config.model_dump(exclude_unset=True), + dataset_id=dataset_id, + ), + unit_of_work_mock.__enter__.return_value, + ) + + profiling_task_worker.set.assert_called_once_with( + ProfilingTaskCreateSchema( + task_id=task_id, + dataset_id=dataset_id, + config=task_config, + ) + ) + + # Check that UnitOfWork was entered and exited correctly + unit_of_work_mock.__enter__.assert_called_once() + unit_of_work_mock.__exit__.assert_called_once() + + +def test_set_task_use_case_dataset_not_found( + set_task_use_case: SetTask, + unit_of_work_mock: UnitOfWork, + dataset_repo_mock: DatasetRepo, + task_repo_mock: TaskRepo, + profiling_task_worker: ProfilingTaskWorker, +): + # Prepare data + dataset_id = uuid4() + task_config = FdTaskConfig( + primitive_name=PrimitiveName.fd, config={"algo_name": FdAlgoName.Aid} + ) + + # Mocks repo methods + dataset_repo_mock.find.return_value = None + + # Act and except error + with pytest.raises(DatasetNotFoundException): + set_task_use_case( + dataset_id=dataset_id, + config=task_config, + ) + + # Check that all methods inside the use case were called correctly + dataset_repo_mock.find.assert_called_once_with( + DatasetFindSchema( + id=dataset_id, + ), + unit_of_work_mock.__enter__.return_value, + ) + + assert not task_repo_mock.create.called + assert not profiling_task_worker.set.called + + # Check that UnitOfWork was entered and exited correctly + unit_of_work_mock.__enter__.assert_called_once() + unit_of_work_mock.__exit__.assert_called_once() From 1ac699e4010f77dfe7893dbe428c69f39cb2e485 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 19 Sep 2024 00:32:27 +0300 Subject: [PATCH 040/153] chore: add file for user's use case exceptions --- internal/usecase/user/exception.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 internal/usecase/user/exception.py diff --git a/internal/usecase/user/exception.py b/internal/usecase/user/exception.py new file mode 100644 index 00000000..e69de29b From ddf134dde94c4efd1fef825a07320f4eb1ea5e8a Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 19 Sep 2024 00:38:39 +0300 Subject: [PATCH 041/153] chore: add readme file for usecase module --- internal/usecase/README.md | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 internal/usecase/README.md diff --git a/internal/usecase/README.md b/internal/usecase/README.md new file mode 100644 index 00000000..fca57df2 --- /dev/null +++ b/internal/usecase/README.md @@ -0,0 +1,26 @@ +# usecase module +This module implements the core business logic, acting as a bridge between domain entities and or infrastructure. This module defines the key use cases, which handle interactions between different components of the system, ensuring that the business logic remains independent of implementation details. + +## submodules + +`file` - contains usecases and usecases' exceptions related to files and datasets, their creation, management, search, and more. + +`task` - contains usecases and usecases' exceptions related to tasks, their creation, launch, verification, and more. + +`user` - contains usecases and usecases' exceptions related to the user, authorization and other user actions. + +## How to create new usecase +### 1. Create DTO schemas +Create Data Transfer Object (DTO) schemas in `usecase.dto` to standardize the data flow between your use case and the repository. + +### 2. Define the Repository Interface Using `Protocol` +All repository interfaces must be created using Python’s `Protocol` to ensure type safety and flexibility in implementation. + +### 3. Implement the usecase +Implement the use case. The use case should manage domain entities directly but interact with repositories, services, and external components strictly through interfaces. + +### 4. Implement data storage context +Implement interface `DataStorageContext` for your data storage. Place it in the `internal.infrastructure.data_storage` module. + +### 4. Implement the repository +If the repository isn't implemented, you will need to provide a concrete implementation for the repository interface. Place this in the `internal.repository` module. From 6973494fc12c4d4e3f6893df53a99408d6acbe17 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 19 Sep 2024 00:44:55 +0300 Subject: [PATCH 042/153] chore: init infrastructure, repository and worker modules --- internal/infrastructure/__init__.py | 0 internal/repository/__init__.py | 0 internal/worker/__init__.py | 0 3 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 internal/infrastructure/__init__.py create mode 100644 internal/repository/__init__.py create mode 100644 internal/worker/__init__.py diff --git a/internal/infrastructure/__init__.py b/internal/infrastructure/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/internal/repository/__init__.py b/internal/repository/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/internal/worker/__init__.py b/internal/worker/__init__.py new file mode 100644 index 00000000..e69de29b From ba6974fcb42dc4057db015e20571922a8beeb44f Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 19 Sep 2024 00:54:51 +0300 Subject: [PATCH 043/153] chore(infra): init data storage module --- internal/infrastructure/data_storage/__init__.py | 0 internal/infrastructure/data_storage/relational/__init__.py | 0 internal/infrastructure/data_storage/relational/model/__init__.py | 0 .../infrastructure/data_storage/relational/postgres/__init__.py | 0 4 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 internal/infrastructure/data_storage/__init__.py create mode 100644 internal/infrastructure/data_storage/relational/__init__.py create mode 100644 internal/infrastructure/data_storage/relational/model/__init__.py create mode 100644 internal/infrastructure/data_storage/relational/postgres/__init__.py diff --git a/internal/infrastructure/data_storage/__init__.py b/internal/infrastructure/data_storage/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/internal/infrastructure/data_storage/relational/__init__.py b/internal/infrastructure/data_storage/relational/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/internal/infrastructure/data_storage/relational/model/__init__.py b/internal/infrastructure/data_storage/relational/model/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/internal/infrastructure/data_storage/relational/postgres/__init__.py b/internal/infrastructure/data_storage/relational/postgres/__init__.py new file mode 100644 index 00000000..e69de29b From 6c05e600aa5b03868a0fa3cf252d4bfdd57806f5 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 19 Sep 2024 00:55:10 +0300 Subject: [PATCH 044/153] feat(infra): add settings for data storages --- .../infrastructure/data_storage/__init__.py | 3 ++ .../infrastructure/data_storage/settings.py | 50 +++++++++++++++++++ 2 files changed, 53 insertions(+) create mode 100644 internal/infrastructure/data_storage/settings.py diff --git a/internal/infrastructure/data_storage/__init__.py b/internal/infrastructure/data_storage/__init__.py index e69de29b..8d705c41 100644 --- a/internal/infrastructure/data_storage/__init__.py +++ b/internal/infrastructure/data_storage/__init__.py @@ -0,0 +1,3 @@ +from internal.infrastructure.data_storage.settings import get_settings + +settings = get_settings() diff --git a/internal/infrastructure/data_storage/settings.py b/internal/infrastructure/data_storage/settings.py new file mode 100644 index 00000000..5229f858 --- /dev/null +++ b/internal/infrastructure/data_storage/settings.py @@ -0,0 +1,50 @@ +from functools import cached_property + +from dotenv import load_dotenv, find_dotenv +from pydantic import AmqpDsn, DirectoryPath, PostgresDsn +from pydantic_settings import BaseSettings + +load_dotenv(find_dotenv(".env")) + + +class Settings(BaseSettings): + # Postgres settings + postgres_dialect_driver: str = "postgresql" + postgres_user: str + postgres_password: str + postgres_host: str + postgres_db: str + postgres_port: int = 5432 + # RabbitMQ settings + rabbitmq_default_user: str + rabbitmq_default_password: str + rabbitmq_host: str + rabbitmq_port: int = 5672 + # Flat files settings + uploaded_files_dir_path: DirectoryPath = "uploads/" + + @cached_property + def rabbitmq_dsn(self) -> AmqpDsn: + return AmqpDsn.build( + scheme="amqp", + username=self.rabbitmq_default_user, + password=self.rabbitmq_default_password, + host=self.rabbitmq_host, + port=self.rabbitmq_port, + ) + + @cached_property + def postgres_dsn(self) -> PostgresDsn: + return PostgresDsn.build( + scheme=self.postgres_dialect_driver, + username=self.postgres_user, + password=self.postgres_password, + host=self.postgres_host, + port=self.postgres_port, + path=self.postgres_db, + ) + + +def get_settings(): + # TODO: create different settings based on environment (production, testing, etc.) + return Settings() From 68bd15e17577354e5d0d2909670e5c1bae285078 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 19 Sep 2024 01:01:33 +0300 Subject: [PATCH 045/153] feat(infra): add context implementation for postgres --- .../relational/postgres/__init__.py | 2 ++ .../relational/postgres/context.py | 32 +++++++++++++++++++ 2 files changed, 34 insertions(+) create mode 100644 internal/infrastructure/data_storage/relational/postgres/context.py diff --git a/internal/infrastructure/data_storage/relational/postgres/__init__.py b/internal/infrastructure/data_storage/relational/postgres/__init__.py index e69de29b..71d0c3b2 100644 --- a/internal/infrastructure/data_storage/relational/postgres/__init__.py +++ b/internal/infrastructure/data_storage/relational/postgres/__init__.py @@ -0,0 +1,2 @@ +from internal.infrastructure.data_storage.relational.postgres.context import (get_context, + get_context_without_pool) diff --git a/internal/infrastructure/data_storage/relational/postgres/context.py b/internal/infrastructure/data_storage/relational/postgres/context.py new file mode 100644 index 00000000..80017714 --- /dev/null +++ b/internal/infrastructure/data_storage/relational/postgres/context.py @@ -0,0 +1,32 @@ +from typing import Generator + +from sqlalchemy import create_engine, NullPool +from sqlalchemy.orm import sessionmaker, Session + +from internal.infrastructure.data_storage import settings + +DataStorageContext = Session + +default_engine = create_engine(url=settings.postgres_dsn.unicode_string()) +engine_without_pool = create_engine( + url=settings.postgres_dsn.unicode_string(), + poolclass=NullPool, +) + +ContextLocal = sessionmaker(bind=default_engine) +ContextLocalWithoutPool = sessionmaker(bind=engine_without_pool) + + +def get_context() -> Generator[DataStorageContext, None, None]: + """ + Returns a generator that yields a context(session) object for database operations. + """ + with ContextLocal() as context: + yield context + +def get_context_without_pool() -> Generator[DataStorageContext, None, None]: + """ + Returns a generator that yields a context(session) object without pool for database operations. + """ + with ContextLocalWithoutPool() as context: + yield context From 77cb1ffd54b7527e18a04f0ec8a3b76a9ea2e84a Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 19 Sep 2024 01:05:11 +0300 Subject: [PATCH 046/153] feat(infra): add base orm model for relational data bases --- .../data_storage/relational/model/__init__.py | 1 + .../relational/model/orm_base_model.py | 20 +++++++++++++++++++ 2 files changed, 21 insertions(+) create mode 100644 internal/infrastructure/data_storage/relational/model/orm_base_model.py diff --git a/internal/infrastructure/data_storage/relational/model/__init__.py b/internal/infrastructure/data_storage/relational/model/__init__.py index e69de29b..9022a02a 100644 --- a/internal/infrastructure/data_storage/relational/model/__init__.py +++ b/internal/infrastructure/data_storage/relational/model/__init__.py @@ -0,0 +1 @@ +from internal.infrastructure.data_storage.relational.model.orm_base_model import ORMBaseModel diff --git a/internal/infrastructure/data_storage/relational/model/orm_base_model.py b/internal/infrastructure/data_storage/relational/model/orm_base_model.py new file mode 100644 index 00000000..6ffa1d63 --- /dev/null +++ b/internal/infrastructure/data_storage/relational/model/orm_base_model.py @@ -0,0 +1,20 @@ +from datetime import datetime + +from sqlalchemy import TIMESTAMP, text +from sqlalchemy.orm import DeclarativeBase, mapped_column, Mapped + + +class ORMBaseModel(DeclarativeBase): + __abstract__ = True + + created_at: Mapped[datetime] = mapped_column( + TIMESTAMP(timezone=True), + nullable=False, + server_default=text("now()") + ) + updated_at: Mapped[datetime] = mapped_column( + TIMESTAMP(timezone=True), + nullable=False, + server_default=text("now()"), + onupdate=text("now()") + ) From e904208c16e5d90f36c2823f219676f50e7f1c4a Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 19 Sep 2024 01:09:20 +0300 Subject: [PATCH 047/153] feat(infra): add base orm model for relational data bases --- .../relational/postgres/migrations/README | 1 + .../postgres/migrations/alembic.ini | 116 ++++++++++++++++++ .../relational/postgres/migrations/env.py | 84 +++++++++++++ .../postgres/migrations/script.py.mako | 26 ++++ 4 files changed, 227 insertions(+) create mode 100644 internal/infrastructure/data_storage/relational/postgres/migrations/README create mode 100644 internal/infrastructure/data_storage/relational/postgres/migrations/alembic.ini create mode 100644 internal/infrastructure/data_storage/relational/postgres/migrations/env.py create mode 100644 internal/infrastructure/data_storage/relational/postgres/migrations/script.py.mako diff --git a/internal/infrastructure/data_storage/relational/postgres/migrations/README b/internal/infrastructure/data_storage/relational/postgres/migrations/README new file mode 100644 index 00000000..2500aa1b --- /dev/null +++ b/internal/infrastructure/data_storage/relational/postgres/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration. diff --git a/internal/infrastructure/data_storage/relational/postgres/migrations/alembic.ini b/internal/infrastructure/data_storage/relational/postgres/migrations/alembic.ini new file mode 100644 index 00000000..e7dd38ba --- /dev/null +++ b/internal/infrastructure/data_storage/relational/postgres/migrations/alembic.ini @@ -0,0 +1,116 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = internal/infrastructure/data_storage/relational/postgres/migrations + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to internal/infrastructure_/data_storage/relational/postgres/migration/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:internal/infrastructure_/data_storage/relational/postgres/migration/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = %(db_url)s + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/internal/infrastructure/data_storage/relational/postgres/migrations/env.py b/internal/infrastructure/data_storage/relational/postgres/migrations/env.py new file mode 100644 index 00000000..23ca22d0 --- /dev/null +++ b/internal/infrastructure/data_storage/relational/postgres/migrations/env.py @@ -0,0 +1,84 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +from internal.infrastructure.data_storage import settings +from internal.infrastructure.data_storage.relational.model import ORMBaseModel + + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = ORMBaseModel.metadata +section = config.config_ini_section +config.set_section_option(section, "db_url", settings.postgres_dsn.unicode_string()) + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migration in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migration in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/internal/infrastructure/data_storage/relational/postgres/migrations/script.py.mako b/internal/infrastructure/data_storage/relational/postgres/migrations/script.py.mako new file mode 100644 index 00000000..fbc4b07d --- /dev/null +++ b/internal/infrastructure/data_storage/relational/postgres/migrations/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} From 6883f13f5c2b72c9c9255f14f82ae9915bf38f45 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 19 Sep 2024 01:16:06 +0300 Subject: [PATCH 048/153] feat(infra): add file metadata model for relational data bases --- .../relational/model/file/__init__.py | 1 + .../relational/model/file/file_metadata.py | 21 +++++++++++++++++++ 2 files changed, 22 insertions(+) create mode 100644 internal/infrastructure/data_storage/relational/model/file/__init__.py create mode 100644 internal/infrastructure/data_storage/relational/model/file/file_metadata.py diff --git a/internal/infrastructure/data_storage/relational/model/file/__init__.py b/internal/infrastructure/data_storage/relational/model/file/__init__.py new file mode 100644 index 00000000..67a609bb --- /dev/null +++ b/internal/infrastructure/data_storage/relational/model/file/__init__.py @@ -0,0 +1 @@ +from internal.infrastructure.data_storage.relational.model.file.file_metadata import FileMetadataORM diff --git a/internal/infrastructure/data_storage/relational/model/file/file_metadata.py b/internal/infrastructure/data_storage/relational/model/file/file_metadata.py new file mode 100644 index 00000000..37a0e2c4 --- /dev/null +++ b/internal/infrastructure/data_storage/relational/model/file/file_metadata.py @@ -0,0 +1,21 @@ +from pathlib import Path +from uuid import UUID, uuid4 + +from sqlalchemy.ext.hybrid import hybrid_property +from sqlalchemy.orm import mapped_column, Mapped + +from internal.infrastructure.data_storage import settings +from internal.infrastructure.data_storage.relational.model import ORMBaseModel + + +class FileMetadataORM(ORMBaseModel): + __tablename__ = "file_metadata" + id: Mapped[UUID] = mapped_column(primary_key=True, default=uuid4) + + mime_type: Mapped[str] + file_name: Mapped[UUID] + original_file_name: Mapped[str] + + @hybrid_property + def path_to_file(self) -> Path: + return Path(settings.uploaded_files_dir_path, str(self.file_name)) From 2bdaec49bb2faced99203f5f438751b23a2847aa Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 19 Sep 2024 01:16:48 +0300 Subject: [PATCH 049/153] feat(infra): add dataset model for relational data bases --- .../relational/model/file/__init__.py | 1 + .../relational/model/file/dataset.py | 28 +++++++++++++++++++ 2 files changed, 29 insertions(+) create mode 100644 internal/infrastructure/data_storage/relational/model/file/dataset.py diff --git a/internal/infrastructure/data_storage/relational/model/file/__init__.py b/internal/infrastructure/data_storage/relational/model/file/__init__.py index 67a609bb..3f0174d4 100644 --- a/internal/infrastructure/data_storage/relational/model/file/__init__.py +++ b/internal/infrastructure/data_storage/relational/model/file/__init__.py @@ -1 +1,2 @@ from internal.infrastructure.data_storage.relational.model.file.file_metadata import FileMetadataORM +from internal.infrastructure.data_storage.relational.model.file.dataset import DatasetORM diff --git a/internal/infrastructure/data_storage/relational/model/file/dataset.py b/internal/infrastructure/data_storage/relational/model/file/dataset.py new file mode 100644 index 00000000..fc436758 --- /dev/null +++ b/internal/infrastructure/data_storage/relational/model/file/dataset.py @@ -0,0 +1,28 @@ +import typing +from uuid import uuid4, UUID + +from sqlalchemy import ForeignKey, Integer, ARRAY +from sqlalchemy.orm import mapped_column, Mapped, relationship + +from internal.infrastructure.data_storage.relational.model import ORMBaseModel +from internal.infrastructure.data_storage.relational.model.file.file_metadata import FileMetadataORM + +if typing.TYPE_CHECKING: + pass + + +class DatasetORM(ORMBaseModel): + __tablename__ = "dataset" + id: Mapped[UUID] = mapped_column(primary_key=True, default=uuid4) + + is_built_in: Mapped[bool] = mapped_column(default=False) + header: Mapped[list[int]] = mapped_column(ARRAY(Integer), default=[]) + separator: Mapped[str] + file_id: Mapped[UUID] = mapped_column(ForeignKey("file_metadata.id"), nullable=False) + file: Mapped[FileMetadataORM] = relationship("FileMetadataORM") + + # related_tasks: Mapped[list["TaskORM"]] = relationship( + # "TaskORM", back_populates="dataset" + # ) + + # owner = relationship("UserORM") From 174bb42e909a6d74ad3859fd735276c9385ea8e4 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 19 Sep 2024 01:20:23 +0300 Subject: [PATCH 050/153] feat(infra): add task model for relational data bases --- .../relational/model/file/dataset.py | 8 ++--- .../relational/model/task/__init__.py | 1 + .../relational/model/task/task.py | 31 +++++++++++++++++++ 3 files changed, 36 insertions(+), 4 deletions(-) create mode 100644 internal/infrastructure/data_storage/relational/model/task/__init__.py create mode 100644 internal/infrastructure/data_storage/relational/model/task/task.py diff --git a/internal/infrastructure/data_storage/relational/model/file/dataset.py b/internal/infrastructure/data_storage/relational/model/file/dataset.py index fc436758..fb2bb1d0 100644 --- a/internal/infrastructure/data_storage/relational/model/file/dataset.py +++ b/internal/infrastructure/data_storage/relational/model/file/dataset.py @@ -8,7 +8,7 @@ from internal.infrastructure.data_storage.relational.model.file.file_metadata import FileMetadataORM if typing.TYPE_CHECKING: - pass + from internal.infrastructure.data_storage.relational.model.task import TaskORM class DatasetORM(ORMBaseModel): @@ -21,8 +21,8 @@ class DatasetORM(ORMBaseModel): file_id: Mapped[UUID] = mapped_column(ForeignKey("file_metadata.id"), nullable=False) file: Mapped[FileMetadataORM] = relationship("FileMetadataORM") - # related_tasks: Mapped[list["TaskORM"]] = relationship( - # "TaskORM", back_populates="dataset" - # ) + related_tasks: Mapped[list["TaskORM"]] = relationship( + "TaskORM", back_populates="dataset" + ) # owner = relationship("UserORM") diff --git a/internal/infrastructure/data_storage/relational/model/task/__init__.py b/internal/infrastructure/data_storage/relational/model/task/__init__.py new file mode 100644 index 00000000..ccfd5b77 --- /dev/null +++ b/internal/infrastructure/data_storage/relational/model/task/__init__.py @@ -0,0 +1 @@ +from internal.infrastructure.data_storage.relational.model.task.task import TaskORM diff --git a/internal/infrastructure/data_storage/relational/model/task/task.py b/internal/infrastructure/data_storage/relational/model/task/task.py new file mode 100644 index 00000000..31cb4aea --- /dev/null +++ b/internal/infrastructure/data_storage/relational/model/task/task.py @@ -0,0 +1,31 @@ +import typing +from uuid import UUID, uuid4 + +from sqlalchemy import ForeignKey +from sqlalchemy.orm import mapped_column, Mapped, relationship +from sqlalchemy.dialects.postgresql import JSONB + +from internal.domain.task.value_objects import TaskStatus, OneOfTaskConfig, OneOfTaskResult, TaskFailureReason +from internal.infrastructure.data_storage.relational.model import ORMBaseModel + +if typing.TYPE_CHECKING: + from internal.infrastructure.data_storage.relational.model.file import DatasetORM + + +class TaskORM(ORMBaseModel): + __tablename__ = "task" + id: Mapped[UUID] = mapped_column(primary_key=True, default=uuid4) + + status: Mapped[TaskStatus] + config: Mapped[OneOfTaskConfig] = mapped_column(JSONB) + result: Mapped[OneOfTaskResult | None] = mapped_column(JSONB, default=None) + + dataset_id: Mapped[UUID] = mapped_column(ForeignKey("dataset.id"), nullable=False) + dataset: Mapped["DatasetORM"] = relationship( + "DatasetORM", back_populates="related_tasks" + ) + + # Only if task failed + raised_exception_name: Mapped[str | None] = mapped_column(default=None) + failure_reason: Mapped[TaskFailureReason | None] = mapped_column(default=None) + traceback: Mapped[str | None] = mapped_column(default=None) From 906ad4177495f28072b2dd6477a9d9c62fe07d29 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 19 Sep 2024 01:21:53 +0300 Subject: [PATCH 051/153] chore(infra): add empty module for user relational models --- .../infrastructure/data_storage/relational/model/user/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 internal/infrastructure/data_storage/relational/model/user/__init__.py diff --git a/internal/infrastructure/data_storage/relational/model/user/__init__.py b/internal/infrastructure/data_storage/relational/model/user/__init__.py new file mode 100644 index 00000000..e69de29b From 8192e4079bac647c3661407d5745cedb145e04b4 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Wed, 18 Sep 2024 22:32:05 +0000 Subject: [PATCH 052/153] chore: set new directories for test command in the makefile --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 9b39e3b2..feb609d0 100644 --- a/Makefile +++ b/Makefile @@ -63,7 +63,7 @@ format: ## Run all tests in project test: - poetry run pytest -o log_cli=true --verbosity=2 --showlocals --log-cli-level=INFO --test-alembic --cov=app --cov-report term + poetry run pytest -o log_cli=true --verbosity=2 --showlocals --log-cli-level=INFO --test-alembic --cov=internal --cov-report term .DEFAULT_GOAL := help # See for explanation. From fbe79a17eec66531e60e6c5992183cba05cec396 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Wed, 18 Sep 2024 22:35:30 +0000 Subject: [PATCH 053/153] chore: set new alembic.ini file's path for postgres migrations command in the makefile --- Makefile | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/Makefile b/Makefile index feb609d0..2ec7afce 100644 --- a/Makefile +++ b/Makefile @@ -27,16 +27,16 @@ open-db: docker exec -it desbordante-postgres psql -d $(POSTGRES_DB) -U $(POSTGRES_USER) ## Create new revision file automatically -revision: - poetry run alembic -c app/settings/alembic.ini revision --autogenerate $(args) +pg-revision: + poetry run alembic -c internal/infrastructure/data_storage/relational/postgres/migrations/alembic.ini revision --autogenerate $(args) ## Make migrations in database -migrate: - poetry run alembic -c app/settings/alembic.ini upgrade $(args) +pg-migrate: + poetry run alembic -c internal/infrastructure/data_storage/relational/postgres/migrations/alembic.ini upgrade $(args) ## Downgrade database -downgrade: - poetry run alembic -c app/settings/alembic.ini downgrade $(args) +pg-downgrade: + poetry run alembic -c internal/infrastructure/data_storage/relational/postgres/migrations/alembic.ini downgrade $(args) ## Run celery worker in watch mode worker: From 45988c0dc4dbdf194ffd045dff3b277bddf2ad18 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Wed, 18 Sep 2024 22:46:52 +0000 Subject: [PATCH 054/153] feat(infra): add first empty postgres migration --- .../a6df7c9124be_first_empty_migration.py | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 internal/infrastructure/data_storage/relational/postgres/migrations/versions/a6df7c9124be_first_empty_migration.py diff --git a/internal/infrastructure/data_storage/relational/postgres/migrations/versions/a6df7c9124be_first_empty_migration.py b/internal/infrastructure/data_storage/relational/postgres/migrations/versions/a6df7c9124be_first_empty_migration.py new file mode 100644 index 00000000..7c4a0f24 --- /dev/null +++ b/internal/infrastructure/data_storage/relational/postgres/migrations/versions/a6df7c9124be_first_empty_migration.py @@ -0,0 +1,28 @@ +"""first_empty_migration + +Revision ID: a6df7c9124be +Revises: +Create Date: 2024-09-18 22:40:07.892034 + +""" +from typing import Sequence, Union + + + +# revision identifiers, used by Alembic. +revision: str = 'a6df7c9124be' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### From becf94120609df3b1fbf3f1c3d89fa615cfc2942 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Wed, 18 Sep 2024 22:53:19 +0000 Subject: [PATCH 055/153] feat(infra): add postgres db migration that creates file and dataset table --- ...create_file_metadata_and_dataset_tadble.py | 50 +++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 internal/infrastructure/data_storage/relational/postgres/migrations/versions/925f30493c24_create_file_metadata_and_dataset_tadble.py diff --git a/internal/infrastructure/data_storage/relational/postgres/migrations/versions/925f30493c24_create_file_metadata_and_dataset_tadble.py b/internal/infrastructure/data_storage/relational/postgres/migrations/versions/925f30493c24_create_file_metadata_and_dataset_tadble.py new file mode 100644 index 00000000..7cfd7f85 --- /dev/null +++ b/internal/infrastructure/data_storage/relational/postgres/migrations/versions/925f30493c24_create_file_metadata_and_dataset_tadble.py @@ -0,0 +1,50 @@ +"""create_file_metadata_and_dataset_tadble + +Revision ID: 925f30493c24 +Revises: a6df7c9124be +Create Date: 2024-09-18 22:50:48.547252 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '925f30493c24' +down_revision: Union[str, None] = 'a6df7c9124be' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('file_metadata', + sa.Column('id', sa.Uuid(), nullable=False), + sa.Column('mime_type', sa.String(), nullable=False), + sa.Column('file_name', sa.Uuid(), nullable=False), + sa.Column('original_file_name', sa.String(), nullable=False), + sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False), + sa.Column('updated_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('dataset', + sa.Column('id', sa.Uuid(), nullable=False), + sa.Column('is_built_in', sa.Boolean(), nullable=False), + sa.Column('header', sa.ARRAY(sa.Integer()), nullable=False), + sa.Column('separator', sa.String(), nullable=False), + sa.Column('file_id', sa.Uuid(), nullable=False), + sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False), + sa.Column('updated_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False), + sa.ForeignKeyConstraint(['file_id'], ['file_metadata.id'], ), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('dataset') + op.drop_table('file_metadata') + # ### end Alembic commands ### From 1cbe946e9efd82475c1e73a3cec7aa0eb1bb8280 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Wed, 18 Sep 2024 22:55:10 +0000 Subject: [PATCH 056/153] feat(infra): add postgres db migration that creates task table --- .../b13295f9fac2_create_task_tadble.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 internal/infrastructure/data_storage/relational/postgres/migrations/versions/b13295f9fac2_create_task_tadble.py diff --git a/internal/infrastructure/data_storage/relational/postgres/migrations/versions/b13295f9fac2_create_task_tadble.py b/internal/infrastructure/data_storage/relational/postgres/migrations/versions/b13295f9fac2_create_task_tadble.py new file mode 100644 index 00000000..cecadd82 --- /dev/null +++ b/internal/infrastructure/data_storage/relational/postgres/migrations/versions/b13295f9fac2_create_task_tadble.py @@ -0,0 +1,43 @@ +"""create_task_tadble + +Revision ID: b13295f9fac2 +Revises: 925f30493c24 +Create Date: 2024-09-18 22:54:19.185777 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = 'b13295f9fac2' +down_revision: Union[str, None] = '925f30493c24' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('task', + sa.Column('id', sa.Uuid(), nullable=False), + sa.Column('status', sa.Enum('FAILED', 'CREATED', 'RUNNING', 'COMPLETED', name='taskstatus'), nullable=False), + sa.Column('config', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('result', postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.Column('dataset_id', sa.Uuid(), nullable=False), + sa.Column('raised_exception_name', sa.String(), nullable=True), + sa.Column('failure_reason', sa.Enum('MEMORY_LIMIT_EXCEEDED', 'TIME_LIMIT_EXCEEDED', 'WORKER_KILLED_BY_SIGNAL', 'OTHER', name='taskfailurereason'), nullable=True), + sa.Column('traceback', sa.String(), nullable=True), + sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False), + sa.Column('updated_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False), + sa.ForeignKeyConstraint(['dataset_id'], ['dataset.id'], ), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('task') + # ### end Alembic commands ### From 97f1ca4acc4d8ecb8a841cfc8b31aa56c4ce8bfe Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 19 Sep 2024 02:11:37 +0300 Subject: [PATCH 057/153] feat(repo): add file repository implementation --- internal/dto/repository/file/__init__.py | 3 ++- internal/dto/repository/file/file.py | 7 ++++++ internal/repository/flat/__init__.py | 1 + internal/repository/flat/file.py | 28 ++++++++++++++++++++++++ 4 files changed, 38 insertions(+), 1 deletion(-) create mode 100644 internal/repository/flat/__init__.py create mode 100644 internal/repository/flat/file.py diff --git a/internal/dto/repository/file/__init__.py b/internal/dto/repository/file/__init__.py index 553d8eb2..cdebb91f 100644 --- a/internal/dto/repository/file/__init__.py +++ b/internal/dto/repository/file/__init__.py @@ -1,5 +1,6 @@ from internal.dto.repository.file.file import (File, FileResponseSchema, FileFindSchema, - FileCreateSchema, FileUpdateSchema) + FileCreateSchema, FileUpdateSchema, + FailedFileReadingException) from internal.dto.repository.file.file_metadata import (FileMetadataResponseSchema, FileMetadataCreateSchema, FileMetadataFindSchema, FileMetadataUpdateSchema) from internal.dto.repository.file.dataset import (DatasetResponseSchema, DatasetCreateSchema, diff --git a/internal/dto/repository/file/file.py b/internal/dto/repository/file/file.py index 50c69305..6212d911 100644 --- a/internal/dto/repository/file/file.py +++ b/internal/dto/repository/file/file.py @@ -3,6 +3,13 @@ from internal.dto.repository.base_schema import BaseCreateSchema, BaseUpdateSchema, BaseSchema + +class FailedFileReadingException(Exception): + + def __init__(self, message: str): + super().__init__(message) + + class File(Protocol): filename: str | None diff --git a/internal/repository/flat/__init__.py b/internal/repository/flat/__init__.py new file mode 100644 index 00000000..a58daf3c --- /dev/null +++ b/internal/repository/flat/__init__.py @@ -0,0 +1 @@ +from internal.repository.flat.file import FileRepository diff --git a/internal/repository/flat/file.py b/internal/repository/flat/file.py new file mode 100644 index 00000000..cd531ab7 --- /dev/null +++ b/internal/repository/flat/file.py @@ -0,0 +1,28 @@ +from pathlib import Path +import aiofiles + +from internal.dto.repository.file.file import FailedFileReadingException +from internal.infrastructure.data_storage import settings +from internal.dto.repository.file import File, FileCreateSchema, FileResponseSchema +from internal.uow import DataStorageContext + +CHUNK_SIZE = 1024 + +class FileRepository: + + def __init__(self): + self.files_dir_path = settings.uploaded_files_dir_path + + async def create( + self, + file: File, + file_info: FileCreateSchema, + context: DataStorageContext # The current repository implementation does not support transactions. + ) -> FileResponseSchema: + path_to_file = Path.joinpath(self.files_dir_path, str(file_info.file_name)) + try: + async with aiofiles.open(path_to_file, "wb") as out_file: # !!! + while content := await file.read(CHUNK_SIZE): + await out_file.write(content) + except Exception: + raise FailedFileReadingException("The sent file could not be read.") From 1ad0cd35d47532e6955ee73cf5fc12e30f777a95 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 19 Sep 2024 03:10:44 +0300 Subject: [PATCH 058/153] chore(infra): rename and replace relational storage context --- .../data_storage/relational/context.py | 3 +++ .../data_storage/relational/postgres/context.py | 13 ++++++------- 2 files changed, 9 insertions(+), 7 deletions(-) create mode 100644 internal/infrastructure/data_storage/relational/context.py diff --git a/internal/infrastructure/data_storage/relational/context.py b/internal/infrastructure/data_storage/relational/context.py new file mode 100644 index 00000000..d5a9b8fe --- /dev/null +++ b/internal/infrastructure/data_storage/relational/context.py @@ -0,0 +1,3 @@ +from sqlalchemy.orm import Session + +RelationalStorageContext = Session diff --git a/internal/infrastructure/data_storage/relational/postgres/context.py b/internal/infrastructure/data_storage/relational/postgres/context.py index 80017714..fdc1ad61 100644 --- a/internal/infrastructure/data_storage/relational/postgres/context.py +++ b/internal/infrastructure/data_storage/relational/postgres/context.py @@ -1,11 +1,10 @@ from typing import Generator from sqlalchemy import create_engine, NullPool -from sqlalchemy.orm import sessionmaker, Session +from sqlalchemy.orm import sessionmaker from internal.infrastructure.data_storage import settings - -DataStorageContext = Session +from internal.infrastructure.data_storage.relational.context import RelationalStorageContext default_engine = create_engine(url=settings.postgres_dsn.unicode_string()) engine_without_pool = create_engine( @@ -17,16 +16,16 @@ ContextLocalWithoutPool = sessionmaker(bind=engine_without_pool) -def get_context() -> Generator[DataStorageContext, None, None]: +def get_context() -> Generator[RelationalStorageContext, None, None]: """ - Returns a generator that yields a context(session) object for database operations. + Returns a generator that yields a context.py(session) object for database operations. """ with ContextLocal() as context: yield context -def get_context_without_pool() -> Generator[DataStorageContext, None, None]: +def get_context_without_pool() -> Generator[RelationalStorageContext, None, None]: """ - Returns a generator that yields a context(session) object without pool for database operations. + Returns a generator that yields a context.py(session) object without pool for database operations. """ with ContextLocalWithoutPool() as context: yield context From c53091d8a444c3d6014e373853cf28eae071dfc4 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Thu, 19 Sep 2024 03:11:17 +0300 Subject: [PATCH 059/153] chore(repo): format code --- internal/repository/flat/file.py | 1 + 1 file changed, 1 insertion(+) diff --git a/internal/repository/flat/file.py b/internal/repository/flat/file.py index cd531ab7..fd087176 100644 --- a/internal/repository/flat/file.py +++ b/internal/repository/flat/file.py @@ -19,6 +19,7 @@ async def create( file_info: FileCreateSchema, context: DataStorageContext # The current repository implementation does not support transactions. ) -> FileResponseSchema: + path_to_file = Path.joinpath(self.files_dir_path, str(file_info.file_name)) try: async with aiofiles.open(path_to_file, "wb") as out_file: # !!! From 439e6cc3040d249cbdb0750bd37e1052652262f2 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 18:19:46 +0300 Subject: [PATCH 060/153] fix(infra): remove typo in dataset relational model's fields --- .../data_storage/relational/model/file/dataset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/infrastructure/data_storage/relational/model/file/dataset.py b/internal/infrastructure/data_storage/relational/model/file/dataset.py index fb2bb1d0..9c33f890 100644 --- a/internal/infrastructure/data_storage/relational/model/file/dataset.py +++ b/internal/infrastructure/data_storage/relational/model/file/dataset.py @@ -19,7 +19,7 @@ class DatasetORM(ORMBaseModel): header: Mapped[list[int]] = mapped_column(ARRAY(Integer), default=[]) separator: Mapped[str] file_id: Mapped[UUID] = mapped_column(ForeignKey("file_metadata.id"), nullable=False) - file: Mapped[FileMetadataORM] = relationship("FileMetadataORM") + file_metadata: Mapped[FileMetadataORM] = relationship("FileMetadataORM") related_tasks: Mapped[list["TaskORM"]] = relationship( "TaskORM", back_populates="dataset" From 67e8f94c267d03578e8e79a1251e1f38fde1904e Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 18:46:37 +0300 Subject: [PATCH 061/153] fix(uow): change the way to return a context from uow --- internal/uow/README.md | 6 +++--- internal/uow/uow.py | 23 ++++++++++++++++------- tests/uow/test_unit_of_work.py | 20 ++++++++++++++++---- 3 files changed, 35 insertions(+), 14 deletions(-) diff --git a/internal/uow/README.md b/internal/uow/README.md index e704aae0..38537345 100644 --- a/internal/uow/README.md +++ b/internal/uow/README.md @@ -12,7 +12,7 @@ To use UoW in your use case, you need to implement the DataStorageContext interf ### Example ```python -from typing import Protocol +from typing import Protocol, Type from uuid import UUID, uuid4 from sqlalchemy.orm import Session from internal.uow import UnitOfWork, DataStorageContext @@ -20,8 +20,8 @@ from internal.uow import UnitOfWork, DataStorageContext class DatasetRepo(Protocol): def create(self, file_id: UUID, context: DataStorageContext) -> None: ... -def create_uow(context: Session) -> UnitOfWork: - return UnitOfWork(context=context) +def create_uow(context_maker: Type[Session]) -> UnitOfWork: + return UnitOfWork(context_maker=context_maker) def create_two_datasets( uow: UnitOfWork, diff --git a/internal/uow/uow.py b/internal/uow/uow.py index 759e80fa..1a1e8656 100644 --- a/internal/uow/uow.py +++ b/internal/uow/uow.py @@ -13,17 +13,26 @@ def rollback(self) -> None: ... def close(self) -> None: ... +class DataStorageContextMaker(Protocol): + + def __call__(self) -> DataStorageContext: ... + + class UnitOfWork: - def __init__(self, context: DataStorageContext): - self._context = context + def __init__(self, context_maker: DataStorageContextMaker): + self._context_maker: DataStorageContextMaker = context_maker + self._context: DataStorageContext | None = None def __enter__(self) -> DataStorageContext: + self._context = self._context_maker() return self._context def __exit__(self, exc_type, exc_val, exc_tb) -> None: - if exc_type: - self._context.rollback() - else: - self._context.commit() - self._context.close() + if self._context is not None: + if exc_type: + self._context.rollback() + else: + self._context.commit() + self._context.close() + self._context = None diff --git a/tests/uow/test_unit_of_work.py b/tests/uow/test_unit_of_work.py index 351ee831..9228be52 100644 --- a/tests/uow/test_unit_of_work.py +++ b/tests/uow/test_unit_of_work.py @@ -2,6 +2,7 @@ from pytest_mock import MockerFixture from internal.uow import DataStorageContext, UnitOfWork +from internal.uow.uow import DataStorageContextMaker @pytest.fixture @@ -9,8 +10,17 @@ def context_mock(mocker: MockerFixture) -> DataStorageContext: return mocker.Mock(spec=DataStorageContext) -def test_unit_of_work_commit_on_success(context_mock: DataStorageContext) -> None: - uow = UnitOfWork(context_mock) +@pytest.fixture +def context_maker_mock( + mocker: MockerFixture, context_mock: DataStorageContext +) -> DataStorageContextMaker: + return mocker.Mock(spec=DataStorageContextMaker, return_value=context_mock) + + +def test_unit_of_work_commit_on_success( + context_maker_mock: DataStorageContextMaker, context_mock: DataStorageContext +) -> None: + uow = UnitOfWork(context_maker_mock) with uow as context: assert isinstance(context, DataStorageContext) @@ -21,8 +31,10 @@ def test_unit_of_work_commit_on_success(context_mock: DataStorageContext) -> Non context_mock.close.assert_called_once() -def test_unit_of_work_rollback_on_failure(context_mock: DataStorageContext) -> None: - uow = UnitOfWork(context_mock) +def test_unit_of_work_rollback_on_failure( + context_maker_mock: DataStorageContextMaker, context_mock: DataStorageContext +) -> None: + uow = UnitOfWork(context_maker_mock) with pytest.raises(ValueError): with uow as context: From c74ca72922ad6bed152f781060cca30b39d70848 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 18:50:09 +0300 Subject: [PATCH 062/153] feat(use case): add file metadata not found exception --- internal/usecase/file/exception.py | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/internal/usecase/file/exception.py b/internal/usecase/file/exception.py index b732554a..f17411c0 100644 --- a/internal/usecase/file/exception.py +++ b/internal/usecase/file/exception.py @@ -1,9 +1,6 @@ class IncorrectFileFormatException(Exception): """ Exception raised when a file format is incorrect or not supported. - - :param message: The error message to be reported. - :type message: str """ def __init__(self, message: str): @@ -15,6 +12,7 @@ def __init__(self, message: str): """ super().__init__(message) + class DatasetNotFoundException(Exception): """ Exception raised when a dataset is not found. @@ -29,3 +27,20 @@ def __init__(self): The default message "Dataset not found" is used. """ super().__init__("Dataset not found") + + +class FileMetadataNotFoundException(Exception): + """ + Exception raised when file metadata is not found. + + This exception is used to indicate that the metadata for a specific file is missing + or could not be retrieved, which may impact operations that depend on that metadata. + """ + + def __init__(self): + """ + Initializes an instance of FileMetadataNotFoundException with a default message. + + The default message "File metadata not found" is used to indicate the error. + """ + super().__init__("File metadata not found") From 806b6d9b885ffda32bcf03225a544c8d6f91cb45 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 18:50:53 +0300 Subject: [PATCH 063/153] feat(use case): add use case for retrieving datasets --- internal/usecase/file/__init__.py | 1 + internal/usecase/file/retrieve_dataset.py | 48 +++++++++++++++++++++++ 2 files changed, 49 insertions(+) create mode 100644 internal/usecase/file/retrieve_dataset.py diff --git a/internal/usecase/file/__init__.py b/internal/usecase/file/__init__.py index 73283758..5707d624 100644 --- a/internal/usecase/file/__init__.py +++ b/internal/usecase/file/__init__.py @@ -1,3 +1,4 @@ from internal.usecase.file.check_content_type import CheckContentType from internal.usecase.file.save_dataset import SaveDataset from internal.usecase.file.save_file import SaveFile, SaveFileUseCaseResult +from internal.usecase.file.retrieve_dataset import RetrieveDataset, RetrieveDatasetUseCaseResult diff --git a/internal/usecase/file/retrieve_dataset.py b/internal/usecase/file/retrieve_dataset.py new file mode 100644 index 00000000..44799871 --- /dev/null +++ b/internal/usecase/file/retrieve_dataset.py @@ -0,0 +1,48 @@ +from typing import Protocol +from uuid import UUID +from pydantic import BaseModel + +from internal.dto.repository.file import DatasetFindSchema, DatasetResponseSchema +from internal.uow import DataStorageContext, UnitOfWork +from internal.usecase.file.exception import DatasetNotFoundException + + +class DatasetRepo(Protocol): + + def find(self, dataset_info: DatasetFindSchema, context: DataStorageContext) -> DatasetResponseSchema | None: ... + + +class RetrieveDatasetUseCaseResult(BaseModel): + id: UUID + file_id: UUID + separator: str + header: list[int] + +class RetrieveDataset: + + def __init__( + self, + unit_of_work: UnitOfWork, + dataset_repo: DatasetRepo, + ): + + self.unit_of_work = unit_of_work + self.dataset_repo = dataset_repo + + def __call__(self, *, dataset_id: UUID) -> RetrieveDatasetUseCaseResult: + dataset_find_schema = DatasetFindSchema( + id=dataset_id, + ) + + with self.unit_of_work as context: + dataset = self.dataset_repo.find(dataset_find_schema, context) + + if not dataset: + raise DatasetNotFoundException() + + return RetrieveDatasetUseCaseResult( + id=dataset.id, + file_id=dataset.file_id, + separator=dataset.separator, + header=dataset.header, + ) From 606b8e4c8616d4ad5b5ef8c8a339f1b2bddf9325 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 18:51:42 +0300 Subject: [PATCH 064/153] feat(tests): add tests for retrieve dataset use case --- tests/usecase/test_retrieve_dataset.py | 101 +++++++++++++++++++++++++ 1 file changed, 101 insertions(+) create mode 100644 tests/usecase/test_retrieve_dataset.py diff --git a/tests/usecase/test_retrieve_dataset.py b/tests/usecase/test_retrieve_dataset.py new file mode 100644 index 00000000..29e9b927 --- /dev/null +++ b/tests/usecase/test_retrieve_dataset.py @@ -0,0 +1,101 @@ +from uuid import uuid4 + +import pytest +from pytest_mock import MockerFixture + +from internal.dto.repository.file import DatasetResponseSchema, DatasetFindSchema +from internal.uow import UnitOfWork, DataStorageContext +from internal.usecase.file.exception import DatasetNotFoundException +from internal.usecase.file.retrieve_dataset import ( + DatasetRepo, + RetrieveDataset, + RetrieveDatasetUseCaseResult, +) + + +@pytest.fixture +def unit_of_work_mock(mocker: MockerFixture) -> UnitOfWork: + mock = mocker.MagicMock() + mock.__enter__.return_value = mocker.Mock( + return_value=mocker.Mock(), spec=DataStorageContext + ) + mock.__exit__.return_value = None + + def exit_side_effect(exc_type, exc_value, traceback) -> bool: + if exc_type: + raise exc_value + return False + + mock.__exit__.side_effect = exit_side_effect + return mock + + +@pytest.fixture +def dataset_repo_mock(mocker: MockerFixture) -> DatasetRepo: + mock = mocker.Mock(spec=DatasetRepo) + return mock + + +@pytest.fixture +def retrieve_dataset_use_case( + unit_of_work_mock: UnitOfWork, dataset_repo_mock: DatasetRepo +) -> RetrieveDataset: + return RetrieveDataset( + unit_of_work=unit_of_work_mock, dataset_repo=dataset_repo_mock + ) + + +def test_retrieve_dataset_use_case_success( + unit_of_work_mock: UnitOfWork, + dataset_repo_mock: DatasetRepo, + retrieve_dataset_use_case: RetrieveDataset, +): + # Prepare data + dataset_id = uuid4() + file_id = uuid4() + + dataset_repo_mock.find.return_value = DatasetResponseSchema( + id=dataset_id, file_id=file_id, separator="?", header=[1, 2, 3, 4, 5] + ) + + # Act + result = retrieve_dataset_use_case(dataset_id=dataset_id) + + # Check result + assert result == RetrieveDatasetUseCaseResult( + id=dataset_id, file_id=file_id, separator="?", header=[1, 2, 3, 4, 5] + ) + + # Check that repositories' find method work correctly + dataset_repo_mock.find.assert_called_once_with( + DatasetFindSchema(id=dataset_id), unit_of_work_mock.__enter__.return_value + ) + + # Check that UnitOfWork was entered and exited correctly + unit_of_work_mock.__enter__.assert_called_once() + unit_of_work_mock.__exit__.assert_called_once() + + +def test_retrieve_dataset_use_case_not_found( + unit_of_work_mock: UnitOfWork, + retrieve_dataset_use_case: RetrieveDataset, + dataset_repo_mock: DatasetRepo, +): + # Prepare data + dataset_id = uuid4() + + # Mocks repository operations + dataset_repo_mock.find.return_value = None + + # Act and except error + with pytest.raises(DatasetNotFoundException): + retrieve_dataset_use_case(dataset_id=dataset_id) + + # Check that repositories' find method work correctly + dataset_repo_mock.find.assert_called_once_with( + DatasetFindSchema(id=dataset_id), unit_of_work_mock.__enter__.return_value + ) + + # Check that UnitOfWork was entered and exited correctly + unit_of_work_mock.__enter__.assert_called_once() + unit_of_work_mock.__exit__.assert_called_once() From 69cdbc62de21cbae7a242307ac6e6a777a4baa22 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 18:55:11 +0300 Subject: [PATCH 065/153] fix(use case): remove unnecessery fields in profiling task worker interface --- internal/usecase/task/set_task.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/internal/usecase/task/set_task.py b/internal/usecase/task/set_task.py index b99c7568..ff8e234d 100644 --- a/internal/usecase/task/set_task.py +++ b/internal/usecase/task/set_task.py @@ -20,8 +20,6 @@ def create(self, task_info: TaskCreateSchema, context: DataStorageContext) -> Ta class ProfilingTaskWorker(Protocol): - unit_of_work: UnitOfWork - dataset_repo: DatasetRepo def set(self, task_info: ProfilingTaskCreateSchema) -> None: ... From 307f1adf040e247165cac197a665ebcdf7f9140f Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 18:56:07 +0300 Subject: [PATCH 066/153] chore(use case): make usecase arguments callable only by key-value --- internal/usecase/task/retrieve_task.py | 2 +- internal/usecase/task/set_task.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/internal/usecase/task/retrieve_task.py b/internal/usecase/task/retrieve_task.py index b06ebe14..30bf71b0 100644 --- a/internal/usecase/task/retrieve_task.py +++ b/internal/usecase/task/retrieve_task.py @@ -36,7 +36,7 @@ def __init__(self, unit_of_work: UnitOfWork, task_repo: TaskRepo): self.unit_of_work = unit_of_work self.task_repo = task_repo - def __call__(self, task_id: UUID) -> RetrieveTaskUseCaseResult: + def __call__(self, *, task_id: UUID) -> RetrieveTaskUseCaseResult: task_find_schema = TaskFindSchema(id=task_id) with self.unit_of_work as context: diff --git a/internal/usecase/task/set_task.py b/internal/usecase/task/set_task.py index ff8e234d..171d688d 100644 --- a/internal/usecase/task/set_task.py +++ b/internal/usecase/task/set_task.py @@ -41,6 +41,7 @@ def __init__( def __call__( self, + *, dataset_id: UUID, config: OneOfTaskConfig, ) -> UUID: From e35f07701c0907d8dc2c4155b1a84fa9508941dd Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 18:57:54 +0300 Subject: [PATCH 067/153] feat(use case): add use case for updating task info --- internal/usecase/task/update_task_info.py | 62 +++++++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100644 internal/usecase/task/update_task_info.py diff --git a/internal/usecase/task/update_task_info.py b/internal/usecase/task/update_task_info.py new file mode 100644 index 00000000..f461782c --- /dev/null +++ b/internal/usecase/task/update_task_info.py @@ -0,0 +1,62 @@ +from typing import Protocol +from uuid import UUID + + +from internal.uow import DataStorageContext, UnitOfWork +from internal.domain.task.value_objects import TaskStatus, OneOfTaskResult +from internal.dto.repository.task import TaskUpdateSchema, TaskResponseSchema, TaskFindSchema +from internal.dto.repository.task.task import TaskNotFoundException +from internal.usecase.task.exception import TaskNotFoundException as TaskNotFoundUseCaseException + + +class TaskRepo(Protocol): + + def update( + self, + find_schema: TaskFindSchema, + update_schema: TaskUpdateSchema, + fields_to_update_if_none: set[str] | None, + context: DataStorageContext, + ) -> TaskResponseSchema: ... + + +class UpdateTaskInfo: + + def __init__( + self, + unit_of_work: UnitOfWork, + task_repo: TaskRepo, + ): + + self.unit_of_work = unit_of_work + self.task_repo = task_repo + + + def __call__( + self, + *, + task_id: UUID, + fields_to_update_if_none: set[str] | None = None, + + task_status: TaskStatus | None = None, + result: OneOfTaskResult | None = None, + raised_exception_name: str | None = None, + failure_reason: str | None = None, + traceback: str | None = None, + ) -> None: + + task_find_schema = TaskFindSchema(id=task_id) + data_to_update = TaskUpdateSchema( + status=task_status, + result=result, + raised_exception_name=raised_exception_name, + failure_reason=failure_reason, + traceback=traceback, + ) # type: ignore + + + with self.unit_of_work as context: + try: + self.task_repo.update(task_find_schema, data_to_update, fields_to_update_if_none, context) + except TaskNotFoundException: + raise TaskNotFoundUseCaseException() From 383fdf1358e4faef58f2144fb82f9c039c24d5d5 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 18:59:35 +0300 Subject: [PATCH 068/153] chore(dto): update task update schema --- internal/dto/repository/task/task.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/internal/dto/repository/task/task.py b/internal/dto/repository/task/task.py index 766aab69..17929220 100644 --- a/internal/dto/repository/task/task.py +++ b/internal/dto/repository/task/task.py @@ -4,6 +4,11 @@ from internal.dto.repository.base_schema import BaseSchema, BaseCreateSchema, BaseUpdateSchema, \ BaseResponseSchema, BaseFindSchema +class TaskNotFoundException(Exception): + + def __init__(self): + super().__init__('Task not found') + class TaskBaseSchema(BaseSchema): status: TaskStatus @@ -15,6 +20,7 @@ class TaskCreateSchema(TaskBaseSchema, BaseCreateSchema): ... class TaskUpdateSchema(TaskBaseSchema, BaseUpdateSchema[UUID]): + status: TaskStatus | None result: OneOfTaskResult | None raised_exception_name: str | None failure_reason: TaskFailureReason | None From cf6ac02e635df040f6c3a925d85bf5c730bff21f Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 18:59:43 +0300 Subject: [PATCH 069/153] feat(tests): add tests for updating task info use case --- tests/usecase/test_update_task_info.py | 104 +++++++++++++++++++++++++ 1 file changed, 104 insertions(+) create mode 100644 tests/usecase/test_update_task_info.py diff --git a/tests/usecase/test_update_task_info.py b/tests/usecase/test_update_task_info.py new file mode 100644 index 00000000..66e335db --- /dev/null +++ b/tests/usecase/test_update_task_info.py @@ -0,0 +1,104 @@ +from uuid import uuid4 + +import pytest +from pytest_mock import MockerFixture + +from internal.domain.task.value_objects import TaskStatus +from internal.dto.repository.task import TaskUpdateSchema, TaskFindSchema +from internal.dto.repository.task.task import TaskNotFoundException +from internal.uow import UnitOfWork, DataStorageContext +from internal.usecase.task.update_task_info import TaskRepo, UpdateTaskInfo +from internal.usecase.task.exception import ( + TaskNotFoundException as TaskNotFoundUseCaseException, +) + + +@pytest.fixture +def unit_of_work_mock(mocker: MockerFixture) -> UnitOfWork: + mock = mocker.MagicMock() + mock.__enter__.return_value = mocker.Mock( + return_value=mocker.Mock(), spec=DataStorageContext + ) + mock.__exit__.return_value = None + + def exit_side_effect(exc_type, exc_value, traceback) -> bool: + if exc_type: + raise exc_value + return False + + mock.__exit__.side_effect = exit_side_effect + return mock + + +@pytest.fixture +def task_repo_mock(mocker: MockerFixture) -> TaskRepo: + mock = mocker.Mock(spec=TaskRepo) + return mock + + +@pytest.fixture +def update_task_info_use_case( + unit_of_work_mock: UnitOfWork, task_repo_mock: TaskRepo +) -> UpdateTaskInfo: + return UpdateTaskInfo( + unit_of_work=unit_of_work_mock, + task_repo=task_repo_mock, + ) + + +def test_update_task_info_success( + update_task_info_use_case: UpdateTaskInfo, + unit_of_work_mock: UnitOfWork, + task_repo_mock: TaskRepo, +) -> None: + # Prepare data + task_id = uuid4() + task_status = TaskStatus.RUNNING + + find_schema = TaskFindSchema(id=task_id) + update_schema = TaskUpdateSchema(status=task_status) # type: ignore + + # Act + update_task_info_use_case( + task_id=task_id, task_status=task_status, fields_to_update_if_none=None + ) + + # Check that all repository methods were called correctly + task_repo_mock.update.assert_called_once_with( + find_schema, update_schema, None, unit_of_work_mock.__enter__.return_value + ) + + # Check that UnitOfWork was entered and exited correctly + unit_of_work_mock.__enter__.assert_called_once() + unit_of_work_mock.__exit__.assert_called_once() + + +@pytest.mark.parametrize( + "repo_exception, use_case_exception", + [ + (TaskNotFoundException, TaskNotFoundUseCaseException), + ], +) +def test_update_task_info_unsuccess( + update_task_info_use_case: UpdateTaskInfo, + unit_of_work_mock: UnitOfWork, + task_repo_mock: TaskRepo, + repo_exception: Exception, + use_case_exception: Exception, +) -> None: + # Prepare data + task_id = uuid4() + + # Mocks + task_repo_mock.update.side_effect = repo_exception + + # Act and except error + with pytest.raises(use_case_exception): + update_task_info_use_case(task_id=task_id) + + task_repo_mock.update.assert_called_once_with( + TaskFindSchema(id=task_id), + TaskUpdateSchema(), # type: ignore + None, + unit_of_work_mock.__enter__.return_value, + ) From b2a654e51e20e3df64f9f0201ab8141678d4c4f1 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 19:03:27 +0300 Subject: [PATCH 070/153] feat(dto): add dataset and file metadata repositories' exceptions --- internal/dto/repository/file/__init__.py | 6 ++++-- internal/dto/repository/file/dataset.py | 6 ++++++ internal/dto/repository/file/file_metadata.py | 6 ++++++ 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/internal/dto/repository/file/__init__.py b/internal/dto/repository/file/__init__.py index cdebb91f..9917ff2c 100644 --- a/internal/dto/repository/file/__init__.py +++ b/internal/dto/repository/file/__init__.py @@ -2,6 +2,8 @@ FileCreateSchema, FileUpdateSchema, FailedFileReadingException) from internal.dto.repository.file.file_metadata import (FileMetadataResponseSchema, FileMetadataCreateSchema, - FileMetadataFindSchema, FileMetadataUpdateSchema) + FileMetadataFindSchema, FileMetadataUpdateSchema, + FileMetadataNotFoundException) from internal.dto.repository.file.dataset import (DatasetResponseSchema, DatasetCreateSchema, - DatasetUpdateSchema, DatasetFindSchema) + DatasetUpdateSchema, DatasetFindSchema, + DatasetNotFoundException) diff --git a/internal/dto/repository/file/dataset.py b/internal/dto/repository/file/dataset.py index b20ebc8a..ff84ee59 100644 --- a/internal/dto/repository/file/dataset.py +++ b/internal/dto/repository/file/dataset.py @@ -4,6 +4,12 @@ BaseResponseSchema, BaseFindSchema +class DatasetNotFoundException(Exception): + + def __init__(self): + super().__init__('Dataset not found') + + class DatasetBaseSchema(BaseSchema): file_id: UUID separator: str diff --git a/internal/dto/repository/file/file_metadata.py b/internal/dto/repository/file/file_metadata.py index 2a721455..5f96af7d 100644 --- a/internal/dto/repository/file/file_metadata.py +++ b/internal/dto/repository/file/file_metadata.py @@ -4,6 +4,12 @@ BaseResponseSchema, BaseFindSchema +class FileMetadataNotFoundException(Exception): + + def __init__(self): + super().__init__('File metadata not found') + + class FileMetadataBaseSchema(BaseSchema): file_name: UUID original_file_name: str From cf8ba5be321de27adbb966baf4c85a281d710afb Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 19:04:07 +0300 Subject: [PATCH 071/153] feat(dto): add find and response schemas for csv files --- internal/dto/repository/file/__init__.py | 1 + internal/dto/repository/file/file.py | 11 +++++++++++ 2 files changed, 12 insertions(+) diff --git a/internal/dto/repository/file/__init__.py b/internal/dto/repository/file/__init__.py index 9917ff2c..30c3cbb0 100644 --- a/internal/dto/repository/file/__init__.py +++ b/internal/dto/repository/file/__init__.py @@ -7,3 +7,4 @@ from internal.dto.repository.file.dataset import (DatasetResponseSchema, DatasetCreateSchema, DatasetUpdateSchema, DatasetFindSchema, DatasetNotFoundException) +from internal.dto.repository.file.file import CSVFileFindSchema, CSVFileResponseSchema diff --git a/internal/dto/repository/file/file.py b/internal/dto/repository/file/file.py index 6212d911..2deb14e3 100644 --- a/internal/dto/repository/file/file.py +++ b/internal/dto/repository/file/file.py @@ -1,6 +1,8 @@ from typing import Protocol from uuid import UUID +import pandas as pd + from internal.dto.repository.base_schema import BaseCreateSchema, BaseUpdateSchema, BaseSchema @@ -30,4 +32,13 @@ class FileUpdateSchema(FileBaseSchema, BaseUpdateSchema[UUID]): ... class FileFindSchema(FileBaseSchema, BaseSchema): ... # it's not a typo + FileResponseSchema = None + + +class CSVFileFindSchema(FileFindSchema): + separator: str + header: list[int] + + +CSVFileResponseSchema = pd.DataFrame From efb491360119d833355f132835332070efe19278 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 19:09:36 +0300 Subject: [PATCH 072/153] feat(domain): add a function to get a primitive by its name --- internal/domain/task/entities/__init__.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/internal/domain/task/entities/__init__.py b/internal/domain/task/entities/__init__.py index 87feb827..c524b935 100644 --- a/internal/domain/task/entities/__init__.py +++ b/internal/domain/task/entities/__init__.py @@ -1,2 +1,14 @@ +from typing import assert_never + from internal.domain.task.entities.fd import FdTask from internal.domain.task.entities.afd import AfdTask +from internal.domain.task.value_objects import PrimitiveName + + +def match_task_by_primitive_name(primitive_name: PrimitiveName): + match primitive_name: + case PrimitiveName.fd: + return FdTask() + case PrimitiveName.afd: + return AfdTask() + assert_never(primitive_name) From c2c17012e96bdcf5719fe6b00539a57b55760035 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 19:09:47 +0300 Subject: [PATCH 073/153] feat(use case): add use case for task profiling --- internal/usecase/task/profile_task.py | 68 +++++++++++++++++++++++++++ 1 file changed, 68 insertions(+) create mode 100644 internal/usecase/task/profile_task.py diff --git a/internal/usecase/task/profile_task.py b/internal/usecase/task/profile_task.py new file mode 100644 index 00000000..afab30f1 --- /dev/null +++ b/internal/usecase/task/profile_task.py @@ -0,0 +1,68 @@ +from typing import Protocol +from uuid import UUID + +from internal.domain.task.entities import match_task_by_primitive_name +from internal.domain.task.value_objects import OneOfTaskResult, OneOfTaskConfig +from internal.dto.repository.file import DatasetFindSchema, DatasetResponseSchema, FileMetadataResponseSchema +from internal.dto.repository.file import CSVFileFindSchema, CSVFileResponseSchema +from internal.dto.repository.file import DatasetNotFoundException, FileMetadataNotFoundException +from internal.usecase.file.exception import DatasetNotFoundException as DatasetNotFoundUseCaseException +from internal.usecase.file.exception import FileMetadataNotFoundException as FileMetadataNotFoundUseCaseException +from internal.uow import UnitOfWork, DataStorageContext + + +class DatasetRepo(Protocol): + + def find_with_file_metadata( + self, + dataset_info: DatasetFindSchema, + context: DataStorageContext + ) -> tuple[DatasetResponseSchema, FileMetadataResponseSchema]: ... + + +class FileRepo(Protocol): + + def find( + self, + file_info: CSVFileFindSchema, + context: DataStorageContext + ) -> CSVFileResponseSchema: ... + + +class ProfileTask: + + def __init__( + self, + unit_of_work: UnitOfWork, + file_repo: FileRepo, + dataset_repo: DatasetRepo, + ): + self.unit_of_work = unit_of_work + self.file_repo = file_repo + self.dataset_repo = dataset_repo + + def __call__(self, *, dataset_id: UUID, config: OneOfTaskConfig) -> OneOfTaskResult: + + with self.unit_of_work as context: + try: + dataset, file_metadata = self.dataset_repo.find_with_file_metadata( + DatasetFindSchema(id=dataset_id), + context + ) + + df = self.file_repo.find( + CSVFileFindSchema( + file_name=file_metadata.file_name, + separator=dataset.separator, + header=dataset.header, + ), + context, + ) + except DatasetNotFoundException: + raise DatasetNotFoundUseCaseException() + except FileMetadataNotFoundException: + raise FileMetadataNotFoundUseCaseException() + + task = match_task_by_primitive_name(primitive_name=config.primitive_name) + result = task.execute(table=df, task_config=config) # type: ignore + return result From e0106539704dfa1bd1fe9df329d0c94334593d21 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 19:09:58 +0300 Subject: [PATCH 074/153] feat(tests): add tests for profilie task use case --- tests/usecase/test_profile_task.py | 190 +++++++++++++++++++++++++++++ 1 file changed, 190 insertions(+) create mode 100644 tests/usecase/test_profile_task.py diff --git a/tests/usecase/test_profile_task.py b/tests/usecase/test_profile_task.py new file mode 100644 index 00000000..a5756c02 --- /dev/null +++ b/tests/usecase/test_profile_task.py @@ -0,0 +1,190 @@ +from uuid import uuid4 + +import pandas as pd +import pytest +from pytest_mock import MockerFixture + +from internal.uow import UnitOfWork, DataStorageContext +from internal.domain.task.value_objects import FdTaskConfig, PrimitiveName, FdTaskResult +from internal.domain.task.value_objects.fd import FdAlgoResult, FdAlgoName +from internal.usecase.task.profile_task import DatasetRepo, FileRepo, ProfileTask +from internal.usecase.file.exception import ( + DatasetNotFoundException as DatasetNotFoundUseCaseException, +) +from internal.usecase.file.exception import ( + FileMetadataNotFoundException as FileMetadataNotFoundUseCaseException, +) +from internal.dto.repository.file import ( + FileMetadataResponseSchema, + DatasetResponseSchema, + DatasetFindSchema, + CSVFileFindSchema, + DatasetNotFoundException, + FileMetadataNotFoundException, +) + + +@pytest.fixture +def unit_of_work_mock(mocker: MockerFixture) -> UnitOfWork: + mock = mocker.MagicMock() + mock.__enter__.return_value = mocker.Mock( + return_value=mocker.Mock(), spec=DataStorageContext + ) + mock.__exit__.return_value = None + + def exit_side_effect(exc_type, exc_value, traceback) -> bool: + if exc_type: + raise exc_value + return False + + mock.__exit__.side_effect = exit_side_effect + return mock + + +@pytest.fixture +def dataset_repo_mock(mocker: MockerFixture) -> DatasetRepo: + mock = mocker.Mock(spec=DatasetRepo) + return mock + + +@pytest.fixture +def file_repo_mock(mocker: MockerFixture) -> FileRepo: + mock = mocker.Mock(spec=FileRepo) + return mock + + +@pytest.fixture +def profile_task_use_case( + unit_of_work_mock: UnitOfWork, + dataset_repo_mock: DatasetRepo, + file_repo_mock: FileRepo, +) -> ProfileTask: + return ProfileTask( + unit_of_work=unit_of_work_mock, + dataset_repo=dataset_repo_mock, + file_repo=file_repo_mock, + ) + + +def test_profile_task_use_case_success( + mocker: MockerFixture, + profile_task_use_case: ProfileTask, + unit_of_work_mock: UnitOfWork, + dataset_repo_mock: DatasetRepo, + file_repo_mock: FileRepo, +) -> None: + # Prepare data + dataset_id = uuid4() + file_id = uuid4() + + file_metadata_response = FileMetadataResponseSchema( + id=file_id, + file_name=uuid4(), + original_file_name="name", + mime_type="application/octet-stream", + ) + + dataset_response = DatasetResponseSchema( + id=dataset_id, + file_id=file_id, + separator=",", + header=[0], + ) + + cvs_file_read_response = pd.DataFrame( + {"column1": [1, 2, 3], "column2": ["a", "b", "c"]} + ) + + task_config = FdTaskConfig( + primitive_name=PrimitiveName.fd, config={"algo_name": FdAlgoName.Aid} + ) + + task_result = FdTaskResult( + primitive_name=PrimitiveName.fd, result=FdAlgoResult(fds=[]) + ) + + # Mocks + dataset_repo_mock.find_with_file_metadata.return_value = ( + dataset_response, + file_metadata_response, + ) + file_repo_mock.find.return_value = cvs_file_read_response + + # Mock the execution of the task + task_mock = mocker.Mock() + task_mock.execute.return_value = task_result + mocker.patch( + "internal.usecase.task.profile_task.match_task_by_primitive_name", + return_value=task_mock, + ) + + # Act + result = profile_task_use_case(dataset_id=dataset_id, config=task_config) + + # Check result + assert task_result == result + + # Check that all methods were called correctly + dataset_repo_mock.find_with_file_metadata.assert_called_once_with( + DatasetFindSchema(id=dataset_id), + unit_of_work_mock.__enter__.return_value, + ) + + file_repo_mock.find.assert_called_once_with( + CSVFileFindSchema( + file_name=file_metadata_response.file_name, + separator=dataset_response.separator, + header=dataset_response.header, + ), + unit_of_work_mock.__enter__.return_value, + ) + + task_mock.execute.assert_called_once_with( + table=cvs_file_read_response, task_config=task_config + ) + + # Check that UnitOfWork was entered and exited correctly + unit_of_work_mock.__enter__.assert_called_once() + unit_of_work_mock.__exit__.assert_called_once() + + +@pytest.mark.parametrize( + "repo_exception, use_case_exception", + [ + (DatasetNotFoundException, DatasetNotFoundUseCaseException), + (FileMetadataNotFoundException, FileMetadataNotFoundUseCaseException), + ], +) +def test_profile_task_use_case_dataset_not_found( + profile_task_use_case: ProfileTask, + unit_of_work_mock: UnitOfWork, + dataset_repo_mock: DatasetRepo, + file_repo_mock: FileRepo, + repo_exception: Exception, + use_case_exception: Exception, +) -> None: + # Prepare data + dataset_id = uuid4() + + task_config = FdTaskConfig( + primitive_name=PrimitiveName.fd, config={"algo_name": FdAlgoName.Aid} + ) + + # Mocks + dataset_repo_mock.find_with_file_metadata.side_effect = repo_exception + + # Act and except error + with pytest.raises(use_case_exception): + profile_task_use_case(dataset_id=dataset_id, config=task_config) + + # Check that all methods were called correctly + dataset_repo_mock.find_with_file_metadata.assert_called_once_with( + DatasetFindSchema(id=dataset_id), + unit_of_work_mock.__enter__.return_value, + ) + + assert not file_repo_mock.find.called + + # Check that UnitOfWork was entered and exited correctly + unit_of_work_mock.__enter__.assert_called_once() + unit_of_work_mock.__exit__.assert_called_once() From cab295a30f385890bf779360225fdb513893b879 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 21:28:11 +0300 Subject: [PATCH 075/153] chore(infra): change name of relational context type --- internal/infrastructure/data_storage/relational/context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/infrastructure/data_storage/relational/context.py b/internal/infrastructure/data_storage/relational/context.py index d5a9b8fe..07284b33 100644 --- a/internal/infrastructure/data_storage/relational/context.py +++ b/internal/infrastructure/data_storage/relational/context.py @@ -1,3 +1,3 @@ from sqlalchemy.orm import Session -RelationalStorageContext = Session +RelationalContextType = Session From 7e52f895f8f041f833ae36fb7961312cfc7b8d66 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 21:28:30 +0300 Subject: [PATCH 076/153] feat(infra): add postgres context and context maker --- .../relational/postgres/__init__.py | 4 +-- .../relational/postgres/context.py | 27 +++++++------------ 2 files changed, 11 insertions(+), 20 deletions(-) diff --git a/internal/infrastructure/data_storage/relational/postgres/__init__.py b/internal/infrastructure/data_storage/relational/postgres/__init__.py index 71d0c3b2..cc233c5d 100644 --- a/internal/infrastructure/data_storage/relational/postgres/__init__.py +++ b/internal/infrastructure/data_storage/relational/postgres/__init__.py @@ -1,2 +1,2 @@ -from internal.infrastructure.data_storage.relational.postgres.context import (get_context, - get_context_without_pool) +from internal.infrastructure.data_storage.relational.postgres.context import (get_postgres_context_maker, + get_postgres_context_maker_without_pool) diff --git a/internal/infrastructure/data_storage/relational/postgres/context.py b/internal/infrastructure/data_storage/relational/postgres/context.py index fdc1ad61..159379df 100644 --- a/internal/infrastructure/data_storage/relational/postgres/context.py +++ b/internal/infrastructure/data_storage/relational/postgres/context.py @@ -1,10 +1,7 @@ -from typing import Generator - from sqlalchemy import create_engine, NullPool -from sqlalchemy.orm import sessionmaker +from sqlalchemy.orm import sessionmaker, Session from internal.infrastructure.data_storage import settings -from internal.infrastructure.data_storage.relational.context import RelationalStorageContext default_engine = create_engine(url=settings.postgres_dsn.unicode_string()) engine_without_pool = create_engine( @@ -12,20 +9,14 @@ poolclass=NullPool, ) -ContextLocal = sessionmaker(bind=default_engine) -ContextLocalWithoutPool = sessionmaker(bind=engine_without_pool) +PostgresContextType = Session +PostgresContextMaker = sessionmaker(bind=default_engine) +PostgresContextMakerWithoutPool = sessionmaker(bind=engine_without_pool) + +def get_postgres_context_maker() -> PostgresContextMaker: + return PostgresContextMaker -def get_context() -> Generator[RelationalStorageContext, None, None]: - """ - Returns a generator that yields a context.py(session) object for database operations. - """ - with ContextLocal() as context: - yield context -def get_context_without_pool() -> Generator[RelationalStorageContext, None, None]: - """ - Returns a generator that yields a context.py(session) object without pool for database operations. - """ - with ContextLocalWithoutPool() as context: - yield context +def get_postgres_context_maker_without_pool() -> PostgresContextMakerWithoutPool: + return PostgresContextMakerWithoutPool From 27124eadcd157b48ca377019320067e95653c076 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 21:28:46 +0300 Subject: [PATCH 077/153] feat(repo): add find(read) csv file method to the file repository --- internal/repository/flat/file.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/internal/repository/flat/file.py b/internal/repository/flat/file.py index fd087176..4cb9f40d 100644 --- a/internal/repository/flat/file.py +++ b/internal/repository/flat/file.py @@ -1,7 +1,9 @@ from pathlib import Path + import aiofiles +import pandas as pd -from internal.dto.repository.file.file import FailedFileReadingException +from internal.dto.repository.file.file import FailedFileReadingException, CSVFileReadSchema, CSVFileReadResponseSchema from internal.infrastructure.data_storage import settings from internal.dto.repository.file import File, FileCreateSchema, FileResponseSchema from internal.uow import DataStorageContext @@ -27,3 +29,18 @@ async def create( await out_file.write(content) except Exception: raise FailedFileReadingException("The sent file could not be read.") + + + def find( + self, + file_info: CSVFileReadSchema, + context: DataStorageContext # The current repository implementation does not support transactions. + ) -> CSVFileReadResponseSchema: + + path_to_file = Path(self.files_dir_path, str(file_info.file_name)) + + return pd.read_csv( + path_to_file, + sep=file_info.separator, + header=file_info.header, + ) From 6cf24b6a26732771282ffb19ec54d6c433c43b2e Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 21:31:17 +0300 Subject: [PATCH 078/153] feat(repo): add class with crud operations for relational repositories --- internal/repository/relational/__init__.py | 1 + internal/repository/relational/crud.py | 90 ++++++++++++++++++++++ 2 files changed, 91 insertions(+) create mode 100644 internal/repository/relational/__init__.py create mode 100644 internal/repository/relational/crud.py diff --git a/internal/repository/relational/__init__.py b/internal/repository/relational/__init__.py new file mode 100644 index 00000000..f7d802e5 --- /dev/null +++ b/internal/repository/relational/__init__.py @@ -0,0 +1 @@ +from internal.repository.relational.crud import CRUD diff --git a/internal/repository/relational/crud.py b/internal/repository/relational/crud.py new file mode 100644 index 00000000..a8da6087 --- /dev/null +++ b/internal/repository/relational/crud.py @@ -0,0 +1,90 @@ +from typing import Type + +from sqlalchemy import select + +from internal.infrastructure.data_storage.relational.model import ORMBaseModel +from internal.dto.repository.base_schema import (BaseCreateSchema, BaseUpdateSchema, + BaseFindSchema, BaseResponseSchema) +from internal.infrastructure.data_storage.relational.context import RelationalContextType + + +class CRUD[ + ORMModel: ORMBaseModel, + CreateSchema: BaseCreateSchema, + UpdateSchema: BaseUpdateSchema, + FindSchema: BaseFindSchema, + ResponseSchema: BaseResponseSchema +]: + + def __init__( + self, + orm_model: Type[ORMModel], + response_schema: Type[ResponseSchema] + ) -> None: + + self._orm_model: Type[ORMModel] = orm_model + self._response_schema: Type[ResponseSchema] = response_schema + + + def create(self, create_schema: CreateSchema, context: RelationalContextType) -> ResponseSchema: + create_schema_dict = create_schema.model_dump() + db_model_instance = self._orm_model(**create_schema_dict) + context.add(db_model_instance) + context.flush() + return self._response_schema.model_validate(db_model_instance) + + + def _find(self, find_schema: FindSchema, context: RelationalContextType) -> ORMModel | None: + find_schema_dict = find_schema.model_dump() + stmt = select(self._orm_model).filter_by(**find_schema_dict) + db_model_instance = context.execute(stmt).scalars().one_or_none() + return db_model_instance + + + def find(self, find_schema: FindSchema, context: RelationalContextType) -> ResponseSchema | None: + db_model_instance = self._find(find_schema, context) + response = self._response_schema.model_validate(db_model_instance) if db_model_instance else None + return response + + + def find_or_create( + self, + find_schema: FindSchema, + create_schema: CreateSchema, + context: RelationalContextType + ) -> ResponseSchema: + + db_model_instance = self._find(find_schema, context) + if not db_model_instance: + db_model_instance = self.create(create_schema, context) + return self._response_schema.model_validate(db_model_instance) + + + def update( + self, + find_schema: FindSchema, + update_schema: UpdateSchema, + fields_to_update_if_none: set[str] | None, + context: RelationalContextType + ) -> ResponseSchema: + + db_model_instance = self._find(find_schema, context) + update_schema_dict = update_schema.model_dump() + fields_to_update_if_none = fields_to_update_if_none if fields_to_update_if_none else set() + + for key, value in update_schema_dict.items(): + if value is not None or key in fields_to_update_if_none: + setattr(db_model_instance, key, value) + + context.add(db_model_instance) + context.flush() + + return self._response_schema.model_validate(db_model_instance) + + + def delete(self, find_schema: FindSchema, context: RelationalContextType) -> ResponseSchema | None: + db_model_instance = self._find(find_schema, context) + if not db_model_instance: return None + context.delete(db_model_instance) + context.flush() + return self._response_schema.model_validate(db_model_instance) From 2f1c5065d33505d0ecea04865c6eeee5cccb7536 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 22:05:24 +0300 Subject: [PATCH 079/153] feat(repo): add file metadata repository implementation --- .../repository/relational/file/__init__.py | 1 + .../relational/file/file_metadata.py | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+) create mode 100644 internal/repository/relational/file/__init__.py create mode 100644 internal/repository/relational/file/file_metadata.py diff --git a/internal/repository/relational/file/__init__.py b/internal/repository/relational/file/__init__.py new file mode 100644 index 00000000..e11a6fcc --- /dev/null +++ b/internal/repository/relational/file/__init__.py @@ -0,0 +1 @@ +from internal.repository.relational.file.file_metadata import FileMetaDataRepository diff --git a/internal/repository/relational/file/file_metadata.py b/internal/repository/relational/file/file_metadata.py new file mode 100644 index 00000000..daaac6c1 --- /dev/null +++ b/internal/repository/relational/file/file_metadata.py @@ -0,0 +1,18 @@ +from internal.dto.repository.file import (FileMetadataCreateSchema, FileMetadataUpdateSchema, + FileMetadataFindSchema, FileMetadataResponseSchema) +from internal.infrastructure.data_storage.relational.model.file import FileMetadataORM +from internal.repository.relational import CRUD + + +class FileMetaDataRepository( + CRUD[ + FileMetadataORM, + FileMetadataCreateSchema, + FileMetadataUpdateSchema, + FileMetadataFindSchema, + FileMetadataResponseSchema + ] +): + + def __init__(self): + super().__init__(orm_model=FileMetadataORM, response_schema=FileMetadataResponseSchema) From efdc4a80af3a0875d8fc1db5a8d72fed4994359e Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 23:57:26 +0300 Subject: [PATCH 080/153] fix(tests): update conftest file, delete alembic tests --- Makefile | 2 +- tests/conftest.py | 15 ++++----------- 2 files changed, 5 insertions(+), 12 deletions(-) diff --git a/Makefile b/Makefile index 2ec7afce..1cc8b2a4 100644 --- a/Makefile +++ b/Makefile @@ -63,7 +63,7 @@ format: ## Run all tests in project test: - poetry run pytest -o log_cli=true --verbosity=2 --showlocals --log-cli-level=INFO --test-alembic --cov=internal --cov-report term + poetry run pytest -o log_cli=true --verbosity=2 --showlocals --log-cli-level=INFO --cov=internal --cov-report term .DEFAULT_GOAL := help # See for explanation. diff --git a/tests/conftest.py b/tests/conftest.py index 26bace98..64d78e9c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,12 +1,11 @@ import pytest -from pytest_alembic import Config from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from sqlalchemy_utils import database_exists, create_database import logging -from app.db import ORMBase -from app.settings import settings +from internal.infrastructure.data_storage.relational.model import ORMBaseModel +from internal.infrastructure.data_storage import settings # https://stackoverflow.com/questions/61582142/test-pydantic-settings-in-fastapi # Maybe should be overriden by env vars for testing only @@ -20,17 +19,11 @@ def prepare_db(): logging.info("Setup database: %s", settings.postgres_dsn.unicode_string()) if not database_exists(settings.postgres_dsn.unicode_string()): create_database(settings.postgres_dsn.unicode_string()) - ORMBase.metadata.drop_all(bind=test_engine) - ORMBase.metadata.create_all(bind=test_engine) + ORMBaseModel.metadata.drop_all(bind=test_engine) + ORMBaseModel.metadata.create_all(bind=test_engine) @pytest.fixture(scope="session", autouse=True) def session(): session = sessionmaker(test_engine, expire_on_commit=False) yield session - - -@pytest.fixture -def alembic_config(): - options = {"file": "app/settings/alembic.ini"} - return Config(config_options=options) From b96329335670f7c94a9a0975458be58f72c2f77c Mon Sep 17 00:00:00 2001 From: raf-nr Date: Fri, 20 Sep 2024 23:57:48 +0300 Subject: [PATCH 081/153] feat(infra): add necessary imports for migrations --- .../data_storage/relational/postgres/migrations/env.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/internal/infrastructure/data_storage/relational/postgres/migrations/env.py b/internal/infrastructure/data_storage/relational/postgres/migrations/env.py index 23ca22d0..74d0c6eb 100644 --- a/internal/infrastructure/data_storage/relational/postgres/migrations/env.py +++ b/internal/infrastructure/data_storage/relational/postgres/migrations/env.py @@ -7,6 +7,9 @@ from internal.infrastructure.data_storage import settings from internal.infrastructure.data_storage.relational.model import ORMBaseModel +from internal.infrastructure.data_storage.relational.model.file.file_metadata import FileMetadataORM # noqa: F401 +from internal.infrastructure.data_storage.relational.model.file.dataset import DatasetORM # noqa: F401 +from internal.infrastructure.data_storage.relational.model.task import TaskORM # noqa: F401 # this is the Alembic Config object, which provides From 73fcab24635ec3f92d20dd7361584d3365579e94 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 00:30:40 +0300 Subject: [PATCH 082/153] feat(repo): add dataset repository implementation --- .../repository/relational/file/__init__.py | 1 + .../repository/relational/file/dataset.py | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+) create mode 100644 internal/repository/relational/file/dataset.py diff --git a/internal/repository/relational/file/__init__.py b/internal/repository/relational/file/__init__.py index e11a6fcc..d839a5cc 100644 --- a/internal/repository/relational/file/__init__.py +++ b/internal/repository/relational/file/__init__.py @@ -1 +1,2 @@ from internal.repository.relational.file.file_metadata import FileMetaDataRepository +from internal.repository.relational.file.dataset import DatasetRepository diff --git a/internal/repository/relational/file/dataset.py b/internal/repository/relational/file/dataset.py new file mode 100644 index 00000000..f26b2b42 --- /dev/null +++ b/internal/repository/relational/file/dataset.py @@ -0,0 +1,19 @@ + +from internal.infrastructure.data_storage.relational.model.file import DatasetORM +from internal.repository.relational import CRUD +from internal.dto.repository.file import (DatasetCreateSchema, DatasetUpdateSchema, + DatasetFindSchema, DatasetResponseSchema) + + +class DatasetRepository( + CRUD[ + DatasetORM, + DatasetCreateSchema, + DatasetUpdateSchema, + DatasetFindSchema, + DatasetResponseSchema + ] +): + + def __init__(self): + super().__init__(orm_model=DatasetORM, response_schema=DatasetResponseSchema) From 62c021be09233a1028e6e3c869615c27600f89ea Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 01:27:25 +0300 Subject: [PATCH 083/153] feat(tests): update conftest file with database preparing fixture --- tests/conftest.py | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 64d78e9c..2646460b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -15,7 +15,7 @@ @pytest.fixture(scope="session", autouse=True) -def prepare_db(): +def prepare_postgres(): logging.info("Setup database: %s", settings.postgres_dsn.unicode_string()) if not database_exists(settings.postgres_dsn.unicode_string()): create_database(settings.postgres_dsn.unicode_string()) @@ -23,7 +23,22 @@ def prepare_db(): ORMBaseModel.metadata.create_all(bind=test_engine) -@pytest.fixture(scope="session", autouse=True) -def session(): - session = sessionmaker(test_engine, expire_on_commit=False) - yield session +@pytest.fixture(scope="session") +def postgres_context_maker(): + return sessionmaker(test_engine, expire_on_commit=False) + + +@pytest.fixture(scope="function") +def postgres_context(postgres_context_maker): + context = postgres_context_maker() + + yield context + + context.close() + + +@pytest.fixture(autouse=True) +def clean_tables(session): + for table in reversed(ORMBaseModel.metadata.sorted_tables): + session.execute(table.delete()) + session.commit() From 4f205d38c9f844538b2f998b4043f65c9b14b16c Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 01:31:53 +0300 Subject: [PATCH 084/153] feat(repo): add find with join for dataset repository --- .../repository/relational/file/dataset.py | 25 ++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/internal/repository/relational/file/dataset.py b/internal/repository/relational/file/dataset.py index f26b2b42..9d59b2df 100644 --- a/internal/repository/relational/file/dataset.py +++ b/internal/repository/relational/file/dataset.py @@ -1,8 +1,12 @@ +from sqlalchemy import select +from sqlalchemy.orm import joinedload +from internal.infrastructure.data_storage.relational.context import RelationalContextType from internal.infrastructure.data_storage.relational.model.file import DatasetORM from internal.repository.relational import CRUD from internal.dto.repository.file import (DatasetCreateSchema, DatasetUpdateSchema, - DatasetFindSchema, DatasetResponseSchema) + DatasetFindSchema, DatasetResponseSchema, FileMetadataResponseSchema, + DatasetNotFoundException) class DatasetRepository( @@ -17,3 +21,22 @@ class DatasetRepository( def __init__(self): super().__init__(orm_model=DatasetORM, response_schema=DatasetResponseSchema) + + + def find_with_file_metadata( + self, + dataset_info: DatasetFindSchema, + context: RelationalContextType, + ) -> tuple[DatasetResponseSchema, FileMetadataResponseSchema]: + + dataset_find_dict = dataset_info.model_dump() + stmt = select(DatasetORM).options(joinedload(DatasetORM.file_metadata)).filter_by(**dataset_find_dict) + dataset_orm_instance = context.execute(stmt).scalars().one_or_none() + + if not dataset_orm_instance: + raise DatasetNotFoundException() + + dataset_response = DatasetResponseSchema.model_validate(dataset_orm_instance) + file_metadata_response = FileMetadataResponseSchema.model_validate(dataset_orm_instance.file_metadata) + + return dataset_response, file_metadata_response From 812b5239ce86edfd565bec2ff243f830e6ee84dc Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 01:32:25 +0300 Subject: [PATCH 085/153] feat(repo): add task repository implementation --- internal/repository/relational/task/__init__.py | 1 + internal/repository/relational/task/task.py | 17 +++++++++++++++++ 2 files changed, 18 insertions(+) create mode 100644 internal/repository/relational/task/__init__.py create mode 100644 internal/repository/relational/task/task.py diff --git a/internal/repository/relational/task/__init__.py b/internal/repository/relational/task/__init__.py new file mode 100644 index 00000000..3381c2a1 --- /dev/null +++ b/internal/repository/relational/task/__init__.py @@ -0,0 +1 @@ +from internal.repository.relational.task.task import TaskRepository diff --git a/internal/repository/relational/task/task.py b/internal/repository/relational/task/task.py new file mode 100644 index 00000000..58e23c7d --- /dev/null +++ b/internal/repository/relational/task/task.py @@ -0,0 +1,17 @@ +from internal.infrastructure.data_storage.relational.model.task import TaskORM +from internal.repository.relational import CRUD +from internal.dto.repository.task import TaskCreateSchema, TaskUpdateSchema, TaskFindSchema, TaskResponseSchema + + +class TaskRepository( + CRUD[ + TaskORM, + TaskCreateSchema, + TaskUpdateSchema, + TaskFindSchema, + TaskResponseSchema + ] +): + + def __init__(self): + super().__init__(orm_model=TaskORM, response_schema=TaskResponseSchema) From e69eb796da95ad2da2b330c24f11a6ffdabf71f7 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 01:34:19 +0300 Subject: [PATCH 086/153] feat(infra): init background task and celery modules --- internal/infrastructure/background_task/__init__.py | 0 internal/infrastructure/background_task/celery/__init__.py | 0 internal/infrastructure/background_task/celery/task/__init__.py | 0 3 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 internal/infrastructure/background_task/__init__.py create mode 100644 internal/infrastructure/background_task/celery/__init__.py create mode 100644 internal/infrastructure/background_task/celery/task/__init__.py diff --git a/internal/infrastructure/background_task/__init__.py b/internal/infrastructure/background_task/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/internal/infrastructure/background_task/celery/__init__.py b/internal/infrastructure/background_task/celery/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/internal/infrastructure/background_task/celery/task/__init__.py b/internal/infrastructure/background_task/celery/task/__init__.py new file mode 100644 index 00000000..e69de29b From 11e47739086744f7d25c25ddbd9dcb149c4b6850 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 01:34:48 +0300 Subject: [PATCH 087/153] feat(infra): add settings for background tasks --- .../background_task/__init__.py | 3 +++ .../background_task/settings.py | 19 +++++++++++++++++++ 2 files changed, 22 insertions(+) create mode 100644 internal/infrastructure/background_task/settings.py diff --git a/internal/infrastructure/background_task/__init__.py b/internal/infrastructure/background_task/__init__.py index e69de29b..7245d6a0 100644 --- a/internal/infrastructure/background_task/__init__.py +++ b/internal/infrastructure/background_task/__init__.py @@ -0,0 +1,3 @@ +from internal.infrastructure.background_task.settings import get_settings + +settings = get_settings() diff --git a/internal/infrastructure/background_task/settings.py b/internal/infrastructure/background_task/settings.py new file mode 100644 index 00000000..180f3b07 --- /dev/null +++ b/internal/infrastructure/background_task/settings.py @@ -0,0 +1,19 @@ + +from dotenv import load_dotenv, find_dotenv +from pydantic import Field, ByteSize +from pydantic_settings import BaseSettings + +load_dotenv(find_dotenv(".env")) + + +class Settings(BaseSettings): + # Celery worker limits + worker_soft_time_limit_in_seconds: int = Field(default=60, gt=0) + worker_hard_time_limit_in_seconds: int = Field(default=120, gt=0) + worker_soft_memory_limit: ByteSize = "2GB" + worker_hard_memory_limit: ByteSize = "4GB" + + +def get_settings(): + # TODO: create different settings based on environment (production, testing, etc.) + return Settings() From 7e3a337ae0c032f7dac70fd97ea6b117adf9415a Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 01:35:20 +0300 Subject: [PATCH 088/153] feat(infra): add celery config --- internal/infrastructure/background_task/celery/config.py | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 internal/infrastructure/background_task/celery/config.py diff --git a/internal/infrastructure/background_task/celery/config.py b/internal/infrastructure/background_task/celery/config.py new file mode 100644 index 00000000..4330b126 --- /dev/null +++ b/internal/infrastructure/background_task/celery/config.py @@ -0,0 +1,6 @@ +broker_connection_retry_on_startup = True +task_serializer = "pickle" +result_serializer = "pickle" +event_serializer = "json" +accept_content = ["application/json", "application/x-python-serialize"] +result_accept_content = ["application/json", "application/x-python-serialize"] From d5c290e2c9ca5c297a55d1b0ef0ed366d507adef Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 01:36:08 +0300 Subject: [PATCH 089/153] feat(infra): add resource intensive celery task class --- .../celery/task/resource_intensive_task.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 internal/infrastructure/background_task/celery/task/resource_intensive_task.py diff --git a/internal/infrastructure/background_task/celery/task/resource_intensive_task.py b/internal/infrastructure/background_task/celery/task/resource_intensive_task.py new file mode 100644 index 00000000..d1d22a30 --- /dev/null +++ b/internal/infrastructure/background_task/celery/task/resource_intensive_task.py @@ -0,0 +1,19 @@ +from celery import Task +import resource + +from internal.infrastructure.background_task import settings + + +class ResourceIntensiveTask(Task): + # There are default Celery time limits, see: https://docs.celeryq.dev/en/stable/userguide/workers.html#time-limits + time_limit = settings.worker_hard_time_limit_in_seconds + soft_time_limit = settings.worker_soft_time_limit_in_seconds + + # There are custom memory limits using `resource` module + hard_memory_limit = settings.worker_hard_memory_limit + soft_memory_limit = settings.worker_soft_memory_limit + + def before_start(self, task_id, args, kwargs) -> None: + resource.setrlimit( + resource.RLIMIT_AS, (self.soft_memory_limit, self.hard_memory_limit) + ) From 32af375db45db997b3969c127285b8b693323a68 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 01:49:41 +0300 Subject: [PATCH 090/153] feat(infra): add dependencies for injection in celery tasks --- .../background_task/celery/task/di.py | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 internal/infrastructure/background_task/celery/task/di.py diff --git a/internal/infrastructure/background_task/celery/task/di.py b/internal/infrastructure/background_task/celery/task/di.py new file mode 100644 index 00000000..e2a4ef12 --- /dev/null +++ b/internal/infrastructure/background_task/celery/task/di.py @@ -0,0 +1,44 @@ +from internal.infrastructure.data_storage.relational.postgres import get_postgres_context_maker_without_pool +from internal.repository.flat import FileRepository +from internal.repository.relational.file import DatasetRepository +from internal.repository.relational.task import TaskRepository +from internal.uow import UnitOfWork +from internal.usecase.task.profile_task import ProfileTask +from internal.usecase.task.update_task_info import UpdateTaskInfo + + +def get_file_repo() -> FileRepository: + return FileRepository() + + +def get_dataset_repo() -> DatasetRepository: + return DatasetRepository() + + +def get_task_repo() -> TaskRepository: + return TaskRepository() + + +def get_update_task_info_use_case(): + context_maker = get_postgres_context_maker_without_pool() + + unit_of_work = UnitOfWork(context_maker) + task_repo = get_task_repo() + + return UpdateTaskInfo( + unit_of_work=unit_of_work, + task_repo=task_repo, # type: ignore + ) + +def get_profile_task_use_case(): + context_maker = get_postgres_context_maker_without_pool() + + unit_of_work = UnitOfWork(context_maker) + file_repo = get_file_repo() + dataset_repo = get_dataset_repo() + + return ProfileTask( + unit_of_work=unit_of_work, + file_repo=file_repo, + dataset_repo=dataset_repo, # type: ignore + ) From a73b900d78963df0360056b5db51d286b786eaf7 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 01:50:36 +0300 Subject: [PATCH 091/153] feat(infra): add data profiling celery task --- .../background_task/celery/task/__init__.py | 1 + .../celery/task/profiling_task.py | 82 +++++++++++++++++++ 2 files changed, 83 insertions(+) create mode 100644 internal/infrastructure/background_task/celery/task/profiling_task.py diff --git a/internal/infrastructure/background_task/celery/task/__init__.py b/internal/infrastructure/background_task/celery/task/__init__.py index e69de29b..badf1c6b 100644 --- a/internal/infrastructure/background_task/celery/task/__init__.py +++ b/internal/infrastructure/background_task/celery/task/__init__.py @@ -0,0 +1 @@ +from internal.infrastructure.background_task.celery.task.profiling_task import profiling_task diff --git a/internal/infrastructure/background_task/celery/task/profiling_task.py b/internal/infrastructure/background_task/celery/task/profiling_task.py new file mode 100644 index 00000000..2f68ed58 --- /dev/null +++ b/internal/infrastructure/background_task/celery/task/profiling_task.py @@ -0,0 +1,82 @@ +from typing import Any +from uuid import UUID + +from celery.signals import task_failure, task_prerun, task_postrun +from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded, WorkerLostError + +from internal.domain.task.value_objects import OneOfTaskConfig, TaskStatus, OneOfTaskResult, TaskFailureReason +from internal.infrastructure.background_task.celery import worker +from internal.infrastructure.background_task.celery.task.di import (get_profile_task_use_case, + get_update_task_info_use_case) +from internal.infrastructure.background_task.celery.task.resource_intensive_task import ResourceIntensiveTask + + +@worker.task(base=ResourceIntensiveTask, ignore_result=True, max_retries=0) +def profiling_task( + task_id: UUID, + dataset_id: UUID, + config: OneOfTaskConfig, +) -> Any: + + profile_task = get_profile_task_use_case() + + result = profile_task(dataset_id=dataset_id, config=config) + return result + + +@task_prerun.connect(sender=profiling_task) +def task_prerun_notifier( + kwargs, + **_, +) -> None: + + update_task_info = get_update_task_info_use_case() + db_task_id: UUID = kwargs["task_id"] + + update_task_info(task_id=db_task_id, task_status=TaskStatus.RUNNING) + + +@task_postrun.connect(sender=profiling_task) +def task_postrun_notifier( + kwargs, + retval: OneOfTaskResult, + **_, +): + + update_task_info = get_update_task_info_use_case() + db_task_id: UUID = kwargs["task_id"] + + update_task_info( + task_id=db_task_id, + task_status=TaskStatus.COMPLETED, + result=retval.model_dump(), + ) + + +@task_failure.connect(sender=profiling_task) +def task_failure_notifier( + kwargs, + exception: Exception, + traceback, + **_, +): + + # TODO: test all possible exceptions + task_failure_reason = TaskFailureReason.OTHER + if isinstance(exception, (TimeLimitExceeded, SoftTimeLimitExceeded)): + task_failure_reason = TaskFailureReason.TIME_LIMIT_EXCEEDED + if isinstance(exception, MemoryError): + task_failure_reason = TaskFailureReason.MEMORY_LIMIT_EXCEEDED + if isinstance(exception, WorkerLostError): + task_failure_reason = TaskFailureReason.WORKER_KILLED_BY_SIGNAL + + update_task_info = get_update_task_info_use_case() + db_task_id: UUID = kwargs["task_id"] + + update_task_info( + task_id=db_task_id, + task_status=TaskStatus.FAILED, + raised_exception_name=exception.__class__.__name__, + failure_reason=task_failure_reason, + traceback=traceback, + ) From 5ba0b87f3867779f2e6725f25a033ba4f3ce05ae Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 01:51:16 +0300 Subject: [PATCH 092/153] feat(infra): add the main celery worker --- .../infrastructure/background_task/celery/__init__.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/internal/infrastructure/background_task/celery/__init__.py b/internal/infrastructure/background_task/celery/__init__.py index e69de29b..335202d9 100644 --- a/internal/infrastructure/background_task/celery/__init__.py +++ b/internal/infrastructure/background_task/celery/__init__.py @@ -0,0 +1,11 @@ +from celery import Celery + +from internal.infrastructure.data_storage import settings + +worker = Celery( + __name__, + broker=settings.rabbitmq_dsn.unicode_string(), + include=['internal.infrastructure.background_task.celery.task'], +) + +worker.config_from_object("internal.infrastructure.background_task.celery.config") From 1fd1fbb07f17b6365b5e38c9c5d3f091a6ffee14 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 01:52:43 +0300 Subject: [PATCH 093/153] feat(worker): add data profiling worker implementation (for celery) --- internal/worker/celery/__init__.py | 1 + internal/worker/celery/profiling_task_worker.py | 13 +++++++++++++ 2 files changed, 14 insertions(+) create mode 100644 internal/worker/celery/__init__.py create mode 100644 internal/worker/celery/profiling_task_worker.py diff --git a/internal/worker/celery/__init__.py b/internal/worker/celery/__init__.py new file mode 100644 index 00000000..489bf5e6 --- /dev/null +++ b/internal/worker/celery/__init__.py @@ -0,0 +1 @@ +from internal.worker.celery.profiling_task_worker import ProfilingTaskWorker diff --git a/internal/worker/celery/profiling_task_worker.py b/internal/worker/celery/profiling_task_worker.py new file mode 100644 index 00000000..a33e7951 --- /dev/null +++ b/internal/worker/celery/profiling_task_worker.py @@ -0,0 +1,13 @@ +from internal.dto.worker.task import ProfilingTaskCreateSchema +from internal.infrastructure.background_task.celery.task import profiling_task + + +class ProfilingTaskWorker: + + def set(self, task_info: ProfilingTaskCreateSchema) -> None: + + profiling_task.delay( + task_id=task_info.task_id, + dataset_id=task_info.dataset_id, + config=task_info.config, + ) From cefb55456c86db49a1797877653b9319e7ddd9ff Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 01:57:15 +0300 Subject: [PATCH 094/153] feat(rest): init rest module and its submodules --- internal/rest/http/__init__.py | 0 internal/rest/http/common/__init__.py | 0 internal/rest/http/file/__init__.py | 9 +++++++++ internal/rest/http/task/__init__.py | 9 +++++++++ internal/rest/http/user/__init__.py | 3 +++ 5 files changed, 21 insertions(+) create mode 100644 internal/rest/http/__init__.py create mode 100644 internal/rest/http/common/__init__.py create mode 100644 internal/rest/http/file/__init__.py create mode 100644 internal/rest/http/task/__init__.py create mode 100644 internal/rest/http/user/__init__.py diff --git a/internal/rest/http/__init__.py b/internal/rest/http/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/internal/rest/http/common/__init__.py b/internal/rest/http/common/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/internal/rest/http/file/__init__.py b/internal/rest/http/file/__init__.py new file mode 100644 index 00000000..acbae6a4 --- /dev/null +++ b/internal/rest/http/file/__init__.py @@ -0,0 +1,9 @@ +from fastapi import APIRouter + +from internal.rest.file.upload_csv_dataset import router as upload_csv_file_router +from internal.rest.file.retrieve_dataset import router as retrieve_dataset_router + +router = APIRouter(prefix="/file", tags=["file"]) + +router.include_router(upload_csv_file_router) +router.include_router(retrieve_dataset_router) diff --git a/internal/rest/http/task/__init__.py b/internal/rest/http/task/__init__.py new file mode 100644 index 00000000..7ac33091 --- /dev/null +++ b/internal/rest/http/task/__init__.py @@ -0,0 +1,9 @@ +from fastapi import APIRouter + +from internal.rest.task.set_task import router as set_task_router +from internal.rest.task.retrieve_task import router as retrieve_task_router + +router = APIRouter(prefix="/task", tags=["task"]) + +router.include_router(set_task_router) +router.include_router(retrieve_task_router) diff --git a/internal/rest/http/user/__init__.py b/internal/rest/http/user/__init__.py new file mode 100644 index 00000000..b7940686 --- /dev/null +++ b/internal/rest/http/user/__init__.py @@ -0,0 +1,3 @@ +from fastapi import APIRouter + +router = APIRouter(prefix="/user", tags=["user"]) From 15b4bfceab14f8b9103d621d5b0ae903f492ea74 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 01:58:23 +0300 Subject: [PATCH 095/153] feat(test): add ping endpoint, common router --- internal/rest/http/common/__init__.py | 6 ++++++ internal/rest/http/common/di.py | 0 internal/rest/http/common/ping.py | 9 +++++++++ 3 files changed, 15 insertions(+) create mode 100644 internal/rest/http/common/di.py create mode 100644 internal/rest/http/common/ping.py diff --git a/internal/rest/http/common/__init__.py b/internal/rest/http/common/__init__.py index e69de29b..98d09641 100644 --- a/internal/rest/http/common/__init__.py +++ b/internal/rest/http/common/__init__.py @@ -0,0 +1,6 @@ +from fastapi import APIRouter + +from internal.rest.http.common.ping import router as ping_router +router = APIRouter(prefix="/common", tags=["common"]) + +router.include_router(ping_router) diff --git a/internal/rest/http/common/di.py b/internal/rest/http/common/di.py new file mode 100644 index 00000000..e69de29b diff --git a/internal/rest/http/common/ping.py b/internal/rest/http/common/ping.py new file mode 100644 index 00000000..855a012f --- /dev/null +++ b/internal/rest/http/common/ping.py @@ -0,0 +1,9 @@ +from typing import Literal + +from fastapi import APIRouter + +router = APIRouter() + +@router.get("/ping") +def ping() -> Literal["Pong!"]: + return "Pong!" From a47646faf9ef5afa2f37d9a0447fb3cd09e83656 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 02:01:42 +0300 Subject: [PATCH 096/153] feat(rest): add dependencies for injection in endpoints --- internal/rest/http/di.py | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 internal/rest/http/di.py diff --git a/internal/rest/http/di.py b/internal/rest/http/di.py new file mode 100644 index 00000000..63bb7368 --- /dev/null +++ b/internal/rest/http/di.py @@ -0,0 +1,38 @@ +from fastapi import Depends + +from internal.infrastructure.data_storage.relational.postgres.context import (get_postgres_context_maker, + get_postgres_context_maker_without_pool) +from internal.repository.flat import FileRepository +from internal.repository.relational.file import FileMetaDataRepository, DatasetRepository +from internal.repository.relational.task import TaskRepository +from internal.uow import UnitOfWork + + +def get_unit_of_work( + context_maker = Depends(get_postgres_context_maker) +) -> UnitOfWork: + + return UnitOfWork(context_maker) + + +def get_unit_of_work_without_pool( + context_maker = Depends(get_postgres_context_maker_without_pool) +) -> UnitOfWork: + + return UnitOfWork(context_maker) + + +def get_file_repo() -> FileRepository: + return FileRepository() + + +def get_file_metadata_repo() -> FileMetaDataRepository: + return FileMetaDataRepository() + + +def get_dataset_repo() -> DatasetRepository: + return DatasetRepository() + + +def get_task_repo() -> TaskRepository: + return TaskRepository() From 38f59631f3991a0e26e87360b086ffb949a5a7c6 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 02:02:37 +0300 Subject: [PATCH 097/153] feat(rest): add dependencies for injection in file/dataset endpoints --- internal/rest/http/file/di.py | 45 +++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 internal/rest/http/file/di.py diff --git a/internal/rest/http/file/di.py b/internal/rest/http/file/di.py new file mode 100644 index 00000000..2cdd0645 --- /dev/null +++ b/internal/rest/http/file/di.py @@ -0,0 +1,45 @@ +from fastapi import Depends + +from internal.rest.http.di import get_unit_of_work, get_file_repo, get_file_metadata_repo, get_dataset_repo +from internal.uow import UnitOfWork +from internal.usecase.file import SaveFile, SaveDataset, CheckContentType +from internal.usecase.file.retrieve_dataset import RetrieveDataset +from internal.usecase.file.save_dataset import DatasetRepo as SaveDatasetRepo +from internal.usecase.file.retrieve_dataset import DatasetRepo as RetrieveDatasetRepo +from internal.usecase.file.save_file import FileRepo, FileMetadataRepo + + +def get_save_file_use_case( + unit_of_work: UnitOfWork = Depends(get_unit_of_work), + file_repo: FileRepo = Depends(get_file_repo), + file_metadata_repo: FileMetadataRepo = Depends(get_file_metadata_repo), +) -> SaveFile: + return SaveFile( + unit_of_work=unit_of_work, + file_repo=file_repo, + file_metadata_repo=file_metadata_repo, + ) + + +def get_save_dataset_use_case( + unit_of_work: UnitOfWork = Depends(get_unit_of_work), + dataset_repo: SaveDatasetRepo = Depends(get_dataset_repo), +) -> SaveDataset: + return SaveDataset( + unit_of_work=unit_of_work, + dataset_repo=dataset_repo, + ) + + +def get_check_content_type_use_case() -> CheckContentType: + return CheckContentType() + + +def get_retrieve_dataset_use_case( + unit_of_work: UnitOfWork = Depends(get_unit_of_work), + dataset_repo: RetrieveDatasetRepo = Depends(get_dataset_repo), +) -> RetrieveDataset: + return RetrieveDataset( + unit_of_work=unit_of_work, + dataset_repo=dataset_repo, + ) From a917b95836dd9ef8ad638b087170334203f7f859 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 02:05:07 +0300 Subject: [PATCH 098/153] feat(rest): add endpoint for upload csv datasets --- internal/rest/http/file/__init__.py | 4 +-- internal/rest/http/file/upload_csv_dataset.py | 29 +++++++++++++++++++ 2 files changed, 30 insertions(+), 3 deletions(-) create mode 100644 internal/rest/http/file/upload_csv_dataset.py diff --git a/internal/rest/http/file/__init__.py b/internal/rest/http/file/__init__.py index acbae6a4..7501ccfb 100644 --- a/internal/rest/http/file/__init__.py +++ b/internal/rest/http/file/__init__.py @@ -1,9 +1,7 @@ from fastapi import APIRouter -from internal.rest.file.upload_csv_dataset import router as upload_csv_file_router -from internal.rest.file.retrieve_dataset import router as retrieve_dataset_router +from internal.rest.http.file.upload_csv_dataset import router as upload_csv_file_router router = APIRouter(prefix="/file", tags=["file"]) router.include_router(upload_csv_file_router) -router.include_router(retrieve_dataset_router) diff --git a/internal/rest/http/file/upload_csv_dataset.py b/internal/rest/http/file/upload_csv_dataset.py new file mode 100644 index 00000000..4cf979bf --- /dev/null +++ b/internal/rest/http/file/upload_csv_dataset.py @@ -0,0 +1,29 @@ +from typing import Annotated +from uuid import UUID + +from fastapi import Form, UploadFile, Depends, APIRouter + +from internal.rest.http.file.di import get_save_file_use_case, get_save_dataset_use_case, get_check_content_type_use_case +from internal.usecase.file import SaveFile, SaveDataset, CheckContentType + +router = APIRouter() + +@router.post("/csv") +async def upload_csv_dataset( + file: UploadFile, + separator: Annotated[str, Form()], # ?separator="," + header: Annotated[list[int], Form()], # ?header=0?header=1?header=2, + + check_content_type: CheckContentType = Depends(get_check_content_type_use_case), + save_file: SaveFile = Depends(get_save_file_use_case), + save_dataset: SaveDataset = Depends(get_save_dataset_use_case) +) -> UUID: + + check_content_type(upload_file=file) + save_file_result = await save_file(upload_file=file) + save_dataset_result = save_dataset( + file_id=save_file_result.id, + separator=separator, + header=header, + ) + return save_dataset_result From d3bd6a410ebde9907f8943ff874af6d7e84a6568 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 02:06:00 +0300 Subject: [PATCH 099/153] feat(rest): add endpoint for retrieve datasets --- internal/rest/http/file/__init__.py | 2 ++ internal/rest/http/file/retrieve_dataset.py | 33 +++++++++++++++++++++ 2 files changed, 35 insertions(+) create mode 100644 internal/rest/http/file/retrieve_dataset.py diff --git a/internal/rest/http/file/__init__.py b/internal/rest/http/file/__init__.py index 7501ccfb..71a72998 100644 --- a/internal/rest/http/file/__init__.py +++ b/internal/rest/http/file/__init__.py @@ -1,7 +1,9 @@ from fastapi import APIRouter from internal.rest.http.file.upload_csv_dataset import router as upload_csv_file_router +from internal.rest.http.file.retrieve_dataset import router as retrieve_dataset_router router = APIRouter(prefix="/file", tags=["file"]) router.include_router(upload_csv_file_router) +router.include_router(retrieve_dataset_router) diff --git a/internal/rest/http/file/retrieve_dataset.py b/internal/rest/http/file/retrieve_dataset.py new file mode 100644 index 00000000..3500facf --- /dev/null +++ b/internal/rest/http/file/retrieve_dataset.py @@ -0,0 +1,33 @@ +from uuid import UUID + +from fastapi import APIRouter, Depends +from pydantic import BaseModel + +from internal.rest.http.file.di import get_retrieve_dataset_use_case +from internal.usecase.file import RetrieveDataset + +router = APIRouter() + + +class ResponseSchema(BaseModel): + id: UUID + file_id: UUID + separator: str + header: list[int] + + +@router.post("/dataset/{dataset_id}", response_model=ResponseSchema) +def retrieve_dataset( + dataset_id: UUID, + + retrieve_dataset_use_case: RetrieveDataset = Depends(get_retrieve_dataset_use_case) +) -> ResponseSchema: + + dataset = retrieve_dataset_use_case(dataset_id=dataset_id) + + return ResponseSchema( + id=dataset.id, + file_id=dataset.file_id, + separator=dataset.separator, + header=dataset.header, + ) From 6b31383d87b2ed7be68e29f87d1c4fe4aafb4f69 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 02:08:46 +0300 Subject: [PATCH 100/153] feat(rest): add dependencies for injection in task endpoints --- internal/rest/http/task/di.py | 39 +++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 internal/rest/http/task/di.py diff --git a/internal/rest/http/task/di.py b/internal/rest/http/task/di.py new file mode 100644 index 00000000..2a8b95a4 --- /dev/null +++ b/internal/rest/http/task/di.py @@ -0,0 +1,39 @@ +from fastapi import Depends + +from internal.uow import UnitOfWork +from internal.rest.http.di import get_unit_of_work, get_task_repo, get_dataset_repo +from internal.usecase.task import RetrieveTask, SetTask +from internal.usecase.task.retrieve_task import TaskRepo as RetrieveTaskRepo +from internal.usecase.task.set_task import (TaskRepo as SetTaskRepo, + DatasetRepo as SetDatasetRepo) +from internal.worker.celery import ProfilingTaskWorker + + +def get_profiling_task_worker() -> ProfilingTaskWorker: + return ProfilingTaskWorker() + + +def get_retrieve_task_use_case( + unit_of_work: UnitOfWork = Depends(get_unit_of_work), + task_repo: RetrieveTaskRepo = Depends(get_task_repo), +) -> RetrieveTask: + + return RetrieveTask( + unit_of_work=unit_of_work, + task_repo=task_repo, + ) + + +def get_set_task_use_case( + unit_of_work: UnitOfWork = Depends(get_unit_of_work), + task_repo: SetTaskRepo = Depends(get_task_repo), + dataset_repo: SetDatasetRepo = Depends(get_dataset_repo), + profiling_task_worker: ProfilingTaskWorker = Depends(get_profiling_task_worker), +) -> SetTask: + + return SetTask( + unit_of_work=unit_of_work, + task_repo=task_repo, + dataset_repo=dataset_repo, + profiling_task_worker=profiling_task_worker, + ) From 809a8d0d0b888e070ef6119a2196da56f23697f3 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 02:11:06 +0300 Subject: [PATCH 101/153] feat(rest): add endpoint for task setting --- internal/rest/http/task/__init__.py | 4 +--- internal/rest/http/task/set_task.py | 24 ++++++++++++++++++++++++ 2 files changed, 25 insertions(+), 3 deletions(-) create mode 100644 internal/rest/http/task/set_task.py diff --git a/internal/rest/http/task/__init__.py b/internal/rest/http/task/__init__.py index 7ac33091..6958beb5 100644 --- a/internal/rest/http/task/__init__.py +++ b/internal/rest/http/task/__init__.py @@ -1,9 +1,7 @@ from fastapi import APIRouter -from internal.rest.task.set_task import router as set_task_router -from internal.rest.task.retrieve_task import router as retrieve_task_router +from internal.rest.http.task.set_task import router as set_task_router router = APIRouter(prefix="/task", tags=["task"]) router.include_router(set_task_router) -router.include_router(retrieve_task_router) diff --git a/internal/rest/http/task/set_task.py b/internal/rest/http/task/set_task.py new file mode 100644 index 00000000..1f68f1a2 --- /dev/null +++ b/internal/rest/http/task/set_task.py @@ -0,0 +1,24 @@ +from uuid import UUID + +from fastapi import APIRouter, Depends + +from internal.domain.task.value_objects import OneOfTaskConfig +from internal.rest.http.task.di import get_set_task_use_case +from internal.usecase.task import SetTask + +router = APIRouter() + +@router.post("/set") +def set_task( + dataset_id: UUID, + config: OneOfTaskConfig, + + set_task_use_case: SetTask = Depends(get_set_task_use_case) +) -> UUID: + + task_id = set_task_use_case( + dataset_id=dataset_id, + config=config, + ) + + return task_id From f0f2210fb8ab5a53bf84baae19bbf20095c5df08 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 02:11:45 +0300 Subject: [PATCH 102/153] feat(rest): add endpoint for task retrieving --- internal/rest/http/task/__init__.py | 2 ++ internal/rest/http/task/retrieve_task.py | 39 ++++++++++++++++++++++++ 2 files changed, 41 insertions(+) create mode 100644 internal/rest/http/task/retrieve_task.py diff --git a/internal/rest/http/task/__init__.py b/internal/rest/http/task/__init__.py index 6958beb5..f81e0f5c 100644 --- a/internal/rest/http/task/__init__.py +++ b/internal/rest/http/task/__init__.py @@ -1,7 +1,9 @@ from fastapi import APIRouter from internal.rest.http.task.set_task import router as set_task_router +from internal.rest.http.task.retrieve_task import router as retrieve_task_router router = APIRouter(prefix="/task", tags=["task"]) router.include_router(set_task_router) +router.include_router(retrieve_task_router) diff --git a/internal/rest/http/task/retrieve_task.py b/internal/rest/http/task/retrieve_task.py new file mode 100644 index 00000000..4ebb5d2a --- /dev/null +++ b/internal/rest/http/task/retrieve_task.py @@ -0,0 +1,39 @@ +from uuid import UUID + +from fastapi import APIRouter, Depends +from pydantic import BaseModel + +from internal.domain.task.value_objects import TaskStatus, OneOfTaskConfig, OneOfTaskResult, TaskFailureReason +from internal.rest.http.task.di import get_retrieve_task_use_case +from internal.usecase.task import RetrieveTask + +router = APIRouter() + + +class ResponseSchema(BaseModel): + status: TaskStatus + config: OneOfTaskConfig + dataset_id: UUID + result: OneOfTaskResult | None + raised_exception_name: str | None + failure_reason: TaskFailureReason | None + traceback: str | None + + +@router.get("/{task_id}") +def retrieve_task( + task_id: UUID, + retrieve_task_use_case: RetrieveTask = Depends(get_retrieve_task_use_case) +) -> ResponseSchema: + + task = retrieve_task_use_case(task_id=task_id) + + return ResponseSchema( + status=task.status, + dataset_id=task.dataset_id, + config=task.config, + result=task.result, + raised_exception_name=task.raised_exception_name, + failure_reason=task.failure_reason, + traceback=task.traceback, + ) From 65c57a1a6d622196af03e44d5465ea26a8ff2db4 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 02:12:09 +0300 Subject: [PATCH 103/153] feat(rest): add di module for user --- internal/rest/http/user/di.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 internal/rest/http/user/di.py diff --git a/internal/rest/http/user/di.py b/internal/rest/http/user/di.py new file mode 100644 index 00000000..e69de29b From 7c120a7252bccf1b109c7beaa252ec17f78cb958 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 02:12:43 +0300 Subject: [PATCH 104/153] feat(rest): add main api router --- internal/rest/http/__init__.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/internal/rest/http/__init__.py b/internal/rest/http/__init__.py index e69de29b..600abe99 100644 --- a/internal/rest/http/__init__.py +++ b/internal/rest/http/__init__.py @@ -0,0 +1,13 @@ +from fastapi import APIRouter + +from internal.rest.http.common import router as common_router +from internal.rest.http.file import router as file_router +from internal.rest.http.user import router as user_router +from internal.rest.http.task import router as task_router + +router = APIRouter(prefix="/api") + +router.include_router(common_router) +router.include_router(file_router) +router.include_router(user_router) +router.include_router(task_router) From 7807ba9fb129e82f3b271f00bc7347d6d80437ce Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 02:18:40 +0300 Subject: [PATCH 105/153] feat: add main application (fastapi) object --- internal/__init__.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 internal/__init__.py diff --git a/internal/__init__.py b/internal/__init__.py new file mode 100644 index 00000000..b2b6c864 --- /dev/null +++ b/internal/__init__.py @@ -0,0 +1,14 @@ +from fastapi import FastAPI +from starlette.middleware.cors import CORSMiddleware + +from internal.rest import http + +app = FastAPI() +app.include_router(http.router) +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) From 6209e710896eac5559a8ee00fa27a9db52568eea Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 02:19:09 +0300 Subject: [PATCH 106/153] feat: change path to app object and celery worker in makefile --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 1cc8b2a4..b87bce64 100644 --- a/Makefile +++ b/Makefile @@ -40,11 +40,11 @@ pg-downgrade: ## Run celery worker in watch mode worker: - watchmedo auto-restart --directory=./ --pattern='*.py' --recursive -- celery -A app.worker worker --loglevel=info --concurrency=1 + watchmedo auto-restart --directory=./ --pattern='*.py' --recursive -- celery -A internal.infrastructure.background_task.celery worker --loglevel=info --concurrency=1 ## Run application server in watch mode app: - poetry run uvicorn --port 8000 app.main:app --reload + poetry run uvicorn --port 8000 internal:app --reload ## Initiate repository init: From 80eb26493985e1007781aa04ca5d593787712c70 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Fri, 20 Sep 2024 23:30:01 +0000 Subject: [PATCH 107/153] fix(repo): typo in imports --- internal/repository/flat/file.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/internal/repository/flat/file.py b/internal/repository/flat/file.py index 4cb9f40d..7b29268e 100644 --- a/internal/repository/flat/file.py +++ b/internal/repository/flat/file.py @@ -3,7 +3,7 @@ import aiofiles import pandas as pd -from internal.dto.repository.file.file import FailedFileReadingException, CSVFileReadSchema, CSVFileReadResponseSchema +from internal.dto.repository.file.file import FailedFileReadingException, CSVFileFindSchema, CSVFileResponseSchema from internal.infrastructure.data_storage import settings from internal.dto.repository.file import File, FileCreateSchema, FileResponseSchema from internal.uow import DataStorageContext @@ -33,9 +33,9 @@ async def create( def find( self, - file_info: CSVFileReadSchema, + file_info: CSVFileFindSchema, context: DataStorageContext # The current repository implementation does not support transactions. - ) -> CSVFileReadResponseSchema: + ) -> CSVFileResponseSchema: path_to_file = Path(self.files_dir_path, str(file_info.file_name)) From 4f269683c2df91fa36fa38e1c69cb66f2b3ff726 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Fri, 20 Sep 2024 23:30:42 +0000 Subject: [PATCH 108/153] fix(tests): remove typo in fixture name --- tests/conftest.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 2646460b..3c231387 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -38,7 +38,7 @@ def postgres_context(postgres_context_maker): @pytest.fixture(autouse=True) -def clean_tables(session): +def clean_tables(postgres_context): for table in reversed(ORMBaseModel.metadata.sorted_tables): - session.execute(table.delete()) - session.commit() + postgres_context.execute(table.delete()) + postgres_context.commit() From f19e99519b46f59e06dcca502e949654d0957831 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Fri, 20 Sep 2024 23:31:47 +0000 Subject: [PATCH 109/153] chore: change name for celery worker in makefile --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index b87bce64..8c111693 100644 --- a/Makefile +++ b/Makefile @@ -39,7 +39,7 @@ pg-downgrade: poetry run alembic -c internal/infrastructure/data_storage/relational/postgres/migrations/alembic.ini downgrade $(args) ## Run celery worker in watch mode -worker: +celery-worker: watchmedo auto-restart --directory=./ --pattern='*.py' --recursive -- celery -A internal.infrastructure.background_task.celery worker --loglevel=info --concurrency=1 ## Run application server in watch mode From 55ad7b625fe41f7bd092dbb4c4b9d5930253ea04 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Sat, 21 Sep 2024 00:05:30 +0000 Subject: [PATCH 110/153] fix(infra): make the correct traceback processing --- .../background_task/celery/task/profiling_task.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/internal/infrastructure/background_task/celery/task/profiling_task.py b/internal/infrastructure/background_task/celery/task/profiling_task.py index 2f68ed58..e18525c6 100644 --- a/internal/infrastructure/background_task/celery/task/profiling_task.py +++ b/internal/infrastructure/background_task/celery/task/profiling_task.py @@ -1,6 +1,8 @@ from typing import Any from uuid import UUID +import traceback as tb + from celery.signals import task_failure, task_prerun, task_postrun from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded, WorkerLostError @@ -70,6 +72,8 @@ def task_failure_notifier( if isinstance(exception, WorkerLostError): task_failure_reason = TaskFailureReason.WORKER_KILLED_BY_SIGNAL + formatted_traceback = "".join(tb.format_exception(type(exception), exception, exception.__traceback__)) + update_task_info = get_update_task_info_use_case() db_task_id: UUID = kwargs["task_id"] @@ -78,5 +82,5 @@ def task_failure_notifier( task_status=TaskStatus.FAILED, raised_exception_name=exception.__class__.__name__, failure_reason=task_failure_reason, - traceback=traceback, + traceback=formatted_traceback, ) From 35b83063949fa506f0ed9ecc13e439fe01396b4f Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 03:33:15 +0300 Subject: [PATCH 111/153] feat(rest): add use case's exceptions handling --- internal/__init__.py | 2 ++ internal/rest/__init__.py | 0 internal/rest/http/exception.py | 38 +++++++++++++++++++++++++++++++++ 3 files changed, 40 insertions(+) create mode 100644 internal/rest/__init__.py create mode 100644 internal/rest/http/exception.py diff --git a/internal/__init__.py b/internal/__init__.py index b2b6c864..84a018cd 100644 --- a/internal/__init__.py +++ b/internal/__init__.py @@ -2,6 +2,7 @@ from starlette.middleware.cors import CORSMiddleware from internal.rest import http +from internal.rest.http.exception import add_exception_handlers app = FastAPI() app.include_router(http.router) @@ -12,3 +13,4 @@ allow_methods=["*"], allow_headers=["*"], ) +add_exception_handlers(app) diff --git a/internal/rest/__init__.py b/internal/rest/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/internal/rest/http/exception.py b/internal/rest/http/exception.py new file mode 100644 index 00000000..5db4ae75 --- /dev/null +++ b/internal/rest/http/exception.py @@ -0,0 +1,38 @@ +from fastapi import FastAPI, Request, HTTPException + +from internal.usecase.file.exception import IncorrectFileFormatException, DatasetNotFoundException, \ + FileMetadataNotFoundException +from internal.usecase.task.exception import TaskNotFoundException + +def add_exception_handlers(app: FastAPI): + + @app.exception_handler(IncorrectFileFormatException) + def incorrect_file_format_exception(request: Request, exc: IncorrectFileFormatException): + raise HTTPException( + status_code=400, + detail=str(exc), + ) + + + @app.exception_handler(DatasetNotFoundException) + def dataset_not_found_exception(request: Request, exc: DatasetNotFoundException): + raise HTTPException( + status_code=404, + detail=str(exc), + ) + + + @app.exception_handler(FileMetadataNotFoundException) + def file_metadata_not_found_exception(request: Request, exc: FileMetadataNotFoundException): + raise HTTPException( + status_code=404, + detail=str(exc), + ) + + + @app.exception_handler(TaskNotFoundException) + def file_metadata_not_found_exception(request: Request, exc: TaskNotFoundException): + raise HTTPException( + status_code=404, + detail=str(exc), + ) From d53540a7d14a97e948c3b73c492101a10081e7ef Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 03:39:15 +0300 Subject: [PATCH 112/153] chore(domain): add code documentation for math function --- internal/domain/task/entities/__init__.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/internal/domain/task/entities/__init__.py b/internal/domain/task/entities/__init__.py index c524b935..975311fa 100644 --- a/internal/domain/task/entities/__init__.py +++ b/internal/domain/task/entities/__init__.py @@ -6,6 +6,18 @@ def match_task_by_primitive_name(primitive_name: PrimitiveName): + """ + Returns an instance of a task based on the given primitive name. + + Args: + primitive_name (PrimitiveName): The name of the task primitive. + + Returns: + object: An instance of the corresponding task (e.g., `FdTask`, `AfdTask`). + + Raises: + AssertionError: If `primitive_name` does not match known task types. + """ match primitive_name: case PrimitiveName.fd: return FdTask() From bfef666788c00cc27569ef586c983c0daf5f4c74 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 04:32:24 +0300 Subject: [PATCH 113/153] chore: add failed file read exception and its handler --- internal/rest/http/exception.py | 10 +++++++++- internal/usecase/file/exception.py | 17 +++++++++++++++++ internal/usecase/file/save_file.py | 11 +++++++---- 3 files changed, 33 insertions(+), 5 deletions(-) diff --git a/internal/rest/http/exception.py b/internal/rest/http/exception.py index 5db4ae75..3227be54 100644 --- a/internal/rest/http/exception.py +++ b/internal/rest/http/exception.py @@ -1,7 +1,7 @@ from fastapi import FastAPI, Request, HTTPException from internal.usecase.file.exception import IncorrectFileFormatException, DatasetNotFoundException, \ - FileMetadataNotFoundException + FileMetadataNotFoundException, FailedReadFileException from internal.usecase.task.exception import TaskNotFoundException def add_exception_handlers(app: FastAPI): @@ -36,3 +36,11 @@ def file_metadata_not_found_exception(request: Request, exc: TaskNotFoundExcepti status_code=404, detail=str(exc), ) + + + @app.exception_handler(FailedReadFileException) + def failed_read_file_exception(request: Request, exc: FailedReadFileException): + raise HTTPException( + status_code=404, + detail=str(exc), + ) diff --git a/internal/usecase/file/exception.py b/internal/usecase/file/exception.py index f17411c0..5fd8470c 100644 --- a/internal/usecase/file/exception.py +++ b/internal/usecase/file/exception.py @@ -44,3 +44,20 @@ def __init__(self): The default message "File metadata not found" is used to indicate the error. """ super().__init__("File metadata not found") + + +class FailedReadFileException(Exception): + """ + Exception raised when a file reading operation fails. + + This exception carries a specific error message detailing the cause of the failure. + """ + + def __init__(self, message: str): + """ + Initializes an instance of FailedReadFileException with a specific error message. + + Args: + message(str): The error message to be reported. + """ + super().__init__(message) diff --git a/internal/usecase/file/save_file.py b/internal/usecase/file/save_file.py index 797e7c09..144ae670 100644 --- a/internal/usecase/file/save_file.py +++ b/internal/usecase/file/save_file.py @@ -4,9 +4,10 @@ from pydantic import BaseModel from internal.domain.file import File as FileEntity -from internal.dto.repository.file import FileCreateSchema, FileResponseSchema, File +from internal.dto.repository.file import FileCreateSchema, FileResponseSchema, File, FailedFileReadingException from internal.dto.repository.file import FileMetadataCreateSchema, FileMetadataResponseSchema from internal.uow import DataStorageContext, UnitOfWork +from internal.usecase.file.exception import FailedReadFileException class FileRepo(Protocol): @@ -61,9 +62,11 @@ async def __call__(self, *, upload_file: File) -> SaveFileUseCaseResult: ) with self.unit_of_work as context: - response = self.file_metadata_repo.create(file_metadata_create_schema, context) - await self.file_repo.create(upload_file, create_file_schema, context) - + try: + response = self.file_metadata_repo.create(file_metadata_create_schema, context) + await self.file_repo.create(upload_file, create_file_schema, context) + except FailedFileReadingException as e: + raise FailedReadFileException(str(e)) return SaveFileUseCaseResult( id=response.id, file_name=response.file_name, From c5ba4ca486598d63b4915950abc9e4c9e6c3393a Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 04:43:32 +0300 Subject: [PATCH 114/153] chore: delete old app module --- app/__init__.py | 0 app/api/__init__.py | 9 -- app/api/dataset.py | 49 -------- app/api/ping.py | 9 -- app/api/task.py | 44 ------- app/db/__init__.py | 2 - app/db/migrations/env.py | 80 ------------ app/db/migrations/script.py.mako | 26 ---- .../03c0f0f4b98e_first_empty_migration.py | 28 ----- ...d47fe978_create_file_and_dataset_tables.py | 56 --------- .../versions/7dc9a3441d07_add_task.py | 67 ---------- app/db/session.py | 70 ----------- app/domain/common/README.md | 1 - app/domain/common/optional_model.py | 18 --- app/domain/file/README.md | 1 - app/domain/file/dataset.py | 36 ------ app/domain/file/file.py | 29 ----- app/domain/task/README.md | 1 - app/domain/task/__init__.py | 32 ----- app/domain/task/abstract_task.py | 51 -------- app/domain/task/afd/__init__.py | 36 ------ app/domain/task/afd/algo_name.py | 6 - app/domain/task/afd/config.py | 38 ------ app/domain/task/afd/result.py | 4 - app/domain/task/fd/__init__.py | 64 ---------- app/domain/task/fd/algo_name.py | 14 --- app/domain/task/fd/config.py | 97 --------------- app/domain/task/fd/result.py | 15 --- app/domain/task/primitive_name.py | 13 -- app/domain/task/task.py | 60 --------- app/domain/user/README.md | 1 - app/domain/worker/README.md | 1 - app/domain/worker/task/__init__.py | 1 - app/domain/worker/task/data_profiling_task.py | 90 -------------- .../worker/task/resource_intensive_task.py | 18 --- app/main.py | 14 --- app/settings/__init__.py | 3 - app/settings/alembic.ini | 116 ------------------ app/settings/celery_config.py | 6 - app/settings/settings.py | 55 --------- app/worker.py | 10 -- tests/usecase/test_save_file.py | 30 +++++ 42 files changed, 30 insertions(+), 1271 deletions(-) delete mode 100644 app/__init__.py delete mode 100644 app/api/__init__.py delete mode 100644 app/api/dataset.py delete mode 100644 app/api/ping.py delete mode 100644 app/api/task.py delete mode 100644 app/db/__init__.py delete mode 100644 app/db/migrations/env.py delete mode 100644 app/db/migrations/script.py.mako delete mode 100644 app/db/migrations/versions/03c0f0f4b98e_first_empty_migration.py delete mode 100644 app/db/migrations/versions/6a59d47fe978_create_file_and_dataset_tables.py delete mode 100644 app/db/migrations/versions/7dc9a3441d07_add_task.py delete mode 100644 app/db/session.py delete mode 100644 app/domain/common/README.md delete mode 100644 app/domain/common/optional_model.py delete mode 100644 app/domain/file/README.md delete mode 100644 app/domain/file/dataset.py delete mode 100644 app/domain/file/file.py delete mode 100644 app/domain/task/README.md delete mode 100644 app/domain/task/__init__.py delete mode 100644 app/domain/task/abstract_task.py delete mode 100644 app/domain/task/afd/__init__.py delete mode 100644 app/domain/task/afd/algo_name.py delete mode 100644 app/domain/task/afd/config.py delete mode 100644 app/domain/task/afd/result.py delete mode 100644 app/domain/task/fd/__init__.py delete mode 100644 app/domain/task/fd/algo_name.py delete mode 100644 app/domain/task/fd/config.py delete mode 100644 app/domain/task/fd/result.py delete mode 100644 app/domain/task/primitive_name.py delete mode 100644 app/domain/task/task.py delete mode 100644 app/domain/user/README.md delete mode 100644 app/domain/worker/README.md delete mode 100644 app/domain/worker/task/__init__.py delete mode 100644 app/domain/worker/task/data_profiling_task.py delete mode 100644 app/domain/worker/task/resource_intensive_task.py delete mode 100644 app/main.py delete mode 100644 app/settings/__init__.py delete mode 100644 app/settings/alembic.ini delete mode 100644 app/settings/celery_config.py delete mode 100644 app/settings/settings.py delete mode 100644 app/worker.py diff --git a/app/__init__.py b/app/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/app/api/__init__.py b/app/api/__init__.py deleted file mode 100644 index fffb2610..00000000 --- a/app/api/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from fastapi import APIRouter -from .ping import router as ping_router -from .task import router as task_router -from .dataset import router as dataset_router - -router = APIRouter(prefix="/api") -router.include_router(ping_router) -router.include_router(task_router) -router.include_router(dataset_router) diff --git a/app/api/dataset.py b/app/api/dataset.py deleted file mode 100644 index 389be883..00000000 --- a/app/api/dataset.py +++ /dev/null @@ -1,49 +0,0 @@ -from pathlib import Path -from typing import Annotated -from uuid import UUID, uuid4 -import aiofiles -from fastapi import APIRouter, Form, HTTPException, UploadFile - -from app.domain.file.dataset import DatasetORM -from app.domain.file.file import FileORM -from app.settings import settings - -router = APIRouter(prefix="/dataset") - - -async def save_file(in_file: UploadFile, out_file_path: Path): - CHUNK_SIZE = 1024 - try: - async with aiofiles.open(out_file_path, "wb") as out_file: - while content := await in_file.read(CHUNK_SIZE): - await out_file.write(content) - except Exception: - raise HTTPException(status_code=400, detail="Failed to save file") - - -@router.post("/csv") -async def upload_csv_dataset( - file: UploadFile, - separator: Annotated[str, Form()], # ?separator="," - header: Annotated[list[int], Form()], # ?header=0?header=1?header=2 -) -> UUID: - if file.content_type != "text/csv": # TODO: replace with actual validation - raise HTTPException(status_code=400, detail="File is not CSV") - - file_name = uuid4() - path_to_file = Path.joinpath(settings.uploaded_files_dir_path, str(file_name)) - await save_file(file, path_to_file) - - file_orm = FileORM.create( - mime_type=file.content_type, - file_name=file_name, - original_file_name=file.filename, - ) - - dataset_orm = DatasetORM.create( - separator=separator, - header=header, - file=file_orm, - ) - - return dataset_orm.id # type: ignore diff --git a/app/api/ping.py b/app/api/ping.py deleted file mode 100644 index 53be9b57..00000000 --- a/app/api/ping.py +++ /dev/null @@ -1,9 +0,0 @@ -from typing import Literal -from fastapi import APIRouter - -router = APIRouter() - - -@router.get("/ping") -def ping() -> Literal["Pong!"]: - return "Pong!" diff --git a/app/api/task.py b/app/api/task.py deleted file mode 100644 index c1bc5845..00000000 --- a/app/api/task.py +++ /dev/null @@ -1,44 +0,0 @@ -from uuid import UUID -from fastapi import APIRouter, HTTPException -from app.domain.file.dataset import DatasetORM -from app.domain.task.task import TaskModel, TaskORM, TaskStatus -from app.domain.worker.task.data_profiling_task import data_profiling_task -from app.domain.task import OneOfTaskConfig -from sqlalchemy_mixins.activerecord import ModelNotFoundError - -router = APIRouter(prefix="/task") - - -@router.post("") -def set_task( - dataset_id: UUID, - config: OneOfTaskConfig, -) -> UUID: - try: - DatasetORM.find_or_fail(dataset_id) - except ModelNotFoundError: - raise HTTPException(404, "Dataset not found") - - task_orm = TaskORM.create( - status=TaskStatus.CREATED, - config=config.model_dump(exclude_unset=True), - dataset_id=dataset_id, - ) - task_id = task_orm.id # type: ignore - - data_profiling_task.delay( - task_id=task_id, - dataset_id=dataset_id, - config=config, - ) - - return task_id - - -@router.get("/{task_id}", response_model_exclude_unset=True) -def retrieve_task(task_id: UUID) -> TaskModel: - try: - task_orm = TaskORM.find_or_fail(task_id) - return TaskModel.model_validate(task_orm) - except ModelNotFoundError: - raise HTTPException(404, "Task not found") diff --git a/app/db/__init__.py b/app/db/__init__.py deleted file mode 100644 index bf234495..00000000 --- a/app/db/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .session import ORMBase as ORMBase -from .session import ORMBaseModel as ORMBaseModel diff --git a/app/db/migrations/env.py b/app/db/migrations/env.py deleted file mode 100644 index 6ad7bb57..00000000 --- a/app/db/migrations/env.py +++ /dev/null @@ -1,80 +0,0 @@ -from logging.config import fileConfig -from sqlalchemy import engine_from_config, pool -from alembic import context -from app.settings import settings -from app.db import ORMBase -from app.domain.file.file import FileORM # noqa: F401 -from app.domain.file.dataset import DatasetORM # noqa: F401 -from app.domain.task.task import TaskORM # noqa: F401 - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -if config.config_file_name is not None: - fileConfig(config.config_file_name) - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -target_metadata = ORMBase.metadata -section = config.config_ini_section -config.set_section_option(section, "db_url", settings.postgres_dsn.unicode_string()) - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def run_migrations_offline() -> None: - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - url = config.get_main_option("sqlalchemy.url") - context.configure( - url=url, - target_metadata=target_metadata, - literal_binds=True, - dialect_opts={"paramstyle": "named"}, - ) - - with context.begin_transaction(): - context.run_migrations() - - -def run_migrations_online() -> None: - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - connectable = engine_from_config( - config.get_section(config.config_ini_section, {}), - prefix="sqlalchemy.", - poolclass=pool.NullPool, - ) - - with connectable.connect() as connection: - context.configure(connection=connection, target_metadata=target_metadata) - - with context.begin_transaction(): - context.run_migrations() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/app/db/migrations/script.py.mako b/app/db/migrations/script.py.mako deleted file mode 100644 index fbc4b07d..00000000 --- a/app/db/migrations/script.py.mako +++ /dev/null @@ -1,26 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision: str = ${repr(up_revision)} -down_revision: Union[str, None] = ${repr(down_revision)} -branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} -depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} - - -def upgrade() -> None: - ${upgrades if upgrades else "pass"} - - -def downgrade() -> None: - ${downgrades if downgrades else "pass"} diff --git a/app/db/migrations/versions/03c0f0f4b98e_first_empty_migration.py b/app/db/migrations/versions/03c0f0f4b98e_first_empty_migration.py deleted file mode 100644 index 214b9690..00000000 --- a/app/db/migrations/versions/03c0f0f4b98e_first_empty_migration.py +++ /dev/null @@ -1,28 +0,0 @@ -"""First empty migration - -Revision ID: 03c0f0f4b98e -Revises: -Create Date: 2023-12-22 15:55:23.335330 - -""" - -from typing import Sequence, Union - - -# revision identifiers, used by Alembic. -revision: str = "03c0f0f4b98e" -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - pass - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - pass - # ### end Alembic commands ### diff --git a/app/db/migrations/versions/6a59d47fe978_create_file_and_dataset_tables.py b/app/db/migrations/versions/6a59d47fe978_create_file_and_dataset_tables.py deleted file mode 100644 index 9521119f..00000000 --- a/app/db/migrations/versions/6a59d47fe978_create_file_and_dataset_tables.py +++ /dev/null @@ -1,56 +0,0 @@ -"""create file and dataset tables - -Revision ID: 6a59d47fe978 -Revises: 03c0f0f4b98e -Create Date: 2024-04-02 01:54:21.955372 - -""" - -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "6a59d47fe978" -down_revision: Union[str, None] = "03c0f0f4b98e" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "file", - sa.Column("id", sa.Uuid(), nullable=False), - sa.Column("mime_type", sa.String(), nullable=False), - sa.Column("file_name", sa.Uuid(), nullable=False), - sa.Column("original_file_name", sa.String(), nullable=False), - sa.Column("created_at", sa.TIMESTAMP(), nullable=False), - sa.Column("updated_at", sa.TIMESTAMP(), nullable=False), - sa.PrimaryKeyConstraint("id"), - ) - op.create_table( - "dataset", - sa.Column("id", sa.Uuid(), nullable=False), - sa.Column("is_built_in", sa.Boolean(), nullable=False), - sa.Column("header", postgresql.ARRAY(sa.Integer()), nullable=False), - sa.Column("separator", sa.String(), nullable=False), - sa.Column("file_id", sa.Uuid(), nullable=False), - sa.Column("created_at", sa.TIMESTAMP(), nullable=False), - sa.Column("updated_at", sa.TIMESTAMP(), nullable=False), - sa.ForeignKeyConstraint( - ["file_id"], - ["file.id"], - ), - sa.PrimaryKeyConstraint("id"), - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("dataset") - op.drop_table("file") - # ### end Alembic commands ### diff --git a/app/db/migrations/versions/7dc9a3441d07_add_task.py b/app/db/migrations/versions/7dc9a3441d07_add_task.py deleted file mode 100644 index 8ff511e7..00000000 --- a/app/db/migrations/versions/7dc9a3441d07_add_task.py +++ /dev/null @@ -1,67 +0,0 @@ -"""add task - -Revision ID: 7dc9a3441d07 -Revises: 6a59d47fe978 -Create Date: 2024-04-02 04:04:09.759025 - -""" - -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "7dc9a3441d07" -down_revision: Union[str, None] = "6a59d47fe978" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "task", - sa.Column("id", sa.Uuid(), nullable=False), - sa.Column( - "status", - sa.Enum("FAILED", "CREATED", "RUNNING", "COMPLETED", name="taskstatus"), - nullable=False, - ), - sa.Column("config", postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.Column("result", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("dataset_id", sa.Uuid(), nullable=False), - sa.Column("raised_exception_name", sa.String(), nullable=True), - sa.Column( - "failure_reason", - sa.Enum( - "MEMORY_LIMIT_EXCEEDED", - "TIME_LIMIT_EXCEEDED", - "WORKER_KILLED_BY_SIGNAL", - "OTHER", - name="taskfailurereason", - ), - nullable=True, - ), - sa.Column("traceback", sa.String(), nullable=True), - sa.Column("created_at", sa.TIMESTAMP(), nullable=False), - sa.Column("updated_at", sa.TIMESTAMP(), nullable=False), - sa.ForeignKeyConstraint( - ["dataset_id"], - ["dataset.id"], - ), - sa.PrimaryKeyConstraint("id"), - ) - # ### end Alembic commands ### - - -# ADJUSTED! See: https://github.com/sqlalchemy/alembic/issues/278#issuecomment-907283386 -def downgrade() -> None: - op.drop_table("task") - - taskfailurereason = sa.Enum(name="taskfailurereason") - taskfailurereason.drop(op.get_bind(), checkfirst=True) - - taskstatus = sa.Enum(name="taskstatus") - taskstatus.drop(op.get_bind(), checkfirst=True) diff --git a/app/db/session.py b/app/db/session.py deleted file mode 100644 index a7389d8c..00000000 --- a/app/db/session.py +++ /dev/null @@ -1,70 +0,0 @@ -from contextlib import contextmanager -import datetime -from typing import Generator -from pydantic import BaseModel, ConfigDict -from sqlalchemy.orm import Session, sessionmaker -from sqlalchemy import create_engine -from app.settings import settings -from sqlalchemy.pool import NullPool -from sqlalchemy_mixins import AllFeaturesMixin -from sqlalchemy_mixins.timestamp import TimestampsMixin -from sqlalchemy_mixins.eagerload import EagerLoadMixin -from sqlalchemy.orm import DeclarativeBase - - -class ORMBase(AllFeaturesMixin, TimestampsMixin, EagerLoadMixin, DeclarativeBase): - __abstract__ = True - - -class ORMBaseModel(BaseModel): - model_config = ConfigDict(from_attributes=True) - - created_at: datetime.datetime - updated_at: datetime.datetime - - -default_engine = create_engine(url=settings.postgres_dsn.unicode_string()) -engine_without_pool = create_engine( - url=settings.postgres_dsn.unicode_string(), - poolclass=NullPool, -) - -SessionLocal = sessionmaker(bind=default_engine) -SessionLocalWithoutPool = sessionmaker(bind=engine_without_pool) - - -@contextmanager -def get_session(with_pool=True) -> Generator[Session, None, None]: - """ - Returns a generator that yields a session object for database operations. - - Parameters: - with_pool (bool): A flag to determine if the session uses a connection pool. - Set to False when used in a Celery task. Defaults to True. - """ - maker = SessionLocal if with_pool else SessionLocalWithoutPool - with maker() as session: - yield session - - -# Default session with pooling -with get_session() as session: - ORMBase.set_session(session) - - -@contextmanager -def no_pooling() -> Generator[None, None, None]: - """ - Operations without pooling in contextmanager scoupe - Example: - - # operations with pooling - with no_pooling(): - # operations WITHOUT pooling - # operations with pooling - """ - old_session = ORMBase.session - with get_session(with_pool=False) as session: - ORMBase.set_session(session) - yield - ORMBase.set_session(old_session) diff --git a/app/domain/common/README.md b/app/domain/common/README.md deleted file mode 100644 index 02d09dbc..00000000 --- a/app/domain/common/README.md +++ /dev/null @@ -1 +0,0 @@ -Тут просто общие для всего проекта вещи. Например, макисмально базовые классы, которые не отнятся напрямую к бизнес-логике и другим доменам. diff --git a/app/domain/common/optional_model.py b/app/domain/common/optional_model.py deleted file mode 100644 index 39965fe3..00000000 --- a/app/domain/common/optional_model.py +++ /dev/null @@ -1,18 +0,0 @@ -from __future__ import annotations -from pydantic import BaseModel -from typing import Any - - -class OptionalModel(BaseModel): - __non_optional_fields__ = set() - - @classmethod - def __pydantic_init_subclass__(cls, **kwargs: Any) -> None: - super().__pydantic_init_subclass__(**kwargs) - - for field in cls.model_fields.values(): - if field in cls.__non_optional_fields__: - continue - field.default = None - - cls.model_rebuild(force=True) diff --git a/app/domain/file/README.md b/app/domain/file/README.md deleted file mode 100644 index de7d6991..00000000 --- a/app/domain/file/README.md +++ /dev/null @@ -1 +0,0 @@ -Это доменная область отвечет за обработку (загрузку, удаление, хранение и т.п) файлов, включая датасеты для задач домена task. diff --git a/app/domain/file/dataset.py b/app/domain/file/dataset.py deleted file mode 100644 index 42e7bb27..00000000 --- a/app/domain/file/dataset.py +++ /dev/null @@ -1,36 +0,0 @@ -from uuid import UUID, uuid4 -from sqlalchemy import ForeignKey, Integer -from sqlalchemy.orm import relationship, Mapped, mapped_column -from sqlalchemy.dialects.postgresql import ARRAY -from app.db import ORMBase -from app.db.session import ORMBaseModel -from app.domain.file.file import FileModel, FileORM -import typing - -if typing.TYPE_CHECKING: - from app.domain.task.task import TaskORM - - -class DatasetORM(ORMBase): - __tablename__ = "dataset" - id: Mapped[UUID] = mapped_column(primary_key=True, default=uuid4) - - is_built_in: Mapped[bool] = mapped_column(default=False) - header: Mapped[list[int]] = mapped_column(ARRAY(Integer), default=[]) - separator: Mapped[str] - file_id: Mapped[UUID] = mapped_column(ForeignKey("file.id"), nullable=False) - file: Mapped[FileORM] = relationship("FileORM") - - related_tasks: Mapped[list["TaskORM"]] = relationship( - "TaskORM", back_populates="dataset" - ) - - # owner = relationship("UserORM") - - -class DatasetModel(ORMBaseModel): - id: UUID - is_built_in: bool - separator: str - header: list[int] - file: FileModel diff --git a/app/domain/file/file.py b/app/domain/file/file.py deleted file mode 100644 index 60b946d1..00000000 --- a/app/domain/file/file.py +++ /dev/null @@ -1,29 +0,0 @@ -from pathlib import Path -from uuid import UUID, uuid4 -from sqlalchemy.orm import Mapped, mapped_column -from app.db import ORMBase -from app.db.session import ORMBaseModel -from sqlalchemy.ext.hybrid import hybrid_property - -from app.settings import settings - - -class FileORM(ORMBase): - __tablename__ = "file" - id: Mapped[UUID] = mapped_column(primary_key=True, default=uuid4) - - mime_type: Mapped[str] - file_name: Mapped[UUID] - original_file_name: Mapped[str] - - @hybrid_property - def path_to_file(self) -> Path: - return Path(settings.uploaded_files_dir_path, str(self.file_name)) - - -class FileModel(ORMBaseModel): - id: UUID - - mime_type: str - file_name: UUID - original_file_name: str diff --git a/app/domain/task/README.md b/app/domain/task/README.md deleted file mode 100644 index 75e1967d..00000000 --- a/app/domain/task/README.md +++ /dev/null @@ -1 +0,0 @@ -Это доменная область, отвечающая за создание задач в терминах desbordante. То есть задач поиска и проверки функциональных или иных зависимостей по данным. diff --git a/app/domain/task/__init__.py b/app/domain/task/__init__.py deleted file mode 100644 index d0f4aa6b..00000000 --- a/app/domain/task/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -from app.domain.task.afd import AfdTask, AfdTaskConfig, AfdTaskResult -from app.domain.task.fd import FdTaskConfig, FdTaskResult -from typing import Annotated, Union, assert_never -from pydantic import Field -from app.domain.task.fd import FdTask -from app.domain.task.primitive_name import PrimitiveName - - -OneOfTaskConfig = Annotated[ - Union[ - FdTaskConfig, - AfdTaskConfig, - ], - Field(discriminator="primitive_name"), -] - -OneOfTaskResult = Annotated[ - Union[ - FdTaskResult, - AfdTaskResult, - ], - Field(discriminator="primitive_name"), -] - - -def match_task_by_primitive_name(primitive_name: PrimitiveName): - match primitive_name: - case PrimitiveName.fd: - return FdTask() - case PrimitiveName.afd: - return AfdTask() - assert_never(primitive_name) diff --git a/app/domain/task/abstract_task.py b/app/domain/task/abstract_task.py deleted file mode 100644 index df523df3..00000000 --- a/app/domain/task/abstract_task.py +++ /dev/null @@ -1,51 +0,0 @@ -from abc import ABC, abstractmethod -from enum import StrEnum -from typing import Any, Protocol -import desbordante -import pandas -from pydantic import BaseModel - - -class AlgoConfig(Protocol): - @property - def algo_name(self) -> StrEnum: ... - - # forces to use pydantic classes there - model_dump = BaseModel.model_dump - - -class TaskConfig(Protocol): - @property - def primitive_name(self) -> StrEnum: ... - - @property - def config(self) -> AlgoConfig: ... - - # forces to use pydantic classes there - model_dump = BaseModel.model_dump - - -class TaskResult(Protocol): - @property - def primitive_name(self) -> StrEnum: ... - - result: Any - - # forces to use pydantic classes there - model_dump = BaseModel.model_dump - - -class Task[C: TaskConfig, R: TaskResult](ABC): - @abstractmethod - def match_algo_by_name(self, algo_name) -> desbordante.Algorithm: ... - - @abstractmethod - def collect_result(self, algo) -> R: ... - - def execute(self, table: pandas.DataFrame, task_config: C) -> R: - algo_config = task_config.config - options = algo_config.model_dump(exclude_unset=True, exclude={"algo_name"}) - algo = self.match_algo_by_name(algo_config.algo_name) - algo.load_data(table=table) - algo.execute(**options) - return self.collect_result(algo) diff --git a/app/domain/task/afd/__init__.py b/app/domain/task/afd/__init__.py deleted file mode 100644 index 394ce317..00000000 --- a/app/domain/task/afd/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -from typing import Literal, assert_never -from pydantic import BaseModel -from desbordante.fd import FdAlgorithm # This is not a typo -from app.domain.task.abstract_task import Task -from app.domain.task.primitive_name import PrimitiveName -from .config import OneOfAfdConfig -from .result import AfdAlgoResult, FdModel -from .algo_name import AfdAlgoName -from desbordante.afd.algorithms import Pyro, Tane - - -class BaseAfdTaskModel(BaseModel): - primitive_name: Literal[PrimitiveName.afd] - - -class AfdTaskConfig(BaseAfdTaskModel): - config: OneOfAfdConfig - - -class AfdTaskResult(BaseAfdTaskModel): - result: AfdAlgoResult - - -class AfdTask(Task[AfdTaskConfig, AfdTaskResult]): - def collect_result(self, algo: FdAlgorithm) -> AfdTaskResult: - fds = algo.get_fds() - algo_result = AfdAlgoResult(fds=list(map(FdModel.from_fd, fds))) - return AfdTaskResult(primitive_name=PrimitiveName.afd, result=algo_result) - - def match_algo_by_name(self, algo_name: AfdAlgoName) -> FdAlgorithm: - match algo_name: - case AfdAlgoName.Pyro: - return Pyro() - case AfdAlgoName.Tane: - return Tane() - assert_never(algo_name) diff --git a/app/domain/task/afd/algo_name.py b/app/domain/task/afd/algo_name.py deleted file mode 100644 index 5a55ccb6..00000000 --- a/app/domain/task/afd/algo_name.py +++ /dev/null @@ -1,6 +0,0 @@ -from enum import StrEnum, auto - - -class AfdAlgoName(StrEnum): - Pyro = auto() - Tane = auto() diff --git a/app/domain/task/afd/config.py b/app/domain/task/afd/config.py deleted file mode 100644 index 5ced5662..00000000 --- a/app/domain/task/afd/config.py +++ /dev/null @@ -1,38 +0,0 @@ -from typing import Annotated, Literal, Union -from pydantic import Field - -from app.domain.common.optional_model import OptionalModel -from app.domain.task.afd.algo_name import AfdAlgoName - - -class BaseAfdConfig(OptionalModel): - __non_optional_fields__ = { - "algo_name", - } - - -class PyroConfig(BaseAfdConfig): - algo_name: Literal[AfdAlgoName.Pyro] - - is_null_equal_null: bool - error: Annotated[float, Field(ge=0, le=1)] - max_lhs: Annotated[int, Field(ge=1, le=10)] - threads: Annotated[int, Field(ge=1, le=8)] - seed: int - - -class TaneConfig(BaseAfdConfig): - algo_name: Literal[AfdAlgoName.Tane] - - is_null_equal_null: bool - error: Annotated[float, Field(ge=0, le=1)] - max_lhs: Annotated[int, Field(ge=1, le=10)] - - -OneOfAfdConfig = Annotated[ - Union[ - PyroConfig, - TaneConfig, - ], - Field(discriminator="algo_name"), -] diff --git a/app/domain/task/afd/result.py b/app/domain/task/afd/result.py deleted file mode 100644 index ce59ecf9..00000000 --- a/app/domain/task/afd/result.py +++ /dev/null @@ -1,4 +0,0 @@ -from app.domain.task.fd.result import FdAlgoResult, FdModel - -AfdAlgoResult = FdAlgoResult -FdModel = FdModel diff --git a/app/domain/task/fd/__init__.py b/app/domain/task/fd/__init__.py deleted file mode 100644 index efe776bb..00000000 --- a/app/domain/task/fd/__init__.py +++ /dev/null @@ -1,64 +0,0 @@ -from typing import Literal -from pydantic import BaseModel -from app.domain.task.abstract_task import Task -from typing import assert_never -from desbordante.fd import FdAlgorithm -from desbordante.fd.algorithms import ( - Aid, - DFD, - Depminer, - FDep, - FUN, - FastFDs, - FdMine, - HyFD, - Pyro, - Tane, -) -from app.domain.task.fd.algo_name import FdAlgoName -from app.domain.task.primitive_name import PrimitiveName -from .config import OneOfFdAlgoConfig -from .result import FdAlgoResult, FdModel - - -class BaseFdTaskModel(BaseModel): - primitive_name: Literal[PrimitiveName.fd] - - -class FdTaskConfig(BaseFdTaskModel): - config: OneOfFdAlgoConfig - - -class FdTaskResult(BaseFdTaskModel): - result: FdAlgoResult - - -class FdTask(Task[FdTaskConfig, FdTaskResult]): - def collect_result(self, algo: FdAlgorithm) -> FdTaskResult: - fds = algo.get_fds() - algo_result = FdAlgoResult(fds=list(map(FdModel.from_fd, fds))) - return FdTaskResult(primitive_name=PrimitiveName.fd, result=algo_result) - - def match_algo_by_name(self, algo_name: FdAlgoName) -> FdAlgorithm: - match algo_name: - case FdAlgoName.Aid: - return Aid() - case FdAlgoName.DFD: - return DFD() - case FdAlgoName.Depminer: - return Depminer() - case FdAlgoName.FDep: - return FDep() - case FdAlgoName.FUN: - return FUN() - case FdAlgoName.FastFDs: - return FastFDs() - case FdAlgoName.FdMine: - return FdMine() - case FdAlgoName.HyFD: - return HyFD() - case FdAlgoName.Pyro: - return Pyro() - case FdAlgoName.Tane: - return Tane() - assert_never(algo_name) diff --git a/app/domain/task/fd/algo_name.py b/app/domain/task/fd/algo_name.py deleted file mode 100644 index 93514588..00000000 --- a/app/domain/task/fd/algo_name.py +++ /dev/null @@ -1,14 +0,0 @@ -from enum import StrEnum, auto - - -class FdAlgoName(StrEnum): - Aid = auto() - DFD = auto() - Depminer = auto() - FDep = auto() - FUN = auto() - FastFDs = auto() - FdMine = auto() - HyFD = auto() - Pyro = auto() - Tane = auto() diff --git a/app/domain/task/fd/config.py b/app/domain/task/fd/config.py deleted file mode 100644 index ab3f305d..00000000 --- a/app/domain/task/fd/config.py +++ /dev/null @@ -1,97 +0,0 @@ -from pydantic import Field -from typing import Annotated, Literal, Union - -from app.domain.common.optional_model import OptionalModel -from app.domain.task.fd.algo_name import FdAlgoName - - -class BaseFdConfig(OptionalModel): - __non_optional_fields__ = { - "algo_name", - } - - -class AidConfig(BaseFdConfig): - algo_name: Literal[FdAlgoName.Aid] - - is_null_equal_null: bool - - -class DFDConfig(BaseFdConfig): - algo_name: Literal[FdAlgoName.DFD] - - is_null_equal_null: bool - threads: Annotated[int, Field(ge=1, le=8)] - - -class DepminerConfig(BaseFdConfig): - algo_name: Literal[FdAlgoName.Depminer] - - is_null_equal_null: bool - - -class FDepConfig(BaseFdConfig): - algo_name: Literal[FdAlgoName.FDep] - - is_null_equal_null: bool - - -class FUNConfig(BaseFdConfig): - algo_name: Literal[FdAlgoName.FUN] - - is_null_equal_null: bool - - -class FastFDsConfig(BaseFdConfig): - algo_name: Literal[FdAlgoName.FastFDs] - - is_null_equal_null: bool - max_lhs: Annotated[int, Field(ge=1, le=10)] - threads: Annotated[int, Field(ge=1, le=8)] - - -class FdMineConfig(BaseFdConfig): - algo_name: Literal[FdAlgoName.FdMine] - - is_null_equal_null: bool - - -class HyFDConfig(BaseFdConfig): - algo_name: Literal[FdAlgoName.HyFD] - - is_null_equal_null: bool - - -class PyroConfig(BaseFdConfig): - algo_name: Literal[FdAlgoName.Pyro] - - is_null_equal_null: bool - error: Annotated[float, Field(ge=0, le=1)] - max_lhs: Annotated[int, Field(ge=1, le=10)] - threads: Annotated[int, Field(ge=1, le=8)] - seed: int - - -class TaneConfig(BaseFdConfig): - algo_name: Literal[FdAlgoName.Tane] - - is_null_equal_null: bool - error: Annotated[float, Field(ge=0, le=1)] - max_lhs: Annotated[int, Field(ge=1, le=10)] - - -OneOfFdAlgoConfig = Annotated[ - Union[ - AidConfig, - DFDConfig, - DepminerConfig, - FDepConfig, - FUNConfig, - FastFDsConfig, - FdMineConfig, - HyFDConfig, - PyroConfig, - TaneConfig, - ], - Field(discriminator="algo_name"), -] diff --git a/app/domain/task/fd/result.py b/app/domain/task/fd/result.py deleted file mode 100644 index d38d2f0e..00000000 --- a/app/domain/task/fd/result.py +++ /dev/null @@ -1,15 +0,0 @@ -from pydantic import BaseModel -from desbordante.fd import FD - - -class FdModel(BaseModel): - @classmethod - def from_fd(cls, fd: FD): - return cls(lhs_indices=fd.lhs_indices, rhs_index=fd.rhs_index) - - lhs_indices: list[int] - rhs_index: int - - -class FdAlgoResult(BaseModel): - fds: list[FdModel] diff --git a/app/domain/task/primitive_name.py b/app/domain/task/primitive_name.py deleted file mode 100644 index 1959867a..00000000 --- a/app/domain/task/primitive_name.py +++ /dev/null @@ -1,13 +0,0 @@ -from enum import StrEnum, auto - - -class PrimitiveName(StrEnum): - fd = auto() - afd = auto() - # ar = auto() - # ac = auto() - # fd_verification = auto() - # mfd_verification = auto() - # statistics = auto() - # ucc = auto() - # ucc_verification = auto() diff --git a/app/domain/task/task.py b/app/domain/task/task.py deleted file mode 100644 index 09010069..00000000 --- a/app/domain/task/task.py +++ /dev/null @@ -1,60 +0,0 @@ -from enum import StrEnum, auto -import typing -from uuid import UUID, uuid4 -from sqlalchemy.orm import Mapped, mapped_column -from app.db import ORMBase -from app.db.session import ORMBaseModel -from sqlalchemy import ForeignKey -from sqlalchemy.orm import relationship -from app.domain.file.dataset import DatasetModel -from app.domain.task import OneOfTaskConfig, OneOfTaskResult - -from sqlalchemy.dialects.postgresql import JSONB - -if typing.TYPE_CHECKING: - from app.domain.file.dataset import DatasetORM - - -class TaskStatus(StrEnum): - FAILED = auto() - CREATED = auto() - RUNNING = auto() - COMPLETED = auto() - - -class TaskFailureReason(StrEnum): - MEMORY_LIMIT_EXCEEDED = auto() - TIME_LIMIT_EXCEEDED = auto() - WORKER_KILLED_BY_SIGNAL = auto() - OTHER = auto() - - -class TaskORM(ORMBase): - __tablename__ = "task" - id: Mapped[UUID] = mapped_column(primary_key=True, default=uuid4) - - status: Mapped[TaskStatus] - config: Mapped[OneOfTaskConfig] = mapped_column(JSONB) - result: Mapped[OneOfTaskResult | None] = mapped_column(JSONB, default=None) - - dataset_id: Mapped[UUID] = mapped_column(ForeignKey("dataset.id"), nullable=False) - dataset: Mapped["DatasetORM"] = relationship( - "DatasetORM", back_populates="related_tasks" - ) - - # Only if task failed - raised_exception_name: Mapped[str | None] = mapped_column(default=None) - failure_reason: Mapped[TaskFailureReason | None] = mapped_column(default=None) - traceback: Mapped[str | None] = mapped_column(default=None) - - -class TaskModel(ORMBaseModel): - id: UUID - status: TaskStatus - config: OneOfTaskConfig - result: OneOfTaskResult | None - dataset: DatasetModel - - raised_exception_name: str | None - failure_reason: TaskFailureReason | None - traceback: str | None diff --git a/app/domain/user/README.md b/app/domain/user/README.md deleted file mode 100644 index 69c39aa2..00000000 --- a/app/domain/user/README.md +++ /dev/null @@ -1 +0,0 @@ -Это доменная область отвечает за распределение прав, авторизацию, аутентификацию и иные действия связанные с пользовательм веб-сервиса. diff --git a/app/domain/worker/README.md b/app/domain/worker/README.md deleted file mode 100644 index 8e56d1e2..00000000 --- a/app/domain/worker/README.md +++ /dev/null @@ -1 +0,0 @@ -Это доменная область отвечает за создание задач в терминах Celery. Это могут быть как задачи в терминах домена task, так и задачи по отложенной отправке или любые другие требующие отложенного выполнения или высоконагруженных вычислений. diff --git a/app/domain/worker/task/__init__.py b/app/domain/worker/task/__init__.py deleted file mode 100644 index 0d2af877..00000000 --- a/app/domain/worker/task/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .data_profiling_task import data_profiling_task as data_profiling_task diff --git a/app/domain/worker/task/data_profiling_task.py b/app/domain/worker/task/data_profiling_task.py deleted file mode 100644 index 00c218d6..00000000 --- a/app/domain/worker/task/data_profiling_task.py +++ /dev/null @@ -1,90 +0,0 @@ -from typing import Any -from uuid import UUID - -from app.db.session import no_pooling -from app.domain.file.dataset import DatasetORM -from app.domain.task import OneOfTaskConfig, OneOfTaskResult -from app.domain.task import match_task_by_primitive_name -from app.domain.task.task import TaskFailureReason, TaskORM, TaskStatus -from app.worker import worker -from app.domain.worker.task.resource_intensive_task import ResourceIntensiveTask -import pandas as pd -from celery.signals import task_failure, task_prerun, task_postrun -from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded, WorkerLostError - - -@worker.task(base=ResourceIntensiveTask, ignore_result=True, max_retries=0) -def data_profiling_task( - task_id: UUID, - dataset_id: UUID, - config: OneOfTaskConfig, -) -> Any: - with no_pooling(): - dataset_orm: DatasetORM = ( - DatasetORM.with_joined(DatasetORM.file) # type: ignore - .where(DatasetORM.id == dataset_id) - .first() - ) - - df = pd.read_csv( - dataset_orm.file.path_to_file, - sep=dataset_orm.separator, - header=dataset_orm.header, - ) - - task = match_task_by_primitive_name(config.primitive_name) - result = task.execute(df, config) # type: ignore - return result - - -@task_prerun.connect(sender=data_profiling_task) -def task_prerun_notifier( - kwargs, - **_, -): - db_task_id: UUID = kwargs["task_id"] - with no_pooling(): - task_orm = TaskORM.find_or_fail(db_task_id) - task_orm.update(status=TaskStatus.RUNNING) # type: ignore - - -@task_postrun.connect(sender=data_profiling_task) -def task_postrun_notifier( - kwargs, - retval: OneOfTaskResult, - **_, -): - db_task_id: UUID = kwargs["task_id"] - with no_pooling(): - task_orm = TaskORM.find_or_fail(db_task_id) # type: ignore - task_orm.update( - status=TaskStatus.COMPLETED, # type: ignore - result=retval.model_dump(), - ) - - -@task_failure.connect(sender=data_profiling_task) -def task_failure_notifier( - kwargs, - exception: Exception, - traceback, - **_, -): - # TODO: test all possible exceptions - task_failure_reason = TaskFailureReason.OTHER - if isinstance(exception, (TimeLimitExceeded, SoftTimeLimitExceeded)): - task_failure_reason = TaskFailureReason.TIME_LIMIT_EXCEEDED - if isinstance(exception, MemoryError): - task_failure_reason = TaskFailureReason.MEMORY_LIMIT_EXCEEDED - if isinstance(exception, WorkerLostError): - task_failure_reason = TaskFailureReason.WORKER_KILLED_BY_SIGNAL - - db_task_id: UUID = kwargs["task_id"] - with no_pooling(): - task_orm = TaskORM.find_or_fail(db_task_id) # type: ignore - task_orm.update( - status=TaskStatus.FAILED, # type: ignore - raised_exception_name=exception.__class__.__name__, # type: ignore - failure_reason=task_failure_reason, # type: ignore - traceback=traceback, - ) diff --git a/app/domain/worker/task/resource_intensive_task.py b/app/domain/worker/task/resource_intensive_task.py deleted file mode 100644 index fe938175..00000000 --- a/app/domain/worker/task/resource_intensive_task.py +++ /dev/null @@ -1,18 +0,0 @@ -from celery import Task -import resource -from app.settings import settings - - -class ResourceIntensiveTask(Task): - # There are default Celery time limits, see: https://docs.celeryq.dev/en/stable/userguide/workers.html#time-limits - time_limit = settings.worker_hard_time_limit_in_seconds - soft_time_limit = settings.worker_soft_time_limit_in_seconds - - # There are custom memory limits using `resource` module - hard_memory_limit = settings.worker_hard_memory_limit - soft_memory_limit = settings.worker_soft_memory_limit - - def before_start(self, task_id, args, kwargs) -> None: - resource.setrlimit( - resource.RLIMIT_AS, (self.soft_memory_limit, self.hard_memory_limit) - ) diff --git a/app/main.py b/app/main.py deleted file mode 100644 index 1f4349a2..00000000 --- a/app/main.py +++ /dev/null @@ -1,14 +0,0 @@ -from fastapi import FastAPI -from starlette.middleware.cors import CORSMiddleware - -from app import api - -app = FastAPI() -app.include_router(api.router) -app.add_middleware( - CORSMiddleware, - allow_origins=["*"], - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], -) diff --git a/app/settings/__init__.py b/app/settings/__init__.py deleted file mode 100644 index 40a4d46c..00000000 --- a/app/settings/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .settings import get_settings - -settings = get_settings() diff --git a/app/settings/alembic.ini b/app/settings/alembic.ini deleted file mode 100644 index 53d097fd..00000000 --- a/app/settings/alembic.ini +++ /dev/null @@ -1,116 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -script_location = app/db/migrations - -# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s -# Uncomment the line below if you want the files to be prepended with date and time -# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file -# for all available tokens -# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s - -# sys.path path, will be prepended to sys.path if present. -# defaults to the current working directory. -prepend_sys_path = . - -# timezone to use when rendering the date within the migration file -# as well as the filename. -# If specified, requires the python-dateutil library that can be -# installed by adding `alembic[tz]` to the pip requirements -# string value is passed to dateutil.tz.gettz() -# leave blank for localtime -# timezone = - -# max length of characters to apply to the -# "slug" field -# truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -# version location specification; This defaults -# to migrations/versions. When using multiple version -# directories, initial revisions must be specified with --version-path. -# The path separator used here should be the separator specified by "version_path_separator" below. -# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions - -# version path separator; As mentioned above, this is the character used to split -# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. -# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. -# Valid values for version_path_separator are: -# -# version_path_separator = : -# version_path_separator = ; -# version_path_separator = space -version_path_separator = os # Use os.pathsep. Default configuration used for new projects. - -# set to 'true' to search source files recursively -# in each "version_locations" directory -# new in Alembic version 1.10 -# recursive_version_locations = false - -# the output encoding used when revision files -# are written from script.py.mako -# output_encoding = utf-8 - -sqlalchemy.url = %(db_url)s - - -[post_write_hooks] -# post_write_hooks defines scripts or Python functions that are run -# on newly generated revision scripts. See the documentation for further -# detail and examples - -# format using "black" - use the console_scripts runner, against the "black" entrypoint -# hooks = black -# black.type = console_scripts -# black.entrypoint = black -# black.options = -l 79 REVISION_SCRIPT_FILENAME - -# lint with attempts to fix using "ruff" - use the exec runner, execute a binary -# hooks = ruff -# ruff.type = exec -# ruff.executable = %(here)s/.venv/bin/ruff -# ruff.options = --fix REVISION_SCRIPT_FILENAME - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/app/settings/celery_config.py b/app/settings/celery_config.py deleted file mode 100644 index 4330b126..00000000 --- a/app/settings/celery_config.py +++ /dev/null @@ -1,6 +0,0 @@ -broker_connection_retry_on_startup = True -task_serializer = "pickle" -result_serializer = "pickle" -event_serializer = "json" -accept_content = ["application/json", "application/x-python-serialize"] -result_accept_content = ["application/json", "application/x-python-serialize"] diff --git a/app/settings/settings.py b/app/settings/settings.py deleted file mode 100644 index 15519213..00000000 --- a/app/settings/settings.py +++ /dev/null @@ -1,55 +0,0 @@ -from functools import cached_property - -from dotenv import load_dotenv, find_dotenv -from pydantic import AmqpDsn, DirectoryPath, PostgresDsn, Field, ByteSize -from pydantic_settings import BaseSettings - -load_dotenv(find_dotenv(".env")) - - -class Settings(BaseSettings): - # Postgres settings - postgres_dialect_driver: str = "postgresql" - postgres_user: str - postgres_password: str - postgres_host: str - postgres_db: str - postgres_port: int = 5432 - # RabbitMQ settings - rabbitmq_default_user: str - rabbitmq_default_password: str - rabbitmq_host: str - rabbitmq_port: int = 5672 - # Worker limits - worker_soft_time_limit_in_seconds: int = Field(default=60, gt=0) - worker_hard_time_limit_in_seconds: int = Field(default=120, gt=0) - worker_soft_memory_limit: ByteSize = "2GB" - worker_hard_memory_limit: ByteSize = "4GB" - # Files settings and limits - uploaded_files_dir_path: DirectoryPath = "uploads/" - - @cached_property - def rabbitmq_dsn(self) -> AmqpDsn: - return AmqpDsn.build( - scheme="amqp", - username=self.rabbitmq_default_user, - password=self.rabbitmq_default_password, - host=self.rabbitmq_host, - port=self.rabbitmq_port, - ) - - @cached_property - def postgres_dsn(self) -> PostgresDsn: - return PostgresDsn.build( - scheme=self.postgres_dialect_driver, - username=self.postgres_user, - password=self.postgres_password, - host=self.postgres_host, - port=self.postgres_port, - path=self.postgres_db, - ) - - -def get_settings(): - # TODO: create different settings based on environment (production, testing, etc.) - return Settings() diff --git a/app/worker.py b/app/worker.py deleted file mode 100644 index 0a7fa4f7..00000000 --- a/app/worker.py +++ /dev/null @@ -1,10 +0,0 @@ -from celery import Celery - -from app.settings import settings - -worker = Celery( - __name__, - broker=settings.rabbitmq_dsn.unicode_string(), - include=["app.domain.worker.task"], -) -worker.config_from_object("app.settings.celery_config") diff --git a/tests/usecase/test_save_file.py b/tests/usecase/test_save_file.py index 9aea7e74..b3260230 100644 --- a/tests/usecase/test_save_file.py +++ b/tests/usecase/test_save_file.py @@ -13,6 +13,7 @@ FileResponseSchema, ) from internal.uow import UnitOfWork, DataStorageContext +from internal.usecase.file.exception import FailedReadFileException from internal.usecase.file.save_file import ( FileMetadataRepo, FileRepo, @@ -137,3 +138,32 @@ async def test_save_file( created_at=created_at, updated_at=updated_at, ) + + +@pytest.mark.asyncio +async def test_save_file_failed_read_file_exception( + mocker: MockerFixture, + save_file: SaveFile, + unit_of_work_mock: UnitOfWork, + file_repo_mock: FileRepo, + file_metadata_repo_mock: FileMetadataRepo, + file_entity_mock: FileEntity, +) -> None: + # Prepare the mock to raise the exception + file_repo_mock.create.side_effect = FailedReadFileException("File reading failed") + + upload_file_mock = mocker.Mock(spec=File) + upload_file_mock.filename = "example.txt" + upload_file_mock.content_type = "text/plain" + + # Act & Assert + with pytest.raises(FailedReadFileException, match="File reading failed"): + await save_file(upload_file=upload_file_mock) + + # Check that the repositories' create methods were called + file_metadata_repo_mock.create.assert_called_once() + file_repo_mock.create.assert_called_once() + + # Verify that UnitOfWork was used correctly + unit_of_work_mock.__enter__.assert_called_once() + unit_of_work_mock.__exit__.assert_called_once() From 2623cd50f31de733df25af43de59888d67f743ce Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 04:47:33 +0300 Subject: [PATCH 115/153] chore: format code --- internal/domain/common/optional_model.py | 1 + internal/domain/task/entities/afd/afd_task.py | 1 + internal/domain/task/entities/task.py | 3 - .../domain/task/value_objects/__init__.py | 1 + internal/dto/repository/base_schema.py | 3 +- internal/dto/repository/file/__init__.py | 31 +++++--- internal/dto/repository/file/dataset.py | 11 ++- internal/dto/repository/file/file.py | 8 ++- internal/dto/repository/file/file_metadata.py | 11 ++- internal/dto/repository/task/__init__.py | 8 ++- internal/dto/repository/task/task.py | 19 +++-- internal/dto/worker/task/profiling_task.py | 3 +- .../background_task/celery/__init__.py | 2 +- .../background_task/celery/task/__init__.py | 4 +- .../background_task/celery/task/di.py | 5 +- .../celery/task/profiling_task.py | 27 ++++--- .../background_task/settings.py | 1 - .../data_storage/relational/model/__init__.py | 4 +- .../relational/model/file/__init__.py | 8 ++- .../relational/model/file/dataset.py | 8 ++- .../relational/model/orm_base_model.py | 6 +- .../relational/model/task/task.py | 7 +- .../relational/postgres/__init__.py | 6 +- .../relational/postgres/migrations/env.py | 7 +- ...create_file_metadata_and_dataset_tadble.py | 70 ++++++++++++------ .../a6df7c9124be_first_empty_migration.py | 4 +- .../b13295f9fac2_create_task_tadble.py | 61 +++++++++++----- internal/repository/flat/file.py | 24 ++++--- internal/repository/relational/crud.py | 71 +++++++++++-------- .../repository/relational/file/dataset.py | 34 ++++++--- .../relational/file/file_metadata.py | 14 ++-- internal/repository/relational/task/task.py | 13 ++-- .../repository/relational/user/__init__.py | 0 internal/rest/http/common/__init__.py | 1 + internal/rest/http/common/ping.py | 1 + internal/rest/http/di.py | 17 +++-- internal/rest/http/exception.py | 21 +++--- internal/rest/http/file/di.py | 21 +++--- internal/rest/http/file/retrieve_dataset.py | 5 +- internal/rest/http/file/upload_csv_dataset.py | 10 ++- internal/rest/http/task/di.py | 18 ++--- internal/rest/http/task/retrieve_task.py | 11 ++- internal/rest/http/task/set_task.py | 4 +- internal/usecase/file/__init__.py | 5 +- internal/usecase/file/check_content_type.py | 4 +- internal/usecase/file/retrieve_dataset.py | 11 +-- internal/usecase/file/save_dataset.py | 18 ++--- internal/usecase/file/save_file.py | 37 +++++----- internal/usecase/task/profile_task.py | 40 ++++++----- internal/usecase/task/retrieve_task.py | 13 +++- internal/usecase/task/set_task.py | 32 +++++---- internal/usecase/task/update_task_info.py | 51 +++++++------ 52 files changed, 505 insertions(+), 291 deletions(-) create mode 100644 internal/repository/relational/user/__init__.py diff --git a/internal/domain/common/optional_model.py b/internal/domain/common/optional_model.py index 8f25940f..904b0e23 100644 --- a/internal/domain/common/optional_model.py +++ b/internal/domain/common/optional_model.py @@ -13,6 +13,7 @@ class OptionalModel(BaseModel): __non_optional_fields__ (set): A set of field names that should remain non-optional. Fields listed here will not have `None` as their default value. """ + __non_optional_fields__ = set() @classmethod diff --git a/internal/domain/task/entities/afd/afd_task.py b/internal/domain/task/entities/afd/afd_task.py index 381a4499..a53c2ea6 100644 --- a/internal/domain/task/entities/afd/afd_task.py +++ b/internal/domain/task/entities/afd/afd_task.py @@ -8,6 +8,7 @@ from internal.domain.task.value_objects.afd import AfdTaskResult, AfdTaskConfig from internal.domain.task.value_objects.afd import AfdAlgoName, AfdAlgoResult, FdModel + class AfdTask(Task[AfdTaskConfig, AfdTaskResult]): """ Task class for Approximate Functional Dependency (AFD) profiling. diff --git a/internal/domain/task/entities/task.py b/internal/domain/task/entities/task.py index c16d0faf..8243b102 100644 --- a/internal/domain/task/entities/task.py +++ b/internal/domain/task/entities/task.py @@ -23,7 +23,6 @@ class Task[C: TaskConfig, R: TaskResult](ABC): on a given table with the provided configuration. """ - @abstractmethod def _match_algo_by_name(self, algo_name: str) -> desbordante.Algorithm: """ @@ -36,7 +35,6 @@ def _match_algo_by_name(self, algo_name: str) -> desbordante.Algorithm: """ pass - @abstractmethod def _collect_result(self, algo: desbordante.Algorithm) -> R: """ @@ -49,7 +47,6 @@ def _collect_result(self, algo: desbordante.Algorithm) -> R: """ pass - def execute(self, table: pandas.DataFrame, task_config: C) -> R: """ Execute the algorithm on the provided data table. diff --git a/internal/domain/task/value_objects/__init__.py b/internal/domain/task/value_objects/__init__.py index 53ce68f9..65656230 100644 --- a/internal/domain/task/value_objects/__init__.py +++ b/internal/domain/task/value_objects/__init__.py @@ -10,6 +10,7 @@ from internal.domain.task.value_objects.primitive_name import PrimitiveName + class TaskStatus(StrEnum): FAILED = auto() CREATED = auto() diff --git a/internal/dto/repository/base_schema.py b/internal/dto/repository/base_schema.py index e03e9e4e..26e90b0b 100644 --- a/internal/dto/repository/base_schema.py +++ b/internal/dto/repository/base_schema.py @@ -9,8 +9,7 @@ class BaseSchema(BaseModel): model_config = ConfigDict(from_attributes=True) -class BaseCreateSchema(BaseSchema): - ... +class BaseCreateSchema(BaseSchema): ... class BaseFindSchema[T](BaseSchema): diff --git a/internal/dto/repository/file/__init__.py b/internal/dto/repository/file/__init__.py index 30c3cbb0..2051fe4f 100644 --- a/internal/dto/repository/file/__init__.py +++ b/internal/dto/repository/file/__init__.py @@ -1,10 +1,23 @@ -from internal.dto.repository.file.file import (File, FileResponseSchema, FileFindSchema, - FileCreateSchema, FileUpdateSchema, - FailedFileReadingException) -from internal.dto.repository.file.file_metadata import (FileMetadataResponseSchema, FileMetadataCreateSchema, - FileMetadataFindSchema, FileMetadataUpdateSchema, - FileMetadataNotFoundException) -from internal.dto.repository.file.dataset import (DatasetResponseSchema, DatasetCreateSchema, - DatasetUpdateSchema, DatasetFindSchema, - DatasetNotFoundException) +from internal.dto.repository.file.file import ( + File, + FileResponseSchema, + FileFindSchema, + FileCreateSchema, + FileUpdateSchema, + FailedFileReadingException, +) +from internal.dto.repository.file.file_metadata import ( + FileMetadataResponseSchema, + FileMetadataCreateSchema, + FileMetadataFindSchema, + FileMetadataUpdateSchema, + FileMetadataNotFoundException, +) +from internal.dto.repository.file.dataset import ( + DatasetResponseSchema, + DatasetCreateSchema, + DatasetUpdateSchema, + DatasetFindSchema, + DatasetNotFoundException, +) from internal.dto.repository.file.file import CSVFileFindSchema, CSVFileResponseSchema diff --git a/internal/dto/repository/file/dataset.py b/internal/dto/repository/file/dataset.py index ff84ee59..648c3307 100644 --- a/internal/dto/repository/file/dataset.py +++ b/internal/dto/repository/file/dataset.py @@ -1,13 +1,18 @@ from uuid import UUID -from internal.dto.repository.base_schema import BaseSchema, BaseCreateSchema, BaseUpdateSchema, \ - BaseResponseSchema, BaseFindSchema +from internal.dto.repository.base_schema import ( + BaseSchema, + BaseCreateSchema, + BaseUpdateSchema, + BaseResponseSchema, + BaseFindSchema, +) class DatasetNotFoundException(Exception): def __init__(self): - super().__init__('Dataset not found') + super().__init__("Dataset not found") class DatasetBaseSchema(BaseSchema): diff --git a/internal/dto/repository/file/file.py b/internal/dto/repository/file/file.py index 2deb14e3..d8f1ac29 100644 --- a/internal/dto/repository/file/file.py +++ b/internal/dto/repository/file/file.py @@ -3,7 +3,11 @@ import pandas as pd -from internal.dto.repository.base_schema import BaseCreateSchema, BaseUpdateSchema, BaseSchema +from internal.dto.repository.base_schema import ( + BaseCreateSchema, + BaseUpdateSchema, + BaseSchema, +) class FailedFileReadingException(Exception): @@ -30,7 +34,7 @@ class FileCreateSchema(FileBaseSchema, BaseCreateSchema): ... class FileUpdateSchema(FileBaseSchema, BaseUpdateSchema[UUID]): ... -class FileFindSchema(FileBaseSchema, BaseSchema): ... # it's not a typo +class FileFindSchema(FileBaseSchema, BaseSchema): ... # it's not a typo FileResponseSchema = None diff --git a/internal/dto/repository/file/file_metadata.py b/internal/dto/repository/file/file_metadata.py index 5f96af7d..2314c8d1 100644 --- a/internal/dto/repository/file/file_metadata.py +++ b/internal/dto/repository/file/file_metadata.py @@ -1,13 +1,18 @@ from uuid import UUID -from internal.dto.repository.base_schema import BaseSchema, BaseCreateSchema, BaseUpdateSchema, \ - BaseResponseSchema, BaseFindSchema +from internal.dto.repository.base_schema import ( + BaseSchema, + BaseCreateSchema, + BaseUpdateSchema, + BaseResponseSchema, + BaseFindSchema, +) class FileMetadataNotFoundException(Exception): def __init__(self): - super().__init__('File metadata not found') + super().__init__("File metadata not found") class FileMetadataBaseSchema(BaseSchema): diff --git a/internal/dto/repository/task/__init__.py b/internal/dto/repository/task/__init__.py index 862a92b9..8849e001 100644 --- a/internal/dto/repository/task/__init__.py +++ b/internal/dto/repository/task/__init__.py @@ -1,2 +1,6 @@ -from internal.dto.repository.task.task import (TaskCreateSchema, TaskUpdateSchema, TaskFindSchema, - TaskResponseSchema) +from internal.dto.repository.task.task import ( + TaskCreateSchema, + TaskUpdateSchema, + TaskFindSchema, + TaskResponseSchema, +) diff --git a/internal/dto/repository/task/task.py b/internal/dto/repository/task/task.py index 17929220..d290c120 100644 --- a/internal/dto/repository/task/task.py +++ b/internal/dto/repository/task/task.py @@ -1,13 +1,24 @@ from uuid import UUID -from internal.domain.task.value_objects import TaskStatus, OneOfTaskConfig, OneOfTaskResult, TaskFailureReason -from internal.dto.repository.base_schema import BaseSchema, BaseCreateSchema, BaseUpdateSchema, \ - BaseResponseSchema, BaseFindSchema +from internal.domain.task.value_objects import ( + TaskStatus, + OneOfTaskConfig, + OneOfTaskResult, + TaskFailureReason, +) +from internal.dto.repository.base_schema import ( + BaseSchema, + BaseCreateSchema, + BaseUpdateSchema, + BaseResponseSchema, + BaseFindSchema, +) + class TaskNotFoundException(Exception): def __init__(self): - super().__init__('Task not found') + super().__init__("Task not found") class TaskBaseSchema(BaseSchema): diff --git a/internal/dto/worker/task/profiling_task.py b/internal/dto/worker/task/profiling_task.py index 303ddc03..58ff8090 100644 --- a/internal/dto/worker/task/profiling_task.py +++ b/internal/dto/worker/task/profiling_task.py @@ -10,8 +10,7 @@ class ProfilingTaskBaseSchema(BaseModel): config: OneOfTaskConfig -class ProfilingTaskCreateSchema(ProfilingTaskBaseSchema): - ... +class ProfilingTaskCreateSchema(ProfilingTaskBaseSchema): ... ProfilingTaskResponseSchema = None diff --git a/internal/infrastructure/background_task/celery/__init__.py b/internal/infrastructure/background_task/celery/__init__.py index 335202d9..a8a9e0f1 100644 --- a/internal/infrastructure/background_task/celery/__init__.py +++ b/internal/infrastructure/background_task/celery/__init__.py @@ -5,7 +5,7 @@ worker = Celery( __name__, broker=settings.rabbitmq_dsn.unicode_string(), - include=['internal.infrastructure.background_task.celery.task'], + include=["internal.infrastructure.background_task.celery.task"], ) worker.config_from_object("internal.infrastructure.background_task.celery.config") diff --git a/internal/infrastructure/background_task/celery/task/__init__.py b/internal/infrastructure/background_task/celery/task/__init__.py index badf1c6b..8b11a477 100644 --- a/internal/infrastructure/background_task/celery/task/__init__.py +++ b/internal/infrastructure/background_task/celery/task/__init__.py @@ -1 +1,3 @@ -from internal.infrastructure.background_task.celery.task.profiling_task import profiling_task +from internal.infrastructure.background_task.celery.task.profiling_task import ( + profiling_task, +) diff --git a/internal/infrastructure/background_task/celery/task/di.py b/internal/infrastructure/background_task/celery/task/di.py index e2a4ef12..032c5abb 100644 --- a/internal/infrastructure/background_task/celery/task/di.py +++ b/internal/infrastructure/background_task/celery/task/di.py @@ -1,4 +1,6 @@ -from internal.infrastructure.data_storage.relational.postgres import get_postgres_context_maker_without_pool +from internal.infrastructure.data_storage.relational.postgres import ( + get_postgres_context_maker_without_pool, +) from internal.repository.flat import FileRepository from internal.repository.relational.file import DatasetRepository from internal.repository.relational.task import TaskRepository @@ -30,6 +32,7 @@ def get_update_task_info_use_case(): task_repo=task_repo, # type: ignore ) + def get_profile_task_use_case(): context_maker = get_postgres_context_maker_without_pool() diff --git a/internal/infrastructure/background_task/celery/task/profiling_task.py b/internal/infrastructure/background_task/celery/task/profiling_task.py index e18525c6..7c18e231 100644 --- a/internal/infrastructure/background_task/celery/task/profiling_task.py +++ b/internal/infrastructure/background_task/celery/task/profiling_task.py @@ -6,18 +6,27 @@ from celery.signals import task_failure, task_prerun, task_postrun from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded, WorkerLostError -from internal.domain.task.value_objects import OneOfTaskConfig, TaskStatus, OneOfTaskResult, TaskFailureReason +from internal.domain.task.value_objects import ( + OneOfTaskConfig, + TaskStatus, + OneOfTaskResult, + TaskFailureReason, +) from internal.infrastructure.background_task.celery import worker -from internal.infrastructure.background_task.celery.task.di import (get_profile_task_use_case, - get_update_task_info_use_case) -from internal.infrastructure.background_task.celery.task.resource_intensive_task import ResourceIntensiveTask +from internal.infrastructure.background_task.celery.task.di import ( + get_profile_task_use_case, + get_update_task_info_use_case, +) +from internal.infrastructure.background_task.celery.task.resource_intensive_task import ( + ResourceIntensiveTask, +) @worker.task(base=ResourceIntensiveTask, ignore_result=True, max_retries=0) def profiling_task( - task_id: UUID, - dataset_id: UUID, - config: OneOfTaskConfig, + task_id: UUID, + dataset_id: UUID, + config: OneOfTaskConfig, ) -> Any: profile_task = get_profile_task_use_case() @@ -72,7 +81,9 @@ def task_failure_notifier( if isinstance(exception, WorkerLostError): task_failure_reason = TaskFailureReason.WORKER_KILLED_BY_SIGNAL - formatted_traceback = "".join(tb.format_exception(type(exception), exception, exception.__traceback__)) + formatted_traceback = "".join( + tb.format_exception(type(exception), exception, exception.__traceback__) + ) update_task_info = get_update_task_info_use_case() db_task_id: UUID = kwargs["task_id"] diff --git a/internal/infrastructure/background_task/settings.py b/internal/infrastructure/background_task/settings.py index 180f3b07..569b215b 100644 --- a/internal/infrastructure/background_task/settings.py +++ b/internal/infrastructure/background_task/settings.py @@ -1,4 +1,3 @@ - from dotenv import load_dotenv, find_dotenv from pydantic import Field, ByteSize from pydantic_settings import BaseSettings diff --git a/internal/infrastructure/data_storage/relational/model/__init__.py b/internal/infrastructure/data_storage/relational/model/__init__.py index 9022a02a..3405ade1 100644 --- a/internal/infrastructure/data_storage/relational/model/__init__.py +++ b/internal/infrastructure/data_storage/relational/model/__init__.py @@ -1 +1,3 @@ -from internal.infrastructure.data_storage.relational.model.orm_base_model import ORMBaseModel +from internal.infrastructure.data_storage.relational.model.orm_base_model import ( + ORMBaseModel, +) diff --git a/internal/infrastructure/data_storage/relational/model/file/__init__.py b/internal/infrastructure/data_storage/relational/model/file/__init__.py index 3f0174d4..29b551f3 100644 --- a/internal/infrastructure/data_storage/relational/model/file/__init__.py +++ b/internal/infrastructure/data_storage/relational/model/file/__init__.py @@ -1,2 +1,6 @@ -from internal.infrastructure.data_storage.relational.model.file.file_metadata import FileMetadataORM -from internal.infrastructure.data_storage.relational.model.file.dataset import DatasetORM +from internal.infrastructure.data_storage.relational.model.file.file_metadata import ( + FileMetadataORM, +) +from internal.infrastructure.data_storage.relational.model.file.dataset import ( + DatasetORM, +) diff --git a/internal/infrastructure/data_storage/relational/model/file/dataset.py b/internal/infrastructure/data_storage/relational/model/file/dataset.py index 9c33f890..88451565 100644 --- a/internal/infrastructure/data_storage/relational/model/file/dataset.py +++ b/internal/infrastructure/data_storage/relational/model/file/dataset.py @@ -5,7 +5,9 @@ from sqlalchemy.orm import mapped_column, Mapped, relationship from internal.infrastructure.data_storage.relational.model import ORMBaseModel -from internal.infrastructure.data_storage.relational.model.file.file_metadata import FileMetadataORM +from internal.infrastructure.data_storage.relational.model.file.file_metadata import ( + FileMetadataORM, +) if typing.TYPE_CHECKING: from internal.infrastructure.data_storage.relational.model.task import TaskORM @@ -18,7 +20,9 @@ class DatasetORM(ORMBaseModel): is_built_in: Mapped[bool] = mapped_column(default=False) header: Mapped[list[int]] = mapped_column(ARRAY(Integer), default=[]) separator: Mapped[str] - file_id: Mapped[UUID] = mapped_column(ForeignKey("file_metadata.id"), nullable=False) + file_id: Mapped[UUID] = mapped_column( + ForeignKey("file_metadata.id"), nullable=False + ) file_metadata: Mapped[FileMetadataORM] = relationship("FileMetadataORM") related_tasks: Mapped[list["TaskORM"]] = relationship( diff --git a/internal/infrastructure/data_storage/relational/model/orm_base_model.py b/internal/infrastructure/data_storage/relational/model/orm_base_model.py index 6ffa1d63..03c771b9 100644 --- a/internal/infrastructure/data_storage/relational/model/orm_base_model.py +++ b/internal/infrastructure/data_storage/relational/model/orm_base_model.py @@ -8,13 +8,11 @@ class ORMBaseModel(DeclarativeBase): __abstract__ = True created_at: Mapped[datetime] = mapped_column( - TIMESTAMP(timezone=True), - nullable=False, - server_default=text("now()") + TIMESTAMP(timezone=True), nullable=False, server_default=text("now()") ) updated_at: Mapped[datetime] = mapped_column( TIMESTAMP(timezone=True), nullable=False, server_default=text("now()"), - onupdate=text("now()") + onupdate=text("now()"), ) diff --git a/internal/infrastructure/data_storage/relational/model/task/task.py b/internal/infrastructure/data_storage/relational/model/task/task.py index 31cb4aea..652e8469 100644 --- a/internal/infrastructure/data_storage/relational/model/task/task.py +++ b/internal/infrastructure/data_storage/relational/model/task/task.py @@ -5,7 +5,12 @@ from sqlalchemy.orm import mapped_column, Mapped, relationship from sqlalchemy.dialects.postgresql import JSONB -from internal.domain.task.value_objects import TaskStatus, OneOfTaskConfig, OneOfTaskResult, TaskFailureReason +from internal.domain.task.value_objects import ( + TaskStatus, + OneOfTaskConfig, + OneOfTaskResult, + TaskFailureReason, +) from internal.infrastructure.data_storage.relational.model import ORMBaseModel if typing.TYPE_CHECKING: diff --git a/internal/infrastructure/data_storage/relational/postgres/__init__.py b/internal/infrastructure/data_storage/relational/postgres/__init__.py index cc233c5d..12f99f61 100644 --- a/internal/infrastructure/data_storage/relational/postgres/__init__.py +++ b/internal/infrastructure/data_storage/relational/postgres/__init__.py @@ -1,2 +1,4 @@ -from internal.infrastructure.data_storage.relational.postgres.context import (get_postgres_context_maker, - get_postgres_context_maker_without_pool) +from internal.infrastructure.data_storage.relational.postgres.context import ( + get_postgres_context_maker, + get_postgres_context_maker_without_pool, +) diff --git a/internal/infrastructure/data_storage/relational/postgres/migrations/env.py b/internal/infrastructure/data_storage/relational/postgres/migrations/env.py index 74d0c6eb..f0eb3337 100644 --- a/internal/infrastructure/data_storage/relational/postgres/migrations/env.py +++ b/internal/infrastructure/data_storage/relational/postgres/migrations/env.py @@ -7,9 +7,6 @@ from internal.infrastructure.data_storage import settings from internal.infrastructure.data_storage.relational.model import ORMBaseModel -from internal.infrastructure.data_storage.relational.model.file.file_metadata import FileMetadataORM # noqa: F401 -from internal.infrastructure.data_storage.relational.model.file.dataset import DatasetORM # noqa: F401 -from internal.infrastructure.data_storage.relational.model.task import TaskORM # noqa: F401 # this is the Alembic Config object, which provides @@ -73,9 +70,7 @@ def run_migrations_online() -> None: ) with connectable.connect() as connection: - context.configure( - connection=connection, target_metadata=target_metadata - ) + context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations() diff --git a/internal/infrastructure/data_storage/relational/postgres/migrations/versions/925f30493c24_create_file_metadata_and_dataset_tadble.py b/internal/infrastructure/data_storage/relational/postgres/migrations/versions/925f30493c24_create_file_metadata_and_dataset_tadble.py index 7cfd7f85..c59b4cc6 100644 --- a/internal/infrastructure/data_storage/relational/postgres/migrations/versions/925f30493c24_create_file_metadata_and_dataset_tadble.py +++ b/internal/infrastructure/data_storage/relational/postgres/migrations/versions/925f30493c24_create_file_metadata_and_dataset_tadble.py @@ -5,6 +5,7 @@ Create Date: 2024-09-18 22:50:48.547252 """ + from typing import Sequence, Union from alembic import op @@ -12,39 +13,64 @@ # revision identifiers, used by Alembic. -revision: str = '925f30493c24' -down_revision: Union[str, None] = 'a6df7c9124be' +revision: str = "925f30493c24" +down_revision: Union[str, None] = "a6df7c9124be" branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.create_table('file_metadata', - sa.Column('id', sa.Uuid(), nullable=False), - sa.Column('mime_type', sa.String(), nullable=False), - sa.Column('file_name', sa.Uuid(), nullable=False), - sa.Column('original_file_name', sa.String(), nullable=False), - sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('updated_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.PrimaryKeyConstraint('id') + op.create_table( + "file_metadata", + sa.Column("id", sa.Uuid(), nullable=False), + sa.Column("mime_type", sa.String(), nullable=False), + sa.Column("file_name", sa.Uuid(), nullable=False), + sa.Column("original_file_name", sa.String(), nullable=False), + sa.Column( + "created_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column( + "updated_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.PrimaryKeyConstraint("id"), ) - op.create_table('dataset', - sa.Column('id', sa.Uuid(), nullable=False), - sa.Column('is_built_in', sa.Boolean(), nullable=False), - sa.Column('header', sa.ARRAY(sa.Integer()), nullable=False), - sa.Column('separator', sa.String(), nullable=False), - sa.Column('file_id', sa.Uuid(), nullable=False), - sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('updated_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.ForeignKeyConstraint(['file_id'], ['file_metadata.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table( + "dataset", + sa.Column("id", sa.Uuid(), nullable=False), + sa.Column("is_built_in", sa.Boolean(), nullable=False), + sa.Column("header", sa.ARRAY(sa.Integer()), nullable=False), + sa.Column("separator", sa.String(), nullable=False), + sa.Column("file_id", sa.Uuid(), nullable=False), + sa.Column( + "created_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column( + "updated_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.ForeignKeyConstraint( + ["file_id"], + ["file_metadata.id"], + ), + sa.PrimaryKeyConstraint("id"), ) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('dataset') - op.drop_table('file_metadata') + op.drop_table("dataset") + op.drop_table("file_metadata") # ### end Alembic commands ### diff --git a/internal/infrastructure/data_storage/relational/postgres/migrations/versions/a6df7c9124be_first_empty_migration.py b/internal/infrastructure/data_storage/relational/postgres/migrations/versions/a6df7c9124be_first_empty_migration.py index 7c4a0f24..69670c9b 100644 --- a/internal/infrastructure/data_storage/relational/postgres/migrations/versions/a6df7c9124be_first_empty_migration.py +++ b/internal/infrastructure/data_storage/relational/postgres/migrations/versions/a6df7c9124be_first_empty_migration.py @@ -5,12 +5,12 @@ Create Date: 2024-09-18 22:40:07.892034 """ -from typing import Sequence, Union +from typing import Sequence, Union # revision identifiers, used by Alembic. -revision: str = 'a6df7c9124be' +revision: str = "a6df7c9124be" down_revision: Union[str, None] = None branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None diff --git a/internal/infrastructure/data_storage/relational/postgres/migrations/versions/b13295f9fac2_create_task_tadble.py b/internal/infrastructure/data_storage/relational/postgres/migrations/versions/b13295f9fac2_create_task_tadble.py index cecadd82..9d519b81 100644 --- a/internal/infrastructure/data_storage/relational/postgres/migrations/versions/b13295f9fac2_create_task_tadble.py +++ b/internal/infrastructure/data_storage/relational/postgres/migrations/versions/b13295f9fac2_create_task_tadble.py @@ -5,6 +5,7 @@ Create Date: 2024-09-18 22:54:19.185777 """ + from typing import Sequence, Union from alembic import op @@ -12,32 +13,60 @@ from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. -revision: str = 'b13295f9fac2' -down_revision: Union[str, None] = '925f30493c24' +revision: str = "b13295f9fac2" +down_revision: Union[str, None] = "925f30493c24" branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.create_table('task', - sa.Column('id', sa.Uuid(), nullable=False), - sa.Column('status', sa.Enum('FAILED', 'CREATED', 'RUNNING', 'COMPLETED', name='taskstatus'), nullable=False), - sa.Column('config', postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.Column('result', postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column('dataset_id', sa.Uuid(), nullable=False), - sa.Column('raised_exception_name', sa.String(), nullable=True), - sa.Column('failure_reason', sa.Enum('MEMORY_LIMIT_EXCEEDED', 'TIME_LIMIT_EXCEEDED', 'WORKER_KILLED_BY_SIGNAL', 'OTHER', name='taskfailurereason'), nullable=True), - sa.Column('traceback', sa.String(), nullable=True), - sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('updated_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.ForeignKeyConstraint(['dataset_id'], ['dataset.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table( + "task", + sa.Column("id", sa.Uuid(), nullable=False), + sa.Column( + "status", + sa.Enum("FAILED", "CREATED", "RUNNING", "COMPLETED", name="taskstatus"), + nullable=False, + ), + sa.Column("config", postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column("result", postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.Column("dataset_id", sa.Uuid(), nullable=False), + sa.Column("raised_exception_name", sa.String(), nullable=True), + sa.Column( + "failure_reason", + sa.Enum( + "MEMORY_LIMIT_EXCEEDED", + "TIME_LIMIT_EXCEEDED", + "WORKER_KILLED_BY_SIGNAL", + "OTHER", + name="taskfailurereason", + ), + nullable=True, + ), + sa.Column("traceback", sa.String(), nullable=True), + sa.Column( + "created_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column( + "updated_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.ForeignKeyConstraint( + ["dataset_id"], + ["dataset.id"], + ), + sa.PrimaryKeyConstraint("id"), ) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('task') + op.drop_table("task") # ### end Alembic commands ### diff --git a/internal/repository/flat/file.py b/internal/repository/flat/file.py index 7b29268e..63be7a6b 100644 --- a/internal/repository/flat/file.py +++ b/internal/repository/flat/file.py @@ -3,38 +3,42 @@ import aiofiles import pandas as pd -from internal.dto.repository.file.file import FailedFileReadingException, CSVFileFindSchema, CSVFileResponseSchema +from internal.dto.repository.file.file import ( + FailedFileReadingException, + CSVFileFindSchema, + CSVFileResponseSchema, +) from internal.infrastructure.data_storage import settings from internal.dto.repository.file import File, FileCreateSchema, FileResponseSchema from internal.uow import DataStorageContext CHUNK_SIZE = 1024 + class FileRepository: def __init__(self): self.files_dir_path = settings.uploaded_files_dir_path async def create( - self, - file: File, - file_info: FileCreateSchema, - context: DataStorageContext # The current repository implementation does not support transactions. + self, + file: File, + file_info: FileCreateSchema, + context: DataStorageContext, # The current repository implementation does not support transactions. ) -> FileResponseSchema: path_to_file = Path.joinpath(self.files_dir_path, str(file_info.file_name)) try: - async with aiofiles.open(path_to_file, "wb") as out_file: # !!! + async with aiofiles.open(path_to_file, "wb") as out_file: # !!! while content := await file.read(CHUNK_SIZE): await out_file.write(content) except Exception: raise FailedFileReadingException("The sent file could not be read.") - def find( - self, - file_info: CSVFileFindSchema, - context: DataStorageContext # The current repository implementation does not support transactions. + self, + file_info: CSVFileFindSchema, + context: DataStorageContext, # The current repository implementation does not support transactions. ) -> CSVFileResponseSchema: path_to_file = Path(self.files_dir_path, str(file_info.file_name)) diff --git a/internal/repository/relational/crud.py b/internal/repository/relational/crud.py index a8da6087..73fad976 100644 --- a/internal/repository/relational/crud.py +++ b/internal/repository/relational/crud.py @@ -3,9 +3,15 @@ from sqlalchemy import select from internal.infrastructure.data_storage.relational.model import ORMBaseModel -from internal.dto.repository.base_schema import (BaseCreateSchema, BaseUpdateSchema, - BaseFindSchema, BaseResponseSchema) -from internal.infrastructure.data_storage.relational.context import RelationalContextType +from internal.dto.repository.base_schema import ( + BaseCreateSchema, + BaseUpdateSchema, + BaseFindSchema, + BaseResponseSchema, +) +from internal.infrastructure.data_storage.relational.context import ( + RelationalContextType, +) class CRUD[ @@ -13,45 +19,49 @@ class CRUD[ CreateSchema: BaseCreateSchema, UpdateSchema: BaseUpdateSchema, FindSchema: BaseFindSchema, - ResponseSchema: BaseResponseSchema + ResponseSchema: BaseResponseSchema, ]: def __init__( - self, - orm_model: Type[ORMModel], - response_schema: Type[ResponseSchema] + self, orm_model: Type[ORMModel], response_schema: Type[ResponseSchema] ) -> None: self._orm_model: Type[ORMModel] = orm_model self._response_schema: Type[ResponseSchema] = response_schema - - def create(self, create_schema: CreateSchema, context: RelationalContextType) -> ResponseSchema: + def create( + self, create_schema: CreateSchema, context: RelationalContextType + ) -> ResponseSchema: create_schema_dict = create_schema.model_dump() db_model_instance = self._orm_model(**create_schema_dict) context.add(db_model_instance) context.flush() return self._response_schema.model_validate(db_model_instance) - - def _find(self, find_schema: FindSchema, context: RelationalContextType) -> ORMModel | None: + def _find( + self, find_schema: FindSchema, context: RelationalContextType + ) -> ORMModel | None: find_schema_dict = find_schema.model_dump() stmt = select(self._orm_model).filter_by(**find_schema_dict) db_model_instance = context.execute(stmt).scalars().one_or_none() return db_model_instance - - def find(self, find_schema: FindSchema, context: RelationalContextType) -> ResponseSchema | None: + def find( + self, find_schema: FindSchema, context: RelationalContextType + ) -> ResponseSchema | None: db_model_instance = self._find(find_schema, context) - response = self._response_schema.model_validate(db_model_instance) if db_model_instance else None + response = ( + self._response_schema.model_validate(db_model_instance) + if db_model_instance + else None + ) return response - def find_or_create( - self, - find_schema: FindSchema, - create_schema: CreateSchema, - context: RelationalContextType + self, + find_schema: FindSchema, + create_schema: CreateSchema, + context: RelationalContextType, ) -> ResponseSchema: db_model_instance = self._find(find_schema, context) @@ -59,18 +69,19 @@ def find_or_create( db_model_instance = self.create(create_schema, context) return self._response_schema.model_validate(db_model_instance) - def update( - self, - find_schema: FindSchema, - update_schema: UpdateSchema, - fields_to_update_if_none: set[str] | None, - context: RelationalContextType + self, + find_schema: FindSchema, + update_schema: UpdateSchema, + fields_to_update_if_none: set[str] | None, + context: RelationalContextType, ) -> ResponseSchema: db_model_instance = self._find(find_schema, context) update_schema_dict = update_schema.model_dump() - fields_to_update_if_none = fields_to_update_if_none if fields_to_update_if_none else set() + fields_to_update_if_none = ( + fields_to_update_if_none if fields_to_update_if_none else set() + ) for key, value in update_schema_dict.items(): if value is not None or key in fields_to_update_if_none: @@ -81,10 +92,12 @@ def update( return self._response_schema.model_validate(db_model_instance) - - def delete(self, find_schema: FindSchema, context: RelationalContextType) -> ResponseSchema | None: + def delete( + self, find_schema: FindSchema, context: RelationalContextType + ) -> ResponseSchema | None: db_model_instance = self._find(find_schema, context) - if not db_model_instance: return None + if not db_model_instance: + return None context.delete(db_model_instance) context.flush() return self._response_schema.model_validate(db_model_instance) diff --git a/internal/repository/relational/file/dataset.py b/internal/repository/relational/file/dataset.py index 9d59b2df..8b7898e7 100644 --- a/internal/repository/relational/file/dataset.py +++ b/internal/repository/relational/file/dataset.py @@ -1,12 +1,19 @@ from sqlalchemy import select from sqlalchemy.orm import joinedload -from internal.infrastructure.data_storage.relational.context import RelationalContextType +from internal.infrastructure.data_storage.relational.context import ( + RelationalContextType, +) from internal.infrastructure.data_storage.relational.model.file import DatasetORM from internal.repository.relational import CRUD -from internal.dto.repository.file import (DatasetCreateSchema, DatasetUpdateSchema, - DatasetFindSchema, DatasetResponseSchema, FileMetadataResponseSchema, - DatasetNotFoundException) +from internal.dto.repository.file import ( + DatasetCreateSchema, + DatasetUpdateSchema, + DatasetFindSchema, + DatasetResponseSchema, + FileMetadataResponseSchema, + DatasetNotFoundException, +) class DatasetRepository( @@ -15,28 +22,33 @@ class DatasetRepository( DatasetCreateSchema, DatasetUpdateSchema, DatasetFindSchema, - DatasetResponseSchema + DatasetResponseSchema, ] ): def __init__(self): super().__init__(orm_model=DatasetORM, response_schema=DatasetResponseSchema) - def find_with_file_metadata( - self, - dataset_info: DatasetFindSchema, - context: RelationalContextType, + self, + dataset_info: DatasetFindSchema, + context: RelationalContextType, ) -> tuple[DatasetResponseSchema, FileMetadataResponseSchema]: dataset_find_dict = dataset_info.model_dump() - stmt = select(DatasetORM).options(joinedload(DatasetORM.file_metadata)).filter_by(**dataset_find_dict) + stmt = ( + select(DatasetORM) + .options(joinedload(DatasetORM.file_metadata)) + .filter_by(**dataset_find_dict) + ) dataset_orm_instance = context.execute(stmt).scalars().one_or_none() if not dataset_orm_instance: raise DatasetNotFoundException() dataset_response = DatasetResponseSchema.model_validate(dataset_orm_instance) - file_metadata_response = FileMetadataResponseSchema.model_validate(dataset_orm_instance.file_metadata) + file_metadata_response = FileMetadataResponseSchema.model_validate( + dataset_orm_instance.file_metadata + ) return dataset_response, file_metadata_response diff --git a/internal/repository/relational/file/file_metadata.py b/internal/repository/relational/file/file_metadata.py index daaac6c1..84c87538 100644 --- a/internal/repository/relational/file/file_metadata.py +++ b/internal/repository/relational/file/file_metadata.py @@ -1,5 +1,9 @@ -from internal.dto.repository.file import (FileMetadataCreateSchema, FileMetadataUpdateSchema, - FileMetadataFindSchema, FileMetadataResponseSchema) +from internal.dto.repository.file import ( + FileMetadataCreateSchema, + FileMetadataUpdateSchema, + FileMetadataFindSchema, + FileMetadataResponseSchema, +) from internal.infrastructure.data_storage.relational.model.file import FileMetadataORM from internal.repository.relational import CRUD @@ -10,9 +14,11 @@ class FileMetaDataRepository( FileMetadataCreateSchema, FileMetadataUpdateSchema, FileMetadataFindSchema, - FileMetadataResponseSchema + FileMetadataResponseSchema, ] ): def __init__(self): - super().__init__(orm_model=FileMetadataORM, response_schema=FileMetadataResponseSchema) + super().__init__( + orm_model=FileMetadataORM, response_schema=FileMetadataResponseSchema + ) diff --git a/internal/repository/relational/task/task.py b/internal/repository/relational/task/task.py index 58e23c7d..1b85b0c5 100644 --- a/internal/repository/relational/task/task.py +++ b/internal/repository/relational/task/task.py @@ -1,15 +1,16 @@ from internal.infrastructure.data_storage.relational.model.task import TaskORM from internal.repository.relational import CRUD -from internal.dto.repository.task import TaskCreateSchema, TaskUpdateSchema, TaskFindSchema, TaskResponseSchema +from internal.dto.repository.task import ( + TaskCreateSchema, + TaskUpdateSchema, + TaskFindSchema, + TaskResponseSchema, +) class TaskRepository( CRUD[ - TaskORM, - TaskCreateSchema, - TaskUpdateSchema, - TaskFindSchema, - TaskResponseSchema + TaskORM, TaskCreateSchema, TaskUpdateSchema, TaskFindSchema, TaskResponseSchema ] ): diff --git a/internal/repository/relational/user/__init__.py b/internal/repository/relational/user/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/internal/rest/http/common/__init__.py b/internal/rest/http/common/__init__.py index 98d09641..c1a9991e 100644 --- a/internal/rest/http/common/__init__.py +++ b/internal/rest/http/common/__init__.py @@ -1,6 +1,7 @@ from fastapi import APIRouter from internal.rest.http.common.ping import router as ping_router + router = APIRouter(prefix="/common", tags=["common"]) router.include_router(ping_router) diff --git a/internal/rest/http/common/ping.py b/internal/rest/http/common/ping.py index 855a012f..e708e6ba 100644 --- a/internal/rest/http/common/ping.py +++ b/internal/rest/http/common/ping.py @@ -4,6 +4,7 @@ router = APIRouter() + @router.get("/ping") def ping() -> Literal["Pong!"]: return "Pong!" diff --git a/internal/rest/http/di.py b/internal/rest/http/di.py index 63bb7368..dd37ca9c 100644 --- a/internal/rest/http/di.py +++ b/internal/rest/http/di.py @@ -1,22 +1,25 @@ from fastapi import Depends -from internal.infrastructure.data_storage.relational.postgres.context import (get_postgres_context_maker, - get_postgres_context_maker_without_pool) +from internal.infrastructure.data_storage.relational.postgres.context import ( + get_postgres_context_maker, + get_postgres_context_maker_without_pool, +) from internal.repository.flat import FileRepository -from internal.repository.relational.file import FileMetaDataRepository, DatasetRepository +from internal.repository.relational.file import ( + FileMetaDataRepository, + DatasetRepository, +) from internal.repository.relational.task import TaskRepository from internal.uow import UnitOfWork -def get_unit_of_work( - context_maker = Depends(get_postgres_context_maker) -) -> UnitOfWork: +def get_unit_of_work(context_maker=Depends(get_postgres_context_maker)) -> UnitOfWork: return UnitOfWork(context_maker) def get_unit_of_work_without_pool( - context_maker = Depends(get_postgres_context_maker_without_pool) + context_maker=Depends(get_postgres_context_maker_without_pool), ) -> UnitOfWork: return UnitOfWork(context_maker) diff --git a/internal/rest/http/exception.py b/internal/rest/http/exception.py index 3227be54..c7d27824 100644 --- a/internal/rest/http/exception.py +++ b/internal/rest/http/exception.py @@ -1,19 +1,25 @@ from fastapi import FastAPI, Request, HTTPException -from internal.usecase.file.exception import IncorrectFileFormatException, DatasetNotFoundException, \ - FileMetadataNotFoundException, FailedReadFileException +from internal.usecase.file.exception import ( + IncorrectFileFormatException, + DatasetNotFoundException, + FileMetadataNotFoundException, + FailedReadFileException, +) from internal.usecase.task.exception import TaskNotFoundException + def add_exception_handlers(app: FastAPI): @app.exception_handler(IncorrectFileFormatException) - def incorrect_file_format_exception(request: Request, exc: IncorrectFileFormatException): + def incorrect_file_format_exception( + request: Request, exc: IncorrectFileFormatException + ): raise HTTPException( status_code=400, detail=str(exc), ) - @app.exception_handler(DatasetNotFoundException) def dataset_not_found_exception(request: Request, exc: DatasetNotFoundException): raise HTTPException( @@ -21,15 +27,15 @@ def dataset_not_found_exception(request: Request, exc: DatasetNotFoundException) detail=str(exc), ) - @app.exception_handler(FileMetadataNotFoundException) - def file_metadata_not_found_exception(request: Request, exc: FileMetadataNotFoundException): + def file_metadata_not_found_exception( + request: Request, exc: FileMetadataNotFoundException + ): raise HTTPException( status_code=404, detail=str(exc), ) - @app.exception_handler(TaskNotFoundException) def file_metadata_not_found_exception(request: Request, exc: TaskNotFoundException): raise HTTPException( @@ -37,7 +43,6 @@ def file_metadata_not_found_exception(request: Request, exc: TaskNotFoundExcepti detail=str(exc), ) - @app.exception_handler(FailedReadFileException) def failed_read_file_exception(request: Request, exc: FailedReadFileException): raise HTTPException( diff --git a/internal/rest/http/file/di.py b/internal/rest/http/file/di.py index 2cdd0645..99911789 100644 --- a/internal/rest/http/file/di.py +++ b/internal/rest/http/file/di.py @@ -1,6 +1,11 @@ from fastapi import Depends -from internal.rest.http.di import get_unit_of_work, get_file_repo, get_file_metadata_repo, get_dataset_repo +from internal.rest.http.di import ( + get_unit_of_work, + get_file_repo, + get_file_metadata_repo, + get_dataset_repo, +) from internal.uow import UnitOfWork from internal.usecase.file import SaveFile, SaveDataset, CheckContentType from internal.usecase.file.retrieve_dataset import RetrieveDataset @@ -10,9 +15,9 @@ def get_save_file_use_case( - unit_of_work: UnitOfWork = Depends(get_unit_of_work), - file_repo: FileRepo = Depends(get_file_repo), - file_metadata_repo: FileMetadataRepo = Depends(get_file_metadata_repo), + unit_of_work: UnitOfWork = Depends(get_unit_of_work), + file_repo: FileRepo = Depends(get_file_repo), + file_metadata_repo: FileMetadataRepo = Depends(get_file_metadata_repo), ) -> SaveFile: return SaveFile( unit_of_work=unit_of_work, @@ -22,8 +27,8 @@ def get_save_file_use_case( def get_save_dataset_use_case( - unit_of_work: UnitOfWork = Depends(get_unit_of_work), - dataset_repo: SaveDatasetRepo = Depends(get_dataset_repo), + unit_of_work: UnitOfWork = Depends(get_unit_of_work), + dataset_repo: SaveDatasetRepo = Depends(get_dataset_repo), ) -> SaveDataset: return SaveDataset( unit_of_work=unit_of_work, @@ -36,8 +41,8 @@ def get_check_content_type_use_case() -> CheckContentType: def get_retrieve_dataset_use_case( - unit_of_work: UnitOfWork = Depends(get_unit_of_work), - dataset_repo: RetrieveDatasetRepo = Depends(get_dataset_repo), + unit_of_work: UnitOfWork = Depends(get_unit_of_work), + dataset_repo: RetrieveDatasetRepo = Depends(get_dataset_repo), ) -> RetrieveDataset: return RetrieveDataset( unit_of_work=unit_of_work, diff --git a/internal/rest/http/file/retrieve_dataset.py b/internal/rest/http/file/retrieve_dataset.py index 3500facf..69b340ec 100644 --- a/internal/rest/http/file/retrieve_dataset.py +++ b/internal/rest/http/file/retrieve_dataset.py @@ -18,9 +18,8 @@ class ResponseSchema(BaseModel): @router.post("/dataset/{dataset_id}", response_model=ResponseSchema) def retrieve_dataset( - dataset_id: UUID, - - retrieve_dataset_use_case: RetrieveDataset = Depends(get_retrieve_dataset_use_case) + dataset_id: UUID, + retrieve_dataset_use_case: RetrieveDataset = Depends(get_retrieve_dataset_use_case), ) -> ResponseSchema: dataset = retrieve_dataset_use_case(dataset_id=dataset_id) diff --git a/internal/rest/http/file/upload_csv_dataset.py b/internal/rest/http/file/upload_csv_dataset.py index 4cf979bf..7ef11c1d 100644 --- a/internal/rest/http/file/upload_csv_dataset.py +++ b/internal/rest/http/file/upload_csv_dataset.py @@ -3,20 +3,24 @@ from fastapi import Form, UploadFile, Depends, APIRouter -from internal.rest.http.file.di import get_save_file_use_case, get_save_dataset_use_case, get_check_content_type_use_case +from internal.rest.http.file.di import ( + get_save_file_use_case, + get_save_dataset_use_case, + get_check_content_type_use_case, +) from internal.usecase.file import SaveFile, SaveDataset, CheckContentType router = APIRouter() + @router.post("/csv") async def upload_csv_dataset( file: UploadFile, separator: Annotated[str, Form()], # ?separator="," header: Annotated[list[int], Form()], # ?header=0?header=1?header=2, - check_content_type: CheckContentType = Depends(get_check_content_type_use_case), save_file: SaveFile = Depends(get_save_file_use_case), - save_dataset: SaveDataset = Depends(get_save_dataset_use_case) + save_dataset: SaveDataset = Depends(get_save_dataset_use_case), ) -> UUID: check_content_type(upload_file=file) diff --git a/internal/rest/http/task/di.py b/internal/rest/http/task/di.py index 2a8b95a4..0138f6ae 100644 --- a/internal/rest/http/task/di.py +++ b/internal/rest/http/task/di.py @@ -4,8 +4,10 @@ from internal.rest.http.di import get_unit_of_work, get_task_repo, get_dataset_repo from internal.usecase.task import RetrieveTask, SetTask from internal.usecase.task.retrieve_task import TaskRepo as RetrieveTaskRepo -from internal.usecase.task.set_task import (TaskRepo as SetTaskRepo, - DatasetRepo as SetDatasetRepo) +from internal.usecase.task.set_task import ( + TaskRepo as SetTaskRepo, + DatasetRepo as SetDatasetRepo, +) from internal.worker.celery import ProfilingTaskWorker @@ -14,8 +16,8 @@ def get_profiling_task_worker() -> ProfilingTaskWorker: def get_retrieve_task_use_case( - unit_of_work: UnitOfWork = Depends(get_unit_of_work), - task_repo: RetrieveTaskRepo = Depends(get_task_repo), + unit_of_work: UnitOfWork = Depends(get_unit_of_work), + task_repo: RetrieveTaskRepo = Depends(get_task_repo), ) -> RetrieveTask: return RetrieveTask( @@ -25,10 +27,10 @@ def get_retrieve_task_use_case( def get_set_task_use_case( - unit_of_work: UnitOfWork = Depends(get_unit_of_work), - task_repo: SetTaskRepo = Depends(get_task_repo), - dataset_repo: SetDatasetRepo = Depends(get_dataset_repo), - profiling_task_worker: ProfilingTaskWorker = Depends(get_profiling_task_worker), + unit_of_work: UnitOfWork = Depends(get_unit_of_work), + task_repo: SetTaskRepo = Depends(get_task_repo), + dataset_repo: SetDatasetRepo = Depends(get_dataset_repo), + profiling_task_worker: ProfilingTaskWorker = Depends(get_profiling_task_worker), ) -> SetTask: return SetTask( diff --git a/internal/rest/http/task/retrieve_task.py b/internal/rest/http/task/retrieve_task.py index 4ebb5d2a..4e03d1dc 100644 --- a/internal/rest/http/task/retrieve_task.py +++ b/internal/rest/http/task/retrieve_task.py @@ -3,7 +3,12 @@ from fastapi import APIRouter, Depends from pydantic import BaseModel -from internal.domain.task.value_objects import TaskStatus, OneOfTaskConfig, OneOfTaskResult, TaskFailureReason +from internal.domain.task.value_objects import ( + TaskStatus, + OneOfTaskConfig, + OneOfTaskResult, + TaskFailureReason, +) from internal.rest.http.task.di import get_retrieve_task_use_case from internal.usecase.task import RetrieveTask @@ -22,8 +27,8 @@ class ResponseSchema(BaseModel): @router.get("/{task_id}") def retrieve_task( - task_id: UUID, - retrieve_task_use_case: RetrieveTask = Depends(get_retrieve_task_use_case) + task_id: UUID, + retrieve_task_use_case: RetrieveTask = Depends(get_retrieve_task_use_case), ) -> ResponseSchema: task = retrieve_task_use_case(task_id=task_id) diff --git a/internal/rest/http/task/set_task.py b/internal/rest/http/task/set_task.py index 1f68f1a2..0f61fe45 100644 --- a/internal/rest/http/task/set_task.py +++ b/internal/rest/http/task/set_task.py @@ -8,12 +8,12 @@ router = APIRouter() + @router.post("/set") def set_task( dataset_id: UUID, config: OneOfTaskConfig, - - set_task_use_case: SetTask = Depends(get_set_task_use_case) + set_task_use_case: SetTask = Depends(get_set_task_use_case), ) -> UUID: task_id = set_task_use_case( diff --git a/internal/usecase/file/__init__.py b/internal/usecase/file/__init__.py index 5707d624..11232990 100644 --- a/internal/usecase/file/__init__.py +++ b/internal/usecase/file/__init__.py @@ -1,4 +1,7 @@ from internal.usecase.file.check_content_type import CheckContentType from internal.usecase.file.save_dataset import SaveDataset from internal.usecase.file.save_file import SaveFile, SaveFileUseCaseResult -from internal.usecase.file.retrieve_dataset import RetrieveDataset, RetrieveDatasetUseCaseResult +from internal.usecase.file.retrieve_dataset import ( + RetrieveDataset, + RetrieveDatasetUseCaseResult, +) diff --git a/internal/usecase/file/check_content_type.py b/internal/usecase/file/check_content_type.py index 7443e5a5..0b43ebc9 100644 --- a/internal/usecase/file/check_content_type.py +++ b/internal/usecase/file/check_content_type.py @@ -10,5 +10,7 @@ class File(Protocol): class CheckContentType: def __call__(self, *, upload_file: File) -> None: - if upload_file.content_type != "text/csv": # TODO: replace with actual validation + if ( + upload_file.content_type != "text/csv" + ): # TODO: replace with actual validation raise IncorrectFileFormatException("File is not CSV") diff --git a/internal/usecase/file/retrieve_dataset.py b/internal/usecase/file/retrieve_dataset.py index 44799871..b252864c 100644 --- a/internal/usecase/file/retrieve_dataset.py +++ b/internal/usecase/file/retrieve_dataset.py @@ -9,7 +9,9 @@ class DatasetRepo(Protocol): - def find(self, dataset_info: DatasetFindSchema, context: DataStorageContext) -> DatasetResponseSchema | None: ... + def find( + self, dataset_info: DatasetFindSchema, context: DataStorageContext + ) -> DatasetResponseSchema | None: ... class RetrieveDatasetUseCaseResult(BaseModel): @@ -18,12 +20,13 @@ class RetrieveDatasetUseCaseResult(BaseModel): separator: str header: list[int] + class RetrieveDataset: def __init__( - self, - unit_of_work: UnitOfWork, - dataset_repo: DatasetRepo, + self, + unit_of_work: UnitOfWork, + dataset_repo: DatasetRepo, ): self.unit_of_work = unit_of_work diff --git a/internal/usecase/file/save_dataset.py b/internal/usecase/file/save_dataset.py index 363912cc..476e57fc 100644 --- a/internal/usecase/file/save_dataset.py +++ b/internal/usecase/file/save_dataset.py @@ -7,7 +7,9 @@ class DatasetRepo(Protocol): - def create(self, dataset_info: DatasetCreateSchema, context: DataStorageContext) -> DatasetResponseSchema: ... + def create( + self, dataset_info: DatasetCreateSchema, context: DataStorageContext + ) -> DatasetResponseSchema: ... class SaveDataset: @@ -17,17 +19,15 @@ def __init__(self, unit_of_work: UnitOfWork, dataset_repo: DatasetRepo): self.dataset_repo = dataset_repo def __call__( - self, - *, - file_id: UUID, - separator: str, - header: list[int], + self, + *, + file_id: UUID, + separator: str, + header: list[int], ) -> UUID: dataset_create_schema = DatasetCreateSchema( - file_id=file_id, - separator=separator, - header=header + file_id=file_id, separator=separator, header=header ) with self.unit_of_work as context: diff --git a/internal/usecase/file/save_file.py b/internal/usecase/file/save_file.py index 144ae670..a1f94b90 100644 --- a/internal/usecase/file/save_file.py +++ b/internal/usecase/file/save_file.py @@ -4,8 +4,16 @@ from pydantic import BaseModel from internal.domain.file import File as FileEntity -from internal.dto.repository.file import FileCreateSchema, FileResponseSchema, File, FailedFileReadingException -from internal.dto.repository.file import FileMetadataCreateSchema, FileMetadataResponseSchema +from internal.dto.repository.file import ( + FileCreateSchema, + FileResponseSchema, + File, + FailedFileReadingException, +) +from internal.dto.repository.file import ( + FileMetadataCreateSchema, + FileMetadataResponseSchema, +) from internal.uow import DataStorageContext, UnitOfWork from internal.usecase.file.exception import FailedReadFileException @@ -13,19 +21,14 @@ class FileRepo(Protocol): async def create( - self, - file: File, - file_info: FileCreateSchema, - context: DataStorageContext + self, file: File, file_info: FileCreateSchema, context: DataStorageContext ) -> FileResponseSchema: ... class FileMetadataRepo(Protocol): def create( - self, - file_metadata: FileMetadataCreateSchema, - context: DataStorageContext + self, file_metadata: FileMetadataCreateSchema, context: DataStorageContext ) -> FileMetadataResponseSchema: ... @@ -41,10 +44,10 @@ class SaveFileUseCaseResult(BaseModel): class SaveFile: def __init__( - self, - unit_of_work: UnitOfWork, - file_repo: FileRepo, - file_metadata_repo: FileMetadataRepo + self, + unit_of_work: UnitOfWork, + file_repo: FileRepo, + file_metadata_repo: FileMetadataRepo, ): self.unit_of_work = unit_of_work @@ -58,12 +61,14 @@ async def __call__(self, *, upload_file: File) -> SaveFileUseCaseResult: file_metadata_create_schema = FileMetadataCreateSchema( file_name=file.name_as_uuid, original_file_name=upload_file.filename, - mime_type=upload_file.content_type + mime_type=upload_file.content_type, ) with self.unit_of_work as context: try: - response = self.file_metadata_repo.create(file_metadata_create_schema, context) + response = self.file_metadata_repo.create( + file_metadata_create_schema, context + ) await self.file_repo.create(upload_file, create_file_schema, context) except FailedFileReadingException as e: raise FailedReadFileException(str(e)) @@ -73,5 +78,5 @@ async def __call__(self, *, upload_file: File) -> SaveFileUseCaseResult: original_file_name=response.original_file_name, mime_type=response.mime_type, created_at=response.created_at, - updated_at=response.updated_at + updated_at=response.updated_at, ) diff --git a/internal/usecase/task/profile_task.py b/internal/usecase/task/profile_task.py index afab30f1..e260d4a2 100644 --- a/internal/usecase/task/profile_task.py +++ b/internal/usecase/task/profile_task.py @@ -3,39 +3,46 @@ from internal.domain.task.entities import match_task_by_primitive_name from internal.domain.task.value_objects import OneOfTaskResult, OneOfTaskConfig -from internal.dto.repository.file import DatasetFindSchema, DatasetResponseSchema, FileMetadataResponseSchema +from internal.dto.repository.file import ( + DatasetFindSchema, + DatasetResponseSchema, + FileMetadataResponseSchema, +) from internal.dto.repository.file import CSVFileFindSchema, CSVFileResponseSchema -from internal.dto.repository.file import DatasetNotFoundException, FileMetadataNotFoundException -from internal.usecase.file.exception import DatasetNotFoundException as DatasetNotFoundUseCaseException -from internal.usecase.file.exception import FileMetadataNotFoundException as FileMetadataNotFoundUseCaseException +from internal.dto.repository.file import ( + DatasetNotFoundException, + FileMetadataNotFoundException, +) +from internal.usecase.file.exception import ( + DatasetNotFoundException as DatasetNotFoundUseCaseException, +) +from internal.usecase.file.exception import ( + FileMetadataNotFoundException as FileMetadataNotFoundUseCaseException, +) from internal.uow import UnitOfWork, DataStorageContext class DatasetRepo(Protocol): def find_with_file_metadata( - self, - dataset_info: DatasetFindSchema, - context: DataStorageContext + self, dataset_info: DatasetFindSchema, context: DataStorageContext ) -> tuple[DatasetResponseSchema, FileMetadataResponseSchema]: ... class FileRepo(Protocol): def find( - self, - file_info: CSVFileFindSchema, - context: DataStorageContext + self, file_info: CSVFileFindSchema, context: DataStorageContext ) -> CSVFileResponseSchema: ... class ProfileTask: def __init__( - self, - unit_of_work: UnitOfWork, - file_repo: FileRepo, - dataset_repo: DatasetRepo, + self, + unit_of_work: UnitOfWork, + file_repo: FileRepo, + dataset_repo: DatasetRepo, ): self.unit_of_work = unit_of_work self.file_repo = file_repo @@ -46,8 +53,7 @@ def __call__(self, *, dataset_id: UUID, config: OneOfTaskConfig) -> OneOfTaskRes with self.unit_of_work as context: try: dataset, file_metadata = self.dataset_repo.find_with_file_metadata( - DatasetFindSchema(id=dataset_id), - context + DatasetFindSchema(id=dataset_id), context ) df = self.file_repo.find( @@ -64,5 +70,5 @@ def __call__(self, *, dataset_id: UUID, config: OneOfTaskConfig) -> OneOfTaskRes raise FileMetadataNotFoundUseCaseException() task = match_task_by_primitive_name(primitive_name=config.primitive_name) - result = task.execute(table=df, task_config=config) # type: ignore + result = task.execute(table=df, task_config=config) # type: ignore return result diff --git a/internal/usecase/task/retrieve_task.py b/internal/usecase/task/retrieve_task.py index 30bf71b0..3317efd8 100644 --- a/internal/usecase/task/retrieve_task.py +++ b/internal/usecase/task/retrieve_task.py @@ -4,7 +4,12 @@ from pydantic import BaseModel -from internal.domain.task.value_objects import TaskStatus, OneOfTaskConfig, OneOfTaskResult, TaskFailureReason +from internal.domain.task.value_objects import ( + TaskStatus, + OneOfTaskConfig, + OneOfTaskResult, + TaskFailureReason, +) from internal.dto.repository.task import TaskResponseSchema, TaskFindSchema from internal.uow import DataStorageContext, UnitOfWork from internal.usecase.task.exception import TaskNotFoundException @@ -12,7 +17,9 @@ class TaskRepo(Protocol): - def find(self, task_info: TaskFindSchema, context: DataStorageContext) -> TaskResponseSchema | None: ... + def find( + self, task_info: TaskFindSchema, context: DataStorageContext + ) -> TaskResponseSchema | None: ... class RetrieveTaskUseCaseResult(BaseModel): @@ -55,5 +62,5 @@ def __call__(self, *, task_id: UUID) -> RetrieveTaskUseCaseResult: failure_reason=task.failure_reason, traceback=task.traceback, created_at=task.created_at, - updated_at=task.updated_at + updated_at=task.updated_at, ) diff --git a/internal/usecase/task/set_task.py b/internal/usecase/task/set_task.py index 171d688d..93f6cc19 100644 --- a/internal/usecase/task/set_task.py +++ b/internal/usecase/task/set_task.py @@ -11,12 +11,16 @@ class DatasetRepo(Protocol): - def find(self, dataset_info: DatasetFindSchema, context: DataStorageContext) -> DatasetResponseSchema | None: ... + def find( + self, dataset_info: DatasetFindSchema, context: DataStorageContext + ) -> DatasetResponseSchema | None: ... class TaskRepo(Protocol): - def create(self, task_info: TaskCreateSchema, context: DataStorageContext) -> TaskResponseSchema: ... + def create( + self, task_info: TaskCreateSchema, context: DataStorageContext + ) -> TaskResponseSchema: ... class ProfilingTaskWorker(Protocol): @@ -27,11 +31,11 @@ def set(self, task_info: ProfilingTaskCreateSchema) -> None: ... class SetTask: def __init__( - self, - unit_of_work: UnitOfWork, - dataset_repo: DatasetRepo, - task_repo: TaskRepo, - profiling_task_worker: ProfilingTaskWorker + self, + unit_of_work: UnitOfWork, + dataset_repo: DatasetRepo, + task_repo: TaskRepo, + profiling_task_worker: ProfilingTaskWorker, ): self.unit_of_work = unit_of_work @@ -40,17 +44,17 @@ def __init__( self.profiling_task_worker = profiling_task_worker def __call__( - self, - *, - dataset_id: UUID, - config: OneOfTaskConfig, + self, + *, + dataset_id: UUID, + config: OneOfTaskConfig, ) -> UUID: dataset_find_schema = DatasetFindSchema(id=dataset_id) task_create_schema = TaskCreateSchema( status=TaskStatus.CREATED, config=config.model_dump(exclude_unset=True), - dataset_id=dataset_id + dataset_id=dataset_id, ) with self.unit_of_work as context: @@ -60,9 +64,7 @@ def __call__( task = self.task_repo.create(task_create_schema, context) profiling_task_create_schema = ProfilingTaskCreateSchema( - task_id=task.id, - dataset_id=dataset_id, - config=config + task_id=task.id, dataset_id=dataset_id, config=config ) self.profiling_task_worker.set(profiling_task_create_schema) diff --git a/internal/usecase/task/update_task_info.py b/internal/usecase/task/update_task_info.py index f461782c..e8e9e10a 100644 --- a/internal/usecase/task/update_task_info.py +++ b/internal/usecase/task/update_task_info.py @@ -4,45 +4,49 @@ from internal.uow import DataStorageContext, UnitOfWork from internal.domain.task.value_objects import TaskStatus, OneOfTaskResult -from internal.dto.repository.task import TaskUpdateSchema, TaskResponseSchema, TaskFindSchema +from internal.dto.repository.task import ( + TaskUpdateSchema, + TaskResponseSchema, + TaskFindSchema, +) from internal.dto.repository.task.task import TaskNotFoundException -from internal.usecase.task.exception import TaskNotFoundException as TaskNotFoundUseCaseException +from internal.usecase.task.exception import ( + TaskNotFoundException as TaskNotFoundUseCaseException, +) class TaskRepo(Protocol): def update( - self, - find_schema: TaskFindSchema, - update_schema: TaskUpdateSchema, - fields_to_update_if_none: set[str] | None, - context: DataStorageContext, + self, + find_schema: TaskFindSchema, + update_schema: TaskUpdateSchema, + fields_to_update_if_none: set[str] | None, + context: DataStorageContext, ) -> TaskResponseSchema: ... class UpdateTaskInfo: def __init__( - self, - unit_of_work: UnitOfWork, - task_repo: TaskRepo, + self, + unit_of_work: UnitOfWork, + task_repo: TaskRepo, ): self.unit_of_work = unit_of_work self.task_repo = task_repo - def __call__( - self, - *, - task_id: UUID, - fields_to_update_if_none: set[str] | None = None, - - task_status: TaskStatus | None = None, - result: OneOfTaskResult | None = None, - raised_exception_name: str | None = None, - failure_reason: str | None = None, - traceback: str | None = None, + self, + *, + task_id: UUID, + fields_to_update_if_none: set[str] | None = None, + task_status: TaskStatus | None = None, + result: OneOfTaskResult | None = None, + raised_exception_name: str | None = None, + failure_reason: str | None = None, + traceback: str | None = None, ) -> None: task_find_schema = TaskFindSchema(id=task_id) @@ -54,9 +58,10 @@ def __call__( traceback=traceback, ) # type: ignore - with self.unit_of_work as context: try: - self.task_repo.update(task_find_schema, data_to_update, fields_to_update_if_none, context) + self.task_repo.update( + task_find_schema, data_to_update, fields_to_update_if_none, context + ) except TaskNotFoundException: raise TaskNotFoundUseCaseException() From 82cce9602fc5b730d098bf6152013ae94ea3a96b Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 04:47:56 +0300 Subject: [PATCH 116/153] chore: change old module path in all makefile commands --- Makefile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index 8c111693..17257111 100644 --- a/Makefile +++ b/Makefile @@ -52,13 +52,13 @@ init: ## Run all formatters and linters in project lint: - poetry run ruff check tests app \ - & poetry run ruff format --check tests app \ - & poetry run black --check tests app + poetry run ruff check tests internal \ + & poetry run ruff format --check tests internal \ + & poetry run black --check tests internal ## Reformat code format: - poetry run ruff format tests app & poetry run ruff check --fix & poetry run black tests app + poetry run ruff format tests app & poetry run ruff check --fix & poetry run black tests internal ## Run all tests in project From 27c4184c5110d205d1c0d7becea034ba68c3f467 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 04:59:28 +0300 Subject: [PATCH 117/153] fix(tests): remove typo in exception name and expande uow mocks --- tests/usecase/test_save_file.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/tests/usecase/test_save_file.py b/tests/usecase/test_save_file.py index b3260230..cc5372f6 100644 --- a/tests/usecase/test_save_file.py +++ b/tests/usecase/test_save_file.py @@ -11,6 +11,7 @@ FileMetadataCreateSchema, FileCreateSchema, FileResponseSchema, + FailedFileReadingException, ) from internal.uow import UnitOfWork, DataStorageContext from internal.usecase.file.exception import FailedReadFileException @@ -28,7 +29,14 @@ def unit_of_work_mock(mocker: MockerFixture) -> UnitOfWork: mock.__enter__.return_value = mocker.Mock( return_value=mocker.Mock(), spec=DataStorageContext ) - mock.__exit__.return_value = mocker.Mock(return_value=None) + mock.__exit__.return_value = None + + def exit_side_effect(exc_type, exc_value, traceback) -> bool: + if exc_type: + raise exc_value + return False + + mock.__exit__.side_effect = exit_side_effect return mock @@ -150,7 +158,9 @@ async def test_save_file_failed_read_file_exception( file_entity_mock: FileEntity, ) -> None: # Prepare the mock to raise the exception - file_repo_mock.create.side_effect = FailedReadFileException("File reading failed") + file_repo_mock.create.side_effect = FailedFileReadingException( + "File reading failed" + ) upload_file_mock = mocker.Mock(spec=File) upload_file_mock.filename = "example.txt" From 0574ed846bd644629dec7612219011942d803d8c Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 16:21:59 +0300 Subject: [PATCH 118/153] feat(infra): add flat file storage context and update use cases --- .../data_storage/flat/__init__.py | 4 +++ .../data_storage/flat/context.py | 32 +++++++++++++++++++ internal/repository/flat/file.py | 17 +++++----- internal/usecase/file/save_file.py | 29 +++++++++++------ tests/usecase/test_save_file.py | 13 ++++---- 5 files changed, 70 insertions(+), 25 deletions(-) create mode 100644 internal/infrastructure/data_storage/flat/__init__.py create mode 100644 internal/infrastructure/data_storage/flat/context.py diff --git a/internal/infrastructure/data_storage/flat/__init__.py b/internal/infrastructure/data_storage/flat/__init__.py new file mode 100644 index 00000000..ea5c9fb4 --- /dev/null +++ b/internal/infrastructure/data_storage/flat/__init__.py @@ -0,0 +1,4 @@ +from internal.infrastructure.data_storage.flat.context import ( + FlatContextMaker, + FlatContext, +) diff --git a/internal/infrastructure/data_storage/flat/context.py b/internal/infrastructure/data_storage/flat/context.py new file mode 100644 index 00000000..83bf5769 --- /dev/null +++ b/internal/infrastructure/data_storage/flat/context.py @@ -0,0 +1,32 @@ +from pathlib import Path + +from internal.infrastructure.data_storage import settings + + +class FlatContext: + + def __init__(self, upload_directory_path: Path): + self._upload_directory_path = upload_directory_path + + @property + def upload_directory_path(self) -> Path: + return self._upload_directory_path + + # This context implementation does not support transactions + def flush(self) -> None: + pass + + def rollback(self) -> None: + pass + + def commit(self) -> None: + pass + + def close(self) -> None: + pass # TODO: implement flat context closing. + + +class FlatContextMaker: + + def __call__(self): + return FlatContext(settings.uploaded_files_dir_path) diff --git a/internal/repository/flat/file.py b/internal/repository/flat/file.py index 63be7a6b..115377a8 100644 --- a/internal/repository/flat/file.py +++ b/internal/repository/flat/file.py @@ -8,26 +8,25 @@ CSVFileFindSchema, CSVFileResponseSchema, ) -from internal.infrastructure.data_storage import settings from internal.dto.repository.file import File, FileCreateSchema, FileResponseSchema -from internal.uow import DataStorageContext +from internal.infrastructure.data_storage.flat import FlatContext CHUNK_SIZE = 1024 class FileRepository: - - def __init__(self): - self.files_dir_path = settings.uploaded_files_dir_path + # The current repository implementation does not support transactions. async def create( self, file: File, file_info: FileCreateSchema, - context: DataStorageContext, # The current repository implementation does not support transactions. + context: FlatContext, ) -> FileResponseSchema: - path_to_file = Path.joinpath(self.files_dir_path, str(file_info.file_name)) + path_to_file = Path.joinpath( + context.upload_directory_path, str(file_info.file_name) + ) try: async with aiofiles.open(path_to_file, "wb") as out_file: # !!! while content := await file.read(CHUNK_SIZE): @@ -38,10 +37,10 @@ async def create( def find( self, file_info: CSVFileFindSchema, - context: DataStorageContext, # The current repository implementation does not support transactions. + context: FlatContext, ) -> CSVFileResponseSchema: - path_to_file = Path(self.files_dir_path, str(file_info.file_name)) + path_to_file = Path(context.upload_directory_path, str(file_info.file_name)) return pd.read_csv( path_to_file, diff --git a/internal/usecase/file/save_file.py b/internal/usecase/file/save_file.py index a1f94b90..0f6d2f5c 100644 --- a/internal/usecase/file/save_file.py +++ b/internal/usecase/file/save_file.py @@ -45,12 +45,17 @@ class SaveFile: def __init__( self, - unit_of_work: UnitOfWork, + # It is assumed that the two repositories will be associated with different repositories. + # In order to support different repositories, different UoW will be needed. + # If both of your repositories are linked to the same repository, use only one of the UoW. + file_info_unit_of_work: UnitOfWork, + file_unit_of_work: UnitOfWork, file_repo: FileRepo, file_metadata_repo: FileMetadataRepo, ): - self.unit_of_work = unit_of_work + self.file_info_unit_of_work = file_info_unit_of_work + self.file_unit_of_work = file_unit_of_work self.file_repo = file_repo self.file_metadata_repo = file_metadata_repo @@ -64,14 +69,18 @@ async def __call__(self, *, upload_file: File) -> SaveFileUseCaseResult: mime_type=upload_file.content_type, ) - with self.unit_of_work as context: - try: - response = self.file_metadata_repo.create( - file_metadata_create_schema, context - ) - await self.file_repo.create(upload_file, create_file_schema, context) - except FailedFileReadingException as e: - raise FailedReadFileException(str(e)) + with self.file_unit_of_work as file_context: + with self.file_info_unit_of_work as file_info_context: + try: + response = self.file_metadata_repo.create( + file_metadata_create_schema, file_info_context + ) + await self.file_repo.create( + upload_file, create_file_schema, file_context + ) + except FailedFileReadingException as e: + raise FailedReadFileException(str(e)) + return SaveFileUseCaseResult( id=response.id, file_name=response.file_name, diff --git a/tests/usecase/test_save_file.py b/tests/usecase/test_save_file.py index cc5372f6..ded91fc7 100644 --- a/tests/usecase/test_save_file.py +++ b/tests/usecase/test_save_file.py @@ -72,7 +72,8 @@ def save_file( "internal.usecase.file.save_file.FileEntity", return_value=file_entity_mock ) return SaveFile( - unit_of_work=unit_of_work_mock, + file_unit_of_work=unit_of_work_mock, + file_info_unit_of_work=unit_of_work_mock, file_repo=file_repo_mock, file_metadata_repo=file_metadata_repo_mock, ) @@ -133,9 +134,9 @@ async def test_save_file( unit_of_work_mock.__enter__.return_value, ) - # Check that UnitOfWork was used correctly - unit_of_work_mock.__enter__.assert_called_once() - unit_of_work_mock.__exit__.assert_called_once() + # Verify that UnitOfWork was used correctly + assert unit_of_work_mock.__enter__.call_count == 2 + assert unit_of_work_mock.__exit__.call_count == 2 # Verify that the result matches the expected SaveFileUseCaseResult assert result == SaveFileUseCaseResult( @@ -175,5 +176,5 @@ async def test_save_file_failed_read_file_exception( file_repo_mock.create.assert_called_once() # Verify that UnitOfWork was used correctly - unit_of_work_mock.__enter__.assert_called_once() - unit_of_work_mock.__exit__.assert_called_once() + assert unit_of_work_mock.__enter__.call_count == 2 + assert unit_of_work_mock.__exit__.call_count == 2 From 30324139953b526279fc46d24787c92fb6e453a0 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 16:25:33 +0300 Subject: [PATCH 119/153] feat(rest): update di after add new flat context --- internal/rest/http/di.py | 6 ++++++ internal/rest/http/file/di.py | 5 ++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/internal/rest/http/di.py b/internal/rest/http/di.py index dd37ca9c..5c48e268 100644 --- a/internal/rest/http/di.py +++ b/internal/rest/http/di.py @@ -1,5 +1,6 @@ from fastapi import Depends +from internal.infrastructure.data_storage.flat import FlatContextMaker from internal.infrastructure.data_storage.relational.postgres.context import ( get_postgres_context_maker, get_postgres_context_maker_without_pool, @@ -25,6 +26,11 @@ def get_unit_of_work_without_pool( return UnitOfWork(context_maker) +def get_flat_unit_of_work(context_maker: FlatContextMaker = Depends()) -> UnitOfWork: + + return UnitOfWork(context_maker) + + def get_file_repo() -> FileRepository: return FileRepository() diff --git a/internal/rest/http/file/di.py b/internal/rest/http/file/di.py index 99911789..6f25b502 100644 --- a/internal/rest/http/file/di.py +++ b/internal/rest/http/file/di.py @@ -5,6 +5,7 @@ get_file_repo, get_file_metadata_repo, get_dataset_repo, + get_flat_unit_of_work, ) from internal.uow import UnitOfWork from internal.usecase.file import SaveFile, SaveDataset, CheckContentType @@ -16,11 +17,13 @@ def get_save_file_use_case( unit_of_work: UnitOfWork = Depends(get_unit_of_work), + flat_unit_of_work: UnitOfWork = Depends(get_flat_unit_of_work), file_repo: FileRepo = Depends(get_file_repo), file_metadata_repo: FileMetadataRepo = Depends(get_file_metadata_repo), ) -> SaveFile: return SaveFile( - unit_of_work=unit_of_work, + file_info_unit_of_work=unit_of_work, + file_unit_of_work=flat_unit_of_work, file_repo=file_repo, file_metadata_repo=file_metadata_repo, ) From 0f652936be51e3a138932b8e2bbcdc89b0a977f9 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 17:51:17 +0300 Subject: [PATCH 120/153] fix: typo in file metadata repository name --- internal/repository/relational/file/__init__.py | 2 +- internal/repository/relational/file/file_metadata.py | 2 +- internal/rest/http/di.py | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/internal/repository/relational/file/__init__.py b/internal/repository/relational/file/__init__.py index d839a5cc..ac06abdf 100644 --- a/internal/repository/relational/file/__init__.py +++ b/internal/repository/relational/file/__init__.py @@ -1,2 +1,2 @@ -from internal.repository.relational.file.file_metadata import FileMetaDataRepository +from internal.repository.relational.file.file_metadata import FileMetadataRepository from internal.repository.relational.file.dataset import DatasetRepository diff --git a/internal/repository/relational/file/file_metadata.py b/internal/repository/relational/file/file_metadata.py index 84c87538..81df318c 100644 --- a/internal/repository/relational/file/file_metadata.py +++ b/internal/repository/relational/file/file_metadata.py @@ -8,7 +8,7 @@ from internal.repository.relational import CRUD -class FileMetaDataRepository( +class FileMetadataRepository( CRUD[ FileMetadataORM, FileMetadataCreateSchema, diff --git a/internal/rest/http/di.py b/internal/rest/http/di.py index 5c48e268..4c59417a 100644 --- a/internal/rest/http/di.py +++ b/internal/rest/http/di.py @@ -7,7 +7,7 @@ ) from internal.repository.flat import FileRepository from internal.repository.relational.file import ( - FileMetaDataRepository, + FileMetadataRepository, DatasetRepository, ) from internal.repository.relational.task import TaskRepository @@ -35,8 +35,8 @@ def get_file_repo() -> FileRepository: return FileRepository() -def get_file_metadata_repo() -> FileMetaDataRepository: - return FileMetaDataRepository() +def get_file_metadata_repo() -> FileMetadataRepository: + return FileMetadataRepository() def get_dataset_repo() -> DatasetRepository: From 57e032014ab6bed834af18bf3ddc432ae82bd337 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 17:51:57 +0300 Subject: [PATCH 121/153] feat(tests): add tests for file repository --- tests/repository/flat/test_file.py | 89 ++++++++++++++++++++++++++++++ 1 file changed, 89 insertions(+) create mode 100644 tests/repository/flat/test_file.py diff --git a/tests/repository/flat/test_file.py b/tests/repository/flat/test_file.py new file mode 100644 index 00000000..573c8394 --- /dev/null +++ b/tests/repository/flat/test_file.py @@ -0,0 +1,89 @@ +from uuid import uuid4 + +import pytest +import aiofiles +import pandas as pd +from pytest_mock import MockFixture + +from internal.dto.repository.file import ( + FileCreateSchema, + File, + FailedFileReadingException, + CSVFileFindSchema, +) +from internal.infrastructure.data_storage.flat import FlatContext +from internal.repository.flat import FileRepository + + +@pytest.fixture +def mock_flat_context(tmp_path, mocker: MockFixture): + context = mocker.MagicMock(spec=FlatContext) + context.upload_directory_path = tmp_path + return context + + +@pytest.fixture +def file_repository(): + return FileRepository() + + +@pytest.mark.asyncio +async def test_create_file_success( + mocker: MockFixture, file_repository, mock_flat_context +): + file_name = uuid4() + file_content = b"Hello, World!" + file_info = FileCreateSchema(file_name=file_name) + + mock_file = mocker.AsyncMock(spec=File) + mock_file.read = mocker.AsyncMock( + side_effect=[file_content, b""] + ) # Читаем содержимое файла + + await file_repository.create(mock_file, file_info, mock_flat_context) + + created_file_path = mock_flat_context.upload_directory_path / str(file_name) + assert created_file_path.is_file() + + async with aiofiles.open(created_file_path, "rb") as f: + content = await f.read() + assert content == file_content + + +def test_find_file_success(file_repository, mock_flat_context): + file_name = uuid4() + file_content = "col1,col2\n1,2\n3,4" + file_path = mock_flat_context.upload_directory_path / file_name + + with open(file_path, "w") as f: + f.write(file_content) + + file_info = CSVFileFindSchema(file_name=file_name, separator=",", header=[0]) + + result = file_repository.find(file_info, mock_flat_context) + + expected_df = pd.DataFrame({"col1": [1, 3], "col2": [2, 4]}) + pd.testing.assert_frame_equal(result, expected_df) + + +@pytest.mark.asyncio +async def test_create_file_failure( + mocker: MockFixture, file_repository, mock_flat_context +): + file_name = uuid4() + file_info = FileCreateSchema(file_name=file_name) + + mock_file = mocker.AsyncMock(spec=File) + mock_file.read = mocker.AsyncMock(side_effect=Exception("Read error")) + + with pytest.raises( + FailedFileReadingException, match="The sent file could not be read." + ): + await file_repository.create(mock_file, file_info, mock_flat_context) + + +def test_find_file_failure(file_repository, mock_flat_context): + file_info = CSVFileFindSchema(file_name=uuid4(), separator=",", header=[0]) + + with pytest.raises(FileNotFoundError): + file_repository.find(file_info, mock_flat_context) From abbe5a2d3547d1818e01249f52b86b5dfb6bb56b Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 17:53:34 +0300 Subject: [PATCH 122/153] feat(tests): add tests for file metadata repository --- .../repository/postgres/test_file_metadata.py | 82 +++++++++++++++++++ 1 file changed, 82 insertions(+) create mode 100644 tests/repository/postgres/test_file_metadata.py diff --git a/tests/repository/postgres/test_file_metadata.py b/tests/repository/postgres/test_file_metadata.py new file mode 100644 index 00000000..91dfd13d --- /dev/null +++ b/tests/repository/postgres/test_file_metadata.py @@ -0,0 +1,82 @@ +import pytest +from uuid import uuid4 + +from internal.repository.relational.file import FileMetadataRepository +from internal.dto.repository.file import ( + FileMetadataCreateSchema, + FileMetadataFindSchema, + FileMetadataUpdateSchema, +) + + +@pytest.fixture +def repo(): + return FileMetadataRepository() + + +@pytest.fixture +def create_schema(): + return FileMetadataCreateSchema( + file_name=uuid4(), + original_file_name="text.csv", + mime_type="text/plain", + ) + + +@pytest.fixture +def update_schema(): + return FileMetadataUpdateSchema( + original_file_name="new_test.csv", + ) # type: ignore + + +class TestFileMetadataRepository: + def test_create(self, repo, create_schema, postgres_context): + response = repo.create(create_schema, postgres_context) + + assert response is not None + assert response.file_name == create_schema.file_name + assert response.original_file_name == create_schema.original_file_name + assert response.mime_type == create_schema.mime_type + + def test_create_and_find( + self, + repo, + create_schema, + postgres_context, + ): + empty_response = repo.find(FileMetadataFindSchema(id=uuid4()), postgres_context) + assert empty_response is None + + created_response = repo.create(create_schema, postgres_context) + response = repo.find( + FileMetadataFindSchema(id=created_response.id), postgres_context + ) + assert response is not None + assert response.file_name == create_schema.file_name + assert response.original_file_name == create_schema.original_file_name + assert response.mime_type == create_schema.mime_type + + def test_update(self, repo, create_schema, update_schema, postgres_context): + created_response = repo.create(create_schema, postgres_context) + find_schema = FileMetadataFindSchema(id=created_response.id) + + repo.update(find_schema, update_schema, None, postgres_context) + + response = repo.find(find_schema, postgres_context) + assert response is not None + assert response.file_name == create_schema.file_name + assert response.original_file_name == update_schema.original_file_name + assert response.mime_type == create_schema.mime_type + + def test_delete(self, repo, create_schema, postgres_context): + created_response = repo.create(create_schema, postgres_context) + find_schema = FileMetadataFindSchema(id=created_response.id) + + response = repo.find(find_schema, postgres_context) + assert response is not None + + repo.delete(find_schema, postgres_context) + + response = repo.find(find_schema, postgres_context) + assert response is None From d8bc2ee8c0a1dd5919d877b76249c6e86cd5921f Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 17:54:00 +0300 Subject: [PATCH 123/153] feat(tests): add tests for dataset repository --- tests/repository/postgres/test_dataset.py | 96 +++++++++++++++++++++++ 1 file changed, 96 insertions(+) create mode 100644 tests/repository/postgres/test_dataset.py diff --git a/tests/repository/postgres/test_dataset.py b/tests/repository/postgres/test_dataset.py new file mode 100644 index 00000000..c9abd4a6 --- /dev/null +++ b/tests/repository/postgres/test_dataset.py @@ -0,0 +1,96 @@ +import pytest +from uuid import uuid4 + +from internal.repository.relational.file import ( + DatasetRepository, + FileMetadataRepository, +) +from internal.dto.repository.file import ( + DatasetCreateSchema, + DatasetFindSchema, + DatasetUpdateSchema, + FileMetadataCreateSchema, +) + + +@pytest.fixture +def file_create_schema(): + return FileMetadataCreateSchema( + file_name=uuid4(), + original_file_name="text.csv", + mime_type="text/plain", + ) + + +@pytest.fixture +def file_id(file_create_schema, postgres_context): + file_metadata_repo = FileMetadataRepository() + response = file_metadata_repo.create(file_create_schema, postgres_context) + return response.id + + +@pytest.fixture +def repo(): + return DatasetRepository() + + +@pytest.fixture +def create_schema(file_id): + return DatasetCreateSchema(file_id=file_id, separator=",", header=[0]) + + +@pytest.fixture +def update_schema(): + return DatasetUpdateSchema(separator=",", header=[1]) # type: ignore + + +class TestDatasetRepository: + def test_create(self, repo, create_schema, postgres_context): + response = repo.create(create_schema, postgres_context) + + assert response is not None + assert response.file_id == create_schema.file_id + assert response.separator == create_schema.separator + assert response.header == create_schema.header + + def test_create_and_find( + self, + repo, + create_schema, + postgres_context, + ): + empty_response = repo.find(DatasetFindSchema(id=uuid4()), postgres_context) + assert empty_response is None + + created_response = repo.create(create_schema, postgres_context) + response = repo.find( + DatasetFindSchema(id=created_response.id), postgres_context + ) + assert response is not None + assert response.file_id == create_schema.file_id + assert response.separator == create_schema.separator + assert response.header == create_schema.header + + def test_update(self, repo, create_schema, update_schema, postgres_context): + created_response = repo.create(create_schema, postgres_context) + find_schema = DatasetFindSchema(id=created_response.id) + + repo.update(find_schema, update_schema, None, postgres_context) + + response = repo.find(find_schema, postgres_context) + assert response is not None + assert response.file_id == create_schema.file_id + assert response.separator == update_schema.separator + assert response.header == update_schema.header + + def test_delete(self, repo, create_schema, postgres_context): + created_response = repo.create(create_schema, postgres_context) + find_schema = DatasetFindSchema(id=created_response.id) + + response = repo.find(find_schema, postgres_context) + assert response is not None + + repo.delete(find_schema, postgres_context) + + response = repo.find(find_schema, postgres_context) + assert response is None From c0b3c133f1249cc32c78b0d9885f89ecd1b3dec0 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 17:54:23 +0300 Subject: [PATCH 124/153] feat(tests): add tests for task repository --- tests/repository/postgres/test_task.py | 129 +++++++++++++++++++++++++ 1 file changed, 129 insertions(+) create mode 100644 tests/repository/postgres/test_task.py diff --git a/tests/repository/postgres/test_task.py b/tests/repository/postgres/test_task.py new file mode 100644 index 00000000..7c8d40aa --- /dev/null +++ b/tests/repository/postgres/test_task.py @@ -0,0 +1,129 @@ +import pytest +from uuid import uuid4 + +from internal.domain.task.value_objects import TaskStatus, FdTaskConfig, PrimitiveName +from internal.domain.task.value_objects.fd import FdAlgoName +from internal.dto.repository.task import ( + TaskCreateSchema, + TaskFindSchema, + TaskUpdateSchema, +) +from internal.dto.repository.file import ( + DatasetCreateSchema, + FileMetadataCreateSchema, +) +from internal.repository.relational.file import ( + DatasetRepository, + FileMetadataRepository, +) +from internal.repository.relational.task import TaskRepository + + +@pytest.fixture +def file_create_schema(): + return FileMetadataCreateSchema( + file_name=uuid4(), + original_file_name="text.csv", + mime_type="text/plain", + ) + + +@pytest.fixture +def file_id(file_create_schema, postgres_context): + file_metadata_repo = FileMetadataRepository() + response = file_metadata_repo.create(file_create_schema, postgres_context) + return response.id + + +@pytest.fixture +def dataset_create_schema(file_id): + return DatasetCreateSchema(file_id=file_id, separator=",", header=[0]) + + +@pytest.fixture +def dataset_id(dataset_create_schema, postgres_context): + dataset_repo = DatasetRepository() + response = dataset_repo.create(dataset_create_schema, postgres_context) + return response.id + + +@pytest.fixture +def get_config(): + return FdTaskConfig( + primitive_name=PrimitiveName.fd, + config={"algo_name": FdAlgoName.Aid, "is_null_equal_null": True}, + ) + + +@pytest.fixture +def repo(): + return TaskRepository() + + +@pytest.fixture +def create_schema(dataset_id, get_config): + return TaskCreateSchema( + dataset_id=dataset_id, status=TaskStatus.CREATED, config=get_config + ) + + +@pytest.fixture +def update_schema(): + return TaskUpdateSchema( + failure_reason="memory_limit_exceeded", status=TaskStatus.COMPLETED + ) # type: ignore + + +class TestDatasetRepository: + def test_create(self, repo, create_schema, postgres_context): + response = repo.create(create_schema, postgres_context) + + assert response is not None + assert response.dataset_id == create_schema.dataset_id + assert response.status == create_schema.status + assert response.config == create_schema.config + assert response.result is None + assert response.failure_reason is None + assert response.raised_exception_name is None + assert response.traceback is None + + def test_create_and_find( + self, + repo, + create_schema, + postgres_context, + ): + empty_response = repo.find(TaskFindSchema(id=uuid4()), postgres_context) + assert empty_response is None + + created_response = repo.create(create_schema, postgres_context) + response = repo.find(TaskFindSchema(id=created_response.id), postgres_context) + assert response is not None + assert response.dataset_id == create_schema.dataset_id + assert response.status == create_schema.status + assert response.config == create_schema.config + + def test_update(self, repo, create_schema, update_schema, postgres_context): + created_response = repo.create(create_schema, postgres_context) + find_schema = TaskFindSchema(id=created_response.id) + + repo.update(find_schema, update_schema, None, postgres_context) + + response = repo.find(find_schema, postgres_context) + assert response is not None + assert response.dataset_id == create_schema.dataset_id + assert response.status == update_schema.status + assert response.config == create_schema.config + assert response.failure_reason == update_schema.failure_reason + + def test_delete(self, repo, create_schema, postgres_context): + created_response = repo.create(create_schema, postgres_context) + find_schema = TaskFindSchema(id=created_response.id) + + response = repo.find(find_schema, postgres_context) + assert response is not None + + repo.delete(find_schema, postgres_context) + + response = repo.find(find_schema, postgres_context) + assert response is None From 315f6bb28c73c98c7e67e28ce8e4117970eb368d Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 17:59:48 +0300 Subject: [PATCH 125/153] fix(tests): add converter from UUID to string for file name --- tests/repository/flat/test_file.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/repository/flat/test_file.py b/tests/repository/flat/test_file.py index 573c8394..cf5f26c9 100644 --- a/tests/repository/flat/test_file.py +++ b/tests/repository/flat/test_file.py @@ -53,7 +53,7 @@ async def test_create_file_success( def test_find_file_success(file_repository, mock_flat_context): file_name = uuid4() file_content = "col1,col2\n1,2\n3,4" - file_path = mock_flat_context.upload_directory_path / file_name + file_path = mock_flat_context.upload_directory_path / str(file_name) with open(file_path, "w") as f: f.write(file_content) From facf649332e85fccfd6e2bef53bb00aaf490b671 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 18:02:34 +0300 Subject: [PATCH 126/153] chore(domain): move task status and task failure reason classes in separate module --- internal/domain/task/value_objects/__init__.py | 17 ++--------------- .../task/value_objects/task_failure_reason.py | 8 ++++++++ .../domain/task/value_objects/task_status.py | 8 ++++++++ 3 files changed, 18 insertions(+), 15 deletions(-) create mode 100644 internal/domain/task/value_objects/task_failure_reason.py create mode 100644 internal/domain/task/value_objects/task_status.py diff --git a/internal/domain/task/value_objects/__init__.py b/internal/domain/task/value_objects/__init__.py index 65656230..1824c19b 100644 --- a/internal/domain/task/value_objects/__init__.py +++ b/internal/domain/task/value_objects/__init__.py @@ -1,4 +1,3 @@ -from enum import StrEnum, auto from typing import Annotated, Union from pydantic import Field @@ -10,20 +9,8 @@ from internal.domain.task.value_objects.primitive_name import PrimitiveName - -class TaskStatus(StrEnum): - FAILED = auto() - CREATED = auto() - RUNNING = auto() - COMPLETED = auto() - - -class TaskFailureReason(StrEnum): - MEMORY_LIMIT_EXCEEDED = auto() - TIME_LIMIT_EXCEEDED = auto() - WORKER_KILLED_BY_SIGNAL = auto() - OTHER = auto() - +from internal.domain.task.value_objects.task_status import TaskStatus +from internal.domain.task.value_objects.task_failure_reason import TaskFailureReason OneOfTaskConfig = Annotated[ Union[ diff --git a/internal/domain/task/value_objects/task_failure_reason.py b/internal/domain/task/value_objects/task_failure_reason.py new file mode 100644 index 00000000..a106aeb9 --- /dev/null +++ b/internal/domain/task/value_objects/task_failure_reason.py @@ -0,0 +1,8 @@ +from enum import StrEnum, auto + + +class TaskFailureReason(StrEnum): + MEMORY_LIMIT_EXCEEDED = auto() + TIME_LIMIT_EXCEEDED = auto() + WORKER_KILLED_BY_SIGNAL = auto() + OTHER = auto() diff --git a/internal/domain/task/value_objects/task_status.py b/internal/domain/task/value_objects/task_status.py new file mode 100644 index 00000000..b1f3a884 --- /dev/null +++ b/internal/domain/task/value_objects/task_status.py @@ -0,0 +1,8 @@ +from enum import StrEnum, auto + + +class TaskStatus(StrEnum): + FAILED = auto() + CREATED = auto() + RUNNING = auto() + COMPLETED = auto() From 23e7c58a6097c574495e5212b6ee5458d9a903a8 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 18:08:16 +0300 Subject: [PATCH 127/153] fix(dto): remove unnecessery generic in update schemas --- internal/dto/repository/base_schema.py | 2 +- internal/dto/repository/file/dataset.py | 2 +- internal/dto/repository/file/file.py | 2 +- internal/dto/repository/file/file_metadata.py | 2 +- internal/dto/repository/task/task.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/internal/dto/repository/base_schema.py b/internal/dto/repository/base_schema.py index 26e90b0b..23f803b8 100644 --- a/internal/dto/repository/base_schema.py +++ b/internal/dto/repository/base_schema.py @@ -16,7 +16,7 @@ class BaseFindSchema[T](BaseSchema): id: T -class BaseUpdateSchema[T](BaseSchema, OptionalModel): +class BaseUpdateSchema(BaseSchema, OptionalModel): __non_optional_fields__ = { "id", } diff --git a/internal/dto/repository/file/dataset.py b/internal/dto/repository/file/dataset.py index 648c3307..3b1cc13a 100644 --- a/internal/dto/repository/file/dataset.py +++ b/internal/dto/repository/file/dataset.py @@ -25,7 +25,7 @@ class DatasetBaseSchema(BaseSchema): class DatasetCreateSchema(DatasetBaseSchema, BaseCreateSchema): ... -class DatasetUpdateSchema(DatasetBaseSchema, BaseUpdateSchema[UUID]): ... +class DatasetUpdateSchema(DatasetBaseSchema, BaseUpdateSchema): ... class DatasetFindSchema(BaseFindSchema[UUID]): ... diff --git a/internal/dto/repository/file/file.py b/internal/dto/repository/file/file.py index d8f1ac29..3aa10eb2 100644 --- a/internal/dto/repository/file/file.py +++ b/internal/dto/repository/file/file.py @@ -31,7 +31,7 @@ class FileBaseSchema(BaseSchema): class FileCreateSchema(FileBaseSchema, BaseCreateSchema): ... -class FileUpdateSchema(FileBaseSchema, BaseUpdateSchema[UUID]): ... +class FileUpdateSchema(FileBaseSchema, BaseUpdateSchema): ... class FileFindSchema(FileBaseSchema, BaseSchema): ... # it's not a typo diff --git a/internal/dto/repository/file/file_metadata.py b/internal/dto/repository/file/file_metadata.py index 2314c8d1..7fe330fb 100644 --- a/internal/dto/repository/file/file_metadata.py +++ b/internal/dto/repository/file/file_metadata.py @@ -24,7 +24,7 @@ class FileMetadataBaseSchema(BaseSchema): class FileMetadataCreateSchema(FileMetadataBaseSchema, BaseCreateSchema): ... -class FileMetadataUpdateSchema(FileMetadataBaseSchema, BaseUpdateSchema[UUID]): ... +class FileMetadataUpdateSchema(FileMetadataBaseSchema, BaseUpdateSchema): ... class FileMetadataFindSchema(BaseFindSchema[UUID]): ... diff --git a/internal/dto/repository/task/task.py b/internal/dto/repository/task/task.py index d290c120..d77ac1de 100644 --- a/internal/dto/repository/task/task.py +++ b/internal/dto/repository/task/task.py @@ -30,7 +30,7 @@ class TaskBaseSchema(BaseSchema): class TaskCreateSchema(TaskBaseSchema, BaseCreateSchema): ... -class TaskUpdateSchema(TaskBaseSchema, BaseUpdateSchema[UUID]): +class TaskUpdateSchema(TaskBaseSchema, BaseUpdateSchema): status: TaskStatus | None result: OneOfTaskResult | None raised_exception_name: str | None From d6a3e5f3a085ec28744990ebf2e2beffb0a3cece Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 18:17:23 +0300 Subject: [PATCH 128/153] chore: add and fix documentation for repo and use case exceptions --- internal/dto/repository/file/dataset.py | 20 +++++++++++----- internal/dto/repository/file/file.py | 24 ++++++++++++++----- internal/dto/repository/file/file_metadata.py | 20 +++++++++++----- internal/dto/repository/task/task.py | 20 +++++++++++----- internal/usecase/file/exception.py | 6 ----- 5 files changed, 60 insertions(+), 30 deletions(-) diff --git a/internal/dto/repository/file/dataset.py b/internal/dto/repository/file/dataset.py index 3b1cc13a..a3bcab1a 100644 --- a/internal/dto/repository/file/dataset.py +++ b/internal/dto/repository/file/dataset.py @@ -9,12 +9,6 @@ ) -class DatasetNotFoundException(Exception): - - def __init__(self): - super().__init__("Dataset not found") - - class DatasetBaseSchema(BaseSchema): file_id: UUID separator: str @@ -32,3 +26,17 @@ class DatasetFindSchema(BaseFindSchema[UUID]): ... class DatasetResponseSchema(DatasetBaseSchema, BaseResponseSchema[UUID]): ... + + +class DatasetNotFoundException(Exception): + """ + Exception raised when a dataset is not found in some data storage. + + This exception may be thrown only by the repository. + """ + + def __init__(self): + """ + Initializes an instance of DatasetNotFoundException with a default message. + """ + super().__init__("Dataset not found") diff --git a/internal/dto/repository/file/file.py b/internal/dto/repository/file/file.py index 3aa10eb2..4b1eb027 100644 --- a/internal/dto/repository/file/file.py +++ b/internal/dto/repository/file/file.py @@ -10,12 +10,6 @@ ) -class FailedFileReadingException(Exception): - - def __init__(self, message: str): - super().__init__(message) - - class File(Protocol): filename: str | None @@ -46,3 +40,21 @@ class CSVFileFindSchema(FileFindSchema): CSVFileResponseSchema = pd.DataFrame + + +class FailedFileReadingException(Exception): + """ + Exception raised when file reading fails. + + This exception can be used to provide specific error messages + related to file reading operations in repository. + """ + + def __init__(self, message: str): + """ + Initializes an instance of FailedFileReadingException with a specific error message. + + Args: + message(str): The error message to be reported. + """ + super().__init__(message) diff --git a/internal/dto/repository/file/file_metadata.py b/internal/dto/repository/file/file_metadata.py index 7fe330fb..3017d02d 100644 --- a/internal/dto/repository/file/file_metadata.py +++ b/internal/dto/repository/file/file_metadata.py @@ -9,12 +9,6 @@ ) -class FileMetadataNotFoundException(Exception): - - def __init__(self): - super().__init__("File metadata not found") - - class FileMetadataBaseSchema(BaseSchema): file_name: UUID original_file_name: str @@ -31,3 +25,17 @@ class FileMetadataFindSchema(BaseFindSchema[UUID]): ... class FileMetadataResponseSchema(FileMetadataBaseSchema, BaseResponseSchema[UUID]): ... + + +class FileMetadataNotFoundException(Exception): + """ + Exception raised when a file metadata is not found in some data storage. + + This exception may be thrown only by the repository. + """ + + def __init__(self): + """ + Initializes an instance of FileMetadataNotFoundException with a default message. + """ + super().__init__("File metadata not found") diff --git a/internal/dto/repository/task/task.py b/internal/dto/repository/task/task.py index d77ac1de..5421e91b 100644 --- a/internal/dto/repository/task/task.py +++ b/internal/dto/repository/task/task.py @@ -15,12 +15,6 @@ ) -class TaskNotFoundException(Exception): - - def __init__(self): - super().__init__("Task not found") - - class TaskBaseSchema(BaseSchema): status: TaskStatus config: OneOfTaskConfig @@ -46,3 +40,17 @@ class TaskResponseSchema(TaskBaseSchema, BaseResponseSchema[UUID]): raised_exception_name: str | None = None failure_reason: TaskFailureReason | None = None traceback: str | None = None + + +class TaskNotFoundException(Exception): + """ + Exception raised when a task is not found in some data storage. + + This exception may be thrown only by the repository. + """ + + def __init__(self): + """ + Initializes an instance of TaskNotFoundException with a default message. + """ + super().__init__("Task not found") diff --git a/internal/usecase/file/exception.py b/internal/usecase/file/exception.py index 5fd8470c..805201e9 100644 --- a/internal/usecase/file/exception.py +++ b/internal/usecase/file/exception.py @@ -23,8 +23,6 @@ class DatasetNotFoundException(Exception): def __init__(self): """ Initializes an instance of DatasetNotFoundException without any specific message. - - The default message "Dataset not found" is used. """ super().__init__("Dataset not found") @@ -40,8 +38,6 @@ class FileMetadataNotFoundException(Exception): def __init__(self): """ Initializes an instance of FileMetadataNotFoundException with a default message. - - The default message "File metadata not found" is used to indicate the error. """ super().__init__("File metadata not found") @@ -49,8 +45,6 @@ def __init__(self): class FailedReadFileException(Exception): """ Exception raised when a file reading operation fails. - - This exception carries a specific error message detailing the cause of the failure. """ def __init__(self, message: str): From 1ee1d93d6e01ad68614137c5aa1c01fdd238f97b Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 18:21:10 +0300 Subject: [PATCH 129/153] chore(infra): add readme file for data storage module --- internal/infrastructure/data_storage/README.md | 16 ++++++++++++++++ .../data_storage/relational/README.md | 9 +++++++++ .../data_storage/relational/__init__.py | 3 +++ 3 files changed, 28 insertions(+) create mode 100644 internal/infrastructure/data_storage/README.md create mode 100644 internal/infrastructure/data_storage/relational/README.md diff --git a/internal/infrastructure/data_storage/README.md b/internal/infrastructure/data_storage/README.md new file mode 100644 index 00000000..025c43fe --- /dev/null +++ b/internal/infrastructure/data_storage/README.md @@ -0,0 +1,16 @@ +# data_storage module +This module is responsible for managing the configuration and settings related to data storage in the application. It serves as a central place for handling all database-related configurations and ensures that the rest of the application can access these configurations seamlessly. + +## Structure +### settings +`settings.py` contains all the settings for working with the data store. Through them you can get the URL to connect to the database, file paths, and so on. + +### relational +The `flat` module contains the logic for interacting with local file storages. [Read more.](flat/README.md) + +### relational +The `relational` module contains the logic for interacting with relational databases using SQLAlchemy. [Read more.](relational/README.md) + + +## Extensibility +If you need to add a new database or other data storage, simply create the appropriate module with the implementation in this module, and also write all the necessary settings in `settings.py`. diff --git a/internal/infrastructure/data_storage/relational/README.md b/internal/infrastructure/data_storage/relational/README.md new file mode 100644 index 00000000..28c41adb --- /dev/null +++ b/internal/infrastructure/data_storage/relational/README.md @@ -0,0 +1,9 @@ +# relational module +The relational module contains the tools for interacting with relational databases using SQLAlchemy. It is designed to be modular, allowing for easy extension to different database systems. Currently, the focus is on the PostgreSQL database, but this can be extended to other systems if needed. +## Structure +`model` - this submodule defines the database models used by the application. These models are mapped to relational database tables using SQLAlchemy ORM. + +`postgres` - this submodule contains tools for working with postgres, such as session provided by SQLAlchemy, migrations provided by alembic, and so on. + +## Extensibility +The `relational` module is designed to support different **relational** databases(supported by SQLAlchemy). If you need to integrate with a different database system, you can create a new submodule within relational (e.g., `MySQL` or `SQLite`) and implement the necessary logic for that database while maintaining the same structure as the Postgres submodule. diff --git a/internal/infrastructure/data_storage/relational/__init__.py b/internal/infrastructure/data_storage/relational/__init__.py index e69de29b..1e14d335 100644 --- a/internal/infrastructure/data_storage/relational/__init__.py +++ b/internal/infrastructure/data_storage/relational/__init__.py @@ -0,0 +1,3 @@ +from internal.infrastructure.data_storage.relational.context import ( + RelationalContextType, +) From 35e27a9616579a3e238f0738d4194010f51bebc0 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 18:22:58 +0300 Subject: [PATCH 130/153] chore(infra): correct typing --- internal/infrastructure/background_task/celery/task/di.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/infrastructure/background_task/celery/task/di.py b/internal/infrastructure/background_task/celery/task/di.py index 032c5abb..7ee6ce74 100644 --- a/internal/infrastructure/background_task/celery/task/di.py +++ b/internal/infrastructure/background_task/celery/task/di.py @@ -42,6 +42,6 @@ def get_profile_task_use_case(): return ProfileTask( unit_of_work=unit_of_work, - file_repo=file_repo, + file_repo=file_repo, # type: ignore dataset_repo=dataset_repo, # type: ignore ) From 2115922da7c01bf8b2df516afa61ee22236856f9 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 18:47:30 +0300 Subject: [PATCH 131/153] chore: update new directories path in Dockerfile --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 9fd4bab2..97ef2f30 100644 --- a/Dockerfile +++ b/Dockerfile @@ -78,7 +78,7 @@ EXPOSE ${APPLICATION_SERVER_PORT} USER 1001 # Run the uvicorn application server. -CMD exec uvicorn --workers 1 --host 0.0.0.0 --port $APPLICATION_SERVER_PORT app.main:app +CMD exec uvicorn --workers 1 --host 0.0.0.0 --port $APPLICATION_SERVER_PORT internal:app FROM server-setup-build-stage as install-dependencies-build-stage # install [tool.poetry.dependencies] @@ -93,4 +93,4 @@ FROM server-setup-build-stage as production-image COPY --chown=python_application:python_application --from=install-dependencies-build-stage /application_root/.venv /application_root/.venv # Copy application files -COPY --chown=python_application:python_application /app /application_root/app/ +COPY --chown=python_application:python_application /internal /application_root/internal/ From f4af48a42225f3ee5b066c75718454f5ec0ad9f1 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 18:53:04 +0300 Subject: [PATCH 132/153] chore(infra): add missed imports to postgres env.py file --- .../data_storage/relational/postgres/migrations/env.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/internal/infrastructure/data_storage/relational/postgres/migrations/env.py b/internal/infrastructure/data_storage/relational/postgres/migrations/env.py index f0eb3337..7f723154 100644 --- a/internal/infrastructure/data_storage/relational/postgres/migrations/env.py +++ b/internal/infrastructure/data_storage/relational/postgres/migrations/env.py @@ -7,6 +7,15 @@ from internal.infrastructure.data_storage import settings from internal.infrastructure.data_storage.relational.model import ORMBaseModel +from internal.infrastructure.data_storage.relational.model.file.file_metadata import ( # noqa: F401 + FileMetadataORM, +) +from internal.infrastructure.data_storage.relational.model.file.dataset import ( # noqa: F401 + DatasetORM, +) +from internal.infrastructure.data_storage.relational.model.task.task import ( # noqa: F401 + TaskORM, +) # this is the Alembic Config object, which provides From eeb12b2647262b5e7a5ff43783bcfb37223f5df2 Mon Sep 17 00:00:00 2001 From: raf-nr Date: Sat, 21 Sep 2024 19:04:36 +0300 Subject: [PATCH 133/153] chore: fix format errors --- internal/domain/common/__init__.py | 2 +- internal/domain/file/__init__.py | 2 +- internal/domain/task/__init__.py | 4 ++-- internal/domain/task/entities/afd/__init__.py | 2 +- internal/domain/task/entities/fd/__init__.py | 2 +- internal/domain/task/value_objects/__init__.py | 14 +++++++++----- internal/domain/task/value_objects/afd/__init__.py | 7 +++++-- internal/domain/task/value_objects/fd/__init__.py | 7 +++++-- internal/dto/repository/file/__init__.py | 11 +++++++---- internal/dto/repository/task/__init__.py | 2 +- internal/dto/worker/task/__init__.py | 4 +++- .../background_task/celery/task/__init__.py | 2 +- .../infrastructure/data_storage/flat/__init__.py | 2 +- .../data_storage/relational/__init__.py | 2 +- .../data_storage/relational/model/__init__.py | 2 +- .../data_storage/relational/model/file/__init__.py | 4 ++-- .../data_storage/relational/model/task/__init__.py | 4 +++- .../data_storage/relational/postgres/__init__.py | 2 +- internal/repository/flat/__init__.py | 2 +- internal/repository/relational/__init__.py | 2 +- internal/repository/relational/file/__init__.py | 6 ++++-- internal/repository/relational/task/__init__.py | 2 +- internal/rest/http/exception.py | 10 ++++------ internal/uow/__init__.py | 2 +- internal/usecase/file/__init__.py | 11 +++++++---- internal/usecase/task/__init__.py | 4 ++-- internal/worker/celery/__init__.py | 4 +++- 27 files changed, 70 insertions(+), 48 deletions(-) diff --git a/internal/domain/common/__init__.py b/internal/domain/common/__init__.py index f24fd5b4..3e6148bf 100644 --- a/internal/domain/common/__init__.py +++ b/internal/domain/common/__init__.py @@ -1 +1 @@ -from internal.domain.common.optional_model import OptionalModel +from internal.domain.common.optional_model import OptionalModel # noqa: F401 diff --git a/internal/domain/file/__init__.py b/internal/domain/file/__init__.py index 4a4654ca..094eafcd 100644 --- a/internal/domain/file/__init__.py +++ b/internal/domain/file/__init__.py @@ -1 +1 @@ -from internal.domain.file.file import File +from internal.domain.file.file import File # noqa: F401 diff --git a/internal/domain/task/__init__.py b/internal/domain/task/__init__.py index 4a830b0e..8a37c343 100644 --- a/internal/domain/task/__init__.py +++ b/internal/domain/task/__init__.py @@ -1,2 +1,2 @@ -from internal.domain.task.entities import FdTask -from internal.domain.task.entities import AfdTask +from internal.domain.task.entities import FdTask # noqa: F401 +from internal.domain.task.entities import AfdTask # noqa: F401 diff --git a/internal/domain/task/entities/afd/__init__.py b/internal/domain/task/entities/afd/__init__.py index 5ea101b1..238d1719 100644 --- a/internal/domain/task/entities/afd/__init__.py +++ b/internal/domain/task/entities/afd/__init__.py @@ -1 +1 @@ -from internal.domain.task.entities.afd.afd_task import AfdTask +from internal.domain.task.entities.afd.afd_task import AfdTask # noqa: F401 diff --git a/internal/domain/task/entities/fd/__init__.py b/internal/domain/task/entities/fd/__init__.py index 220ae96a..b0341b9d 100644 --- a/internal/domain/task/entities/fd/__init__.py +++ b/internal/domain/task/entities/fd/__init__.py @@ -1 +1 @@ -from internal.domain.task.entities.fd.fd_task import FdTask +from internal.domain.task.entities.fd.fd_task import FdTask # noqa: F401 diff --git a/internal/domain/task/value_objects/__init__.py b/internal/domain/task/value_objects/__init__.py index 1824c19b..eb1decfb 100644 --- a/internal/domain/task/value_objects/__init__.py +++ b/internal/domain/task/value_objects/__init__.py @@ -4,13 +4,17 @@ from internal.domain.task.value_objects.afd import AfdTaskConfig, AfdTaskResult from internal.domain.task.value_objects.fd import FdTaskConfig, FdTaskResult -from internal.domain.task.value_objects.config import TaskConfig -from internal.domain.task.value_objects.result import TaskResult +from internal.domain.task.value_objects.config import TaskConfig # noqa: F401 +from internal.domain.task.value_objects.result import TaskResult # noqa: F401 -from internal.domain.task.value_objects.primitive_name import PrimitiveName +from internal.domain.task.value_objects.primitive_name import ( # noqa: F401 + PrimitiveName, +) -from internal.domain.task.value_objects.task_status import TaskStatus -from internal.domain.task.value_objects.task_failure_reason import TaskFailureReason +from internal.domain.task.value_objects.task_status import TaskStatus # noqa: F401 +from internal.domain.task.value_objects.task_failure_reason import ( # noqa: F401 + TaskFailureReason, +) OneOfTaskConfig = Annotated[ Union[ diff --git a/internal/domain/task/value_objects/afd/__init__.py b/internal/domain/task/value_objects/afd/__init__.py index f8705e15..dc5cb92f 100644 --- a/internal/domain/task/value_objects/afd/__init__.py +++ b/internal/domain/task/value_objects/afd/__init__.py @@ -3,8 +3,11 @@ from pydantic import BaseModel from internal.domain.task.value_objects.afd.algo_config import OneOfAfdConfig -from internal.domain.task.value_objects.afd.algo_result import AfdAlgoResult, FdModel -from internal.domain.task.value_objects.afd.algo_name import AfdAlgoName +from internal.domain.task.value_objects.afd.algo_result import ( # noqa: F401 + AfdAlgoResult, + FdModel, +) +from internal.domain.task.value_objects.afd.algo_name import AfdAlgoName # noqa: F401 from internal.domain.task.value_objects.primitive_name import PrimitiveName diff --git a/internal/domain/task/value_objects/fd/__init__.py b/internal/domain/task/value_objects/fd/__init__.py index 8c3a3f94..1fd31502 100644 --- a/internal/domain/task/value_objects/fd/__init__.py +++ b/internal/domain/task/value_objects/fd/__init__.py @@ -4,8 +4,11 @@ from internal.domain.task.value_objects.primitive_name import PrimitiveName from internal.domain.task.value_objects.fd.algo_config import OneOfFdAlgoConfig -from internal.domain.task.value_objects.fd.algo_result import FdAlgoResult, FdModel -from internal.domain.task.value_objects.fd.algo_name import FdAlgoName +from internal.domain.task.value_objects.fd.algo_result import ( # noqa: F401 + FdAlgoResult, + FdModel, +) +from internal.domain.task.value_objects.fd.algo_name import FdAlgoName # noqa: F401 class BaseFdTaskModel(BaseModel): diff --git a/internal/dto/repository/file/__init__.py b/internal/dto/repository/file/__init__.py index 2051fe4f..b1046036 100644 --- a/internal/dto/repository/file/__init__.py +++ b/internal/dto/repository/file/__init__.py @@ -1,4 +1,4 @@ -from internal.dto.repository.file.file import ( +from internal.dto.repository.file.file import ( # noqa: F401 File, FileResponseSchema, FileFindSchema, @@ -6,18 +6,21 @@ FileUpdateSchema, FailedFileReadingException, ) -from internal.dto.repository.file.file_metadata import ( +from internal.dto.repository.file.file_metadata import ( # noqa: F401 FileMetadataResponseSchema, FileMetadataCreateSchema, FileMetadataFindSchema, FileMetadataUpdateSchema, FileMetadataNotFoundException, ) -from internal.dto.repository.file.dataset import ( +from internal.dto.repository.file.dataset import ( # noqa: F401 DatasetResponseSchema, DatasetCreateSchema, DatasetUpdateSchema, DatasetFindSchema, DatasetNotFoundException, ) -from internal.dto.repository.file.file import CSVFileFindSchema, CSVFileResponseSchema +from internal.dto.repository.file.file import ( # noqa: F401 + CSVFileFindSchema, + CSVFileResponseSchema, +) diff --git a/internal/dto/repository/task/__init__.py b/internal/dto/repository/task/__init__.py index 8849e001..4b68e30b 100644 --- a/internal/dto/repository/task/__init__.py +++ b/internal/dto/repository/task/__init__.py @@ -1,4 +1,4 @@ -from internal.dto.repository.task.task import ( +from internal.dto.repository.task.task import ( # noqa: F401 TaskCreateSchema, TaskUpdateSchema, TaskFindSchema, diff --git a/internal/dto/worker/task/__init__.py b/internal/dto/worker/task/__init__.py index cd1f8b7d..d6ad75d9 100644 --- a/internal/dto/worker/task/__init__.py +++ b/internal/dto/worker/task/__init__.py @@ -1 +1,3 @@ -from internal.dto.worker.task.profiling_task import ProfilingTaskCreateSchema +from internal.dto.worker.task.profiling_task import ( # noqa: F401 + ProfilingTaskCreateSchema, +) diff --git a/internal/infrastructure/background_task/celery/task/__init__.py b/internal/infrastructure/background_task/celery/task/__init__.py index 8b11a477..55ba9e76 100644 --- a/internal/infrastructure/background_task/celery/task/__init__.py +++ b/internal/infrastructure/background_task/celery/task/__init__.py @@ -1,3 +1,3 @@ -from internal.infrastructure.background_task.celery.task.profiling_task import ( +from internal.infrastructure.background_task.celery.task.profiling_task import ( # noqa: F401 profiling_task, ) diff --git a/internal/infrastructure/data_storage/flat/__init__.py b/internal/infrastructure/data_storage/flat/__init__.py index ea5c9fb4..e39991c9 100644 --- a/internal/infrastructure/data_storage/flat/__init__.py +++ b/internal/infrastructure/data_storage/flat/__init__.py @@ -1,4 +1,4 @@ -from internal.infrastructure.data_storage.flat.context import ( +from internal.infrastructure.data_storage.flat.context import ( # noqa: F401 FlatContextMaker, FlatContext, ) diff --git a/internal/infrastructure/data_storage/relational/__init__.py b/internal/infrastructure/data_storage/relational/__init__.py index 1e14d335..bb68759c 100644 --- a/internal/infrastructure/data_storage/relational/__init__.py +++ b/internal/infrastructure/data_storage/relational/__init__.py @@ -1,3 +1,3 @@ -from internal.infrastructure.data_storage.relational.context import ( +from internal.infrastructure.data_storage.relational.context import ( # noqa: F401 RelationalContextType, ) diff --git a/internal/infrastructure/data_storage/relational/model/__init__.py b/internal/infrastructure/data_storage/relational/model/__init__.py index 3405ade1..9cdbc4e1 100644 --- a/internal/infrastructure/data_storage/relational/model/__init__.py +++ b/internal/infrastructure/data_storage/relational/model/__init__.py @@ -1,3 +1,3 @@ -from internal.infrastructure.data_storage.relational.model.orm_base_model import ( +from internal.infrastructure.data_storage.relational.model.orm_base_model import ( # noqa: F401 ORMBaseModel, ) diff --git a/internal/infrastructure/data_storage/relational/model/file/__init__.py b/internal/infrastructure/data_storage/relational/model/file/__init__.py index 29b551f3..549f4598 100644 --- a/internal/infrastructure/data_storage/relational/model/file/__init__.py +++ b/internal/infrastructure/data_storage/relational/model/file/__init__.py @@ -1,6 +1,6 @@ -from internal.infrastructure.data_storage.relational.model.file.file_metadata import ( +from internal.infrastructure.data_storage.relational.model.file.file_metadata import ( # noqa: F401 FileMetadataORM, ) -from internal.infrastructure.data_storage.relational.model.file.dataset import ( +from internal.infrastructure.data_storage.relational.model.file.dataset import ( # noqa: F401 DatasetORM, ) diff --git a/internal/infrastructure/data_storage/relational/model/task/__init__.py b/internal/infrastructure/data_storage/relational/model/task/__init__.py index ccfd5b77..85377667 100644 --- a/internal/infrastructure/data_storage/relational/model/task/__init__.py +++ b/internal/infrastructure/data_storage/relational/model/task/__init__.py @@ -1 +1,3 @@ -from internal.infrastructure.data_storage.relational.model.task.task import TaskORM +from internal.infrastructure.data_storage.relational.model.task.task import ( # noqa: F401 + TaskORM, +) diff --git a/internal/infrastructure/data_storage/relational/postgres/__init__.py b/internal/infrastructure/data_storage/relational/postgres/__init__.py index 12f99f61..67881a86 100644 --- a/internal/infrastructure/data_storage/relational/postgres/__init__.py +++ b/internal/infrastructure/data_storage/relational/postgres/__init__.py @@ -1,4 +1,4 @@ -from internal.infrastructure.data_storage.relational.postgres.context import ( +from internal.infrastructure.data_storage.relational.postgres.context import ( # noqa: F401 get_postgres_context_maker, get_postgres_context_maker_without_pool, ) diff --git a/internal/repository/flat/__init__.py b/internal/repository/flat/__init__.py index a58daf3c..a6b33771 100644 --- a/internal/repository/flat/__init__.py +++ b/internal/repository/flat/__init__.py @@ -1 +1 @@ -from internal.repository.flat.file import FileRepository +from internal.repository.flat.file import FileRepository # noqa: F401 diff --git a/internal/repository/relational/__init__.py b/internal/repository/relational/__init__.py index f7d802e5..3352cffe 100644 --- a/internal/repository/relational/__init__.py +++ b/internal/repository/relational/__init__.py @@ -1 +1 @@ -from internal.repository.relational.crud import CRUD +from internal.repository.relational.crud import CRUD # noqa: F401 diff --git a/internal/repository/relational/file/__init__.py b/internal/repository/relational/file/__init__.py index ac06abdf..76523981 100644 --- a/internal/repository/relational/file/__init__.py +++ b/internal/repository/relational/file/__init__.py @@ -1,2 +1,4 @@ -from internal.repository.relational.file.file_metadata import FileMetadataRepository -from internal.repository.relational.file.dataset import DatasetRepository +from internal.repository.relational.file.file_metadata import ( # noqa: F401 + FileMetadataRepository, +) +from internal.repository.relational.file.dataset import DatasetRepository # noqa: F401 diff --git a/internal/repository/relational/task/__init__.py b/internal/repository/relational/task/__init__.py index 3381c2a1..233ca550 100644 --- a/internal/repository/relational/task/__init__.py +++ b/internal/repository/relational/task/__init__.py @@ -1 +1 @@ -from internal.repository.relational.task.task import TaskRepository +from internal.repository.relational.task.task import TaskRepository # noqa: F401 diff --git a/internal/rest/http/exception.py b/internal/rest/http/exception.py index c7d27824..13aa1339 100644 --- a/internal/rest/http/exception.py +++ b/internal/rest/http/exception.py @@ -21,30 +21,28 @@ def incorrect_file_format_exception( ) @app.exception_handler(DatasetNotFoundException) - def dataset_not_found_exception(request: Request, exc: DatasetNotFoundException): + def dataset_not_found_exception(_, exc: DatasetNotFoundException): raise HTTPException( status_code=404, detail=str(exc), ) @app.exception_handler(FileMetadataNotFoundException) - def file_metadata_not_found_exception( - request: Request, exc: FileMetadataNotFoundException - ): + def file_metadata_not_found_exception(_, exc: FileMetadataNotFoundException): raise HTTPException( status_code=404, detail=str(exc), ) @app.exception_handler(TaskNotFoundException) - def file_metadata_not_found_exception(request: Request, exc: TaskNotFoundException): + def file_metadata_not_found_exception(_, exc: TaskNotFoundException): # noqa: F811 raise HTTPException( status_code=404, detail=str(exc), ) @app.exception_handler(FailedReadFileException) - def failed_read_file_exception(request: Request, exc: FailedReadFileException): + def failed_read_file_exception(_, exc: FailedReadFileException): raise HTTPException( status_code=404, detail=str(exc), diff --git a/internal/uow/__init__.py b/internal/uow/__init__.py index aedf3b4a..0ea6a6a2 100644 --- a/internal/uow/__init__.py +++ b/internal/uow/__init__.py @@ -1 +1 @@ -from internal.uow.uow import DataStorageContext, UnitOfWork +from internal.uow.uow import DataStorageContext, UnitOfWork # noqa: F401 diff --git a/internal/usecase/file/__init__.py b/internal/usecase/file/__init__.py index 11232990..e4020f93 100644 --- a/internal/usecase/file/__init__.py +++ b/internal/usecase/file/__init__.py @@ -1,7 +1,10 @@ -from internal.usecase.file.check_content_type import CheckContentType -from internal.usecase.file.save_dataset import SaveDataset -from internal.usecase.file.save_file import SaveFile, SaveFileUseCaseResult -from internal.usecase.file.retrieve_dataset import ( +from internal.usecase.file.check_content_type import CheckContentType # noqa: F401 +from internal.usecase.file.save_dataset import SaveDataset # noqa: F401 +from internal.usecase.file.save_file import ( # noqa: F401 + SaveFile, + SaveFileUseCaseResult, +) +from internal.usecase.file.retrieve_dataset import ( # noqa: F401 RetrieveDataset, RetrieveDatasetUseCaseResult, ) diff --git a/internal/usecase/task/__init__.py b/internal/usecase/task/__init__.py index f9fd7c3f..5c6536ca 100644 --- a/internal/usecase/task/__init__.py +++ b/internal/usecase/task/__init__.py @@ -1,2 +1,2 @@ -from internal.usecase.task.retrieve_task import RetrieveTask -from internal.usecase.task.set_task import SetTask +from internal.usecase.task.retrieve_task import RetrieveTask # noqa: F401 +from internal.usecase.task.set_task import SetTask # noqa: F401 diff --git a/internal/worker/celery/__init__.py b/internal/worker/celery/__init__.py index 489bf5e6..3602016f 100644 --- a/internal/worker/celery/__init__.py +++ b/internal/worker/celery/__init__.py @@ -1 +1,3 @@ -from internal.worker.celery.profiling_task_worker import ProfilingTaskWorker +from internal.worker.celery.profiling_task_worker import ( # noqa: F401 + ProfilingTaskWorker, +) From d36ab1301aa90a6b45e43f87b061fb4f68567531 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Wed, 25 Sep 2024 20:52:09 +0000 Subject: [PATCH 134/153] chore: delete sqlalchemy mixins from project dependencies --- poetry.lock | 17 +---------------- pyproject.toml | 1 - 2 files changed, 1 insertion(+), 17 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2466adc2..2d4f5a75 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2788,21 +2788,6 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3_binary"] -[[package]] -name = "sqlalchemy-mixins" -version = "2.0.5" -description = "Active Record, Django-like queries, nested eager load and beauty __repr__ for SQLAlchemy" -optional = false -python-versions = "*" -files = [ - {file = "sqlalchemy_mixins-2.0.5-py3-none-any.whl", hash = "sha256:9067b630744741b472aa91d92494cc5612ed2d29c66729a5a4a1d3fbbeccd448"}, - {file = "sqlalchemy_mixins-2.0.5.tar.gz", hash = "sha256:85197fc3682c4bf9c35671fb3d10282a0973b19cd2ff2b6791d601cbfb0fb89e"}, -] - -[package.dependencies] -six = "*" -SQLAlchemy = ">=2.0" - [[package]] name = "sqlalchemy-utils" version = "0.41.2" @@ -3405,4 +3390,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "2a16ad9195dd67f2442c55835418c818e7528f094bc7bd99356700614ea6a3e5" +content-hash = "8bc60ad0e9e823656aae7fc151606f23962c48d27d3bd05dfa974785c2ff5fed" diff --git a/pyproject.toml b/pyproject.toml index 08384723..1058923a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,6 @@ desbordante-stubs = "^0.1.0" pandas = "^2.2.1" pandas-stubs = "^2.2.0.240218" python-multipart = "^0.0.9" -sqlalchemy-mixins = "^2.0.5" aiofiles = "^23.2.1" cfgv = "^3.4.0" From 636488c952da93506a517dee64fbc9911560e7fe Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Fri, 27 Sep 2024 01:30:04 +0000 Subject: [PATCH 135/153] fix: type errors --- internal/domain/task/entities/afd/afd_task.py | 15 +++-- internal/domain/task/entities/fd/fd_task.py | 15 +++-- internal/domain/task/entities/task.py | 6 +- .../domain/task/value_objects/afd/__init__.py | 3 + .../task/value_objects/afd/exception.py | 4 ++ .../domain/task/value_objects/fd/__init__.py | 3 + .../domain/task/value_objects/fd/exception.py | 4 ++ internal/dto/repository/file/__init__.py | 1 - internal/dto/repository/file/file.py | 7 +-- internal/dto/repository/task/task.py | 5 +- internal/dto/worker/task/profiling_task.py | 3 - .../background_task/celery/task/di.py | 10 +++- .../celery/task/profiling_task.py | 2 +- .../background_task/settings.py | 4 +- .../data_storage/flat/__init__.py | 1 + .../data_storage/flat/context.py | 16 ++--- .../relational/postgres/context.py | 6 +- .../infrastructure/data_storage/settings.py | 8 ++- internal/repository/flat/file.py | 4 +- internal/rest/http/exception.py | 2 +- internal/rest/http/file/upload_csv_dataset.py | 16 ++++- internal/usecase/file/save_file.py | 3 +- internal/usecase/task/profile_task.py | 44 ++++++++------ internal/usecase/task/set_task.py | 2 +- tests/repository/postgres/test_task.py | 3 +- tests/uow/test_unit_of_work.py | 14 ++--- tests/usecase/test_check_content_type.py | 4 +- tests/usecase/test_profile_task.py | 58 +++++++++---------- tests/usecase/test_retrieve_dataset.py | 22 ++++--- tests/usecase/test_retrieve_task.py | 30 +++++----- tests/usecase/test_save_dataset.py | 16 +++-- tests/usecase/test_save_file.py | 43 +++++++------- tests/usecase/test_set_task.py | 52 ++++++++--------- tests/usecase/test_update_task_info.py | 37 +++++------- 34 files changed, 245 insertions(+), 218 deletions(-) create mode 100644 internal/domain/task/value_objects/afd/exception.py create mode 100644 internal/domain/task/value_objects/fd/exception.py diff --git a/internal/domain/task/entities/afd/afd_task.py b/internal/domain/task/entities/afd/afd_task.py index a53c2ea6..70f27aa8 100644 --- a/internal/domain/task/entities/afd/afd_task.py +++ b/internal/domain/task/entities/afd/afd_task.py @@ -1,4 +1,3 @@ -from typing import assert_never from desbordante.fd import FdAlgorithm # This is not a typo from desbordante.afd.algorithms import Pyro, Tane @@ -6,10 +5,15 @@ from internal.domain.task.value_objects import PrimitiveName from internal.domain.task.value_objects.afd import AfdTaskResult, AfdTaskConfig -from internal.domain.task.value_objects.afd import AfdAlgoName, AfdAlgoResult, FdModel +from internal.domain.task.value_objects.afd import ( + AfdAlgoName, + AfdAlgoResult, + FdModel, + IncorrectAFDAlgorithmName, +) -class AfdTask(Task[AfdTaskConfig, AfdTaskResult]): +class AfdTask(Task[FdAlgorithm, AfdTaskConfig, AfdTaskResult]): """ Task class for Approximate Functional Dependency (AFD) profiling. @@ -37,7 +41,7 @@ def _collect_result(self, algo: FdAlgorithm) -> AfdTaskResult: algo_result = AfdAlgoResult(fds=list(map(FdModel.from_fd, fds))) return AfdTaskResult(primitive_name=PrimitiveName.afd, result=algo_result) - def _match_algo_by_name(self, algo_name: AfdAlgoName) -> FdAlgorithm: + def _match_algo_by_name(self, algo_name: str) -> FdAlgorithm: """ Match the approximate functional dependency algorithm by name. @@ -51,4 +55,5 @@ def _match_algo_by_name(self, algo_name: AfdAlgoName) -> FdAlgorithm: return Pyro() case AfdAlgoName.Tane: return Tane() - assert_never(algo_name) + case _: + raise IncorrectAFDAlgorithmName(algo_name) diff --git a/internal/domain/task/entities/fd/fd_task.py b/internal/domain/task/entities/fd/fd_task.py index 245f483d..8b140c6f 100644 --- a/internal/domain/task/entities/fd/fd_task.py +++ b/internal/domain/task/entities/fd/fd_task.py @@ -1,4 +1,3 @@ -from typing import assert_never from desbordante.fd import FdAlgorithm from desbordante.fd.algorithms import ( Aid, @@ -16,10 +15,15 @@ from internal.domain.task.entities.task import Task from internal.domain.task.value_objects import PrimitiveName from internal.domain.task.value_objects.fd import FdTaskConfig, FdTaskResult -from internal.domain.task.value_objects.fd import FdAlgoName, FdModel, FdAlgoResult +from internal.domain.task.value_objects.fd import ( + FdAlgoName, + FdModel, + FdAlgoResult, + IncorrectFDAlgorithmName, +) -class FdTask(Task[FdTaskConfig, FdTaskResult]): +class FdTask(Task[FdAlgorithm, FdTaskConfig, FdTaskResult]): """ Task class for Functional Dependency (FD) profiling. @@ -47,7 +51,7 @@ def _collect_result(self, algo: FdAlgorithm) -> FdTaskResult: algo_result = FdAlgoResult(fds=list(map(FdModel.from_fd, fds))) return FdTaskResult(primitive_name=PrimitiveName.fd, result=algo_result) - def _match_algo_by_name(self, algo_name: FdAlgoName) -> FdAlgorithm: + def _match_algo_by_name(self, algo_name: str) -> FdAlgorithm: """ Match the functional dependency algorithm by name. @@ -77,4 +81,5 @@ def _match_algo_by_name(self, algo_name: FdAlgoName) -> FdAlgorithm: return Pyro() case FdAlgoName.Tane: return Tane() - assert_never(algo_name) + case _: + raise IncorrectFDAlgorithmName(algo_name) diff --git a/internal/domain/task/entities/task.py b/internal/domain/task/entities/task.py index 8243b102..cd8b00e1 100644 --- a/internal/domain/task/entities/task.py +++ b/internal/domain/task/entities/task.py @@ -4,7 +4,7 @@ from internal.domain.task.value_objects import TaskConfig, TaskResult -class Task[C: TaskConfig, R: TaskResult](ABC): +class Task[A: desbordante.Algorithm, C: TaskConfig, R: TaskResult](ABC): """ Abstract base class for data profiling tasks. @@ -24,7 +24,7 @@ class Task[C: TaskConfig, R: TaskResult](ABC): """ @abstractmethod - def _match_algo_by_name(self, algo_name: str) -> desbordante.Algorithm: + def _match_algo_by_name(self, algo_name: str) -> A: """ Match and return the algorithm instance based on its name. @@ -36,7 +36,7 @@ def _match_algo_by_name(self, algo_name: str) -> desbordante.Algorithm: pass @abstractmethod - def _collect_result(self, algo: desbordante.Algorithm) -> R: + def _collect_result(self, algo: A) -> R: """ Collect and process the result from the executed algorithm. diff --git a/internal/domain/task/value_objects/afd/__init__.py b/internal/domain/task/value_objects/afd/__init__.py index dc5cb92f..a4d631ea 100644 --- a/internal/domain/task/value_objects/afd/__init__.py +++ b/internal/domain/task/value_objects/afd/__init__.py @@ -9,6 +9,9 @@ ) from internal.domain.task.value_objects.afd.algo_name import AfdAlgoName # noqa: F401 from internal.domain.task.value_objects.primitive_name import PrimitiveName +from internal.domain.task.value_objects.afd.exception import ( + IncorrectAFDAlgorithmName, +) # noqa: F401 class BaseAfdTaskModel(BaseModel): diff --git a/internal/domain/task/value_objects/afd/exception.py b/internal/domain/task/value_objects/afd/exception.py new file mode 100644 index 00000000..aa682005 --- /dev/null +++ b/internal/domain/task/value_objects/afd/exception.py @@ -0,0 +1,4 @@ +class IncorrectAFDAlgorithmName(Exception): + + def __init__(self, message: str): + super().__init__(f"{message} is incorrect afd algorithm name") diff --git a/internal/domain/task/value_objects/fd/__init__.py b/internal/domain/task/value_objects/fd/__init__.py index 1fd31502..9a77b3ed 100644 --- a/internal/domain/task/value_objects/fd/__init__.py +++ b/internal/domain/task/value_objects/fd/__init__.py @@ -9,6 +9,9 @@ FdModel, ) from internal.domain.task.value_objects.fd.algo_name import FdAlgoName # noqa: F401 +from internal.domain.task.value_objects.fd.exception import ( + IncorrectFDAlgorithmName, +) # noqa: F401 class BaseFdTaskModel(BaseModel): diff --git a/internal/domain/task/value_objects/fd/exception.py b/internal/domain/task/value_objects/fd/exception.py new file mode 100644 index 00000000..65576f28 --- /dev/null +++ b/internal/domain/task/value_objects/fd/exception.py @@ -0,0 +1,4 @@ +class IncorrectFDAlgorithmName(Exception): + + def __init__(self, message: str): + super().__init__(f"{message} is incorrect fd algorithm name") diff --git a/internal/dto/repository/file/__init__.py b/internal/dto/repository/file/__init__.py index b1046036..749878b9 100644 --- a/internal/dto/repository/file/__init__.py +++ b/internal/dto/repository/file/__init__.py @@ -1,6 +1,5 @@ from internal.dto.repository.file.file import ( # noqa: F401 File, - FileResponseSchema, FileFindSchema, FileCreateSchema, FileUpdateSchema, diff --git a/internal/dto/repository/file/file.py b/internal/dto/repository/file/file.py index 4b1eb027..5eba009b 100644 --- a/internal/dto/repository/file/file.py +++ b/internal/dto/repository/file/file.py @@ -12,8 +12,8 @@ class File(Protocol): - filename: str | None - content_type: str | None + filename: str + content_type: str async def read(self, chunk_size: int) -> bytes: ... @@ -31,9 +31,6 @@ class FileUpdateSchema(FileBaseSchema, BaseUpdateSchema): ... class FileFindSchema(FileBaseSchema, BaseSchema): ... # it's not a typo -FileResponseSchema = None - - class CSVFileFindSchema(FileFindSchema): separator: str header: list[int] diff --git a/internal/dto/repository/task/task.py b/internal/dto/repository/task/task.py index 5421e91b..56cf79c9 100644 --- a/internal/dto/repository/task/task.py +++ b/internal/dto/repository/task/task.py @@ -16,12 +16,12 @@ class TaskBaseSchema(BaseSchema): - status: TaskStatus config: OneOfTaskConfig dataset_id: UUID -class TaskCreateSchema(TaskBaseSchema, BaseCreateSchema): ... +class TaskCreateSchema(TaskBaseSchema, BaseCreateSchema): + status: TaskStatus class TaskUpdateSchema(TaskBaseSchema, BaseUpdateSchema): @@ -36,6 +36,7 @@ class TaskFindSchema(BaseFindSchema[UUID]): ... class TaskResponseSchema(TaskBaseSchema, BaseResponseSchema[UUID]): + status: TaskStatus result: OneOfTaskResult | None = None raised_exception_name: str | None = None failure_reason: TaskFailureReason | None = None diff --git a/internal/dto/worker/task/profiling_task.py b/internal/dto/worker/task/profiling_task.py index 58ff8090..ced8a77d 100644 --- a/internal/dto/worker/task/profiling_task.py +++ b/internal/dto/worker/task/profiling_task.py @@ -11,6 +11,3 @@ class ProfilingTaskBaseSchema(BaseModel): class ProfilingTaskCreateSchema(ProfilingTaskBaseSchema): ... - - -ProfilingTaskResponseSchema = None diff --git a/internal/infrastructure/background_task/celery/task/di.py b/internal/infrastructure/background_task/celery/task/di.py index 7ee6ce74..2f0b54f0 100644 --- a/internal/infrastructure/background_task/celery/task/di.py +++ b/internal/infrastructure/background_task/celery/task/di.py @@ -1,6 +1,7 @@ from internal.infrastructure.data_storage.relational.postgres import ( get_postgres_context_maker_without_pool, ) +from internal.infrastructure.data_storage.flat import get_flat_context_maker from internal.repository.flat import FileRepository from internal.repository.relational.file import DatasetRepository from internal.repository.relational.task import TaskRepository @@ -34,14 +35,17 @@ def get_update_task_info_use_case(): def get_profile_task_use_case(): - context_maker = get_postgres_context_maker_without_pool() + postgres_context_maker = get_postgres_context_maker_without_pool() + flat_context_maker = get_flat_context_maker() - unit_of_work = UnitOfWork(context_maker) + file_unit_of_work = UnitOfWork(flat_context_maker) + dataset_unit_of_work = UnitOfWork(postgres_context_maker) file_repo = get_file_repo() dataset_repo = get_dataset_repo() return ProfileTask( - unit_of_work=unit_of_work, + file_unit_of_work=file_unit_of_work, + dataset_unit_of_work=dataset_unit_of_work, file_repo=file_repo, # type: ignore dataset_repo=dataset_repo, # type: ignore ) diff --git a/internal/infrastructure/background_task/celery/task/profiling_task.py b/internal/infrastructure/background_task/celery/task/profiling_task.py index 7c18e231..981e87ec 100644 --- a/internal/infrastructure/background_task/celery/task/profiling_task.py +++ b/internal/infrastructure/background_task/celery/task/profiling_task.py @@ -60,7 +60,7 @@ def task_postrun_notifier( update_task_info( task_id=db_task_id, task_status=TaskStatus.COMPLETED, - result=retval.model_dump(), + result=retval, ) diff --git a/internal/infrastructure/background_task/settings.py b/internal/infrastructure/background_task/settings.py index 569b215b..3aab5e73 100644 --- a/internal/infrastructure/background_task/settings.py +++ b/internal/infrastructure/background_task/settings.py @@ -9,8 +9,8 @@ class Settings(BaseSettings): # Celery worker limits worker_soft_time_limit_in_seconds: int = Field(default=60, gt=0) worker_hard_time_limit_in_seconds: int = Field(default=120, gt=0) - worker_soft_memory_limit: ByteSize = "2GB" - worker_hard_memory_limit: ByteSize = "4GB" + worker_soft_memory_limit: ByteSize = Field(default=2 * 1024 * 1024 * 1024) # 2GB + worker_hard_memory_limit: ByteSize = Field(default=4 * 1024 * 1024 * 1024) # 4GB def get_settings(): diff --git a/internal/infrastructure/data_storage/flat/__init__.py b/internal/infrastructure/data_storage/flat/__init__.py index e39991c9..d1fb6e59 100644 --- a/internal/infrastructure/data_storage/flat/__init__.py +++ b/internal/infrastructure/data_storage/flat/__init__.py @@ -1,4 +1,5 @@ from internal.infrastructure.data_storage.flat.context import ( # noqa: F401 FlatContextMaker, FlatContext, + get_flat_context_maker, ) diff --git a/internal/infrastructure/data_storage/flat/context.py b/internal/infrastructure/data_storage/flat/context.py index 83bf5769..1f8b679f 100644 --- a/internal/infrastructure/data_storage/flat/context.py +++ b/internal/infrastructure/data_storage/flat/context.py @@ -13,20 +13,20 @@ def upload_directory_path(self) -> Path: return self._upload_directory_path # This context implementation does not support transactions - def flush(self) -> None: - pass + def flush(self) -> None: ... - def rollback(self) -> None: - pass + def rollback(self) -> None: ... - def commit(self) -> None: - pass + def commit(self) -> None: ... - def close(self) -> None: - pass # TODO: implement flat context closing. + def close(self) -> None: ... # TODO: implement flat context closing. class FlatContextMaker: def __call__(self): return FlatContext(settings.uploaded_files_dir_path) + + +def get_flat_context_maker() -> FlatContextMaker: + return FlatContextMaker() diff --git a/internal/infrastructure/data_storage/relational/postgres/context.py b/internal/infrastructure/data_storage/relational/postgres/context.py index 159379df..7231e0f4 100644 --- a/internal/infrastructure/data_storage/relational/postgres/context.py +++ b/internal/infrastructure/data_storage/relational/postgres/context.py @@ -9,14 +9,14 @@ poolclass=NullPool, ) -PostgresContextType = Session +type PostgresContextType = Session PostgresContextMaker = sessionmaker(bind=default_engine) PostgresContextMakerWithoutPool = sessionmaker(bind=engine_without_pool) -def get_postgres_context_maker() -> PostgresContextMaker: +def get_postgres_context_maker() -> sessionmaker[Session]: return PostgresContextMaker -def get_postgres_context_maker_without_pool() -> PostgresContextMakerWithoutPool: +def get_postgres_context_maker_without_pool() -> sessionmaker[Session]: return PostgresContextMakerWithoutPool diff --git a/internal/infrastructure/data_storage/settings.py b/internal/infrastructure/data_storage/settings.py index 5229f858..cf1f28b9 100644 --- a/internal/infrastructure/data_storage/settings.py +++ b/internal/infrastructure/data_storage/settings.py @@ -1,8 +1,14 @@ +# type: ignore + +# Values ​​in the settings are added dynamically, so the static analyzer, +# without knowing this, produces an error. + from functools import cached_property from dotenv import load_dotenv, find_dotenv from pydantic import AmqpDsn, DirectoryPath, PostgresDsn from pydantic_settings import BaseSettings +from pathlib import Path load_dotenv(find_dotenv(".env")) @@ -21,7 +27,7 @@ class Settings(BaseSettings): rabbitmq_host: str rabbitmq_port: int = 5672 # Flat files settings - uploaded_files_dir_path: DirectoryPath = "uploads/" + uploaded_files_dir_path: DirectoryPath = Path("uploads/") @cached_property def rabbitmq_dsn(self) -> AmqpDsn: diff --git a/internal/repository/flat/file.py b/internal/repository/flat/file.py index 115377a8..b9d669d7 100644 --- a/internal/repository/flat/file.py +++ b/internal/repository/flat/file.py @@ -8,7 +8,7 @@ CSVFileFindSchema, CSVFileResponseSchema, ) -from internal.dto.repository.file import File, FileCreateSchema, FileResponseSchema +from internal.dto.repository.file import File, FileCreateSchema from internal.infrastructure.data_storage.flat import FlatContext CHUNK_SIZE = 1024 @@ -22,7 +22,7 @@ async def create( file: File, file_info: FileCreateSchema, context: FlatContext, - ) -> FileResponseSchema: + ) -> None: path_to_file = Path.joinpath( context.upload_directory_path, str(file_info.file_name) diff --git a/internal/rest/http/exception.py b/internal/rest/http/exception.py index 13aa1339..20c5557a 100644 --- a/internal/rest/http/exception.py +++ b/internal/rest/http/exception.py @@ -35,7 +35,7 @@ def file_metadata_not_found_exception(_, exc: FileMetadataNotFoundException): ) @app.exception_handler(TaskNotFoundException) - def file_metadata_not_found_exception(_, exc: TaskNotFoundException): # noqa: F811 + def task_not_found_exception(_, exc: TaskNotFoundException): raise HTTPException( status_code=404, detail=str(exc), diff --git a/internal/rest/http/file/upload_csv_dataset.py b/internal/rest/http/file/upload_csv_dataset.py index 7ef11c1d..3a1db24f 100644 --- a/internal/rest/http/file/upload_csv_dataset.py +++ b/internal/rest/http/file/upload_csv_dataset.py @@ -13,6 +13,16 @@ router = APIRouter() +class UploadFileAdapter: + def __init__(self, upload_file: UploadFile): + self.filename = upload_file.filename or "" + self.content_type = upload_file.content_type or "" + self._upload_file = upload_file + + async def read(self, chunk_size: int) -> bytes: + return await self._upload_file.read(chunk_size) + + @router.post("/csv") async def upload_csv_dataset( file: UploadFile, @@ -23,8 +33,10 @@ async def upload_csv_dataset( save_dataset: SaveDataset = Depends(get_save_dataset_use_case), ) -> UUID: - check_content_type(upload_file=file) - save_file_result = await save_file(upload_file=file) + adapted_file = UploadFileAdapter(file) + + check_content_type(upload_file=adapted_file) + save_file_result = await save_file(upload_file=adapted_file) save_dataset_result = save_dataset( file_id=save_file_result.id, separator=separator, diff --git a/internal/usecase/file/save_file.py b/internal/usecase/file/save_file.py index 0f6d2f5c..82c2cb2b 100644 --- a/internal/usecase/file/save_file.py +++ b/internal/usecase/file/save_file.py @@ -6,7 +6,6 @@ from internal.domain.file import File as FileEntity from internal.dto.repository.file import ( FileCreateSchema, - FileResponseSchema, File, FailedFileReadingException, ) @@ -22,7 +21,7 @@ class FileRepo(Protocol): async def create( self, file: File, file_info: FileCreateSchema, context: DataStorageContext - ) -> FileResponseSchema: ... + ) -> None: ... class FileMetadataRepo(Protocol): diff --git a/internal/usecase/task/profile_task.py b/internal/usecase/task/profile_task.py index e260d4a2..b42efdd3 100644 --- a/internal/usecase/task/profile_task.py +++ b/internal/usecase/task/profile_task.py @@ -40,34 +40,40 @@ class ProfileTask: def __init__( self, - unit_of_work: UnitOfWork, + # It is assumed that the two repositories will be associated with different repositories. + # In order to support different repositories, different UoW will be needed. + # If both of your repositories are linked to the same repository, use only one of the UoW. + file_unit_of_work: UnitOfWork, + dataset_unit_of_work: UnitOfWork, file_repo: FileRepo, dataset_repo: DatasetRepo, ): - self.unit_of_work = unit_of_work + self.file_unit_of_work = file_unit_of_work + self.dataset_unit_of_work = dataset_unit_of_work self.file_repo = file_repo self.dataset_repo = dataset_repo def __call__(self, *, dataset_id: UUID, config: OneOfTaskConfig) -> OneOfTaskResult: - with self.unit_of_work as context: - try: - dataset, file_metadata = self.dataset_repo.find_with_file_metadata( - DatasetFindSchema(id=dataset_id), context - ) + with self.file_unit_of_work as file_context: + with self.dataset_unit_of_work as dataset_context: + try: + dataset, file_metadata = self.dataset_repo.find_with_file_metadata( + DatasetFindSchema(id=dataset_id), dataset_context + ) - df = self.file_repo.find( - CSVFileFindSchema( - file_name=file_metadata.file_name, - separator=dataset.separator, - header=dataset.header, - ), - context, - ) - except DatasetNotFoundException: - raise DatasetNotFoundUseCaseException() - except FileMetadataNotFoundException: - raise FileMetadataNotFoundUseCaseException() + df = self.file_repo.find( + CSVFileFindSchema( + file_name=file_metadata.file_name, + separator=dataset.separator, + header=dataset.header, + ), + file_context, + ) + except DatasetNotFoundException: + raise DatasetNotFoundUseCaseException() + except FileMetadataNotFoundException: + raise FileMetadataNotFoundUseCaseException() task = match_task_by_primitive_name(primitive_name=config.primitive_name) result = task.execute(table=df, task_config=config) # type: ignore diff --git a/internal/usecase/task/set_task.py b/internal/usecase/task/set_task.py index 93f6cc19..a8c13800 100644 --- a/internal/usecase/task/set_task.py +++ b/internal/usecase/task/set_task.py @@ -53,7 +53,7 @@ def __call__( dataset_find_schema = DatasetFindSchema(id=dataset_id) task_create_schema = TaskCreateSchema( status=TaskStatus.CREATED, - config=config.model_dump(exclude_unset=True), + config=config, dataset_id=dataset_id, ) diff --git a/tests/repository/postgres/test_task.py b/tests/repository/postgres/test_task.py index 7c8d40aa..0ac36a21 100644 --- a/tests/repository/postgres/test_task.py +++ b/tests/repository/postgres/test_task.py @@ -3,6 +3,7 @@ from internal.domain.task.value_objects import TaskStatus, FdTaskConfig, PrimitiveName from internal.domain.task.value_objects.fd import FdAlgoName +from internal.domain.task.value_objects.fd.algo_config import AidConfig from internal.dto.repository.task import ( TaskCreateSchema, TaskFindSchema, @@ -51,7 +52,7 @@ def dataset_id(dataset_create_schema, postgres_context): def get_config(): return FdTaskConfig( primitive_name=PrimitiveName.fd, - config={"algo_name": FdAlgoName.Aid, "is_null_equal_null": True}, + config=AidConfig(algo_name=FdAlgoName.Aid, is_null_equal_null=True), ) diff --git a/tests/uow/test_unit_of_work.py b/tests/uow/test_unit_of_work.py index 9228be52..542172af 100644 --- a/tests/uow/test_unit_of_work.py +++ b/tests/uow/test_unit_of_work.py @@ -6,20 +6,16 @@ @pytest.fixture -def context_mock(mocker: MockerFixture) -> DataStorageContext: +def context_mock(mocker: MockerFixture): return mocker.Mock(spec=DataStorageContext) @pytest.fixture -def context_maker_mock( - mocker: MockerFixture, context_mock: DataStorageContext -) -> DataStorageContextMaker: +def context_maker_mock(mocker: MockerFixture, context_mock): return mocker.Mock(spec=DataStorageContextMaker, return_value=context_mock) -def test_unit_of_work_commit_on_success( - context_maker_mock: DataStorageContextMaker, context_mock: DataStorageContext -) -> None: +def test_unit_of_work_commit_on_success(context_maker_mock, context_mock) -> None: uow = UnitOfWork(context_maker_mock) with uow as context: @@ -31,9 +27,7 @@ def test_unit_of_work_commit_on_success( context_mock.close.assert_called_once() -def test_unit_of_work_rollback_on_failure( - context_maker_mock: DataStorageContextMaker, context_mock: DataStorageContext -) -> None: +def test_unit_of_work_rollback_on_failure(context_maker_mock, context_mock) -> None: uow = UnitOfWork(context_maker_mock) with pytest.raises(ValueError): diff --git a/tests/usecase/test_check_content_type.py b/tests/usecase/test_check_content_type.py index f2aa4a9b..1d8eb0ce 100644 --- a/tests/usecase/test_check_content_type.py +++ b/tests/usecase/test_check_content_type.py @@ -1,3 +1,5 @@ +from typing import Type + import pytest from pytest_mock import MockerFixture @@ -23,7 +25,7 @@ def test_check_content_type( check_content_type: CheckContentType, mocker: MockerFixture, content_type: str, - expected_exception: IncorrectFileFormatException | None, + expected_exception: Type[IncorrectFileFormatException] | None, ): upload_file = mocker.Mock(spec=File) upload_file.content_type = content_type diff --git a/tests/usecase/test_profile_task.py b/tests/usecase/test_profile_task.py index a5756c02..99ef932a 100644 --- a/tests/usecase/test_profile_task.py +++ b/tests/usecase/test_profile_task.py @@ -4,9 +4,10 @@ import pytest from pytest_mock import MockerFixture -from internal.uow import UnitOfWork, DataStorageContext +from internal.uow import DataStorageContext from internal.domain.task.value_objects import FdTaskConfig, PrimitiveName, FdTaskResult -from internal.domain.task.value_objects.fd import FdAlgoResult, FdAlgoName +from internal.domain.task.value_objects.fd import FdAlgoResult +from internal.domain.task.value_objects.fd.algo_config import AidConfig from internal.usecase.task.profile_task import DatasetRepo, FileRepo, ProfileTask from internal.usecase.file.exception import ( DatasetNotFoundException as DatasetNotFoundUseCaseException, @@ -25,7 +26,7 @@ @pytest.fixture -def unit_of_work_mock(mocker: MockerFixture) -> UnitOfWork: +def unit_of_work_mock(mocker: MockerFixture): mock = mocker.MagicMock() mock.__enter__.return_value = mocker.Mock( return_value=mocker.Mock(), spec=DataStorageContext @@ -42,25 +43,26 @@ def exit_side_effect(exc_type, exc_value, traceback) -> bool: @pytest.fixture -def dataset_repo_mock(mocker: MockerFixture) -> DatasetRepo: +def dataset_repo_mock(mocker: MockerFixture): mock = mocker.Mock(spec=DatasetRepo) return mock @pytest.fixture -def file_repo_mock(mocker: MockerFixture) -> FileRepo: +def file_repo_mock(mocker: MockerFixture): mock = mocker.Mock(spec=FileRepo) return mock @pytest.fixture def profile_task_use_case( - unit_of_work_mock: UnitOfWork, - dataset_repo_mock: DatasetRepo, - file_repo_mock: FileRepo, + unit_of_work_mock, + dataset_repo_mock, + file_repo_mock, ) -> ProfileTask: return ProfileTask( - unit_of_work=unit_of_work_mock, + file_unit_of_work=unit_of_work_mock, + dataset_unit_of_work=unit_of_work_mock, dataset_repo=dataset_repo_mock, file_repo=file_repo_mock, ) @@ -68,10 +70,10 @@ def profile_task_use_case( def test_profile_task_use_case_success( mocker: MockerFixture, - profile_task_use_case: ProfileTask, - unit_of_work_mock: UnitOfWork, - dataset_repo_mock: DatasetRepo, - file_repo_mock: FileRepo, + profile_task_use_case, + unit_of_work_mock, + dataset_repo_mock, + file_repo_mock, ) -> None: # Prepare data dataset_id = uuid4() @@ -95,9 +97,8 @@ def test_profile_task_use_case_success( {"column1": [1, 2, 3], "column2": ["a", "b", "c"]} ) - task_config = FdTaskConfig( - primitive_name=PrimitiveName.fd, config={"algo_name": FdAlgoName.Aid} - ) + aid_config = AidConfig(algo_name="aid") # type: ignore + task_config = FdTaskConfig(primitive_name=PrimitiveName.fd, config=aid_config) task_result = FdTaskResult( primitive_name=PrimitiveName.fd, result=FdAlgoResult(fds=[]) @@ -144,8 +145,8 @@ def test_profile_task_use_case_success( ) # Check that UnitOfWork was entered and exited correctly - unit_of_work_mock.__enter__.assert_called_once() - unit_of_work_mock.__exit__.assert_called_once() + assert unit_of_work_mock.__enter__.call_count == 2 + assert unit_of_work_mock.__exit__.call_count == 2 @pytest.mark.parametrize( @@ -156,19 +157,18 @@ def test_profile_task_use_case_success( ], ) def test_profile_task_use_case_dataset_not_found( - profile_task_use_case: ProfileTask, - unit_of_work_mock: UnitOfWork, - dataset_repo_mock: DatasetRepo, - file_repo_mock: FileRepo, - repo_exception: Exception, - use_case_exception: Exception, + profile_task_use_case, + unit_of_work_mock, + dataset_repo_mock, + file_repo_mock, + repo_exception, + use_case_exception, ) -> None: # Prepare data dataset_id = uuid4() - task_config = FdTaskConfig( - primitive_name=PrimitiveName.fd, config={"algo_name": FdAlgoName.Aid} - ) + aid_config = AidConfig(algo_name="aid") # type: ignore + task_config = FdTaskConfig(primitive_name=PrimitiveName.fd, config=aid_config) # Mocks dataset_repo_mock.find_with_file_metadata.side_effect = repo_exception @@ -186,5 +186,5 @@ def test_profile_task_use_case_dataset_not_found( assert not file_repo_mock.find.called # Check that UnitOfWork was entered and exited correctly - unit_of_work_mock.__enter__.assert_called_once() - unit_of_work_mock.__exit__.assert_called_once() + assert unit_of_work_mock.__enter__.call_count == 2 + assert unit_of_work_mock.__exit__.call_count == 2 diff --git a/tests/usecase/test_retrieve_dataset.py b/tests/usecase/test_retrieve_dataset.py index 29e9b927..42edb472 100644 --- a/tests/usecase/test_retrieve_dataset.py +++ b/tests/usecase/test_retrieve_dataset.py @@ -4,7 +4,7 @@ from pytest_mock import MockerFixture from internal.dto.repository.file import DatasetResponseSchema, DatasetFindSchema -from internal.uow import UnitOfWork, DataStorageContext +from internal.uow import DataStorageContext from internal.usecase.file.exception import DatasetNotFoundException from internal.usecase.file.retrieve_dataset import ( DatasetRepo, @@ -14,7 +14,7 @@ @pytest.fixture -def unit_of_work_mock(mocker: MockerFixture) -> UnitOfWork: +def unit_of_work_mock(mocker: MockerFixture): mock = mocker.MagicMock() mock.__enter__.return_value = mocker.Mock( return_value=mocker.Mock(), spec=DataStorageContext @@ -31,24 +31,22 @@ def exit_side_effect(exc_type, exc_value, traceback) -> bool: @pytest.fixture -def dataset_repo_mock(mocker: MockerFixture) -> DatasetRepo: +def dataset_repo_mock(mocker: MockerFixture): mock = mocker.Mock(spec=DatasetRepo) return mock @pytest.fixture -def retrieve_dataset_use_case( - unit_of_work_mock: UnitOfWork, dataset_repo_mock: DatasetRepo -) -> RetrieveDataset: +def retrieve_dataset_use_case(unit_of_work_mock, dataset_repo_mock): return RetrieveDataset( unit_of_work=unit_of_work_mock, dataset_repo=dataset_repo_mock ) def test_retrieve_dataset_use_case_success( - unit_of_work_mock: UnitOfWork, - dataset_repo_mock: DatasetRepo, - retrieve_dataset_use_case: RetrieveDataset, + unit_of_work_mock, + dataset_repo_mock, + retrieve_dataset_use_case, ): # Prepare data dataset_id = uuid4() @@ -77,9 +75,9 @@ def test_retrieve_dataset_use_case_success( def test_retrieve_dataset_use_case_not_found( - unit_of_work_mock: UnitOfWork, - retrieve_dataset_use_case: RetrieveDataset, - dataset_repo_mock: DatasetRepo, + unit_of_work_mock, + retrieve_dataset_use_case, + dataset_repo_mock, ): # Prepare data dataset_id = uuid4() diff --git a/tests/usecase/test_retrieve_task.py b/tests/usecase/test_retrieve_task.py index 3bc84eb7..a2cb3fe9 100644 --- a/tests/usecase/test_retrieve_task.py +++ b/tests/usecase/test_retrieve_task.py @@ -4,12 +4,13 @@ from pytest_mock import MockerFixture from internal.domain.task.value_objects import PrimitiveName, TaskStatus, FdTaskResult -from internal.domain.task.value_objects.fd import FdAlgoName, FdAlgoResult, FdTaskConfig +from internal.domain.task.value_objects.fd import FdAlgoResult, FdTaskConfig +from internal.domain.task.value_objects.fd.algo_config import AidConfig from internal.dto.repository.task import ( TaskResponseSchema, TaskFindSchema, ) -from internal.uow import UnitOfWork, DataStorageContext +from internal.uow import DataStorageContext from internal.usecase.task.exception import TaskNotFoundException from internal.usecase.task.retrieve_task import ( RetrieveTask, @@ -19,7 +20,7 @@ @pytest.fixture -def unit_of_work_mock(mocker: MockerFixture) -> UnitOfWork: +def unit_of_work_mock(mocker: MockerFixture): mock = mocker.MagicMock() mock.__enter__.return_value = mocker.Mock( return_value=mocker.Mock(), spec=DataStorageContext @@ -29,30 +30,27 @@ def unit_of_work_mock(mocker: MockerFixture) -> UnitOfWork: @pytest.fixture -def task_repo_mock(mocker: MockerFixture) -> TaskRepo: +def task_repo_mock(mocker: MockerFixture): mock = mocker.Mock(spec=TaskRepo) return mock @pytest.fixture -def retrieve_task_use_case( - unit_of_work_mock: UnitOfWork, task_repo_mock: TaskRepo -) -> RetrieveTask: +def retrieve_task_use_case(unit_of_work_mock, task_repo_mock): return RetrieveTask(unit_of_work=unit_of_work_mock, task_repo=task_repo_mock) def test_retrieve_task_use_case_success( - unit_of_work_mock: UnitOfWork, - task_repo_mock: TaskRepo, - retrieve_task_use_case: RetrieveTask, + unit_of_work_mock, + task_repo_mock, + retrieve_task_use_case, ): # Prepare data task_id = uuid4() dataset_id = uuid4() - task_config = FdTaskConfig( - primitive_name=PrimitiveName.fd, config={"algo_name": FdAlgoName.Aid} - ) + aid_config = AidConfig(algo_name="aid") # type: ignore + task_config = FdTaskConfig(primitive_name=PrimitiveName.fd, config=aid_config) task_result = FdTaskResult( primitive_name=PrimitiveName.fd, result=FdAlgoResult(fds=[]) ) @@ -95,9 +93,9 @@ def test_retrieve_task_use_case_success( def test_retrieve_task_use_case_not_found( - unit_of_work_mock: UnitOfWork, - retrieve_task_use_case: RetrieveTask, - task_repo_mock: TaskRepo, + unit_of_work_mock, + retrieve_task_use_case, + task_repo_mock, ): task_id = uuid4() diff --git a/tests/usecase/test_save_dataset.py b/tests/usecase/test_save_dataset.py index ed0eefdb..46a66684 100644 --- a/tests/usecase/test_save_dataset.py +++ b/tests/usecase/test_save_dataset.py @@ -4,12 +4,12 @@ from pytest_mock import MockerFixture from internal.dto.repository.file import DatasetResponseSchema, DatasetCreateSchema -from internal.uow import UnitOfWork, DataStorageContext +from internal.uow import DataStorageContext from internal.usecase.file.save_dataset import DatasetRepo, SaveDataset @pytest.fixture -def unit_of_work_mock(mocker: MockerFixture) -> UnitOfWork: +def unit_of_work_mock(mocker: MockerFixture): mock = mocker.MagicMock() mock.__enter__.return_value = mocker.Mock( return_value=mocker.Mock(), spec=DataStorageContext @@ -19,22 +19,20 @@ def unit_of_work_mock(mocker: MockerFixture) -> UnitOfWork: @pytest.fixture -def dataset_repo_mock(mocker: MockerFixture) -> DatasetRepo: +def dataset_repo_mock(mocker: MockerFixture): mock = mocker.Mock(spec=DatasetRepo) return mock @pytest.fixture -def save_dataset( - unit_of_work_mock: UnitOfWork, dataset_repo_mock: DatasetRepo -) -> SaveDataset: +def save_dataset(unit_of_work_mock, dataset_repo_mock): return SaveDataset(unit_of_work=unit_of_work_mock, dataset_repo=dataset_repo_mock) def test_save_dataset( - save_dataset: SaveDataset, - unit_of_work_mock: UnitOfWork, - dataset_repo_mock: DatasetRepo, + save_dataset, + unit_of_work_mock, + dataset_repo_mock, ) -> None: # Prepare data file_id = uuid4() diff --git a/tests/usecase/test_save_file.py b/tests/usecase/test_save_file.py index ded91fc7..11cba635 100644 --- a/tests/usecase/test_save_file.py +++ b/tests/usecase/test_save_file.py @@ -10,10 +10,9 @@ File, FileMetadataCreateSchema, FileCreateSchema, - FileResponseSchema, FailedFileReadingException, ) -from internal.uow import UnitOfWork, DataStorageContext +from internal.uow import DataStorageContext from internal.usecase.file.exception import FailedReadFileException from internal.usecase.file.save_file import ( FileMetadataRepo, @@ -24,7 +23,7 @@ @pytest.fixture -def unit_of_work_mock(mocker: MockerFixture) -> UnitOfWork: +def unit_of_work_mock(mocker: MockerFixture): mock = mocker.MagicMock() mock.__enter__.return_value = mocker.Mock( return_value=mocker.Mock(), spec=DataStorageContext @@ -41,7 +40,7 @@ def exit_side_effect(exc_type, exc_value, traceback) -> bool: @pytest.fixture -def file_entity_mock(mocker: MockerFixture) -> FileEntity: +def file_entity_mock(mocker: MockerFixture): mock = mocker.Mock(spec=FileEntity) mock.name_as_uuid = uuid4() mock.name = str(mock.name_as_uuid) @@ -49,13 +48,13 @@ def file_entity_mock(mocker: MockerFixture) -> FileEntity: @pytest.fixture -def file_repo_mock(mocker: MockerFixture) -> FileRepo: +def file_repo_mock(mocker: MockerFixture): mock = mocker.Mock(spec=FileRepo) return mock @pytest.fixture -def file_metadata_repo_mock(mocker: MockerFixture) -> FileMetadataRepo: +def file_metadata_repo_mock(mocker: MockerFixture): mock = mocker.Mock(spec=FileMetadataRepo) return mock @@ -63,10 +62,10 @@ def file_metadata_repo_mock(mocker: MockerFixture) -> FileMetadataRepo: @pytest.fixture def save_file( mocker: MockerFixture, - unit_of_work_mock: UnitOfWork, - file_repo_mock: FileRepo, - file_metadata_repo_mock: FileMetadataRepo, - file_entity_mock: FileEntity, + unit_of_work_mock, + file_repo_mock, + file_metadata_repo_mock, + file_entity_mock, ) -> SaveFile: mocker.patch( "internal.usecase.file.save_file.FileEntity", return_value=file_entity_mock @@ -82,11 +81,11 @@ def save_file( @pytest.mark.asyncio async def test_save_file( mocker: MockerFixture, - save_file: SaveFile, - unit_of_work_mock: UnitOfWork, - file_repo_mock: FileRepo, - file_metadata_repo_mock: FileMetadataRepo, - file_entity_mock: FileEntity, + save_file, + unit_of_work_mock, + file_repo_mock, + file_metadata_repo_mock, + file_entity_mock, ) -> None: # Prepare data file_id = uuid4() @@ -106,9 +105,7 @@ async def test_save_file( updated_at=updated_at, ) - file_response = FileResponseSchema - - file_repo_mock.create.return_value = file_response + file_repo_mock.create.return_value = None file_metadata_repo_mock.create.return_value = file_metadata_response upload_file_mock = mocker.Mock(spec=File) @@ -152,11 +149,11 @@ async def test_save_file( @pytest.mark.asyncio async def test_save_file_failed_read_file_exception( mocker: MockerFixture, - save_file: SaveFile, - unit_of_work_mock: UnitOfWork, - file_repo_mock: FileRepo, - file_metadata_repo_mock: FileMetadataRepo, - file_entity_mock: FileEntity, + save_file, + unit_of_work_mock, + file_repo_mock, + file_metadata_repo_mock, + file_entity_mock, ) -> None: # Prepare the mock to raise the exception file_repo_mock.create.side_effect = FailedFileReadingException( diff --git a/tests/usecase/test_set_task.py b/tests/usecase/test_set_task.py index 3f13f3b2..dbf761b3 100644 --- a/tests/usecase/test_set_task.py +++ b/tests/usecase/test_set_task.py @@ -5,7 +5,7 @@ from pytest_mock import MockerFixture from internal.domain.task.value_objects import PrimitiveName, TaskStatus, FdTaskConfig -from internal.domain.task.value_objects.fd import FdAlgoName +from internal.domain.task.value_objects.fd.algo_config import AidConfig from internal.dto.repository.file import ( DatasetResponseSchema, DatasetFindSchema, @@ -15,7 +15,7 @@ TaskCreateSchema, ) from internal.dto.worker.task import ProfilingTaskCreateSchema -from internal.uow import UnitOfWork, DataStorageContext +from internal.uow import DataStorageContext from internal.usecase.file.exception import DatasetNotFoundException from internal.usecase.task.set_task import ( SetTask, @@ -26,7 +26,7 @@ @pytest.fixture -def unit_of_work_mock(mocker: MockerFixture) -> UnitOfWork: +def unit_of_work_mock(mocker: MockerFixture): mock = mocker.MagicMock() mock.__enter__.return_value = mocker.Mock( return_value=mocker.Mock(), spec=DataStorageContext @@ -43,29 +43,29 @@ def exit_side_effect(exc_type, exc_value, traceback) -> bool: @pytest.fixture -def dataset_repo_mock(mocker: MockerFixture) -> DatasetRepo: +def dataset_repo_mock(mocker: MockerFixture): mock = mocker.Mock(spec=DatasetRepo) return mock @pytest.fixture -def task_repo_mock(mocker: MockerFixture) -> TaskRepo: +def task_repo_mock(mocker: MockerFixture): mock = mocker.Mock(spec=TaskRepo) return mock @pytest.fixture -def profiling_task_worker(mocker: MockerFixture) -> ProfilingTaskWorker: +def profiling_task_worker(mocker: MockerFixture): mock = mocker.Mock(spec=ProfilingTaskWorker) return mock @pytest.fixture def set_task_use_case( - unit_of_work_mock: UnitOfWork, - dataset_repo_mock: DatasetRepo, - task_repo_mock: TaskRepo, - profiling_task_worker: ProfilingTaskWorker, + unit_of_work_mock, + dataset_repo_mock, + task_repo_mock, + profiling_task_worker, ): return SetTask( unit_of_work=unit_of_work_mock, @@ -76,18 +76,17 @@ def set_task_use_case( def test_set_task_use_case_success( - set_task_use_case: SetTask, - unit_of_work_mock: UnitOfWork, - dataset_repo_mock: DatasetRepo, - task_repo_mock: TaskRepo, - profiling_task_worker: ProfilingTaskWorker, + set_task_use_case, + unit_of_work_mock, + dataset_repo_mock, + task_repo_mock, + profiling_task_worker, ) -> None: # Prepare data dataset_id = uuid4() task_id = uuid4() - task_config = FdTaskConfig( - primitive_name=PrimitiveName.fd, config={"algo_name": FdAlgoName.Aid} - ) + aid_config = AidConfig(algo_name="aid") # type: ignore + task_config = FdTaskConfig(primitive_name=PrimitiveName.fd, config=aid_config) # Mocks repo methods dataset_repo_mock.find.return_value = DatasetResponseSchema( @@ -124,7 +123,7 @@ def test_set_task_use_case_success( task_repo_mock.create.assert_called_once_with( TaskCreateSchema( status=TaskStatus.CREATED, - config=task_config.model_dump(exclude_unset=True), + config=task_config, dataset_id=dataset_id, ), unit_of_work_mock.__enter__.return_value, @@ -144,17 +143,16 @@ def test_set_task_use_case_success( def test_set_task_use_case_dataset_not_found( - set_task_use_case: SetTask, - unit_of_work_mock: UnitOfWork, - dataset_repo_mock: DatasetRepo, - task_repo_mock: TaskRepo, - profiling_task_worker: ProfilingTaskWorker, + set_task_use_case, + unit_of_work_mock, + dataset_repo_mock, + task_repo_mock, + profiling_task_worker, ): # Prepare data dataset_id = uuid4() - task_config = FdTaskConfig( - primitive_name=PrimitiveName.fd, config={"algo_name": FdAlgoName.Aid} - ) + aid_config = AidConfig(algo_name="aid") # type: ignore + task_config = FdTaskConfig(primitive_name=PrimitiveName.fd, config=aid_config) # Mocks repo methods dataset_repo_mock.find.return_value = None diff --git a/tests/usecase/test_update_task_info.py b/tests/usecase/test_update_task_info.py index 66e335db..fe7a0e1c 100644 --- a/tests/usecase/test_update_task_info.py +++ b/tests/usecase/test_update_task_info.py @@ -1,12 +1,10 @@ -from uuid import uuid4 - import pytest +from uuid import uuid4 from pytest_mock import MockerFixture - from internal.domain.task.value_objects import TaskStatus from internal.dto.repository.task import TaskUpdateSchema, TaskFindSchema from internal.dto.repository.task.task import TaskNotFoundException -from internal.uow import UnitOfWork, DataStorageContext +from internal.uow import DataStorageContext from internal.usecase.task.update_task_info import TaskRepo, UpdateTaskInfo from internal.usecase.task.exception import ( TaskNotFoundException as TaskNotFoundUseCaseException, @@ -14,12 +12,10 @@ @pytest.fixture -def unit_of_work_mock(mocker: MockerFixture) -> UnitOfWork: +def unit_of_work_mock(mocker: MockerFixture): mock = mocker.MagicMock() - mock.__enter__.return_value = mocker.Mock( - return_value=mocker.Mock(), spec=DataStorageContext - ) - mock.__exit__.return_value = None + mock.__enter__.return_value = mocker.Mock(spec=DataStorageContext) + mock.__exit__.return_value = mocker.Mock() def exit_side_effect(exc_type, exc_value, traceback) -> bool: if exc_type: @@ -31,15 +27,14 @@ def exit_side_effect(exc_type, exc_value, traceback) -> bool: @pytest.fixture -def task_repo_mock(mocker: MockerFixture) -> TaskRepo: +def task_repo_mock(mocker: MockerFixture): mock = mocker.Mock(spec=TaskRepo) + mock.update = mocker.Mock() return mock @pytest.fixture -def update_task_info_use_case( - unit_of_work_mock: UnitOfWork, task_repo_mock: TaskRepo -) -> UpdateTaskInfo: +def update_task_info_use_case(unit_of_work_mock, task_repo_mock) -> UpdateTaskInfo: return UpdateTaskInfo( unit_of_work=unit_of_work_mock, task_repo=task_repo_mock, @@ -47,9 +42,9 @@ def update_task_info_use_case( def test_update_task_info_success( - update_task_info_use_case: UpdateTaskInfo, - unit_of_work_mock: UnitOfWork, - task_repo_mock: TaskRepo, + update_task_info_use_case, + unit_of_work_mock, + task_repo_mock, ) -> None: # Prepare data task_id = uuid4() @@ -80,11 +75,11 @@ def test_update_task_info_success( ], ) def test_update_task_info_unsuccess( - update_task_info_use_case: UpdateTaskInfo, - unit_of_work_mock: UnitOfWork, - task_repo_mock: TaskRepo, - repo_exception: Exception, - use_case_exception: Exception, + update_task_info_use_case, + unit_of_work_mock, + task_repo_mock, + repo_exception, + use_case_exception, ) -> None: # Prepare data task_id = uuid4() From 835272cb31253416ce784701ec61bbdfb0000f86 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Fri, 27 Sep 2024 01:30:52 +0000 Subject: [PATCH 136/153] chore: add command for type checking in makefile and rename old target names --- Makefile | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 17257111..72546ef0 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -.PHONY: env install-deps up open-db revision migrate downgrade worker app init lint test +.PHONY: env install-deps up open-db pg-revision pg-migrate pg-downgrade celery-worker app init lint test check-types ifeq ($(shell test -e '.env' && echo -n yes), yes) include .env @@ -60,11 +60,14 @@ lint: format: poetry run ruff format tests app & poetry run ruff check --fix & poetry run black tests internal - ## Run all tests in project test: poetry run pytest -o log_cli=true --verbosity=2 --showlocals --log-cli-level=INFO --cov=internal --cov-report term +## Check all types +check-types: + poetry run pyright . + .DEFAULT_GOAL := help # See for explanation. help: From e5e1c214e64ea40e212c20e0a769db9392a21129 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Fri, 27 Sep 2024 01:31:31 +0000 Subject: [PATCH 137/153] chore: add type checking to ci --- .github/workflows/run-linter-and-tests.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/run-linter-and-tests.yaml b/.github/workflows/run-linter-and-tests.yaml index 3bbc0c9c..d3a48bed 100644 --- a/.github/workflows/run-linter-and-tests.yaml +++ b/.github/workflows/run-linter-and-tests.yaml @@ -20,6 +20,9 @@ jobs: - name: Run all linters and formatters run: make lint + - name: Run type check + run: make check-types + - name: Up all containers run: docker compose -f dev-docker-compose.yaml up --build --force-recreate --remove-orphans -d From cad5d30ebffe7c5825a9c9432b25f37b33e73ea3 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Mon, 30 Sep 2024 18:18:04 +0000 Subject: [PATCH 138/153] chore(infra): add relational/postgres context maker types --- internal/infrastructure/data_storage/relational/__init__.py | 1 + internal/infrastructure/data_storage/relational/context.py | 5 +++-- .../data_storage/relational/postgres/context.py | 1 + 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/internal/infrastructure/data_storage/relational/__init__.py b/internal/infrastructure/data_storage/relational/__init__.py index bb68759c..6788b8c2 100644 --- a/internal/infrastructure/data_storage/relational/__init__.py +++ b/internal/infrastructure/data_storage/relational/__init__.py @@ -1,3 +1,4 @@ from internal.infrastructure.data_storage.relational.context import ( # noqa: F401 RelationalContextType, + RelationalContextMakerType, ) diff --git a/internal/infrastructure/data_storage/relational/context.py b/internal/infrastructure/data_storage/relational/context.py index 07284b33..148e976a 100644 --- a/internal/infrastructure/data_storage/relational/context.py +++ b/internal/infrastructure/data_storage/relational/context.py @@ -1,3 +1,4 @@ -from sqlalchemy.orm import Session +from sqlalchemy.orm import Session, sessionmaker -RelationalContextType = Session +type RelationalContextType = Session +type RelationalContextMakerType = sessionmaker diff --git a/internal/infrastructure/data_storage/relational/postgres/context.py b/internal/infrastructure/data_storage/relational/postgres/context.py index 7231e0f4..0382f877 100644 --- a/internal/infrastructure/data_storage/relational/postgres/context.py +++ b/internal/infrastructure/data_storage/relational/postgres/context.py @@ -10,6 +10,7 @@ ) type PostgresContextType = Session +type PostgresContextMakerType = sessionmaker[Session] PostgresContextMaker = sessionmaker(bind=default_engine) PostgresContextMakerWithoutPool = sessionmaker(bind=engine_without_pool) From 459d7970dcc5185bd8263da9d7a2127540bd405b Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Mon, 30 Sep 2024 18:19:03 +0000 Subject: [PATCH 139/153] chore(infra): add helper functions for getting postgres context --- .../data_storage/relational/postgres/__init__.py | 2 ++ .../data_storage/relational/postgres/context.py | 12 ++++++++++-- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/internal/infrastructure/data_storage/relational/postgres/__init__.py b/internal/infrastructure/data_storage/relational/postgres/__init__.py index 67881a86..b12ed1a5 100644 --- a/internal/infrastructure/data_storage/relational/postgres/__init__.py +++ b/internal/infrastructure/data_storage/relational/postgres/__init__.py @@ -1,4 +1,6 @@ from internal.infrastructure.data_storage.relational.postgres.context import ( # noqa: F401 get_postgres_context_maker, get_postgres_context_maker_without_pool, + get_postgres_context, + get_postgres_context_without_pool, ) diff --git a/internal/infrastructure/data_storage/relational/postgres/context.py b/internal/infrastructure/data_storage/relational/postgres/context.py index 0382f877..142d8351 100644 --- a/internal/infrastructure/data_storage/relational/postgres/context.py +++ b/internal/infrastructure/data_storage/relational/postgres/context.py @@ -15,9 +15,17 @@ PostgresContextMakerWithoutPool = sessionmaker(bind=engine_without_pool) -def get_postgres_context_maker() -> sessionmaker[Session]: +def get_postgres_context() -> PostgresContextType: + return PostgresContextMaker() + + +def get_postgres_context_without_pool() -> PostgresContextType: + return PostgresContextMakerWithoutPool() + + +def get_postgres_context_maker() -> PostgresContextMakerType: return PostgresContextMaker -def get_postgres_context_maker_without_pool() -> sessionmaker[Session]: +def get_postgres_context_maker_without_pool() -> PostgresContextMakerType: return PostgresContextMakerWithoutPool From 79ebcce996260586f264afa4904c65f9fa65b747 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Tue, 1 Oct 2024 15:11:46 +0000 Subject: [PATCH 140/153] feat(infra): implement an analogue of a transaction for local storage --- .../data_storage/flat/__init__.py | 3 + .../data_storage/flat/context.py | 69 +++++++++++++++++-- internal/repository/flat/file.py | 15 ++-- 3 files changed, 71 insertions(+), 16 deletions(-) diff --git a/internal/infrastructure/data_storage/flat/__init__.py b/internal/infrastructure/data_storage/flat/__init__.py index d1fb6e59..e8a8da3a 100644 --- a/internal/infrastructure/data_storage/flat/__init__.py +++ b/internal/infrastructure/data_storage/flat/__init__.py @@ -1,5 +1,8 @@ from internal.infrastructure.data_storage.flat.context import ( # noqa: F401 FlatContextMaker, FlatContext, + get_flat_context, get_flat_context_maker, + FlatAddModel, + FlatDeleteModel, ) diff --git a/internal/infrastructure/data_storage/flat/context.py b/internal/infrastructure/data_storage/flat/context.py index 1f8b679f..1bb55be1 100644 --- a/internal/infrastructure/data_storage/flat/context.py +++ b/internal/infrastructure/data_storage/flat/context.py @@ -1,25 +1,79 @@ +import os from pathlib import Path +import aiofiles +import asyncio + +from pydantic import BaseModel + +from internal.dto.repository.file import File from internal.infrastructure.data_storage import settings +CHUNK_SIZE = 1024 + + +class FlatAddModel: + + def __init__(self, file: File, file_name: str): + self.file_name = file_name + self.file = file + + +class FlatDeleteModel(BaseModel): + file_name: str + + class FlatContext: def __init__(self, upload_directory_path: Path): self._upload_directory_path = upload_directory_path + self._is_closed = True + self._to_add: list[FlatAddModel] = [] + self._added: list[Path] = [] @property def upload_directory_path(self) -> Path: return self._upload_directory_path - # This context implementation does not support transactions - def flush(self) -> None: ... + async def async_flush(self) -> None: + for file_model in self._to_add: + path_to_file = Path.joinpath( + self.upload_directory_path, str(file_model.file_name) + ) + async with aiofiles.open(path_to_file, "wb") as out_file: + while content := await file_model.file.read(CHUNK_SIZE): + await out_file.write(content) + self._added.append(path_to_file) + self._to_add.remove(file_model) + + def flush(self) -> None: + """Запускает асинхронный flush внутри синхронного контекста.""" + asyncio.run(self.async_flush()) + + def rollback(self) -> None: + for file_path in self._added: + if file_path.exists(): + os.remove(file_path) + self._added.clear() + self._to_add.clear() - def rollback(self) -> None: ... + def commit(self) -> None: + if self._to_add: + self.flush() + self._added.clear() - def commit(self) -> None: ... + def close(self) -> None: + if self._added: + self.rollback() + self._is_closed = True - def close(self) -> None: ... # TODO: implement flat context closing. + def add(self, file_model: FlatAddModel) -> None: + self._to_add.append(file_model) + + def delete( + self, file_model: FlatDeleteModel + ) -> None: ... # TODO: implement, when needed class FlatContextMaker: @@ -28,5 +82,10 @@ def __call__(self): return FlatContext(settings.uploaded_files_dir_path) +def get_flat_context() -> FlatContext: + context_maker = FlatContextMaker() + return context_maker() + + def get_flat_context_maker() -> FlatContextMaker: return FlatContextMaker() diff --git a/internal/repository/flat/file.py b/internal/repository/flat/file.py index b9d669d7..38b10515 100644 --- a/internal/repository/flat/file.py +++ b/internal/repository/flat/file.py @@ -1,6 +1,5 @@ from pathlib import Path -import aiofiles import pandas as pd from internal.dto.repository.file.file import ( @@ -9,13 +8,10 @@ CSVFileResponseSchema, ) from internal.dto.repository.file import File, FileCreateSchema -from internal.infrastructure.data_storage.flat import FlatContext - -CHUNK_SIZE = 1024 +from internal.infrastructure.data_storage.flat import FlatAddModel, FlatContext class FileRepository: - # The current repository implementation does not support transactions. async def create( self, @@ -23,14 +19,11 @@ async def create( file_info: FileCreateSchema, context: FlatContext, ) -> None: + model = FlatAddModel(file=file, file_name=str(file_info.file_name)) - path_to_file = Path.joinpath( - context.upload_directory_path, str(file_info.file_name) - ) try: - async with aiofiles.open(path_to_file, "wb") as out_file: # !!! - while content := await file.read(CHUNK_SIZE): - await out_file.write(content) + context.add(model) + await context.async_flush() except Exception: raise FailedFileReadingException("The sent file could not be read.") From 72910bebd2e3ee546ac23a600f873032a3716fa9 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Tue, 1 Oct 2024 15:13:53 +0000 Subject: [PATCH 141/153] feat(tests): add tests for new flat context --- tests/context/test_flat.py | 114 +++++++++++++++++++++++++++++ tests/repository/flat/test_file.py | 18 ++--- 2 files changed, 123 insertions(+), 9 deletions(-) create mode 100644 tests/context/test_flat.py diff --git a/tests/context/test_flat.py b/tests/context/test_flat.py new file mode 100644 index 00000000..3215cfc5 --- /dev/null +++ b/tests/context/test_flat.py @@ -0,0 +1,114 @@ +import pytest +from pytest_mock import MockerFixture +import aiofiles +from pathlib import Path +from internal.infrastructure.data_storage.flat import FlatContext, FlatAddModel +from internal.dto.repository.file import File + + +@pytest.fixture +def tmp_upload_dir(tmp_path): + return tmp_path + + +@pytest.fixture +def flat_context(tmp_upload_dir): + return FlatContext(upload_directory_path=tmp_upload_dir) + + +@pytest.fixture +def mock_file(mocker: MockerFixture): + mock = mocker.AsyncMock(spec=File) + mock.read = mocker.AsyncMock(side_effect=[b"Hello", b" ", b"World", b""]) + return mock + + +@pytest.fixture +def mock_sync_file(mocker: MockerFixture): + mock = mocker.Mock() + mock.read.side_effect = [b"Hello World", b""] + return mock + + +@pytest.fixture +def file_name(): + return "test_file.txt" + + +@pytest.mark.asyncio +async def test_add_and_flush_file(flat_context, mock_file, file_name): + file_model = FlatAddModel(file=mock_file, file_name=file_name) + flat_context.add(file_model) + await flat_context.async_flush() + added_file_path = Path(flat_context.upload_directory_path) / file_name + assert added_file_path.exists() + async with aiofiles.open(added_file_path, "rb") as f: + content = await f.read() + assert content == b"Hello World" + + +def test_add_and_sync_flush_file(flat_context, mock_file, file_name): + file_model = FlatAddModel(file=mock_file, file_name=file_name) + flat_context.add(file_model) + flat_context.flush() + added_file_path = Path(flat_context.upload_directory_path) / file_name + assert added_file_path.exists() + with open(added_file_path, "rb") as f: + content = f.read() + assert content == b"Hello World" + + +@pytest.mark.asyncio +async def test_rollback_on_flush_failure( + flat_context, mock_file, file_name, mocker: MockerFixture +): + file_model = FlatAddModel(file=mock_file, file_name=file_name) + flat_context.add(file_model) + mocker.patch("aiofiles.open", side_effect=Exception("Failed to write file")) + with pytest.raises(Exception, match="Failed to write file"): + await flat_context.async_flush() + assert not Path(flat_context.upload_directory_path / file_name).exists() + + +@pytest.mark.asyncio +async def test_commit_clears_added_list(flat_context, mock_file, file_name): + file_model = FlatAddModel(file=mock_file, file_name=file_name) + flat_context.add(file_model) + await flat_context.async_flush() + flat_context.commit() + added_file_path = Path(flat_context.upload_directory_path) / file_name + assert added_file_path.exists() + assert not flat_context._added + + +@pytest.mark.asyncio +async def test_rollback_clears_files(flat_context, mock_file, file_name): + file_model = FlatAddModel(file=mock_file, file_name=file_name) + flat_context.add(file_model) + await flat_context.async_flush() + added_file_path = Path(flat_context.upload_directory_path) / file_name + assert added_file_path.exists() + flat_context.rollback() + assert not added_file_path.exists() + + +@pytest.mark.asyncio +async def test_close_without_files(flat_context): + flat_context.close() + assert flat_context._is_closed == True + assert flat_context._to_add == [] + assert flat_context._added == [] + + +@pytest.mark.asyncio +async def test_close_with_rollback(flat_context, mock_file, file_name): + file_model = FlatAddModel(file=mock_file, file_name=file_name) + flat_context.add(file_model) + await flat_context.async_flush() + added_file_path = Path(flat_context.upload_directory_path) / file_name + assert added_file_path.exists() + flat_context.close() + assert not added_file_path.exists() + assert flat_context._is_closed == True + assert flat_context._to_add == [] + assert flat_context._added == [] diff --git a/tests/repository/flat/test_file.py b/tests/repository/flat/test_file.py index cf5f26c9..b47bc726 100644 --- a/tests/repository/flat/test_file.py +++ b/tests/repository/flat/test_file.py @@ -17,7 +17,7 @@ @pytest.fixture def mock_flat_context(tmp_path, mocker: MockFixture): - context = mocker.MagicMock(spec=FlatContext) + context = mocker.AsyncMock(spec=FlatContext) context.upload_directory_path = tmp_path return context @@ -29,20 +29,18 @@ def file_repository(): @pytest.mark.asyncio async def test_create_file_success( - mocker: MockFixture, file_repository, mock_flat_context + mocker: MockFixture, file_repository, mock_flat_context, tmp_path ): file_name = uuid4() file_content = b"Hello, World!" file_info = FileCreateSchema(file_name=file_name) mock_file = mocker.AsyncMock(spec=File) - mock_file.read = mocker.AsyncMock( - side_effect=[file_content, b""] - ) # Читаем содержимое файла - - await file_repository.create(mock_file, file_info, mock_flat_context) + mock_file.read = mocker.AsyncMock(side_effect=[file_content, b""]) + context = FlatContext(tmp_path) + await file_repository.create(mock_file, file_info, context) - created_file_path = mock_flat_context.upload_directory_path / str(file_name) + created_file_path = tmp_path / str(file_name) assert created_file_path.is_file() async with aiofiles.open(created_file_path, "rb") as f: @@ -74,7 +72,9 @@ async def test_create_file_failure( file_info = FileCreateSchema(file_name=file_name) mock_file = mocker.AsyncMock(spec=File) - mock_file.read = mocker.AsyncMock(side_effect=Exception("Read error")) + mock_flat_context.async_flush = mocker.AsyncMock( + side_effect=Exception("Read error") + ) with pytest.raises( FailedFileReadingException, match="The sent file could not be read." From eeff5b9f14a0f8085716bc513131cd1317c44daf Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Tue, 1 Oct 2024 15:15:02 +0000 Subject: [PATCH 142/153] feat(infra): add relational add/delete models --- internal/infrastructure/data_storage/relational/__init__.py | 2 ++ internal/infrastructure/data_storage/relational/context.py | 5 ++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/internal/infrastructure/data_storage/relational/__init__.py b/internal/infrastructure/data_storage/relational/__init__.py index 6788b8c2..529c2820 100644 --- a/internal/infrastructure/data_storage/relational/__init__.py +++ b/internal/infrastructure/data_storage/relational/__init__.py @@ -1,4 +1,6 @@ from internal.infrastructure.data_storage.relational.context import ( # noqa: F401 RelationalContextType, RelationalContextMakerType, + RelationalAddModel, + RelationalDeleteModel, ) diff --git a/internal/infrastructure/data_storage/relational/context.py b/internal/infrastructure/data_storage/relational/context.py index 148e976a..fecb70a3 100644 --- a/internal/infrastructure/data_storage/relational/context.py +++ b/internal/infrastructure/data_storage/relational/context.py @@ -1,4 +1,7 @@ -from sqlalchemy.orm import Session, sessionmaker +from sqlalchemy.orm import Session, sessionmaker, DeclarativeBase type RelationalContextType = Session type RelationalContextMakerType = sessionmaker + +RelationalAddModel = DeclarativeBase +RelationalDeleteModel = DeclarativeBase From af7aeb9e11dd238c0b3ef5affef2a633f0152931 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Tue, 1 Oct 2024 22:28:43 +0000 Subject: [PATCH 143/153] chore(tests): divide the test directory into subdirectories for unit and integration testing --- tests/{ => unit}/context/test_flat.py | 0 .../domain/common/test_optional_model.py | 0 tests/{ => unit}/domain/file/test_file_entity.py | 0 tests/{ => unit}/domain/task/test_fd.py | 0 tests/{ => unit}/repository/flat/test_file.py | 0 .../repository/postgres/test_dataset.py | 0 .../repository/postgres/test_file_metadata.py | 0 tests/{ => unit}/repository/postgres/test_task.py | 0 tests/{ => unit}/uow/test_unit_of_work.py | 0 .../{ => unit}/usecase/test_check_content_type.py | 0 tests/{ => unit}/usecase/test_profile_task.py | 15 +++++++-------- tests/{ => unit}/usecase/test_retrieve_dataset.py | 0 tests/{ => unit}/usecase/test_retrieve_task.py | 0 tests/{ => unit}/usecase/test_save_dataset.py | 0 tests/{ => unit}/usecase/test_save_file.py | 11 +++++------ tests/{ => unit}/usecase/test_set_task.py | 0 tests/{ => unit}/usecase/test_update_task_info.py | 0 17 files changed, 12 insertions(+), 14 deletions(-) rename tests/{ => unit}/context/test_flat.py (100%) rename tests/{ => unit}/domain/common/test_optional_model.py (100%) rename tests/{ => unit}/domain/file/test_file_entity.py (100%) rename tests/{ => unit}/domain/task/test_fd.py (100%) rename tests/{ => unit}/repository/flat/test_file.py (100%) rename tests/{ => unit}/repository/postgres/test_dataset.py (100%) rename tests/{ => unit}/repository/postgres/test_file_metadata.py (100%) rename tests/{ => unit}/repository/postgres/test_task.py (100%) rename tests/{ => unit}/uow/test_unit_of_work.py (100%) rename tests/{ => unit}/usecase/test_check_content_type.py (100%) rename tests/{ => unit}/usecase/test_profile_task.py (92%) rename tests/{ => unit}/usecase/test_retrieve_dataset.py (100%) rename tests/{ => unit}/usecase/test_retrieve_task.py (100%) rename tests/{ => unit}/usecase/test_save_dataset.py (100%) rename tests/{ => unit}/usecase/test_save_file.py (93%) rename tests/{ => unit}/usecase/test_set_task.py (100%) rename tests/{ => unit}/usecase/test_update_task_info.py (100%) diff --git a/tests/context/test_flat.py b/tests/unit/context/test_flat.py similarity index 100% rename from tests/context/test_flat.py rename to tests/unit/context/test_flat.py diff --git a/tests/domain/common/test_optional_model.py b/tests/unit/domain/common/test_optional_model.py similarity index 100% rename from tests/domain/common/test_optional_model.py rename to tests/unit/domain/common/test_optional_model.py diff --git a/tests/domain/file/test_file_entity.py b/tests/unit/domain/file/test_file_entity.py similarity index 100% rename from tests/domain/file/test_file_entity.py rename to tests/unit/domain/file/test_file_entity.py diff --git a/tests/domain/task/test_fd.py b/tests/unit/domain/task/test_fd.py similarity index 100% rename from tests/domain/task/test_fd.py rename to tests/unit/domain/task/test_fd.py diff --git a/tests/repository/flat/test_file.py b/tests/unit/repository/flat/test_file.py similarity index 100% rename from tests/repository/flat/test_file.py rename to tests/unit/repository/flat/test_file.py diff --git a/tests/repository/postgres/test_dataset.py b/tests/unit/repository/postgres/test_dataset.py similarity index 100% rename from tests/repository/postgres/test_dataset.py rename to tests/unit/repository/postgres/test_dataset.py diff --git a/tests/repository/postgres/test_file_metadata.py b/tests/unit/repository/postgres/test_file_metadata.py similarity index 100% rename from tests/repository/postgres/test_file_metadata.py rename to tests/unit/repository/postgres/test_file_metadata.py diff --git a/tests/repository/postgres/test_task.py b/tests/unit/repository/postgres/test_task.py similarity index 100% rename from tests/repository/postgres/test_task.py rename to tests/unit/repository/postgres/test_task.py diff --git a/tests/uow/test_unit_of_work.py b/tests/unit/uow/test_unit_of_work.py similarity index 100% rename from tests/uow/test_unit_of_work.py rename to tests/unit/uow/test_unit_of_work.py diff --git a/tests/usecase/test_check_content_type.py b/tests/unit/usecase/test_check_content_type.py similarity index 100% rename from tests/usecase/test_check_content_type.py rename to tests/unit/usecase/test_check_content_type.py diff --git a/tests/usecase/test_profile_task.py b/tests/unit/usecase/test_profile_task.py similarity index 92% rename from tests/usecase/test_profile_task.py rename to tests/unit/usecase/test_profile_task.py index 99ef932a..2a332c46 100644 --- a/tests/usecase/test_profile_task.py +++ b/tests/unit/usecase/test_profile_task.py @@ -61,8 +61,7 @@ def profile_task_use_case( file_repo_mock, ) -> ProfileTask: return ProfileTask( - file_unit_of_work=unit_of_work_mock, - dataset_unit_of_work=unit_of_work_mock, + unit_of_work=unit_of_work_mock, dataset_repo=dataset_repo_mock, file_repo=file_repo_mock, ) @@ -144,9 +143,9 @@ def test_profile_task_use_case_success( table=cvs_file_read_response, task_config=task_config ) - # Check that UnitOfWork was entered and exited correctly - assert unit_of_work_mock.__enter__.call_count == 2 - assert unit_of_work_mock.__exit__.call_count == 2 + # Verify that UnitOfWork was used correctly + unit_of_work_mock.__enter__.assert_called_once() + unit_of_work_mock.__exit__.assert_called_once() @pytest.mark.parametrize( @@ -185,6 +184,6 @@ def test_profile_task_use_case_dataset_not_found( assert not file_repo_mock.find.called - # Check that UnitOfWork was entered and exited correctly - assert unit_of_work_mock.__enter__.call_count == 2 - assert unit_of_work_mock.__exit__.call_count == 2 + # Verify that UnitOfWork was used correctly + unit_of_work_mock.__enter__.assert_called_once() + unit_of_work_mock.__exit__.assert_called_once() diff --git a/tests/usecase/test_retrieve_dataset.py b/tests/unit/usecase/test_retrieve_dataset.py similarity index 100% rename from tests/usecase/test_retrieve_dataset.py rename to tests/unit/usecase/test_retrieve_dataset.py diff --git a/tests/usecase/test_retrieve_task.py b/tests/unit/usecase/test_retrieve_task.py similarity index 100% rename from tests/usecase/test_retrieve_task.py rename to tests/unit/usecase/test_retrieve_task.py diff --git a/tests/usecase/test_save_dataset.py b/tests/unit/usecase/test_save_dataset.py similarity index 100% rename from tests/usecase/test_save_dataset.py rename to tests/unit/usecase/test_save_dataset.py diff --git a/tests/usecase/test_save_file.py b/tests/unit/usecase/test_save_file.py similarity index 93% rename from tests/usecase/test_save_file.py rename to tests/unit/usecase/test_save_file.py index 11cba635..7b6eb97c 100644 --- a/tests/usecase/test_save_file.py +++ b/tests/unit/usecase/test_save_file.py @@ -71,8 +71,7 @@ def save_file( "internal.usecase.file.save_file.FileEntity", return_value=file_entity_mock ) return SaveFile( - file_unit_of_work=unit_of_work_mock, - file_info_unit_of_work=unit_of_work_mock, + unit_of_work=unit_of_work_mock, file_repo=file_repo_mock, file_metadata_repo=file_metadata_repo_mock, ) @@ -132,8 +131,8 @@ async def test_save_file( ) # Verify that UnitOfWork was used correctly - assert unit_of_work_mock.__enter__.call_count == 2 - assert unit_of_work_mock.__exit__.call_count == 2 + unit_of_work_mock.__enter__.assert_called_once() + unit_of_work_mock.__exit__.assert_called_once() # Verify that the result matches the expected SaveFileUseCaseResult assert result == SaveFileUseCaseResult( @@ -173,5 +172,5 @@ async def test_save_file_failed_read_file_exception( file_repo_mock.create.assert_called_once() # Verify that UnitOfWork was used correctly - assert unit_of_work_mock.__enter__.call_count == 2 - assert unit_of_work_mock.__exit__.call_count == 2 + unit_of_work_mock.__enter__.assert_called_once() + unit_of_work_mock.__exit__.assert_called_once() diff --git a/tests/usecase/test_set_task.py b/tests/unit/usecase/test_set_task.py similarity index 100% rename from tests/usecase/test_set_task.py rename to tests/unit/usecase/test_set_task.py diff --git a/tests/usecase/test_update_task_info.py b/tests/unit/usecase/test_update_task_info.py similarity index 100% rename from tests/usecase/test_update_task_info.py rename to tests/unit/usecase/test_update_task_info.py From 9025b96f3dd68947981f9701b3c1194be491116f Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Tue, 1 Oct 2024 22:30:26 +0000 Subject: [PATCH 144/153] feat(infra): implement sync flush method for flat context --- internal/infrastructure/data_storage/flat/context.py | 12 +++++++++--- tests/unit/context/test_flat.py | 4 ++-- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/internal/infrastructure/data_storage/flat/context.py b/internal/infrastructure/data_storage/flat/context.py index 1bb55be1..21061c10 100644 --- a/internal/infrastructure/data_storage/flat/context.py +++ b/internal/infrastructure/data_storage/flat/context.py @@ -2,7 +2,6 @@ from pathlib import Path import aiofiles -import asyncio from pydantic import BaseModel @@ -48,8 +47,15 @@ async def async_flush(self) -> None: self._to_add.remove(file_model) def flush(self) -> None: - """Запускает асинхронный flush внутри синхронного контекста.""" - asyncio.run(self.async_flush()) + for file_model in self._to_add: + path_to_file = Path.joinpath( + self.upload_directory_path, str(file_model.file_name) + ) + with open(path_to_file, "wb") as out_file: + while content := file_model.file.read(CHUNK_SIZE): + out_file.write(content) # type: ignore + self._added.append(path_to_file) + self._to_add.remove(file_model) def rollback(self) -> None: for file_path in self._added: diff --git a/tests/unit/context/test_flat.py b/tests/unit/context/test_flat.py index 3215cfc5..1232b802 100644 --- a/tests/unit/context/test_flat.py +++ b/tests/unit/context/test_flat.py @@ -47,8 +47,8 @@ async def test_add_and_flush_file(flat_context, mock_file, file_name): assert content == b"Hello World" -def test_add_and_sync_flush_file(flat_context, mock_file, file_name): - file_model = FlatAddModel(file=mock_file, file_name=file_name) +def test_add_and_sync_flush_file(flat_context, mock_sync_file, file_name): + file_model = FlatAddModel(file=mock_sync_file, file_name=file_name) flat_context.add(file_model) flat_context.flush() added_file_path = Path(flat_context.upload_directory_path) / file_name From 6550ceeedbdea8f1759925c88b27305b76fec6bc Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Tue, 1 Oct 2024 22:31:43 +0000 Subject: [PATCH 145/153] feat(infra): add universal context for storages --- .../infrastructure/data_storage/__init__.py | 9 ++ .../infrastructure/data_storage/context.py | 105 ++++++++++++++++++ 2 files changed, 114 insertions(+) create mode 100644 internal/infrastructure/data_storage/context.py diff --git a/internal/infrastructure/data_storage/__init__.py b/internal/infrastructure/data_storage/__init__.py index 8d705c41..8c7e024f 100644 --- a/internal/infrastructure/data_storage/__init__.py +++ b/internal/infrastructure/data_storage/__init__.py @@ -1,3 +1,12 @@ from internal.infrastructure.data_storage.settings import get_settings settings = get_settings() + + +from internal.infrastructure.data_storage.context import ( + Context, + get_context, + get_context_without_pool, + get_context_maker, + get_context_maker_without_pool, +) # noqa: F401 diff --git a/internal/infrastructure/data_storage/context.py b/internal/infrastructure/data_storage/context.py new file mode 100644 index 00000000..ec15d1a9 --- /dev/null +++ b/internal/infrastructure/data_storage/context.py @@ -0,0 +1,105 @@ +from typing import Any + +from internal.infrastructure.data_storage.flat import ( + FlatContext, + get_flat_context_maker, +) +from internal.infrastructure.data_storage.relational.postgres import ( + get_postgres_context_maker, + get_postgres_context_maker_without_pool, +) +from internal.infrastructure.data_storage.flat import ( + FlatAddModel, + FlatDeleteModel, +) +from internal.infrastructure.data_storage.relational import ( + RelationalAddModel, + RelationalDeleteModel, + RelationalContextType, +) + + +class Context: + + def __init__( + self, postgres_context: RelationalContextType, flat_context: FlatContext + ): + self._postgres_context = postgres_context + self._flat_context = flat_context + + @property + def flat_context(self): + return self._flat_context + + @property + def postgres_context(self): + return self._postgres_context + + def commit(self): + self._postgres_context.commit() + self._flat_context.commit() + + def rollback(self): + self._postgres_context.rollback() + self._flat_context.rollback() + + def close(self): + self._postgres_context.close() + self._flat_context.close() + + def flush(self): + self._postgres_context.flush() + self._flat_context.flush() + + async def async_flush(self): + self._postgres_context.flush() # async calling not supported + await self._flat_context.async_flush() + + def add(self, model: RelationalAddModel | FlatAddModel): + if isinstance(model, RelationalAddModel): + self._postgres_context.add(model) + if isinstance(model, FlatAddModel): + self._flat_context.add(model) + + def delete(self, model: RelationalDeleteModel | FlatDeleteModel): + if isinstance(model, RelationalDeleteModel): + self._postgres_context.delete(model) + if isinstance(model, FlatDeleteModel): + self._flat_context.delete(model) + + def execute(self, *args) -> Any: + # Only for relational storages. + return self._postgres_context.execute(*args) + + +class ContextMaker: + + def __init__(self, *, use_pool: bool = True): + if use_pool: + self._postgres_context_maker = get_postgres_context_maker() + else: + self._postgres_context_maker = get_postgres_context_maker_without_pool() + self._flat_context_maker = get_flat_context_maker() + + def __call__(self) -> Context: + postgres_context = self._postgres_context_maker() + flat_context = self._flat_context_maker() + return Context(postgres_context, flat_context) + + +def get_context_maker(): + return ContextMaker() + + +def get_context_maker_without_pool(): + return ContextMaker(use_pool=False) + + +def get_context(): + maker = get_context_maker() + return maker() + + +def get_context_without_pool(): + maker = get_context_maker_without_pool() + return maker() From 07857a3197742e6089a611b277587c86a2123e23 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Tue, 1 Oct 2024 22:32:32 +0000 Subject: [PATCH 146/153] feat(tests): add tests for universal context --- tests/unit/context/test_context.py | 122 +++++++++++++++++++++++++++++ 1 file changed, 122 insertions(+) create mode 100644 tests/unit/context/test_context.py diff --git a/tests/unit/context/test_context.py b/tests/unit/context/test_context.py new file mode 100644 index 00000000..e6651dc2 --- /dev/null +++ b/tests/unit/context/test_context.py @@ -0,0 +1,122 @@ +import pytest +from pytest_mock import MockerFixture +from internal.infrastructure.data_storage.flat import ( + FlatAddModel, + FlatDeleteModel, + FlatContext, +) +from internal.infrastructure.data_storage.relational import ( + RelationalAddModel, + RelationalDeleteModel, + RelationalContextType, +) +from internal.infrastructure.data_storage import ( + Context, +) + + +@pytest.fixture +def mock_postgres_context(mocker: MockerFixture): + mock_context = mocker.Mock(spec=RelationalContextType) + mock_context.commit = mocker.Mock() + mock_context.rollback = mocker.Mock() + mock_context.close = mocker.Mock() + mock_context.flush = mocker.Mock() + mock_context.add = mocker.Mock() + mock_context.delete = mocker.Mock() + mock_context.execute = mocker.Mock() + return mock_context + + +@pytest.fixture +def mock_flat_context(mocker: MockerFixture): + mock_context = mocker.Mock(spec=FlatContext) + mock_context.commit = mocker.Mock() + mock_context.rollback = mocker.Mock() + mock_context.close = mocker.Mock() + mock_context.flush = mocker.Mock() + mock_context.add = mocker.Mock() + mock_context.delete = mocker.Mock() + mock_context.async_flush = mocker.AsyncMock() + return mock_context + + +@pytest.fixture +def context(mock_postgres_context, mock_flat_context): + return Context(mock_postgres_context, mock_flat_context) + + +def test_context_commit(context, mock_postgres_context, mock_flat_context): + context.commit() + mock_postgres_context.commit.assert_called_once() + mock_flat_context.commit.assert_called_once() + + +def test_context_rollback(context, mock_postgres_context, mock_flat_context): + context.rollback() + mock_postgres_context.rollback.assert_called_once() + mock_flat_context.rollback.assert_called_once() + + +def test_context_close(context, mock_postgres_context, mock_flat_context): + context.close() + mock_postgres_context.close.assert_called_once() + mock_flat_context.close.assert_called_once() + + +def test_context_flush(context, mock_postgres_context, mock_flat_context): + context.flush() + mock_postgres_context.flush.assert_called_once() + mock_flat_context.flush.assert_called_once() + + +@pytest.mark.asyncio +async def test_context_async_flush( + mocker: MockerFixture, context, mock_postgres_context, mock_flat_context +): + mock_flat_context.async_flush = mocker.AsyncMock() + await context.async_flush() + mock_postgres_context.flush.assert_called_once() + mock_flat_context.async_flush.assert_called_once() + mock_flat_context.flush.assert_not_called() + + +def test_context_add_relational_model( + context, mock_postgres_context, mock_flat_context +): + relational_model = RelationalAddModel() + context.add(relational_model) + mock_postgres_context.add.assert_called_once_with(relational_model) + mock_flat_context.add.assert_not_called() + + +def test_context_add_flat_model( + mocker: MockerFixture, context, mock_postgres_context, mock_flat_context +): + flat_model = FlatAddModel(file=mocker.Mock(), file_name="test_file.txt") + context.add(flat_model) + mock_flat_context.add.assert_called_once_with(flat_model) + mock_postgres_context.add.assert_not_called() + + +def test_context_delete_relational_model( + context, mock_postgres_context, mock_flat_context +): + relational_model = RelationalDeleteModel() + context.delete(relational_model) + mock_postgres_context.delete.assert_called_once_with(relational_model) + mock_flat_context.delete.assert_not_called() + + +def test_context_delete_flat_model(context, mock_postgres_context, mock_flat_context): + flat_model = FlatDeleteModel(file_name="test_file.txt") + context.delete(flat_model) + mock_flat_context.delete.assert_called_once_with(flat_model) + mock_postgres_context.add.assert_not_called() + + +def test_context_execute(context, mock_postgres_context, mock_flat_context): + query = "SELECT * FROM users" + context.execute(query) + mock_postgres_context.execute.assert_called_once_with(query) + mock_postgres_context.add.assert_not_called() From d26bec29124bc0e85094d0e51ab4f05aa790bb30 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Tue, 1 Oct 2024 22:33:35 +0000 Subject: [PATCH 147/153] chore(usecase): add missed task usecases imports --- internal/usecase/task/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/internal/usecase/task/__init__.py b/internal/usecase/task/__init__.py index 5c6536ca..a954e33c 100644 --- a/internal/usecase/task/__init__.py +++ b/internal/usecase/task/__init__.py @@ -1,2 +1,4 @@ from internal.usecase.task.retrieve_task import RetrieveTask # noqa: F401 from internal.usecase.task.set_task import SetTask # noqa: F401 +from internal.usecase.task.profile_task import ProfileTask # noqa: F401 +from internal.usecase.task.update_task_info import UpdateTaskInfo # noqa: F401 From b2ffbe3c0e160e762783f4435a8d22237344abc0 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Wed, 2 Oct 2024 14:32:11 +0000 Subject: [PATCH 148/153] feat(usecase): make a single unit of work in each use case and its usage in app --- .../background_task/celery/task/di.py | 23 ++++----- .../infrastructure/data_storage/__init__.py | 1 + .../infrastructure/data_storage/context.py | 27 ++++++++-- .../data_storage/flat/context.py | 23 ++++++--- internal/repository/flat/file.py | 11 ++-- internal/repository/relational/crud.py | 22 +++----- .../repository/relational/file/dataset.py | 6 +-- internal/rest/http/di.py | 16 ++---- internal/rest/http/file/di.py | 5 +- internal/usecase/file/save_file.py | 28 ++++------ internal/usecase/task/profile_task.py | 44 +++++++--------- tests/conftest.py | 38 ++++++++++++-- tests/unit/repository/flat/test_file.py | 51 +++++++------------ 13 files changed, 153 insertions(+), 142 deletions(-) diff --git a/internal/infrastructure/background_task/celery/task/di.py b/internal/infrastructure/background_task/celery/task/di.py index 2f0b54f0..10f17769 100644 --- a/internal/infrastructure/background_task/celery/task/di.py +++ b/internal/infrastructure/background_task/celery/task/di.py @@ -1,7 +1,4 @@ -from internal.infrastructure.data_storage.relational.postgres import ( - get_postgres_context_maker_without_pool, -) -from internal.infrastructure.data_storage.flat import get_flat_context_maker +from internal.infrastructure.data_storage.context import get_context_maker_without_pool from internal.repository.flat import FileRepository from internal.repository.relational.file import DatasetRepository from internal.repository.relational.task import TaskRepository @@ -22,10 +19,13 @@ def get_task_repo() -> TaskRepository: return TaskRepository() -def get_update_task_info_use_case(): - context_maker = get_postgres_context_maker_without_pool() +def get_unit_of_work_without_pool() -> UnitOfWork: + context_maker_without_pool = get_context_maker_without_pool() + return UnitOfWork(context_maker_without_pool) + - unit_of_work = UnitOfWork(context_maker) +def get_update_task_info_use_case(): + unit_of_work = get_unit_of_work_without_pool() task_repo = get_task_repo() return UpdateTaskInfo( @@ -35,17 +35,12 @@ def get_update_task_info_use_case(): def get_profile_task_use_case(): - postgres_context_maker = get_postgres_context_maker_without_pool() - flat_context_maker = get_flat_context_maker() - - file_unit_of_work = UnitOfWork(flat_context_maker) - dataset_unit_of_work = UnitOfWork(postgres_context_maker) + unit_of_work = get_unit_of_work_without_pool() file_repo = get_file_repo() dataset_repo = get_dataset_repo() return ProfileTask( - file_unit_of_work=file_unit_of_work, - dataset_unit_of_work=dataset_unit_of_work, + unit_of_work=unit_of_work, file_repo=file_repo, # type: ignore dataset_repo=dataset_repo, # type: ignore ) diff --git a/internal/infrastructure/data_storage/__init__.py b/internal/infrastructure/data_storage/__init__.py index 8c7e024f..75dcc1e6 100644 --- a/internal/infrastructure/data_storage/__init__.py +++ b/internal/infrastructure/data_storage/__init__.py @@ -5,6 +5,7 @@ from internal.infrastructure.data_storage.context import ( Context, + ContextMaker, get_context, get_context_without_pool, get_context_maker, diff --git a/internal/infrastructure/data_storage/context.py b/internal/infrastructure/data_storage/context.py index ec15d1a9..244a8184 100644 --- a/internal/infrastructure/data_storage/context.py +++ b/internal/infrastructure/data_storage/context.py @@ -1,8 +1,11 @@ from typing import Any +from sqlalchemy.orm import sessionmaker, Session + from internal.infrastructure.data_storage.flat import ( FlatContext, get_flat_context_maker, + FlatContextMaker, ) from internal.infrastructure.data_storage.relational.postgres import ( get_postgres_context_maker, @@ -74,12 +77,28 @@ def execute(self, *args) -> Any: class ContextMaker: - def __init__(self, *, use_pool: bool = True): + def __init__( + self, + *, + use_pool: bool = True, + postgres_context_maker: sessionmaker[Session] | None = None, + flat_context_maker: FlatContextMaker | None = None, + ): if use_pool: - self._postgres_context_maker = get_postgres_context_maker() + self._postgres_context_maker = ( + postgres_context_maker + if postgres_context_maker + else get_postgres_context_maker() + ) else: - self._postgres_context_maker = get_postgres_context_maker_without_pool() - self._flat_context_maker = get_flat_context_maker() + self._postgres_context_maker = ( + postgres_context_maker + if postgres_context_maker + else get_postgres_context_maker_without_pool() + ) + self._flat_context_maker = ( + flat_context_maker if flat_context_maker else get_flat_context_maker() + ) def __call__(self) -> Context: postgres_context = self._postgres_context_maker() diff --git a/internal/infrastructure/data_storage/flat/context.py b/internal/infrastructure/data_storage/flat/context.py index 21061c10..c0d9ca6b 100644 --- a/internal/infrastructure/data_storage/flat/context.py +++ b/internal/infrastructure/data_storage/flat/context.py @@ -84,14 +84,25 @@ def delete( class FlatContextMaker: + def __init__( + self, *, uploaded_files_dir_path: Path = settings.uploaded_files_dir_path + ): + self.uploaded_files_dir_path = uploaded_files_dir_path + def __call__(self): - return FlatContext(settings.uploaded_files_dir_path) + return FlatContext(self.uploaded_files_dir_path) -def get_flat_context() -> FlatContext: - context_maker = FlatContextMaker() - return context_maker() +def get_flat_context_maker( + *, uploaded_files_dir_path: Path | None = None +) -> FlatContextMaker: + flat_context_maker = ( + FlatContextMaker(uploaded_files_dir_path=uploaded_files_dir_path) + if uploaded_files_dir_path + else FlatContextMaker() + ) + return flat_context_maker -def get_flat_context_maker() -> FlatContextMaker: - return FlatContextMaker() +def get_flat_context() -> FlatContext: + return get_flat_context_maker()() diff --git a/internal/repository/flat/file.py b/internal/repository/flat/file.py index 38b10515..a591d1d1 100644 --- a/internal/repository/flat/file.py +++ b/internal/repository/flat/file.py @@ -8,7 +8,8 @@ CSVFileResponseSchema, ) from internal.dto.repository.file import File, FileCreateSchema -from internal.infrastructure.data_storage.flat import FlatAddModel, FlatContext +from internal.infrastructure.data_storage.flat import FlatAddModel +from internal.infrastructure.data_storage import Context class FileRepository: @@ -17,7 +18,7 @@ async def create( self, file: File, file_info: FileCreateSchema, - context: FlatContext, + context: Context, ) -> None: model = FlatAddModel(file=file, file_name=str(file_info.file_name)) @@ -30,10 +31,12 @@ async def create( def find( self, file_info: CSVFileFindSchema, - context: FlatContext, + context: Context, ) -> CSVFileResponseSchema: - path_to_file = Path(context.upload_directory_path, str(file_info.file_name)) + path_to_file = Path( + context.flat_context.upload_directory_path, str(file_info.file_name) + ) return pd.read_csv( path_to_file, diff --git a/internal/repository/relational/crud.py b/internal/repository/relational/crud.py index 73fad976..d97b03f1 100644 --- a/internal/repository/relational/crud.py +++ b/internal/repository/relational/crud.py @@ -9,9 +9,7 @@ BaseFindSchema, BaseResponseSchema, ) -from internal.infrastructure.data_storage.relational.context import ( - RelationalContextType, -) +from internal.infrastructure.data_storage import Context class CRUD[ @@ -29,26 +27,20 @@ def __init__( self._orm_model: Type[ORMModel] = orm_model self._response_schema: Type[ResponseSchema] = response_schema - def create( - self, create_schema: CreateSchema, context: RelationalContextType - ) -> ResponseSchema: + def create(self, create_schema: CreateSchema, context: Context) -> ResponseSchema: create_schema_dict = create_schema.model_dump() db_model_instance = self._orm_model(**create_schema_dict) context.add(db_model_instance) context.flush() return self._response_schema.model_validate(db_model_instance) - def _find( - self, find_schema: FindSchema, context: RelationalContextType - ) -> ORMModel | None: + def _find(self, find_schema: FindSchema, context: Context) -> ORMModel | None: find_schema_dict = find_schema.model_dump() stmt = select(self._orm_model).filter_by(**find_schema_dict) db_model_instance = context.execute(stmt).scalars().one_or_none() return db_model_instance - def find( - self, find_schema: FindSchema, context: RelationalContextType - ) -> ResponseSchema | None: + def find(self, find_schema: FindSchema, context: Context) -> ResponseSchema | None: db_model_instance = self._find(find_schema, context) response = ( self._response_schema.model_validate(db_model_instance) @@ -61,7 +53,7 @@ def find_or_create( self, find_schema: FindSchema, create_schema: CreateSchema, - context: RelationalContextType, + context: Context, ) -> ResponseSchema: db_model_instance = self._find(find_schema, context) @@ -74,7 +66,7 @@ def update( find_schema: FindSchema, update_schema: UpdateSchema, fields_to_update_if_none: set[str] | None, - context: RelationalContextType, + context: Context, ) -> ResponseSchema: db_model_instance = self._find(find_schema, context) @@ -93,7 +85,7 @@ def update( return self._response_schema.model_validate(db_model_instance) def delete( - self, find_schema: FindSchema, context: RelationalContextType + self, find_schema: FindSchema, context: Context ) -> ResponseSchema | None: db_model_instance = self._find(find_schema, context) if not db_model_instance: diff --git a/internal/repository/relational/file/dataset.py b/internal/repository/relational/file/dataset.py index 8b7898e7..84fb2b0d 100644 --- a/internal/repository/relational/file/dataset.py +++ b/internal/repository/relational/file/dataset.py @@ -1,9 +1,7 @@ from sqlalchemy import select from sqlalchemy.orm import joinedload -from internal.infrastructure.data_storage.relational.context import ( - RelationalContextType, -) +from internal.infrastructure.data_storage import Context from internal.infrastructure.data_storage.relational.model.file import DatasetORM from internal.repository.relational import CRUD from internal.dto.repository.file import ( @@ -32,7 +30,7 @@ def __init__(self): def find_with_file_metadata( self, dataset_info: DatasetFindSchema, - context: RelationalContextType, + context: Context, ) -> tuple[DatasetResponseSchema, FileMetadataResponseSchema]: dataset_find_dict = dataset_info.model_dump() diff --git a/internal/rest/http/di.py b/internal/rest/http/di.py index 4c59417a..a034ac55 100644 --- a/internal/rest/http/di.py +++ b/internal/rest/http/di.py @@ -1,9 +1,8 @@ from fastapi import Depends -from internal.infrastructure.data_storage.flat import FlatContextMaker -from internal.infrastructure.data_storage.relational.postgres.context import ( - get_postgres_context_maker, - get_postgres_context_maker_without_pool, +from internal.infrastructure.data_storage import ( + get_context_maker, + get_context_maker_without_pool, ) from internal.repository.flat import FileRepository from internal.repository.relational.file import ( @@ -14,23 +13,18 @@ from internal.uow import UnitOfWork -def get_unit_of_work(context_maker=Depends(get_postgres_context_maker)) -> UnitOfWork: +def get_unit_of_work(context_maker=Depends(get_context_maker)) -> UnitOfWork: return UnitOfWork(context_maker) def get_unit_of_work_without_pool( - context_maker=Depends(get_postgres_context_maker_without_pool), + context_maker=Depends(get_context_maker_without_pool), ) -> UnitOfWork: return UnitOfWork(context_maker) -def get_flat_unit_of_work(context_maker: FlatContextMaker = Depends()) -> UnitOfWork: - - return UnitOfWork(context_maker) - - def get_file_repo() -> FileRepository: return FileRepository() diff --git a/internal/rest/http/file/di.py b/internal/rest/http/file/di.py index 6f25b502..99911789 100644 --- a/internal/rest/http/file/di.py +++ b/internal/rest/http/file/di.py @@ -5,7 +5,6 @@ get_file_repo, get_file_metadata_repo, get_dataset_repo, - get_flat_unit_of_work, ) from internal.uow import UnitOfWork from internal.usecase.file import SaveFile, SaveDataset, CheckContentType @@ -17,13 +16,11 @@ def get_save_file_use_case( unit_of_work: UnitOfWork = Depends(get_unit_of_work), - flat_unit_of_work: UnitOfWork = Depends(get_flat_unit_of_work), file_repo: FileRepo = Depends(get_file_repo), file_metadata_repo: FileMetadataRepo = Depends(get_file_metadata_repo), ) -> SaveFile: return SaveFile( - file_info_unit_of_work=unit_of_work, - file_unit_of_work=flat_unit_of_work, + unit_of_work=unit_of_work, file_repo=file_repo, file_metadata_repo=file_metadata_repo, ) diff --git a/internal/usecase/file/save_file.py b/internal/usecase/file/save_file.py index 82c2cb2b..60be650a 100644 --- a/internal/usecase/file/save_file.py +++ b/internal/usecase/file/save_file.py @@ -44,17 +44,12 @@ class SaveFile: def __init__( self, - # It is assumed that the two repositories will be associated with different repositories. - # In order to support different repositories, different UoW will be needed. - # If both of your repositories are linked to the same repository, use only one of the UoW. - file_info_unit_of_work: UnitOfWork, - file_unit_of_work: UnitOfWork, + unit_of_work: UnitOfWork, file_repo: FileRepo, file_metadata_repo: FileMetadataRepo, ): - self.file_info_unit_of_work = file_info_unit_of_work - self.file_unit_of_work = file_unit_of_work + self.unit_of_work = unit_of_work self.file_repo = file_repo self.file_metadata_repo = file_metadata_repo @@ -68,17 +63,14 @@ async def __call__(self, *, upload_file: File) -> SaveFileUseCaseResult: mime_type=upload_file.content_type, ) - with self.file_unit_of_work as file_context: - with self.file_info_unit_of_work as file_info_context: - try: - response = self.file_metadata_repo.create( - file_metadata_create_schema, file_info_context - ) - await self.file_repo.create( - upload_file, create_file_schema, file_context - ) - except FailedFileReadingException as e: - raise FailedReadFileException(str(e)) + with self.unit_of_work as context: + try: + response = self.file_metadata_repo.create( + file_metadata_create_schema, context + ) + await self.file_repo.create(upload_file, create_file_schema, context) + except FailedFileReadingException as e: + raise FailedReadFileException(str(e)) return SaveFileUseCaseResult( id=response.id, diff --git a/internal/usecase/task/profile_task.py b/internal/usecase/task/profile_task.py index b42efdd3..e260d4a2 100644 --- a/internal/usecase/task/profile_task.py +++ b/internal/usecase/task/profile_task.py @@ -40,40 +40,34 @@ class ProfileTask: def __init__( self, - # It is assumed that the two repositories will be associated with different repositories. - # In order to support different repositories, different UoW will be needed. - # If both of your repositories are linked to the same repository, use only one of the UoW. - file_unit_of_work: UnitOfWork, - dataset_unit_of_work: UnitOfWork, + unit_of_work: UnitOfWork, file_repo: FileRepo, dataset_repo: DatasetRepo, ): - self.file_unit_of_work = file_unit_of_work - self.dataset_unit_of_work = dataset_unit_of_work + self.unit_of_work = unit_of_work self.file_repo = file_repo self.dataset_repo = dataset_repo def __call__(self, *, dataset_id: UUID, config: OneOfTaskConfig) -> OneOfTaskResult: - with self.file_unit_of_work as file_context: - with self.dataset_unit_of_work as dataset_context: - try: - dataset, file_metadata = self.dataset_repo.find_with_file_metadata( - DatasetFindSchema(id=dataset_id), dataset_context - ) + with self.unit_of_work as context: + try: + dataset, file_metadata = self.dataset_repo.find_with_file_metadata( + DatasetFindSchema(id=dataset_id), context + ) - df = self.file_repo.find( - CSVFileFindSchema( - file_name=file_metadata.file_name, - separator=dataset.separator, - header=dataset.header, - ), - file_context, - ) - except DatasetNotFoundException: - raise DatasetNotFoundUseCaseException() - except FileMetadataNotFoundException: - raise FileMetadataNotFoundUseCaseException() + df = self.file_repo.find( + CSVFileFindSchema( + file_name=file_metadata.file_name, + separator=dataset.separator, + header=dataset.header, + ), + context, + ) + except DatasetNotFoundException: + raise DatasetNotFoundUseCaseException() + except FileMetadataNotFoundException: + raise FileMetadataNotFoundUseCaseException() task = match_task_by_primitive_name(primitive_name=config.primitive_name) result = task.execute(table=df, task_config=config) # type: ignore diff --git a/tests/conftest.py b/tests/conftest.py index 3c231387..d8143501 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,7 +5,8 @@ import logging from internal.infrastructure.data_storage.relational.model import ORMBaseModel -from internal.infrastructure.data_storage import settings +from internal.infrastructure.data_storage import settings, ContextMaker +from internal.infrastructure.data_storage.flat import get_flat_context_maker # https://stackoverflow.com/questions/61582142/test-pydantic-settings-in-fastapi # Maybe should be overriden by env vars for testing only @@ -14,6 +15,11 @@ test_engine = create_engine(settings.postgres_dsn.unicode_string()) +@pytest.fixture +def tmp_upload_dir(tmp_path): + return tmp_path + + @pytest.fixture(scope="session", autouse=True) def prepare_postgres(): logging.info("Setup database: %s", settings.postgres_dsn.unicode_string()) @@ -23,14 +29,38 @@ def prepare_postgres(): ORMBaseModel.metadata.create_all(bind=test_engine) -@pytest.fixture(scope="session") +@pytest.fixture(scope="function") def postgres_context_maker(): - return sessionmaker(test_engine, expire_on_commit=False) + return sessionmaker(bind=test_engine) @pytest.fixture(scope="function") def postgres_context(postgres_context_maker): - context = postgres_context_maker() + return postgres_context_maker() + + +@pytest.fixture(scope="function") +def flat_context_maker(tmp_upload_dir): + return get_flat_context_maker(uploaded_files_dir_path=tmp_upload_dir) + + +@pytest.fixture +def flat_context(flat_context_maker): + return flat_context_maker() + + +@pytest.fixture(scope="function") +def context_maker(postgres_context_maker, flat_context_maker): + context_maker = ContextMaker( + postgres_context_maker=postgres_context_maker, + flat_context_maker=flat_context_maker, + ) + return context_maker + + +@pytest.fixture(scope="function") +def context(context_maker): + context = context_maker() yield context diff --git a/tests/unit/repository/flat/test_file.py b/tests/unit/repository/flat/test_file.py index b47bc726..4948a17c 100644 --- a/tests/unit/repository/flat/test_file.py +++ b/tests/unit/repository/flat/test_file.py @@ -1,7 +1,6 @@ from uuid import uuid4 import pytest -import aiofiles import pandas as pd from pytest_mock import MockFixture @@ -11,79 +10,65 @@ FailedFileReadingException, CSVFileFindSchema, ) -from internal.infrastructure.data_storage.flat import FlatContext from internal.repository.flat import FileRepository -@pytest.fixture -def mock_flat_context(tmp_path, mocker: MockFixture): - context = mocker.AsyncMock(spec=FlatContext) - context.upload_directory_path = tmp_path - return context - - @pytest.fixture def file_repository(): return FileRepository() @pytest.mark.asyncio -async def test_create_file_success( - mocker: MockFixture, file_repository, mock_flat_context, tmp_path -): +async def test_create_file_success(mocker: MockFixture, file_repository): file_name = uuid4() - file_content = b"Hello, World!" file_info = FileCreateSchema(file_name=file_name) mock_file = mocker.AsyncMock(spec=File) - mock_file.read = mocker.AsyncMock(side_effect=[file_content, b""]) - context = FlatContext(tmp_path) - await file_repository.create(mock_file, file_info, context) + mock_file.read = mocker.AsyncMock() - created_file_path = tmp_path / str(file_name) - assert created_file_path.is_file() + mock_context = mocker.MagicMock() + mock_context.async_flush = mocker.AsyncMock() - async with aiofiles.open(created_file_path, "rb") as f: - content = await f.read() - assert content == file_content + await file_repository.create(mock_file, file_info, mock_context) + mock_context.add.assert_called_once() + mock_context.async_flush.assert_called_once() -def test_find_file_success(file_repository, mock_flat_context): + +def test_find_file_success(file_repository, context): file_name = uuid4() file_content = "col1,col2\n1,2\n3,4" - file_path = mock_flat_context.upload_directory_path / str(file_name) + file_path = context.flat_context.upload_directory_path / str(file_name) with open(file_path, "w") as f: f.write(file_content) file_info = CSVFileFindSchema(file_name=file_name, separator=",", header=[0]) - result = file_repository.find(file_info, mock_flat_context) + result = file_repository.find(file_info, context) expected_df = pd.DataFrame({"col1": [1, 3], "col2": [2, 4]}) pd.testing.assert_frame_equal(result, expected_df) @pytest.mark.asyncio -async def test_create_file_failure( - mocker: MockFixture, file_repository, mock_flat_context -): +async def test_create_file_failure(mocker: MockFixture, file_repository): file_name = uuid4() file_info = FileCreateSchema(file_name=file_name) mock_file = mocker.AsyncMock(spec=File) - mock_flat_context.async_flush = mocker.AsyncMock( - side_effect=Exception("Read error") - ) + + mock_context = mocker.MagicMock() + mock_context.async_flush = mocker.AsyncMock(side_effect=Exception("Read error")) with pytest.raises( FailedFileReadingException, match="The sent file could not be read." ): - await file_repository.create(mock_file, file_info, mock_flat_context) + await file_repository.create(mock_file, file_info, mock_context) -def test_find_file_failure(file_repository, mock_flat_context): +def test_find_file_failure(file_repository, context): file_info = CSVFileFindSchema(file_name=uuid4(), separator=",", header=[0]) with pytest.raises(FileNotFoundError): - file_repository.find(file_info, mock_flat_context) + file_repository.find(file_info, context) From 673611c87ef38b75fd2635cca2d1361bdbb30b9f Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Wed, 2 Oct 2024 14:39:28 +0000 Subject: [PATCH 149/153] feat(repo): add processing for case when value is not found when updating in the database --- internal/dto/repository/exception.py | 3 +++ internal/repository/relational/crud.py | 6 ++++++ 2 files changed, 9 insertions(+) diff --git a/internal/dto/repository/exception.py b/internal/dto/repository/exception.py index e69de29b..27c82a2d 100644 --- a/internal/dto/repository/exception.py +++ b/internal/dto/repository/exception.py @@ -0,0 +1,3 @@ +class ModelNotFoundException(Exception): + def __init__(self, message: str): + super().__init__(message) diff --git a/internal/repository/relational/crud.py b/internal/repository/relational/crud.py index d97b03f1..65cb0fc4 100644 --- a/internal/repository/relational/crud.py +++ b/internal/repository/relational/crud.py @@ -10,6 +10,7 @@ BaseResponseSchema, ) from internal.infrastructure.data_storage import Context +from internal.dto.repository.exception import ModelNotFoundException class CRUD[ @@ -70,6 +71,11 @@ def update( ) -> ResponseSchema: db_model_instance = self._find(find_schema, context) + if not db_model_instance: + raise ModelNotFoundException( + "When updating data, the required row was not found in the database." + ) + update_schema_dict = update_schema.model_dump() fields_to_update_if_none = ( fields_to_update_if_none if fields_to_update_if_none else set() From 4b1fcfacc652c3f489ba692e4c9c1388950ab8a4 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Wed, 2 Oct 2024 14:40:26 +0000 Subject: [PATCH 150/153] feat(tests): add tests that check dependencies between modules --- tests/architecture/test_dependencies.py | 142 ++++++++++++++++++++++++ 1 file changed, 142 insertions(+) create mode 100644 tests/architecture/test_dependencies.py diff --git a/tests/architecture/test_dependencies.py b/tests/architecture/test_dependencies.py new file mode 100644 index 00000000..ca845628 --- /dev/null +++ b/tests/architecture/test_dependencies.py @@ -0,0 +1,142 @@ +import os + + +def has_import_from(file_path, module_name): + """Checks whether the specified module is imported in the file.""" + with open(file_path, "r") as f: + for line in f: + if f"import {module_name}" in line or f"from {module_name}" in line: + return True + return False + + +def check_dependencies(module_path): + depends_on_domain = False + depends_on_usecase = False + depends_on_dto = False + depends_on_infrastructure = False + depends_on_repository = False + depends_on_worker = False + depends_on_rest = False + depends_on_uow = False + + for root, _, files in os.walk(module_path): + for file in files: + if file.endswith(".py"): + file_path = os.path.join(root, file) + + if has_import_from(file_path, "internal.domain"): + depends_on_domain = True + + if has_import_from(file_path, "internal.usecase"): + depends_on_usecase = True + + if has_import_from(file_path, "internal.dto"): + depends_on_dto = True + + if has_import_from(file_path, "internal.infrastructure"): + depends_on_infrastructure = True + + if has_import_from(file_path, "internal.repository"): + depends_on_repository = True + + if has_import_from(file_path, "internal.worker"): + depends_on_worker = True + + if has_import_from(file_path, "internal.rest"): + depends_on_rest = True + + if has_import_from(file_path, "internal.uow"): + depends_on_uow = True + + return { + "depends_on_domain": depends_on_domain, + "depends_on_usecase": depends_on_usecase, + "depends_on_dto": depends_on_dto, + "depends_on_infrastructure": depends_on_infrastructure, + "depends_on_repository": depends_on_repository, + "depends_on_worker": depends_on_worker, + "depends_on_rest": depends_on_rest, + "depends_on_uow": depends_on_uow, + } + + +def test_domain_is_independent(): + domain_path = "internal/domain" + domain = check_dependencies(domain_path) + + assert not domain["depends_on_usecase"] + assert not domain["depends_on_dto"] + assert not domain["depends_on_infrastructure"] + assert not domain["depends_on_repository"] + assert not domain["depends_on_worker"] + assert not domain["depends_on_rest"] + assert not domain["depends_on_uow"] + + +def test_usecase_dependencies(): + usecase_path = "internal/usecase" + usecase = check_dependencies(usecase_path) + + assert usecase["depends_on_domain"] + assert usecase["depends_on_dto"] + assert usecase["depends_on_uow"] + assert not usecase["depends_on_infrastructure"] + assert not usecase["depends_on_repository"] + assert not usecase["depends_on_worker"] + assert not usecase["depends_on_rest"] + + +def test_infrastructure_dependencies(): + infrastructure_path = "internal/infrastructure" + infrastructure = check_dependencies(infrastructure_path) + + assert infrastructure["depends_on_domain"] + assert infrastructure["depends_on_usecase"] + assert infrastructure["depends_on_dto"] + assert not infrastructure["depends_on_rest"] + + +def test_data_storage_dependencies(): + data_storage_path = "internal/infrastructure/data_storage" + data_storage = check_dependencies(data_storage_path) + + assert data_storage["depends_on_dto"] + assert not data_storage["depends_on_repository"] + assert not data_storage["depends_on_uow"] + assert not data_storage["depends_on_rest"] + + +def test_repository_dependencies(): + repository_path = "internal/repository" + repository = check_dependencies(repository_path) + + assert repository["depends_on_dto"] + assert repository["depends_on_infrastructure"] + assert not repository["depends_on_uow"] + assert not repository["depends_on_worker"] + assert not repository["depends_on_rest"] + + +def test_worker_dependencies(): + worker_path = "internal/worker" + worker = check_dependencies(worker_path) + + assert worker["depends_on_dto"] + assert worker["depends_on_infrastructure"] + assert not worker["depends_on_uow"] + assert not worker["depends_on_repository"] + assert not worker["depends_on_rest"] + + +def test_dto_dependencies(): + dto_path = "internal/dto" + dto = check_dependencies(dto_path) + + assert dto["depends_on_domain"] + assert not dto["depends_on_usecase"] + assert not dto["depends_on_infrastructure"] + assert not dto["depends_on_worker"] + assert not dto["depends_on_uow"] + assert not dto["depends_on_repository"] + assert not dto["depends_on_rest"] From e78796754d57beeec7e789c3358d547e2cfa50f4 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Wed, 2 Oct 2024 20:30:11 +0000 Subject: [PATCH 151/153] feat(tests): set up the correct system of fixtures with access to storage systems --- tests/conftest.py | 214 ++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 205 insertions(+), 9 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index d8143501..f6d0bd4f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,6 @@ import pytest -from sqlalchemy import create_engine +import shutil +from sqlalchemy import create_engine, NullPool from sqlalchemy.orm import sessionmaker from sqlalchemy_utils import database_exists, create_database import logging @@ -7,17 +8,38 @@ from internal.infrastructure.data_storage.relational.model import ORMBaseModel from internal.infrastructure.data_storage import settings, ContextMaker from internal.infrastructure.data_storage.flat import get_flat_context_maker +from internal.repository.flat import FileRepository +from internal.repository.relational.file import ( + FileMetadataRepository, + DatasetRepository, +) +from internal.repository.relational.task import TaskRepository +from internal.worker.celery import ProfilingTaskWorker +from internal.usecase.file import ( + SaveFile, + SaveDataset, + CheckContentType, + RetrieveDataset, +) +from internal.usecase.task import RetrieveTask, SetTask, ProfileTask, UpdateTaskInfo +from internal.uow import UnitOfWork + +from internal.rest.http.file.di import ( + get_save_file_use_case, + get_save_dataset_use_case, + get_check_content_type_use_case, + get_retrieve_dataset_use_case, +) +from internal.rest.http.task.di import get_set_task_use_case, get_retrieve_task_use_case # https://stackoverflow.com/questions/61582142/test-pydantic-settings-in-fastapi # Maybe should be overriden by env vars for testing only settings.postgres_db = "desbordante-test" test_engine = create_engine(settings.postgres_dsn.unicode_string()) - - -@pytest.fixture -def tmp_upload_dir(tmp_path): - return tmp_path +test_engine_without_pool = create_engine( + settings.postgres_dsn.unicode_string(), poolclass=NullPool +) @pytest.fixture(scope="session", autouse=True) @@ -29,7 +51,16 @@ def prepare_postgres(): ORMBaseModel.metadata.create_all(bind=test_engine) -@pytest.fixture(scope="function") +@pytest.fixture(scope="session") +def tmp_upload_dir(tmp_path_factory): + temp_dir = tmp_path_factory.mktemp("uploads") + + yield temp_dir + + shutil.rmtree(temp_dir) + + +@pytest.fixture(scope="session") def postgres_context_maker(): return sessionmaker(bind=test_engine) @@ -39,17 +70,29 @@ def postgres_context(postgres_context_maker): return postgres_context_maker() +@pytest.fixture(scope="session") +def postgres_context_maker_without_pool(): + return sessionmaker(bind=test_engine_without_pool) + + @pytest.fixture(scope="function") +def postgres_context_without_pool(postgres_context_maker_without_pool): + return postgres_context_maker_without_pool() + + +@pytest.fixture(scope="session") def flat_context_maker(tmp_upload_dir): return get_flat_context_maker(uploaded_files_dir_path=tmp_upload_dir) -@pytest.fixture +@pytest.fixture(scope="function") def flat_context(flat_context_maker): return flat_context_maker() -@pytest.fixture(scope="function") +@pytest.fixture( + scope="session", +) def context_maker(postgres_context_maker, flat_context_maker): context_maker = ContextMaker( postgres_context_maker=postgres_context_maker, @@ -67,8 +110,161 @@ def context(context_maker): context.close() +@pytest.fixture(scope="session") +def context_maker_without_pool(postgres_context_maker_without_pool, flat_context_maker): + context_maker = ContextMaker( + postgres_context_maker=postgres_context_maker_without_pool, + flat_context_maker=flat_context_maker, + ) + return context_maker + + +@pytest.fixture(scope="function") +def context_without_pool(context_maker_without_pool): + context = context_maker_without_pool() + + yield context + + context.close() + + @pytest.fixture(autouse=True) def clean_tables(postgres_context): for table in reversed(ORMBaseModel.metadata.sorted_tables): postgres_context.execute(table.delete()) postgres_context.commit() + + +@pytest.fixture(autouse=True) +def clear_tmp_upload_dir(tmp_upload_dir): + yield + for item in tmp_upload_dir.iterdir(): + if item.is_file(): + item.unlink() + else: + shutil.rmtree(item) + + +@pytest.fixture(scope="session") +def unit_of_work(context_maker): + return UnitOfWork(context_maker) + + +@pytest.fixture(scope="session") +def unit_of_work_without_pool(context_maker_without_pool): + return UnitOfWork(context_maker_without_pool) + + +@pytest.fixture(scope="session") +def file_repo(): + return FileRepository() + + +@pytest.fixture(scope="session") +def file_metadata_repo(): + return FileMetadataRepository() + + +@pytest.fixture(scope="session") +def dataset_repo(): + return DatasetRepository() + + +@pytest.fixture(scope="session") +def task_repo(): + return TaskRepository() + + +@pytest.fixture(scope="session") +def check_content_type_use_case(): + return CheckContentType() + + +@pytest.fixture(scope="session") +def save_file_use_case(unit_of_work, file_repo, file_metadata_repo): + return SaveFile( + unit_of_work=unit_of_work, + file_repo=file_repo, + file_metadata_repo=file_metadata_repo, + ) + + +@pytest.fixture(scope="session") +def save_dataset_use_case(unit_of_work, dataset_repo): + return SaveDataset( + unit_of_work=unit_of_work, + dataset_repo=dataset_repo, + ) + + +@pytest.fixture(scope="session") +def retrieve_dataset_use_case(unit_of_work, dataset_repo): + return RetrieveDataset( + unit_of_work=unit_of_work, + dataset_repo=dataset_repo, + ) + + +@pytest.fixture(scope="session") +def profiling_task_worker(): + return ProfilingTaskWorker() + + +@pytest.fixture(scope="session") +def retrieve_task_use_case(unit_of_work, task_repo): + return RetrieveTask(unit_of_work=unit_of_work, task_repo=task_repo) + + +@pytest.fixture(scope="session") +def set_task_use_case(unit_of_work, task_repo, dataset_repo, profiling_task_worker): + return SetTask( + unit_of_work=unit_of_work, + task_repo=task_repo, + dataset_repo=dataset_repo, + profiling_task_worker=profiling_task_worker, + ) + + +@pytest.fixture(scope="session") +def profile_task_use_case(unit_of_work_without_pool, dataset_repo, file_repo): + return ProfileTask( + unit_of_work=unit_of_work_without_pool, + dataset_repo=task_repo, + file_repo=file_repo, + ) + + +@pytest.fixture(scope="session") +def update_task_info_use_case(unit_of_work_without_pool, task_repo): + return UpdateTaskInfo( + unit_of_work=unit_of_work_without_pool, + task_repo=task_repo, + ) + + +@pytest.fixture(scope="session") +def client( + save_file_use_case, + save_dataset_use_case, + check_content_type_use_case, + set_task_use_case, + retrieve_dataset_use_case, + retrieve_task_use_case, +): + from fastapi.testclient import TestClient + from internal import app + + app.dependency_overrides[get_save_file_use_case] = lambda: save_file_use_case + app.dependency_overrides[get_save_dataset_use_case] = lambda: save_dataset_use_case + app.dependency_overrides[get_check_content_type_use_case] = ( + lambda: check_content_type_use_case + ) + app.dependency_overrides[get_retrieve_dataset_use_case] = ( + lambda: retrieve_dataset_use_case + ) + app.dependency_overrides[get_set_task_use_case] = lambda: set_task_use_case + app.dependency_overrides[get_retrieve_task_use_case] = ( + lambda: retrieve_task_use_case + ) + + return TestClient(app) From 00d24e89be6dc30455910f8ed82377d1a9768f38 Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Wed, 2 Oct 2024 20:47:12 +0000 Subject: [PATCH 152/153] feat(tests): add integration tests for all api usage scenarios --- internal/rest/http/file/di.py | 8 ++- tests/conftest.py | 2 +- tests/datasets/university.txt | 9 +++ tests/integration/common_requests.py | 36 ++++++++++++ tests/integration/test_ping.py | 8 +++ .../integration/test_retrieve_dataset_info.py | 40 +++++++++++++ tests/integration/test_retrieve_task_info.py | 58 +++++++++++++++++++ tests/integration/test_set_profiling_task.py | 42 ++++++++++++++ tests/integration/test_upload_dataset.py | 58 +++++++++++++++++++ 9 files changed, 258 insertions(+), 3 deletions(-) create mode 100644 tests/datasets/university.txt create mode 100644 tests/integration/common_requests.py create mode 100644 tests/integration/test_ping.py create mode 100644 tests/integration/test_retrieve_dataset_info.py create mode 100644 tests/integration/test_retrieve_task_info.py create mode 100644 tests/integration/test_set_profiling_task.py create mode 100644 tests/integration/test_upload_dataset.py diff --git a/internal/rest/http/file/di.py b/internal/rest/http/file/di.py index 99911789..53126195 100644 --- a/internal/rest/http/file/di.py +++ b/internal/rest/http/file/di.py @@ -7,8 +7,12 @@ get_dataset_repo, ) from internal.uow import UnitOfWork -from internal.usecase.file import SaveFile, SaveDataset, CheckContentType -from internal.usecase.file.retrieve_dataset import RetrieveDataset +from internal.usecase.file import ( + SaveFile, + SaveDataset, + CheckContentType, + RetrieveDataset, +) from internal.usecase.file.save_dataset import DatasetRepo as SaveDatasetRepo from internal.usecase.file.retrieve_dataset import DatasetRepo as RetrieveDatasetRepo from internal.usecase.file.save_file import FileRepo, FileMetadataRepo diff --git a/tests/conftest.py b/tests/conftest.py index f6d0bd4f..bf2dcca4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -229,7 +229,7 @@ def set_task_use_case(unit_of_work, task_repo, dataset_repo, profiling_task_work def profile_task_use_case(unit_of_work_without_pool, dataset_repo, file_repo): return ProfileTask( unit_of_work=unit_of_work_without_pool, - dataset_repo=task_repo, + dataset_repo=dataset_repo, file_repo=file_repo, ) diff --git a/tests/datasets/university.txt b/tests/datasets/university.txt new file mode 100644 index 00000000..c3afc981 --- /dev/null +++ b/tests/datasets/university.txt @@ -0,0 +1,9 @@ +Course,Classroom,Professor,Semester +Math,512,Dr. Smith,Fall +Physics,406,Dr. Green,Fall +English,208,Prof. Turner,Fall +History,209,Prof. Davis,Fall +Math,512,Dr. Smith,Spring +Physics,503,Dr. Gray,Spring +English,116,Prof. Turner,Spring +Biology,209,Prof. Light,Spring diff --git a/tests/integration/common_requests.py b/tests/integration/common_requests.py new file mode 100644 index 00000000..50128fe5 --- /dev/null +++ b/tests/integration/common_requests.py @@ -0,0 +1,36 @@ +from fastapi.testclient import TestClient + + +def upload_csv_dataset( + client: TestClient, + file_name, + mime_type, + separator, + header, +): + file_path = f"tests/datasets/{file_name}" + + with open(file_path, "rb") as file: + form_data = { + "separator": separator, + "header": header, + "file": (file_name, file, mime_type), + } + + response = client.post( + "api/file/csv", + files={"file": form_data["file"]}, + data={ + "separator": form_data["separator"], + "header": form_data["header"], + }, + ) + + return response + + +def set_task(client: TestClient, dataset_id, config): + response = client.post( + "api/task/set", params={"dataset_id": dataset_id}, json=config.model_dump() + ) + return response diff --git a/tests/integration/test_ping.py b/tests/integration/test_ping.py new file mode 100644 index 00000000..6adbd88f --- /dev/null +++ b/tests/integration/test_ping.py @@ -0,0 +1,8 @@ +from fastapi.testclient import TestClient + + +def test_retrieve_task(client: TestClient): + response = client.get("api/common/ping") + + assert response.status_code == 200 + assert response.json() == "Pong!" diff --git a/tests/integration/test_retrieve_dataset_info.py b/tests/integration/test_retrieve_dataset_info.py new file mode 100644 index 00000000..8717b3c3 --- /dev/null +++ b/tests/integration/test_retrieve_dataset_info.py @@ -0,0 +1,40 @@ +from fastapi.testclient import TestClient +from uuid import UUID, uuid4 + +from internal.infrastructure.data_storage.relational.model.file import DatasetORM +from tests.integration.common_requests import upload_csv_dataset + + +def test_retrieve_dataset(client: TestClient, context): + file_name = "university_fd.csv" + file_path = f"tests/datasets/{file_name}" + mime_type = "text/csv" + separator = "," + header = [0] + + response = upload_csv_dataset(client, file_name, mime_type, separator, header) + assert response.status_code == 200 + + dataset_id = UUID(response.json()) + + response = client.post(f"api/file/dataset/{dataset_id}") + + assert response.status_code == 200 + + response_data = response.json() + assert response_data["id"] == str(dataset_id) + + dataset = context.postgres_context.get(DatasetORM, dataset_id) + assert dataset is not None + assert response_data["file_id"] == str(dataset.file_id) + assert response_data["separator"] == dataset.separator + assert response_data["header"] == dataset.header + + +def test_retrieve_non_existent_dataset(client: TestClient): + dataset_id = uuid4() # non existen dataset + + response = client.post(f"api/file/dataset/{dataset_id}") + + assert response.status_code == 404 + assert response.json()["detail"] == "Dataset not found" diff --git a/tests/integration/test_retrieve_task_info.py b/tests/integration/test_retrieve_task_info.py new file mode 100644 index 00000000..0b0be869 --- /dev/null +++ b/tests/integration/test_retrieve_task_info.py @@ -0,0 +1,58 @@ +import time +from fastapi.testclient import TestClient +from uuid import UUID, uuid4 + +from internal.domain.task.value_objects import FdTaskConfig, PrimitiveName +from internal.domain.task.value_objects.fd.algo_name import FdAlgoName +from internal.domain.task.value_objects.fd.algo_config import AidConfig +from internal.infrastructure.data_storage.relational.model.task import TaskORM +from tests.integration.common_requests import upload_csv_dataset, set_task + + +def test_retrieve_task(client: TestClient, context): + dataset_response = upload_csv_dataset( + client, "university_fd.csv", "text/csv", ".", [0] + ) + assert dataset_response.status_code == 200 + dataset_id = UUID(dataset_response.json()) + + algo_config = AidConfig(algo_name=FdAlgoName.Aid, is_null_equal_null=True) + task_config = FdTaskConfig(primitive_name=PrimitiveName.fd, config=algo_config) + + task_response = set_task(client, dataset_id, task_config) + assert task_response.status_code == 200 + task_id = UUID(task_response.json()) + + time.sleep(1) + + response = client.get(f"api/task/{task_id}") + assert response.status_code == 200 + + response_data = response.json() + assert response_data["dataset_id"] == str(dataset_id) + + task = context.postgres_context.get(TaskORM, task_id) + assert task is not None + assert response_data["dataset_id"] == str(task.dataset_id) + assert response_data["status"] == task.status + assert response_data["config"] == task.config + assert response_data["result"] == task.result + assert response_data["raised_exception_name"] == task.raised_exception_name + assert response_data["failure_reason"] == task.failure_reason + assert response_data["traceback"] == task.traceback + + if response_data["status"] == "completed": + assert response_data["result"] is not None + if response_data["status"] == "failure": + assert response_data["raised_exception_name"] is not None + assert response_data["failure_reason"] is not None + assert response_data["traceback"] is not None + + +def test_retrieve_non_existent_task(client: TestClient): + task_id = uuid4() # non existen task + + response = client.get(f"api/task/{task_id}") + + assert response.status_code == 404 + assert response.json()["detail"] == "Task not found" diff --git a/tests/integration/test_set_profiling_task.py b/tests/integration/test_set_profiling_task.py new file mode 100644 index 00000000..568121a8 --- /dev/null +++ b/tests/integration/test_set_profiling_task.py @@ -0,0 +1,42 @@ +from fastapi.testclient import TestClient +from uuid import UUID, uuid4 + +from internal.domain.task.value_objects import FdTaskConfig, PrimitiveName +from internal.domain.task.value_objects.fd.algo_name import FdAlgoName +from internal.domain.task.value_objects.fd.algo_config import AidConfig +from internal.infrastructure.data_storage.relational.model.task import TaskORM +from tests.integration.common_requests import upload_csv_dataset, set_task + + +def test_set_profiling_task(client: TestClient, context): + dataset_response = upload_csv_dataset( + client, "university_fd.csv", "text/csv", ".", [0] + ) + assert dataset_response.status_code == 200 + dataset_id = UUID(dataset_response.json()) + + algo_config = AidConfig(algo_name=FdAlgoName.Aid, is_null_equal_null=True) + task_config = FdTaskConfig(primitive_name=PrimitiveName.fd, config=algo_config) + + response = set_task(client, dataset_id, task_config) + + assert response.status_code == 200 + + task_id = UUID(response.json()) + + task_data = context.postgres_context.get(TaskORM, task_id) + + assert task_data is not None + assert task_data.config == task_config.model_dump() + assert task_data.dataset_id == dataset_id + + +def test_set_profiling_task_with_non_existent_dataset(client: TestClient): + dataset_id = uuid4() + algo_config = AidConfig(algo_name=FdAlgoName.Aid, is_null_equal_null=True) + task_config = FdTaskConfig(primitive_name=PrimitiveName.fd, config=algo_config) + + response = set_task(client, dataset_id, task_config) + + assert response.status_code == 404 + assert response.json()["detail"] == "Dataset not found" diff --git a/tests/integration/test_upload_dataset.py b/tests/integration/test_upload_dataset.py new file mode 100644 index 00000000..97c2ead6 --- /dev/null +++ b/tests/integration/test_upload_dataset.py @@ -0,0 +1,58 @@ +import os +import pytest +from fastapi.testclient import TestClient + +from uuid import UUID + +from internal.infrastructure.data_storage.relational.model.file import DatasetORM +from tests.integration.common_requests import upload_csv_dataset + + +@pytest.mark.asyncio +async def test_upload_csv_dataset(client: TestClient, context, tmp_upload_dir): + file_name = "university_fd.csv" + file_path = f"tests/datasets/{file_name}" + mime_type = "text/csv" + separator = "," + header = [0] + + response = upload_csv_dataset(client, file_name, mime_type, separator, header) + + assert response.status_code == 200 + + dataset_id = UUID(response.json()) + + data = context.postgres_context.get(DatasetORM, dataset_id) + assert data is not None + assert data.id == dataset_id + assert data.separator == separator + assert data.header == header + assert data.related_tasks == [] + + file = data.file_metadata + assert file.original_file_name == "university_fd.csv" + assert file.mime_type == mime_type + + saved_file_path = os.path.join(tmp_upload_dir, str(file.file_name)) + assert os.path.exists(saved_file_path) + + with open(saved_file_path, "rb") as saved_file, open( + file_path, "rb" + ) as original_file: + saved_file_content = saved_file.read() + original_file_content = original_file.read() + assert saved_file_content == original_file_content + + +@pytest.mark.asyncio +async def test_upload_csv_dataset_with_incorrect_mime_type(client: TestClient): + file_name = "university.txt" + file_path = f"tests/datasets/{file_name}" + mime_type = "text/plain" + separator = "," + header = [0] + + response = upload_csv_dataset(client, file_name, mime_type, separator, header) + + assert response.status_code == 400 + assert response.json()["detail"] == "File is not CSV" From b19ed8499e9b7462af2dbdfcdd57a088e3da2c4d Mon Sep 17 00:00:00 2001 From: Rafik Nurmuhametov <113212617+raf-nr@users.noreply.github.com> Date: Wed, 2 Oct 2024 22:13:47 +0000 Subject: [PATCH 153/153] chore: format code, and some documentation and readme files --- Makefile | 2 +- README.md | 2 +- internal/README.md | 22 +++++++++++++++++++ .../domain/task/value_objects/afd/__init__.py | 4 ++-- .../task/value_objects/afd/exception.py | 1 - .../domain/task/value_objects/fd/__init__.py | 4 ++-- .../domain/task/value_objects/fd/exception.py | 1 - internal/dto/repository/exception.py | 9 ++++++++ internal/dto/repository/file/file.py | 1 - internal/infrastructure/README.md | 7 ++++++ .../celery/task/profiling_task.py | 4 ---- .../infrastructure/data_storage/README.md | 7 +++++- .../infrastructure/data_storage/__init__.py | 5 ++--- .../infrastructure/data_storage/context.py | 2 -- .../data_storage/flat/context.py | 3 --- internal/repository/README.md | 3 +++ internal/repository/flat/file.py | 2 -- internal/repository/relational/crud.py | 4 ---- .../repository/relational/file/dataset.py | 2 -- .../relational/file/file_metadata.py | 1 - internal/repository/relational/task/task.py | 1 - internal/rest/http/di.py | 2 -- internal/rest/http/exception.py | 1 - internal/rest/http/file/retrieve_dataset.py | 1 - internal/rest/http/file/upload_csv_dataset.py | 1 - internal/rest/http/task/di.py | 2 -- internal/rest/http/task/retrieve_task.py | 1 - internal/rest/http/task/set_task.py | 1 - internal/uow/README.md | 9 ++++---- internal/uow/uow.py | 3 --- internal/usecase/README.md | 2 +- internal/usecase/file/check_content_type.py | 1 - internal/usecase/file/retrieve_dataset.py | 3 --- internal/usecase/file/save_dataset.py | 3 --- internal/usecase/file/save_file.py | 4 ---- internal/usecase/task/profile_task.py | 4 ---- internal/usecase/task/retrieve_task.py | 2 -- internal/usecase/task/set_task.py | 6 ----- internal/usecase/task/update_task_info.py | 4 ---- .../worker/celery/profiling_task_worker.py | 2 -- .../integration/test_retrieve_dataset_info.py | 1 - tests/integration/test_upload_dataset.py | 1 - tests/unit/context/test_flat.py | 4 ++-- 43 files changed, 63 insertions(+), 82 deletions(-) create mode 100644 internal/README.md create mode 100644 internal/infrastructure/README.md create mode 100644 internal/repository/README.md diff --git a/Makefile b/Makefile index 72546ef0..7d066d7d 100644 --- a/Makefile +++ b/Makefile @@ -58,7 +58,7 @@ lint: ## Reformat code format: - poetry run ruff format tests app & poetry run ruff check --fix & poetry run black tests internal + poetry run ruff format tests internal & poetry run ruff check --fix & poetry run black tests internal ## Run all tests in project test: diff --git a/README.md b/README.md index fab93e8d..6c4e1790 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ Execute `make` to see all available rules with documentation 1. Activate virtual environment: `source .venv/bin/activate` 2. Don't forget to change values in .env 3. Run **development-only** containers: `make up` -4. Run celery worker in watch mode: `make worker` +4. Run celery worker in watch mode: `make celery-worker` 5. Run application in watch mode: `make app` ## Docs diff --git a/internal/README.md b/internal/README.md new file mode 100644 index 00000000..d18aaa3b --- /dev/null +++ b/internal/README.md @@ -0,0 +1,22 @@ +# Internal Module + +## Description + +This application is built upon the principles of Clean Architecture. Clean Architecture emphasizes separation of concerns, making the codebase easier to maintain, test, and scale. The architecture is structured to allow flexibility and independence from frameworks, which enhances the portability of the code. + +For more information about Clean Architecture, you may refer to the following resources: +- [Clean Architecture: A Craftsman's Guide to Software Structure and Design by Robert C. Martin](https://www.oreilly.com/library/view/clean-architecture-a/9780134494272/) +- [Clean Architecture](https://github.com/preslavmihaylov/booknotes/tree/master/architecture/clean-architecture) + +## Structure + +The internal module is organized into the following components: + +- **Domain**: Contains the core business logic and entities. This is where the application's fundamental rules are defined. +- **Use Case**: Represents application-specific business rules and orchestrates the flow of data between the domain and external layers. +- **Unit of Work (UoW)**: Manages transactional boundaries and ensures that a series of operations can be committed or rolled back as a single unit. +- **Repository**: Abstracts the data access layer, providing methods for retrieving and storing entities. +- **Worker**: Provides abstractions for running background tasks and managing asynchronous operations. +- **DTO (Data Transfer Object)**: Handles the data transfer between use cases and the external world, ensuring that only the necessary data is exposed and passed around. + +This structure facilitates maintainability and scalability by enforcing clear boundaries and responsibilities within the application. diff --git a/internal/domain/task/value_objects/afd/__init__.py b/internal/domain/task/value_objects/afd/__init__.py index a4d631ea..a6cc1a36 100644 --- a/internal/domain/task/value_objects/afd/__init__.py +++ b/internal/domain/task/value_objects/afd/__init__.py @@ -9,9 +9,9 @@ ) from internal.domain.task.value_objects.afd.algo_name import AfdAlgoName # noqa: F401 from internal.domain.task.value_objects.primitive_name import PrimitiveName -from internal.domain.task.value_objects.afd.exception import ( +from internal.domain.task.value_objects.afd.exception import ( # noqa: F401 IncorrectAFDAlgorithmName, -) # noqa: F401 +) class BaseAfdTaskModel(BaseModel): diff --git a/internal/domain/task/value_objects/afd/exception.py b/internal/domain/task/value_objects/afd/exception.py index aa682005..4624a448 100644 --- a/internal/domain/task/value_objects/afd/exception.py +++ b/internal/domain/task/value_objects/afd/exception.py @@ -1,4 +1,3 @@ class IncorrectAFDAlgorithmName(Exception): - def __init__(self, message: str): super().__init__(f"{message} is incorrect afd algorithm name") diff --git a/internal/domain/task/value_objects/fd/__init__.py b/internal/domain/task/value_objects/fd/__init__.py index 9a77b3ed..8333c13a 100644 --- a/internal/domain/task/value_objects/fd/__init__.py +++ b/internal/domain/task/value_objects/fd/__init__.py @@ -9,9 +9,9 @@ FdModel, ) from internal.domain.task.value_objects.fd.algo_name import FdAlgoName # noqa: F401 -from internal.domain.task.value_objects.fd.exception import ( +from internal.domain.task.value_objects.fd.exception import ( # noqa: F401 IncorrectFDAlgorithmName, -) # noqa: F401 +) class BaseFdTaskModel(BaseModel): diff --git a/internal/domain/task/value_objects/fd/exception.py b/internal/domain/task/value_objects/fd/exception.py index 65576f28..d7f47dc9 100644 --- a/internal/domain/task/value_objects/fd/exception.py +++ b/internal/domain/task/value_objects/fd/exception.py @@ -1,4 +1,3 @@ class IncorrectFDAlgorithmName(Exception): - def __init__(self, message: str): super().__init__(f"{message} is incorrect fd algorithm name") diff --git a/internal/dto/repository/exception.py b/internal/dto/repository/exception.py index 27c82a2d..53277077 100644 --- a/internal/dto/repository/exception.py +++ b/internal/dto/repository/exception.py @@ -1,3 +1,12 @@ class ModelNotFoundException(Exception): + """ + Exception raised when a storage model is not found in some data storage. + + This exception may be thrown only by the repository. + """ + def __init__(self, message: str): + """ + Initializes an instance of ModelNotFoundException with a default message. + """ super().__init__(message) diff --git a/internal/dto/repository/file/file.py b/internal/dto/repository/file/file.py index 5eba009b..e8cfebcb 100644 --- a/internal/dto/repository/file/file.py +++ b/internal/dto/repository/file/file.py @@ -11,7 +11,6 @@ class File(Protocol): - filename: str content_type: str diff --git a/internal/infrastructure/README.md b/internal/infrastructure/README.md new file mode 100644 index 00000000..b9e610f8 --- /dev/null +++ b/internal/infrastructure/README.md @@ -0,0 +1,7 @@ +# infrastructure module +This module implements an infrastructure layer from a pure architecture. + +## submodules +`background_task` - implementation of tasks execution in the background. Provides functionality for specific worker implementations. + +`data_storage` - provides various tools and functions for managing data stores and transactions in them - settings, connections, contexts, migrations, etc. diff --git a/internal/infrastructure/background_task/celery/task/profiling_task.py b/internal/infrastructure/background_task/celery/task/profiling_task.py index 981e87ec..adf5adf3 100644 --- a/internal/infrastructure/background_task/celery/task/profiling_task.py +++ b/internal/infrastructure/background_task/celery/task/profiling_task.py @@ -28,7 +28,6 @@ def profiling_task( dataset_id: UUID, config: OneOfTaskConfig, ) -> Any: - profile_task = get_profile_task_use_case() result = profile_task(dataset_id=dataset_id, config=config) @@ -40,7 +39,6 @@ def task_prerun_notifier( kwargs, **_, ) -> None: - update_task_info = get_update_task_info_use_case() db_task_id: UUID = kwargs["task_id"] @@ -53,7 +51,6 @@ def task_postrun_notifier( retval: OneOfTaskResult, **_, ): - update_task_info = get_update_task_info_use_case() db_task_id: UUID = kwargs["task_id"] @@ -71,7 +68,6 @@ def task_failure_notifier( traceback, **_, ): - # TODO: test all possible exceptions task_failure_reason = TaskFailureReason.OTHER if isinstance(exception, (TimeLimitExceeded, SoftTimeLimitExceeded)): diff --git a/internal/infrastructure/data_storage/README.md b/internal/infrastructure/data_storage/README.md index 025c43fe..8f502773 100644 --- a/internal/infrastructure/data_storage/README.md +++ b/internal/infrastructure/data_storage/README.md @@ -5,12 +5,17 @@ This module is responsible for managing the configuration and settings related t ### settings `settings.py` contains all the settings for working with the data store. Through them you can get the URL to connect to the database, file paths, and so on. +### context +This module(`data_storage`) and all its submodules that represent some kind of storage must contain an implementation of the context - a “bridge” between the repositories and the data storage system. The context accumulates data within itself and provides transactions + +`context.py` contains a unique context that encapsulates the logic of all other contexts. + ### relational The `flat` module contains the logic for interacting with local file storages. [Read more.](flat/README.md) ### relational The `relational` module contains the logic for interacting with relational databases using SQLAlchemy. [Read more.](relational/README.md) - ## Extensibility If you need to add a new database or other data storage, simply create the appropriate module with the implementation in this module, and also write all the necessary settings in `settings.py`. +You should also implement a context for your repository and extend a unique context for it. diff --git a/internal/infrastructure/data_storage/__init__.py b/internal/infrastructure/data_storage/__init__.py index 75dcc1e6..999cb494 100644 --- a/internal/infrastructure/data_storage/__init__.py +++ b/internal/infrastructure/data_storage/__init__.py @@ -2,12 +2,11 @@ settings = get_settings() - -from internal.infrastructure.data_storage.context import ( +from internal.infrastructure.data_storage.context import ( # noqa: F401, E402 Context, ContextMaker, get_context, get_context_without_pool, get_context_maker, get_context_maker_without_pool, -) # noqa: F401 +) diff --git a/internal/infrastructure/data_storage/context.py b/internal/infrastructure/data_storage/context.py index 244a8184..83b5878f 100644 --- a/internal/infrastructure/data_storage/context.py +++ b/internal/infrastructure/data_storage/context.py @@ -23,7 +23,6 @@ class Context: - def __init__( self, postgres_context: RelationalContextType, flat_context: FlatContext ): @@ -76,7 +75,6 @@ def execute(self, *args) -> Any: class ContextMaker: - def __init__( self, *, diff --git a/internal/infrastructure/data_storage/flat/context.py b/internal/infrastructure/data_storage/flat/context.py index c0d9ca6b..f2b484fb 100644 --- a/internal/infrastructure/data_storage/flat/context.py +++ b/internal/infrastructure/data_storage/flat/context.py @@ -13,7 +13,6 @@ class FlatAddModel: - def __init__(self, file: File, file_name: str): self.file_name = file_name self.file = file @@ -24,7 +23,6 @@ class FlatDeleteModel(BaseModel): class FlatContext: - def __init__(self, upload_directory_path: Path): self._upload_directory_path = upload_directory_path self._is_closed = True @@ -83,7 +81,6 @@ def delete( class FlatContextMaker: - def __init__( self, *, uploaded_files_dir_path: Path = settings.uploaded_files_dir_path ): diff --git a/internal/repository/README.md b/internal/repository/README.md new file mode 100644 index 00000000..6d4fac5d --- /dev/null +++ b/internal/repository/README.md @@ -0,0 +1,3 @@ +# repository module +This module contains implementations of all repositories. +To implement your repository, create a module here for the repository that the repository works with (or use an existing one), and then implement the repository WITHOUT inheriting from interfaces. It is important that in the repository implementation all operations occur through the universal context from the `internal.infrastructure.data_storage` directory. diff --git a/internal/repository/flat/file.py b/internal/repository/flat/file.py index a591d1d1..6d1a6c6b 100644 --- a/internal/repository/flat/file.py +++ b/internal/repository/flat/file.py @@ -13,7 +13,6 @@ class FileRepository: - async def create( self, file: File, @@ -33,7 +32,6 @@ def find( file_info: CSVFileFindSchema, context: Context, ) -> CSVFileResponseSchema: - path_to_file = Path( context.flat_context.upload_directory_path, str(file_info.file_name) ) diff --git a/internal/repository/relational/crud.py b/internal/repository/relational/crud.py index 65cb0fc4..44020e8d 100644 --- a/internal/repository/relational/crud.py +++ b/internal/repository/relational/crud.py @@ -20,11 +20,9 @@ class CRUD[ FindSchema: BaseFindSchema, ResponseSchema: BaseResponseSchema, ]: - def __init__( self, orm_model: Type[ORMModel], response_schema: Type[ResponseSchema] ) -> None: - self._orm_model: Type[ORMModel] = orm_model self._response_schema: Type[ResponseSchema] = response_schema @@ -56,7 +54,6 @@ def find_or_create( create_schema: CreateSchema, context: Context, ) -> ResponseSchema: - db_model_instance = self._find(find_schema, context) if not db_model_instance: db_model_instance = self.create(create_schema, context) @@ -69,7 +66,6 @@ def update( fields_to_update_if_none: set[str] | None, context: Context, ) -> ResponseSchema: - db_model_instance = self._find(find_schema, context) if not db_model_instance: raise ModelNotFoundException( diff --git a/internal/repository/relational/file/dataset.py b/internal/repository/relational/file/dataset.py index 84fb2b0d..0dda7527 100644 --- a/internal/repository/relational/file/dataset.py +++ b/internal/repository/relational/file/dataset.py @@ -23,7 +23,6 @@ class DatasetRepository( DatasetResponseSchema, ] ): - def __init__(self): super().__init__(orm_model=DatasetORM, response_schema=DatasetResponseSchema) @@ -32,7 +31,6 @@ def find_with_file_metadata( dataset_info: DatasetFindSchema, context: Context, ) -> tuple[DatasetResponseSchema, FileMetadataResponseSchema]: - dataset_find_dict = dataset_info.model_dump() stmt = ( select(DatasetORM) diff --git a/internal/repository/relational/file/file_metadata.py b/internal/repository/relational/file/file_metadata.py index 81df318c..a16b6d0c 100644 --- a/internal/repository/relational/file/file_metadata.py +++ b/internal/repository/relational/file/file_metadata.py @@ -17,7 +17,6 @@ class FileMetadataRepository( FileMetadataResponseSchema, ] ): - def __init__(self): super().__init__( orm_model=FileMetadataORM, response_schema=FileMetadataResponseSchema diff --git a/internal/repository/relational/task/task.py b/internal/repository/relational/task/task.py index 1b85b0c5..f0d9dd23 100644 --- a/internal/repository/relational/task/task.py +++ b/internal/repository/relational/task/task.py @@ -13,6 +13,5 @@ class TaskRepository( TaskORM, TaskCreateSchema, TaskUpdateSchema, TaskFindSchema, TaskResponseSchema ] ): - def __init__(self): super().__init__(orm_model=TaskORM, response_schema=TaskResponseSchema) diff --git a/internal/rest/http/di.py b/internal/rest/http/di.py index a034ac55..b7a5a6d4 100644 --- a/internal/rest/http/di.py +++ b/internal/rest/http/di.py @@ -14,14 +14,12 @@ def get_unit_of_work(context_maker=Depends(get_context_maker)) -> UnitOfWork: - return UnitOfWork(context_maker) def get_unit_of_work_without_pool( context_maker=Depends(get_context_maker_without_pool), ) -> UnitOfWork: - return UnitOfWork(context_maker) diff --git a/internal/rest/http/exception.py b/internal/rest/http/exception.py index 20c5557a..4e1622f8 100644 --- a/internal/rest/http/exception.py +++ b/internal/rest/http/exception.py @@ -10,7 +10,6 @@ def add_exception_handlers(app: FastAPI): - @app.exception_handler(IncorrectFileFormatException) def incorrect_file_format_exception( request: Request, exc: IncorrectFileFormatException diff --git a/internal/rest/http/file/retrieve_dataset.py b/internal/rest/http/file/retrieve_dataset.py index 69b340ec..25ef0b5f 100644 --- a/internal/rest/http/file/retrieve_dataset.py +++ b/internal/rest/http/file/retrieve_dataset.py @@ -21,7 +21,6 @@ def retrieve_dataset( dataset_id: UUID, retrieve_dataset_use_case: RetrieveDataset = Depends(get_retrieve_dataset_use_case), ) -> ResponseSchema: - dataset = retrieve_dataset_use_case(dataset_id=dataset_id) return ResponseSchema( diff --git a/internal/rest/http/file/upload_csv_dataset.py b/internal/rest/http/file/upload_csv_dataset.py index 3a1db24f..bf64333e 100644 --- a/internal/rest/http/file/upload_csv_dataset.py +++ b/internal/rest/http/file/upload_csv_dataset.py @@ -32,7 +32,6 @@ async def upload_csv_dataset( save_file: SaveFile = Depends(get_save_file_use_case), save_dataset: SaveDataset = Depends(get_save_dataset_use_case), ) -> UUID: - adapted_file = UploadFileAdapter(file) check_content_type(upload_file=adapted_file) diff --git a/internal/rest/http/task/di.py b/internal/rest/http/task/di.py index 0138f6ae..d458ddb6 100644 --- a/internal/rest/http/task/di.py +++ b/internal/rest/http/task/di.py @@ -19,7 +19,6 @@ def get_retrieve_task_use_case( unit_of_work: UnitOfWork = Depends(get_unit_of_work), task_repo: RetrieveTaskRepo = Depends(get_task_repo), ) -> RetrieveTask: - return RetrieveTask( unit_of_work=unit_of_work, task_repo=task_repo, @@ -32,7 +31,6 @@ def get_set_task_use_case( dataset_repo: SetDatasetRepo = Depends(get_dataset_repo), profiling_task_worker: ProfilingTaskWorker = Depends(get_profiling_task_worker), ) -> SetTask: - return SetTask( unit_of_work=unit_of_work, task_repo=task_repo, diff --git a/internal/rest/http/task/retrieve_task.py b/internal/rest/http/task/retrieve_task.py index 4e03d1dc..85808844 100644 --- a/internal/rest/http/task/retrieve_task.py +++ b/internal/rest/http/task/retrieve_task.py @@ -30,7 +30,6 @@ def retrieve_task( task_id: UUID, retrieve_task_use_case: RetrieveTask = Depends(get_retrieve_task_use_case), ) -> ResponseSchema: - task = retrieve_task_use_case(task_id=task_id) return ResponseSchema( diff --git a/internal/rest/http/task/set_task.py b/internal/rest/http/task/set_task.py index 0f61fe45..e0b98369 100644 --- a/internal/rest/http/task/set_task.py +++ b/internal/rest/http/task/set_task.py @@ -15,7 +15,6 @@ def set_task( config: OneOfTaskConfig, set_task_use_case: SetTask = Depends(get_set_task_use_case), ) -> UUID: - task_id = set_task_use_case( dataset_id=dataset_id, config=config, diff --git a/internal/uow/README.md b/internal/uow/README.md index 38537345..44ad732a 100644 --- a/internal/uow/README.md +++ b/internal/uow/README.md @@ -5,23 +5,24 @@ This module implements the Unit of Work (UoW) pattern, which is designed to mana The Unit of Work pattern manages transactional operations within a business process. It groups multiple changes to a data store into a single logical transaction, ensuring that either all operations succeed or none do. This is particularly useful for preventing partial updates, ensuring data integrity, and managing rollbacks in case of errors. ## Implementation -The Unit Of Work class works with a DataStorageContext interface, which defines essential methods like commit, flush, rollback, and close. This allows different types of data storage (e.g., relational databases, file systems) to be plugged in while adhering to a unified transaction control mechanism. +The Unit Of Work class works with a DataStorageContext interface (it is assumed that the generic context from `internal.infrastructure.data_storage` will be used), which defines essential methods like commit, flush, rollback, and close. This allows different types of data storage (e.g., relational databases, file systems) to be plugged in while adhering to a unified transaction control mechanism. -To use UoW in your use case, you need to implement the DataStorageContext interface for your data store (if not already done), and you also need to have a repository implementation that supports working with your DataStorageContext. +To use UoW in your use case, you need to implement the DataStorageContext interface for your data store (if not already done), and then inject your context into the universal context from the infrastructure module. ### Example ```python from typing import Protocol, Type from uuid import UUID, uuid4 +from internal.infrastructure.data_storage import get_context_maker from sqlalchemy.orm import Session from internal.uow import UnitOfWork, DataStorageContext class DatasetRepo(Protocol): def create(self, file_id: UUID, context: DataStorageContext) -> None: ... -def create_uow(context_maker: Type[Session]) -> UnitOfWork: - return UnitOfWork(context_maker=context_maker) +def create_uow() -> UnitOfWork: + return UnitOfWork(context_maker=get_context_maker()) def create_two_datasets( uow: UnitOfWork, diff --git a/internal/uow/uow.py b/internal/uow/uow.py index 1a1e8656..b611f524 100644 --- a/internal/uow/uow.py +++ b/internal/uow/uow.py @@ -3,7 +3,6 @@ @runtime_checkable class DataStorageContext(Protocol): - def commit(self) -> None: ... def flush(self) -> None: ... @@ -14,12 +13,10 @@ def close(self) -> None: ... class DataStorageContextMaker(Protocol): - def __call__(self) -> DataStorageContext: ... class UnitOfWork: - def __init__(self, context_maker: DataStorageContextMaker): self._context_maker: DataStorageContextMaker = context_maker self._context: DataStorageContext | None = None diff --git a/internal/usecase/README.md b/internal/usecase/README.md index fca57df2..21676beb 100644 --- a/internal/usecase/README.md +++ b/internal/usecase/README.md @@ -20,7 +20,7 @@ All repository interfaces must be created using Python’s `Protocol` to ensure Implement the use case. The use case should manage domain entities directly but interact with repositories, services, and external components strictly through interfaces. ### 4. Implement data storage context -Implement interface `DataStorageContext` for your data storage. Place it in the `internal.infrastructure.data_storage` module. +Implement interface `DataStorageContext` for your data storage and then add it into the universal context from `internal.infrastructure.data_storage` module. ### 4. Implement the repository If the repository isn't implemented, you will need to provide a concrete implementation for the repository interface. Place this in the `internal.repository` module. diff --git a/internal/usecase/file/check_content_type.py b/internal/usecase/file/check_content_type.py index 0b43ebc9..4a6d2a6a 100644 --- a/internal/usecase/file/check_content_type.py +++ b/internal/usecase/file/check_content_type.py @@ -8,7 +8,6 @@ class File(Protocol): class CheckContentType: - def __call__(self, *, upload_file: File) -> None: if ( upload_file.content_type != "text/csv" diff --git a/internal/usecase/file/retrieve_dataset.py b/internal/usecase/file/retrieve_dataset.py index b252864c..fd89299d 100644 --- a/internal/usecase/file/retrieve_dataset.py +++ b/internal/usecase/file/retrieve_dataset.py @@ -8,7 +8,6 @@ class DatasetRepo(Protocol): - def find( self, dataset_info: DatasetFindSchema, context: DataStorageContext ) -> DatasetResponseSchema | None: ... @@ -22,13 +21,11 @@ class RetrieveDatasetUseCaseResult(BaseModel): class RetrieveDataset: - def __init__( self, unit_of_work: UnitOfWork, dataset_repo: DatasetRepo, ): - self.unit_of_work = unit_of_work self.dataset_repo = dataset_repo diff --git a/internal/usecase/file/save_dataset.py b/internal/usecase/file/save_dataset.py index 476e57fc..ce214ee3 100644 --- a/internal/usecase/file/save_dataset.py +++ b/internal/usecase/file/save_dataset.py @@ -6,14 +6,12 @@ class DatasetRepo(Protocol): - def create( self, dataset_info: DatasetCreateSchema, context: DataStorageContext ) -> DatasetResponseSchema: ... class SaveDataset: - def __init__(self, unit_of_work: UnitOfWork, dataset_repo: DatasetRepo): self.unit_of_work = unit_of_work self.dataset_repo = dataset_repo @@ -25,7 +23,6 @@ def __call__( separator: str, header: list[int], ) -> UUID: - dataset_create_schema = DatasetCreateSchema( file_id=file_id, separator=separator, header=header ) diff --git a/internal/usecase/file/save_file.py b/internal/usecase/file/save_file.py index 60be650a..a12f2263 100644 --- a/internal/usecase/file/save_file.py +++ b/internal/usecase/file/save_file.py @@ -18,14 +18,12 @@ class FileRepo(Protocol): - async def create( self, file: File, file_info: FileCreateSchema, context: DataStorageContext ) -> None: ... class FileMetadataRepo(Protocol): - def create( self, file_metadata: FileMetadataCreateSchema, context: DataStorageContext ) -> FileMetadataResponseSchema: ... @@ -41,14 +39,12 @@ class SaveFileUseCaseResult(BaseModel): class SaveFile: - def __init__( self, unit_of_work: UnitOfWork, file_repo: FileRepo, file_metadata_repo: FileMetadataRepo, ): - self.unit_of_work = unit_of_work self.file_repo = file_repo self.file_metadata_repo = file_metadata_repo diff --git a/internal/usecase/task/profile_task.py b/internal/usecase/task/profile_task.py index e260d4a2..f820e616 100644 --- a/internal/usecase/task/profile_task.py +++ b/internal/usecase/task/profile_task.py @@ -23,21 +23,18 @@ class DatasetRepo(Protocol): - def find_with_file_metadata( self, dataset_info: DatasetFindSchema, context: DataStorageContext ) -> tuple[DatasetResponseSchema, FileMetadataResponseSchema]: ... class FileRepo(Protocol): - def find( self, file_info: CSVFileFindSchema, context: DataStorageContext ) -> CSVFileResponseSchema: ... class ProfileTask: - def __init__( self, unit_of_work: UnitOfWork, @@ -49,7 +46,6 @@ def __init__( self.dataset_repo = dataset_repo def __call__(self, *, dataset_id: UUID, config: OneOfTaskConfig) -> OneOfTaskResult: - with self.unit_of_work as context: try: dataset, file_metadata = self.dataset_repo.find_with_file_metadata( diff --git a/internal/usecase/task/retrieve_task.py b/internal/usecase/task/retrieve_task.py index 3317efd8..501ac39f 100644 --- a/internal/usecase/task/retrieve_task.py +++ b/internal/usecase/task/retrieve_task.py @@ -16,7 +16,6 @@ class TaskRepo(Protocol): - def find( self, task_info: TaskFindSchema, context: DataStorageContext ) -> TaskResponseSchema | None: ... @@ -38,7 +37,6 @@ class RetrieveTaskUseCaseResult(BaseModel): class RetrieveTask: - def __init__(self, unit_of_work: UnitOfWork, task_repo: TaskRepo): self.unit_of_work = unit_of_work self.task_repo = task_repo diff --git a/internal/usecase/task/set_task.py b/internal/usecase/task/set_task.py index a8c13800..47138b65 100644 --- a/internal/usecase/task/set_task.py +++ b/internal/usecase/task/set_task.py @@ -10,26 +10,22 @@ class DatasetRepo(Protocol): - def find( self, dataset_info: DatasetFindSchema, context: DataStorageContext ) -> DatasetResponseSchema | None: ... class TaskRepo(Protocol): - def create( self, task_info: TaskCreateSchema, context: DataStorageContext ) -> TaskResponseSchema: ... class ProfilingTaskWorker(Protocol): - def set(self, task_info: ProfilingTaskCreateSchema) -> None: ... class SetTask: - def __init__( self, unit_of_work: UnitOfWork, @@ -37,7 +33,6 @@ def __init__( task_repo: TaskRepo, profiling_task_worker: ProfilingTaskWorker, ): - self.unit_of_work = unit_of_work self.dataset_repo = dataset_repo self.task_repo = task_repo @@ -49,7 +44,6 @@ def __call__( dataset_id: UUID, config: OneOfTaskConfig, ) -> UUID: - dataset_find_schema = DatasetFindSchema(id=dataset_id) task_create_schema = TaskCreateSchema( status=TaskStatus.CREATED, diff --git a/internal/usecase/task/update_task_info.py b/internal/usecase/task/update_task_info.py index e8e9e10a..24d752fd 100644 --- a/internal/usecase/task/update_task_info.py +++ b/internal/usecase/task/update_task_info.py @@ -16,7 +16,6 @@ class TaskRepo(Protocol): - def update( self, find_schema: TaskFindSchema, @@ -27,13 +26,11 @@ def update( class UpdateTaskInfo: - def __init__( self, unit_of_work: UnitOfWork, task_repo: TaskRepo, ): - self.unit_of_work = unit_of_work self.task_repo = task_repo @@ -48,7 +45,6 @@ def __call__( failure_reason: str | None = None, traceback: str | None = None, ) -> None: - task_find_schema = TaskFindSchema(id=task_id) data_to_update = TaskUpdateSchema( status=task_status, diff --git a/internal/worker/celery/profiling_task_worker.py b/internal/worker/celery/profiling_task_worker.py index a33e7951..3107d22c 100644 --- a/internal/worker/celery/profiling_task_worker.py +++ b/internal/worker/celery/profiling_task_worker.py @@ -3,9 +3,7 @@ class ProfilingTaskWorker: - def set(self, task_info: ProfilingTaskCreateSchema) -> None: - profiling_task.delay( task_id=task_info.task_id, dataset_id=task_info.dataset_id, diff --git a/tests/integration/test_retrieve_dataset_info.py b/tests/integration/test_retrieve_dataset_info.py index 8717b3c3..27ce05c7 100644 --- a/tests/integration/test_retrieve_dataset_info.py +++ b/tests/integration/test_retrieve_dataset_info.py @@ -7,7 +7,6 @@ def test_retrieve_dataset(client: TestClient, context): file_name = "university_fd.csv" - file_path = f"tests/datasets/{file_name}" mime_type = "text/csv" separator = "," header = [0] diff --git a/tests/integration/test_upload_dataset.py b/tests/integration/test_upload_dataset.py index 97c2ead6..3f7aab22 100644 --- a/tests/integration/test_upload_dataset.py +++ b/tests/integration/test_upload_dataset.py @@ -47,7 +47,6 @@ async def test_upload_csv_dataset(client: TestClient, context, tmp_upload_dir): @pytest.mark.asyncio async def test_upload_csv_dataset_with_incorrect_mime_type(client: TestClient): file_name = "university.txt" - file_path = f"tests/datasets/{file_name}" mime_type = "text/plain" separator = "," header = [0] diff --git a/tests/unit/context/test_flat.py b/tests/unit/context/test_flat.py index 1232b802..1142d32d 100644 --- a/tests/unit/context/test_flat.py +++ b/tests/unit/context/test_flat.py @@ -95,7 +95,7 @@ async def test_rollback_clears_files(flat_context, mock_file, file_name): @pytest.mark.asyncio async def test_close_without_files(flat_context): flat_context.close() - assert flat_context._is_closed == True + assert flat_context._is_closed assert flat_context._to_add == [] assert flat_context._added == [] @@ -109,6 +109,6 @@ async def test_close_with_rollback(flat_context, mock_file, file_name): assert added_file_path.exists() flat_context.close() assert not added_file_path.exists() - assert flat_context._is_closed == True + assert flat_context._is_closed assert flat_context._to_add == [] assert flat_context._added == []