From 149be5e6f00ef97c37d1e2e2a44c00da07319670 Mon Sep 17 00:00:00 2001 From: Lariel Fernandes <89975469+lariel-fernandes@users.noreply.github.com> Date: Sat, 7 Sep 2024 20:57:00 +0200 Subject: [PATCH] docs: api-ref (#8) --- docs/api/_static/style.css | 3 + docs/api/conf.py | 14 + docs/api/rst/artschema/framework.rst | 12 + docs/api/rst/artschema/index.rst | 6 +- docs/api/rst/artschema/specs.rst | 8 +- docs/api/rst/artschema/tags.rst | 1 + docs/api/rst/mlflow/traits.rst | 1 - docs/api/rst/mlflow/utils.rst | 2 +- pyproject.toml | 4 +- src/mlopus/artschema/__init__.py | 20 +- src/mlopus/artschema/catalog.py | 44 ++- src/mlopus/artschema/framework.py | 174 +++++++++--- src/mlopus/artschema/helpers.py | 144 ++++++++-- src/mlopus/artschema/specs.py | 151 +++++++++-- src/mlopus/artschema/tags.py | 67 +++-- src/mlopus/lineage.py | 62 +++-- src/mlopus/mlflow/__init__.py | 19 +- src/mlopus/mlflow/api/base.py | 385 +++++++++++++++++++++++---- src/mlopus/mlflow/api/contract.py | 2 +- src/mlopus/mlflow/api/exp.py | 32 ++- src/mlopus/mlflow/api/model.py | 43 ++- src/mlopus/mlflow/api/mv.py | 60 ++++- src/mlopus/mlflow/api/run.py | 132 +++++++-- src/mlopus/mlflow/traits.py | 8 +- src/mlopus/mlflow/utils.py | 39 ++- uv.lock | 283 +++++++++----------- 26 files changed, 1305 insertions(+), 411 deletions(-) create mode 100644 docs/api/_static/style.css diff --git a/docs/api/_static/style.css b/docs/api/_static/style.css new file mode 100644 index 0000000..b07bdb1 --- /dev/null +++ b/docs/api/_static/style.css @@ -0,0 +1,3 @@ +.wy-nav-content { + max-width: none; +} diff --git a/docs/api/conf.py b/docs/api/conf.py index f351d78..3153d59 100644 --- a/docs/api/conf.py +++ b/docs/api/conf.py @@ -29,6 +29,8 @@ "sphinx.ext.viewcode", "sphinx_autodoc_typehints", "sphinxcontrib.autodoc_pydantic", + "sphinx_toolbox.more_autodoc.typevars", + "sphinx_paramlinks", ] templates_path = ["_templates"] @@ -45,17 +47,28 @@ # -- Options for autodoc -------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#configuration +autodoc_typehints = "signature" autodoc_member_order = "groupwise" +autodoc_default_options = { + "autoclass_content": "class", + "show-inheritance": True, +} # -- Options for pydantic_autodoc -------------------------------------------- # https://autodoc-pydantic.readthedocs.io/en/stable/users/configuration.html autodoc_pydantic_model_show_json = False +autodoc_pydantic_field_list_validators = False autodoc_pydantic_model_member_order = "groupwise" autodoc_pydantic_model_show_field_summary = False autodoc_pydantic_model_show_config_summary = False autodoc_pydantic_model_show_validator_summary = False +# -- Options for sphinx-paramlinks ------------------------------------------- +# https://github.com/sqlalchemyorg/sphinx-paramlinks + +paramlinks_hyperlink_param = "name_and_symbol" + # -- Event handlers ---------------------------------------------------------- @@ -77,4 +90,5 @@ def skip_member_handler(app, objtype, membername, member, skip, options): # noq def setup(app): + app.add_css_file("my_theme.css") app.connect("autodoc-skip-member", skip_member_handler) diff --git a/docs/api/rst/artschema/framework.rst b/docs/api/rst/artschema/framework.rst index 9e667c8..4b4a661 100644 --- a/docs/api/rst/artschema/framework.rst +++ b/docs/api/rst/artschema/framework.rst @@ -2,6 +2,18 @@ Framework ========= +.. autotypevar:: mlopus.artschema.framework.A + :no-value: + :no-type: + +.. autotypevar:: mlopus.artschema.framework.D + :no-value: + :no-type: + +.. autotypevar:: mlopus.artschema.framework.L + :no-value: + :no-type: + .. autopydantic_model:: mlopus.artschema.Dumper :private-members: _dump, _verify diff --git a/docs/api/rst/artschema/index.rst b/docs/api/rst/artschema/index.rst index 856be9e..626e89c 100644 --- a/docs/api/rst/artschema/index.rst +++ b/docs/api/rst/artschema/index.rst @@ -5,14 +5,10 @@ Artifact Schemas .. automodule:: mlopus.artschema .. toctree:: - :maxdepth: 2 + :maxdepth: 1 :caption: Contents: framework - -.. toctree:: - :maxdepth: 1 - tags helpers specs diff --git a/docs/api/rst/artschema/specs.rst b/docs/api/rst/artschema/specs.rst index 5739164..13bd689 100644 --- a/docs/api/rst/artschema/specs.rst +++ b/docs/api/rst/artschema/specs.rst @@ -4,12 +4,16 @@ Specs & Catalog .. automodule:: mlopus.artschema.specs +.. autopydantic_model:: mlopus.artschema.RunArtifact + :exclude-members: cache, export, load, log, apply_defaults + +.. autopydantic_model:: mlopus.artschema.ModelVersionArtifact + :exclude-members: cache, export, load, log, apply_defaults + .. autopydantic_model:: mlopus.artschema.LogArtifactSpec :exclude-members: with_defaults - :show-inheritance: .. autopydantic_model:: mlopus.artschema.LoadArtifactSpec :exclude-members: with_defaults - :show-inheritance: .. autopydantic_model:: mlopus.artschema.ArtifactsCatalog diff --git a/docs/api/rst/artschema/tags.rst b/docs/api/rst/artschema/tags.rst index 49c5034..531dff8 100644 --- a/docs/api/rst/artschema/tags.rst +++ b/docs/api/rst/artschema/tags.rst @@ -3,6 +3,7 @@ Tags ==== .. autopydantic_model:: mlopus.artschema.Tags + :exclude-members: get_schema .. autopydantic_model:: mlopus.artschema.ClassSpec :exclude-members: parse_class diff --git a/docs/api/rst/mlflow/traits.rst b/docs/api/rst/mlflow/traits.rst index 7d76f3b..f65c1d4 100644 --- a/docs/api/rst/mlflow/traits.rst +++ b/docs/api/rst/mlflow/traits.rst @@ -7,4 +7,3 @@ Traits .. autopydantic_model:: mlopus.mlflow.MlflowRunMixin .. autopydantic_model:: mlopus.mlflow.MlflowRunManager - :show-inheritance: diff --git a/docs/api/rst/mlflow/utils.rst b/docs/api/rst/mlflow/utils.rst index 5b58348..1e7fb69 100644 --- a/docs/api/rst/mlflow/utils.rst +++ b/docs/api/rst/mlflow/utils.rst @@ -3,5 +3,5 @@ Utilities ========= .. autofunction:: mlopus.mlflow.list_api_plugins -.. autofunction:: mlopus.mlflow.api_conf_schema .. autofunction:: mlopus.mlflow.get_api +.. autofunction:: mlopus.mlflow.api_conf_schema diff --git a/pyproject.toml b/pyproject.toml index 5bc5fc7..f467996 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -59,9 +59,11 @@ dev-dependencies = [ "sphinx <7", "autodoc-pydantic <2", "sphinx-rtd-theme ~=2.0", + "sphinx-paramlinks ~=0.6", "enum-tools[sphinx] ~=0.12", "sphinx-code-include ~=1.4", "sphinx-autodoc-typehints ~=1.23", + "sphinx-toolbox>=3.8.0", ] # === Commitizen ================================================================================== @@ -140,4 +142,4 @@ select = [ "TID", "SIM", ] -ignore = [] +ignore = ["E501"] diff --git a/src/mlopus/artschema/__init__.py b/src/mlopus/artschema/__init__.py index ec8eb5e..c10588a 100644 --- a/src/mlopus/artschema/__init__.py +++ b/src/mlopus/artschema/__init__.py @@ -2,17 +2,23 @@ reusable dumpers/loaders for custom model/dataset classes. Schemas can be used standalone or registered in the tags of MLflow entities -such as experiments, runs, models and model versions, which allows loading -artifacts from these entities. - -Then, artifacts associated to those entities can be saved/loaded/verified -just by providing the alias of a previously registered schema. +such as experiments, runs, models and model versions. In the second case, +artifacts associated to those entities can be saved/loaded/verified +just by providing the alias of a registered schema. """ from .catalog import ArtifactsCatalog from .framework import Dumper, Loader, Schema from .helpers import load_artifact, get_schema, log_model_version, log_run_artifact, get_schemas -from .specs import parse_load_specs, LoadArtifactSpec, parse_logart_specs, LogArtifactSpec +from .specs import ( + parse_load_specs, + LoadArtifactSpec, + parse_logart_specs, + LogArtifactSpec, + RunArtifact, + ModelVersionArtifact, +) + from .tags import Tags, ClassSpec __all__ = [ @@ -31,4 +37,6 @@ "parse_load_specs", "LogArtifactSpec", "parse_logart_specs", + "RunArtifact", + "ModelVersionArtifact", ] diff --git a/src/mlopus/artschema/catalog.py b/src/mlopus/artschema/catalog.py index 0116e3c..ecb5a1a 100644 --- a/src/mlopus/artschema/catalog.py +++ b/src/mlopus/artschema/catalog.py @@ -15,20 +15,33 @@ class ArtifactsCatalog(pydantic.BaseModel): Useful for type-safe loading/downloading/exporting artifacts based on parsed application settings. + Example settings: + + .. code-block:: yaml + + foo: + schema: package.module:Schema # Schema specified explicitly by fully qualified class name + subject: + run_id: 12345678 + path_in_run: foo + bar: + schema: default # Schema obtained by alias from model version tags or parent model tags + subject: + model_name: foo + model_version: 3 + Example usage: .. code-block:: python + # Load the YAML settings above + artifact_specs: dict = ... + + # Declare an artifact catalog class ArtifactsCatalog(mlopus.artschema.ArtifactsCatalog): foo: FooArtifact bar: BarArtifact - # Configure artifact sources - artifact_specs = { - "foo": {"subject": {"model_name": "foo", "model_version": "3"}}, # specs for a model artifact - "bar": {"subject": {"run_id": "1234567", "path_in_run": "bar"}}, # specs for a run artifact - } - # Cache all artifacts and metadata and verify their files using the specified schemas ArtifactsCatalog.download(mlflow_api, artifact_specs) @@ -37,6 +50,10 @@ class ArtifactsCatalog(mlopus.artschema.ArtifactsCatalog): artifacts_catalog.foo # `FooArtifact` artifacts_catalog.bar # `BarArtifact` + + In the example above, `artifact_specs` is implicitly parsed into a mapping of `str` to :class:`LoadArtifactSpec`, + while the :attr:`~LoadArtifactSpec.subject` values of `foo` and `bar` are parsed into + :class:`~mlopus.artschema.RunArtifact` and :class:`~mlopus.artschema.ModelVersionArtifact`, respectively. """ @classmethod @@ -48,6 +65,9 @@ def load( ) -> "ArtifactsCatalog": """Load artifacts from specs using their respective schemas. + See also: + - :meth:`LoadArtifactSpec.load` + :param mlflow_api: :param artifact_specs: """ @@ -63,9 +83,13 @@ def download( ) -> Dict[str, Path]: """Cache artifacts and metadata and verify the files against the schemas. + See also: + - :meth:`LoadArtifactSpec.download` + :param mlflow_api: :param artifact_specs: - :param verify: Use schemas for verification after download. + :param verify: | Use schemas for verification after download. + | See :meth:`~mlopus.artschema.Dumper.verify`. """ paths = {} @@ -87,10 +111,14 @@ def export( ) -> Dict[str, Path]: """Export artifacts and metadata caches while preserving cache structure. + See also: + - :meth:`LoadArtifactSpec.export` + :param mlflow_api: :param artifact_specs: :param target: Cache export target path. - :param verify: Use schemas for verification after export. + :param verify: | Use schemas for verification after export. + | See :meth:`~mlopus.artschema.Dumper.verify`. """ paths = {} diff --git a/src/mlopus/artschema/framework.py b/src/mlopus/artschema/framework.py index 6a0f7d0..a46e571 100644 --- a/src/mlopus/artschema/framework.py +++ b/src/mlopus/artschema/framework.py @@ -8,7 +8,8 @@ logger = logging.getLogger(__name__) -A = TypeVar("A", bound=object) # Type of artifact +A = TypeVar("A", bound=object) +"""Type of artifact""" class Dumper(pydantic.BaseModel, ABC, Generic[A]): @@ -24,30 +25,67 @@ class Config(pydantic.BaseModel.Config): @abstractmethod def _dump(self, path: Path, artifact: A) -> None: - """Save artifact to `path` as file or dir.""" + """Save artifact to :paramref:`path` as file or dir. + + :param path: When this method is called, it is guaranteed that this + path doesn't exist yet and that it's parent is a dir. + + :param artifact: An instance of :attr:`.Artifact`. + """ @abstractmethod def _verify(self, path: Path) -> None: - """Verify the `path` where the artifact was dumped.""" + """Verify the :paramref:`path`. + + :param path: Path where an instance of :attr:`.Artifact` is + supposed to have been dumped, downloaded or placed. + + :raises AssertionError: Unless :paramref:`path` is a file + or dir in the expected structure. + """ # ======================================================================================================= # === Public methods ==================================================================================== - def dump(self, path: Path, artifact: A, overwrite: bool = False) -> None: - """Save artifact to `path` as file or dir.""" + def dump(self, path: Path, artifact: A | dict, overwrite: bool = False) -> None: + """Save artifact to :paramref:`path` as file or dir. + + If possible, also saves a file with this dumper's conf. + + :param path: Target path. + :param artifact: + - An instance of :attr:`.Artifact`. + - A `dict` that can be parsed into an :attr:`.Artifact` (in case :attr:`.Artifact` is a Pydantic model) + + :param overwrite: Overwrite the :paramref:`path` if exists. + """ self._dump(artifact=self._pre_dump(artifact), path=(path := paths.ensure_only_parents(path, force=overwrite))) self.verify(path) self.maybe_save_conf(path, strict=True) def verify(self, path: Path) -> None: - """Verify the `path` where the artifact was dumped.""" + """Verify the :paramref:`path`. + + :param path: Path where an instance of :attr:`.Artifact` is + supposed to have been dumped, downloaded or placed. + + :raises AssertionError: Unless :paramref:`path` is a file + or dir in the expected structure. + """ if path.exists(): self._verify(path) else: raise FileNotFoundError(path) def maybe_save_conf(self, path: Path, strict: bool): - """If path is a dir, save a file with dumper conf.""" + """If :paramref:`path` is a dir, save a file with this dumper's conf. + + :param path: + :param strict: + :raises FileNotFoundError: If :paramref:`path` doesn't exist. + :raises FileExistsError: If :paramref:`strict` is `True` and a dumper + conf file already exists in :paramref:`path`. + """ if path.is_file(): logger.warning("Artifact dump is not a directory, dumper conf file will not be saved.") elif path.is_dir(): @@ -63,7 +101,7 @@ def maybe_save_conf(self, path: Path, strict: bool): # === Private methods =================================================================================== def _pre_dump(self, artifact: A | dict) -> A: - if isinstance(artifact, dict) and (model := pydantic.as_model_cls(self._artifact_type)): + if isinstance(artifact, dict) and (model := pydantic.as_model_cls(self.Artifact)): artifact = model.parse_obj(artifact) return artifact @@ -77,13 +115,16 @@ def _serialize_conf(self) -> str: # === Type param inference ============================================================================== @property - def _artifact_type(self) -> Type[A]: - """Artifact type used by this schema.""" + def Artifact(self) -> Type[A]: # noqa + """Artifact type used by this dumper. + + :return: Type of :attr:`~mlopus.artschema.framework.A` + """ return self._get_artifact_type() @classmethod def _get_artifact_type(cls) -> Type[A]: - """Infer artifact type used by this schema.""" + """Infer artifact type used by this dumper.""" base = typing_utils.find_base(cls, lambda b: typing_utils.safe_issubclass(b, Dumper)) return typing_utils.get_type_param(base, object, pos=0, strict=True) @@ -98,7 +139,8 @@ def _verify(self, path: Path) -> None: pass -D = TypeVar("D", bound=Dumper) # Type of dumper +D = TypeVar("D", bound=Dumper) +"""Type of :class:`Dumper`""" class Loader(pydantic.BaseModel, ABC, Generic[A, D]): @@ -109,13 +151,35 @@ class Loader(pydantic.BaseModel, ABC, Generic[A, D]): @abstractmethod def _load(self, path: Path, dumper: D) -> A | dict: - """Load artifact from `path`.""" + """Load artifact from :paramref:`path`. + + :param path: Path to artifact file or dir. + :param dumper: + - If :paramref:`path` is a dir containing a dumper conf file, this param will be an instance + of :attr:`.Dumper` equivalent to the one that was originally used to save the artifact. + - Otherwise, it will be a :attr:`.Dumper` initialized with empty params. + + :return: + - An instance of :attr:`.Artifact`. + - A `dict` that can be parsed into an :attr:`.Artifact` (in case :attr:`.Artifact` is a Pydantic model) + """ # ======================================================================================================= # === Public methods ==================================================================================== def load(self, path: Path, dry_run: bool = False) -> A | Path: - """Load artifact from `path`.""" + """Load artifact from :paramref:`path`. + + As a side effect, this will use a :attr:`.Dumper` to :meth:`~Dumper.verify` the :paramref:`path`. + If :paramref:`path` is a dir containing a dumper conf file, the used :attr:`.Dumper` is equivalent to the one + that was originally used to save the artifact. Otherwise, it's a :attr:`.Dumper` initialized with empty params. + + :param path: Path to artifact file or dir. + :param dry_run: Just verify the artifact path. + :return: + - If :paramref:`dry_run` is `True`, the same :paramref:`path`. + - Otherwise, an instance of :attr:`.Artifact`. + """ (dumper := self._load_dumper(path)).verify(path) if dry_run: @@ -127,22 +191,22 @@ def load(self, path: Path, dry_run: bool = False) -> A | Path: # === Private methods =================================================================================== def _post_load(self, artifact: A | dict) -> A: - if isinstance(artifact, dict) and (model := pydantic.as_model_cls(self._artifact_type)): + if isinstance(artifact, dict) and (model := pydantic.as_model_cls(self.Artifact)): artifact = model.parse_obj(artifact) return artifact def _load_dumper(self, path: Path) -> D: - if (dumper_conf_file := path / self._dumper_type.Config.dumper_conf_file).exists(): + if (dumper_conf_file := path / self.Dumper.Config.dumper_conf_file).exists(): dumper_conf = json_utils.loads(dumper_conf_file.read_text()) else: dumper_conf = {} try: - return self._dumper_type.parse_obj(dumper_conf) + return self.Dumper.parse_obj(dumper_conf) except pydantic.ValidationError as exc: logger.error( "Could not parse dumper with type '%s' (an anonymous pydantic class will be used instead): %s", - *(self._dumper_type, exc), + *(self.Dumper, exc), ) return pydantic.create_obj_from_data(name="AnonymousDumper", data=dumper_conf, __base__=_DummyDumper) @@ -150,13 +214,19 @@ def _load_dumper(self, path: Path) -> D: # === Type param inference ============================================================================== @property - def _artifact_type(self) -> Type[A]: - """Artifact type used by this schema.""" + def Artifact(self) -> Type[A]: # noqa + """Artifact type used by this loader. + + :return: Type of :attr:`~mlopus.artschema.framework.A` + """ return self._get_artifact_type() @property - def _dumper_type(self) -> Type[D]: - """Dumper class used by this schema.""" + def Dumper(self) -> Type[D]: # noqa + """Dumper type used by this loader. + + :return: Type of :attr:`~mlopus.artschema.framework.D` + """ return self._get_dumper_type() @classmethod @@ -179,12 +249,15 @@ def _load(self, path: Path, dumper: _DummyDumper) -> object | dict: return path -L = TypeVar("L", bound=Loader) # Type of loader +L = TypeVar("L", bound=Loader) +"""Type of :class:`Loader`""" class Schema(pydantic.BaseModel, Generic[A, D, L]): """Base class for artifact schemas. + Serves for putting together the types of :class:`Artifact`, :class:`Dumper` and :class:`Loader`. + Example: .. code-block:: python @@ -240,10 +313,26 @@ class Schema(mlopus.artschema.Schema[Artifact, Dumper, Loader]): def get_dumper( self, artifact: A | dict | Path, dumper: D | dict | None = None, **dumper_kwargs ) -> Callable[[Path], None] | Path: - """Get dumper callback for artifact data. + """Get a dumper callback. + + :param artifact: + + - An instance of :attr:`.Artifact` + - A `Path` to a file or dir containing a pre-dumped :attr:`.Artifact` + - A `dict` that can be parsed into an :attr:`.Artifact` (in case :attr:`.Artifact` is a Pydantic model) + + :param dumper: Custom :attr:`.Dumper` configuration. Defaults to an empty `dict`. + + - An instance of :attr:`.Dumper` + - A `dict` that can be parsed into a :attr:`.Dumper` + + :param dumper_kwargs: | Keyword arguments for instantiating a :attr:`.Dumper`. + | Incompatible with the :paramref:`dumper` param. - - If artifact is a `Path`, it will be just verified and returned as it is. - - If artifact is a `dict` and the target type is a pydantic model, it will be parsed before being dumped. + :return: + + - If :paramref:`artifact` is a `Path`: The same `Path` after being verified by the configured :attr:`.Dumper`. + - Otherwise: A callback that accepts a `Path` and uses the configured :attr:`.Dumper` to dump the provided :attr:`.Artifact` on it. """ assert dumper is None or not dumper_kwargs, "`dumper` and `dumper_kwargs` are not compatible." @@ -260,10 +349,22 @@ def get_dumper( def get_loader( self, loader: L | dict | None = None, dry_run: bool = False, **loader_kwargs ) -> Callable[[Path], A | Path]: - """Get loader callback for artifact file(s). + """Get a loader callback. + + :param loader: Custom :attr:`.Loader` configuration. Defaults to an empty `dict`. + + - An instance of :attr:`.Loader` + - A `dict` that can be parsed into a :attr:`.Loader` + + :param loader_kwargs: | Keyword arguments for instantiating a :attr:`.Loader`. + | Incompatible with the :paramref:`loader` param. + + :param dry_run: | See :paramref:`Loader.load.dry_run`. + + :return: - - If `dry_run=True`, the callback will just verify the given `Path` and return it as it is. - - If the loaded artifact is a `dict` and the target type is a pydantic model, the callback will parse it. + - If :paramref:`dry_run` is `True`: A callback that accepts a `Path`, verifies it and returns it. + - Otherwise: A callback that accepts a `Path` and uses the configured :attr:`.Loader` to load and return an :attr:`.Artifact` """ assert loader is None or not loader_kwargs, "`loader` and `loader_kwargs` are not compatible." @@ -277,17 +378,26 @@ def get_loader( @property def Artifact(self) -> Type[A]: # noqa - """Artifact type used by this schema.""" + """Artifact type used by this schema. + + :return: Type of :attr:`~mlopus.artschema.framework.A` + """ return self._get_artifact_type() @property def Dumper(self) -> Type[D]: # noqa - """Dumper class used by this schema.""" + """:class:`Dumper` type used by this schema. + + :return: Type of :attr:`~mlopus.artschema.framework.D` + """ return self._get_dumper_type() @property def Loader(self) -> Type[L]: # noqa - """Loader class used by this schema.""" + """:class:`Loader` type used by this schema. + + :return: Type of :attr:`~mlopus.artschema.framework.L` + """ return self._get_loader_type() @classmethod diff --git a/src/mlopus/artschema/helpers.py b/src/mlopus/artschema/helpers.py index 10dca09..f626117 100644 --- a/src/mlopus/artschema/helpers.py +++ b/src/mlopus/artschema/helpers.py @@ -2,34 +2,44 @@ import logging from pathlib import Path -from typing import TypeVar, Type, Tuple, Mapping +from typing import Type, Tuple, Mapping -from mlopus.mlflow.api.common.schema import BaseEntity +import mlopus from mlopus.mlflow.api.contract import RunIdentifier from mlopus.mlflow.api.model import ModelApi from mlopus.mlflow.api.mv import ModelVersionApi from mlopus.mlflow.api.run import RunApi from mlopus.utils import import_utils, typing_utils, dicts - -from .framework import Dumper, Loader, Schema +from .framework import Schema, A, D, L from .tags import Tags, ClassSpec, DEFAULT_ALIAS logger = logging.getLogger(__name__) -T = TypeVar("T", bound=BaseEntity) # Type of entity - -A = TypeVar("A", bound=object) # Type of artifact -D = TypeVar("D", bound=Dumper) # Type of Dumper -L = TypeVar("L", bound=Loader) # Type of Loader +T = ( + mlopus.mlflow.schema.Experiment + | mlopus.mlflow.schema.Run + | mlopus.mlflow.schema.Model + | mlopus.mlflow.schema.ModelVersion +) def get_schemas(subject: T) -> Tags: - """Based on tags, find the artifact schemas for the specified experiment, run, registered model or model version.""" + """Parse artifact schema tags from :paramref:`subject`. + + See also: :meth:`mlopus.artschema.Tags.parse_subject` + + :param subject: | Experiment, run, model or model version. + """ return Tags.parse_subject(subject) def get_schema(subject: T, alias: str | None = None) -> ClassSpec: - """Based on tags, infer artifact schema for the specified experiment, run, registered model or model version.""" + """Get artifact schema class specification from :paramref:`subject`. + + :param subject: | Experiment, run, model or model version. + :param alias: | Alias of a previously registered schema for this :paramref:`subject`. + | Defaults to `default`. + """ return Tags.parse_subject(subject).get_schema(alias) @@ -45,7 +55,36 @@ def log_run_artifact( allow_duplication: bool | None = None, use_cache: bool | None = None, ) -> None: - """Log run artifact using schema (either provided or inferred from tags).""" + """Publish run artifact using schema. + + :param artifact: | See :paramref:`Schema.get_dumper.artifact` + + :param run: | Run API object. + + :param path_in_run: | See :paramref:`mlopus.mlflow.BaseMlflowApi.log_run_artifact.path_in_run` + + :param schema: + - Type or instance of :class:`Schema` + - Fully qualified name of a :class:`Schema` class (e.g.: `package.module:Class`) + - Alias of a schema previously registered for this run or its parent experiment + (see :class:`mlopus.artschema.Tags`). + + :param dumper_conf: | See :paramref:`Schema.get_dumper.dumper` + + :param skip_reqs_check: | If :paramref:`schema` is specified by alias, ignore the registered package requirement. + | See :meth:`mlopus.artschema.ClassSpec.load` + + :param auto_register: | After a successful :paramref:`artifact` publish, register the used :paramref:`schema` in the :paramref:`run` tags. + | If a non-empty `dict` is passed, it is used as keyword arguments for :meth:`Tags.using`. + | If the :paramref:`schema` was specified by alias, that alias is used by default. + + :param keep_the_source: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.keep_the_source` + (the `source` in this case is a callback, unless :paramref:`artifact` is a `Path`) + + :param allow_duplication: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.allow_duplication` + + :param use_cache: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.use_cache` + """ schema, alias = resolve_schema_and_alias(schema, run, skip_reqs_check) run.log_artifact( @@ -66,18 +105,68 @@ def log_model_version( artifact: A | dict | Path, model: ModelApi, run: RunIdentifier, + path_in_run: str | None = None, schema: Schema[A, D, L] | Type[Schema[A, D, L]] | str | None = None, dumper_conf: D | dict | None = None, skip_reqs_check: bool = False, auto_register: bool | dict = False, - path_in_run: str | None = None, keep_the_source: bool | None = None, allow_duplication: bool | None = None, use_cache: bool | None = None, version: str | None = None, tags: Mapping | None = None, ) -> ModelVersionApi: - """Log artifact as model version using schema (either provided or inferred from tags).""" + """Log artifact as model version using schema. + + Example: + + .. code-block:: python + + mlflow = mlopus.mlflow.get_api() + + version = mlopus.artschema.log_model_version( + my_artifact, + schema=MySchema, + run=mlflow.start_run(...), + model=mlflow.get_or_create_model(...), + auto_register={"aliased_as": "foobar"} # register `MySchema` as `foobar` + ) + + mlopus.artschema.load_artifact(version, schema="foobar") + + :param artifact: | See :paramref:`Schema.get_dumper.artifact` + + :param model: | Model API object. + + :param run: | Run API object. + + :param path_in_run: | See :paramref:`mlopus.mlflow.BaseMlflowApi.log_model_version.path_in_run` + + :param schema: + - Type or instance of :class:`Schema` + - Fully qualified name of a :class:`Schema` class (e.g.: `package.module:Class`) + - Alias of a schema previously registered for this run or its parent experiment + (see :class:`mlopus.artschema.Tags`). + + :param dumper_conf: | See :paramref:`Schema.get_dumper.dumper` + + :param skip_reqs_check: | If :paramref:`schema` is specified by alias, ignore the registered package requirement. + | See :meth:`mlopus.artschema.ClassSpec.load` + + :param auto_register: | After a successful :paramref:`artifact` publish, register the used :paramref:`schema` in the new model version tags. + | See also :paramref:`log_run_artifact.auto_register` + + :param keep_the_source: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.keep_the_source` + (the `source` in this case is a callback, unless :paramref:`artifact` is a `Path`) + + :param allow_duplication: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.allow_duplication` + + :param use_cache: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.use_cache` + + :param version: | See :paramref:`mlopus.mlflow.BaseMlflowApi.log_model_version.version` + + :param tags: | See :paramref:`mlopus.mlflow.BaseMlflowApi.log_model_version.version` + """ schema, alias = resolve_schema_and_alias(schema, model, skip_reqs_check) mv = model.log_version( @@ -106,8 +195,31 @@ def load_artifact( loader_conf: L | dict | None = None, skip_reqs_check: bool = False, dry_run: bool = False, -) -> A: - """Load artifact of run or model version using schema (either provided or inferred from tags).""" +) -> A | Path: + """Load artifact of run or model version using schema. + + :param subject: | Run or model version with API handle. + + :param path_in_run: | See :paramref:`mlopus.mlflow.BaseMlflowApi.load_run_artifact.path_in_run` + | If :paramref:`subject` is a model version, defaults to model name. + + :param schema: + - Type or instance of :class:`Schema` + - Fully qualified name of a :class:`Schema` class (e.g.: `package.module:Class`) + - Alias of a schema previously registered for this run/model version or its parent experiment/model + (see :class:`mlopus.artschema.Tags`). + + :param loader_conf: | See :paramref:`Schema.get_loader.loader` + + :param skip_reqs_check: | If :paramref:`schema` is specified by alias, ignore the registered package requirement. + | See :meth:`mlopus.artschema.ClassSpec.load` + + :param dry_run: | See :paramref:`~mlopus.artschema.Loader.load.dry_run` + + :return: + - If :paramref:`dry_run` is `True`: A `Path` to the cached artifact, after being verified. + - Otherwise: An instance of :attr:`~mlopus.artschema.Schema.Artifact` + """ kwargs = {} if isinstance(subject, RunApi): diff --git a/src/mlopus/artschema/specs.py b/src/mlopus/artschema/specs.py index 5b66e2c..0582efb 100644 --- a/src/mlopus/artschema/specs.py +++ b/src/mlopus/artschema/specs.py @@ -14,19 +14,15 @@ from typing import Dict, Any, Generic, Type, Mapping, Tuple from typing import TypeVar +from mlopus.lineage import _LineageArg, _ModelLineageArg, _RunLineageArg from mlopus.mlflow.api.entity import EntityApi from mlopus.mlflow.api.mv import ModelVersionApi from mlopus.mlflow.api.run import RunApi from mlopus.mlflow.traits import MlflowApiMixin from mlopus.utils import pydantic -from mlopus.lineage import _LineageArg, _ModelLineageArg, _RunLineageArg -from .framework import Dumper, Loader, Schema, _DummySchema +from .framework import Schema, _DummySchema, A, D, L from .helpers import load_artifact, log_model_version, log_run_artifact -A = TypeVar("A", bound=object) # Type of artifact -D = TypeVar("D", bound=Dumper) # Type of Dumper -L = TypeVar("L", bound=Loader) # Type of Loader - T = TypeVar("T", bound=EntityApi) # Type of entity API LA = TypeVar("LA", bound=_LineageArg) # Type of lineage argument @@ -59,13 +55,25 @@ def apply_defaults(self, **defaults): class ModelVersionArtifact(ArtifactSubject[ModelVersionApi, _ModelLineageArg]): """Specification of a model version artifact.""" - model_name: str - model_version: str | None = None # Required when loading an existing model version + model_name: str = pydantic.Field(description="Parent model name.") - # Params bellow are only used when logging a new model version! - run_id: str = None - tags: Mapping | None = None - path_in_run: str | None = None + model_version: str | None = pydantic.Field( + default=None, description="Required when loading, optional when publishing." + ) + + run_id: str = pydantic.Field(default=None, description="Parent run ID. Required when publishing.") + + tags: Mapping | None = pydantic.Field( + default=None, + description="Optional, only used when publishing. See :attr:`mlopus.mlflow.schema.ModelVersion.tags`.", + ) + + path_in_run: str | None = pydantic.Field( + default=None, + description=( + "Only used when publishing. See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_model_version.path_in_run`." + ), + ) @property def _version_api(self) -> ModelVersionApi: @@ -148,23 +156,70 @@ def _parse_subject(subj: ModelVersionArtifact | RunArtifact | dict) -> ArtifactS class LoadArtifactSpec(MlflowApiMixin, Generic[T, LA]): """Specification for loading an artifact.""" - schema_: Schema[A, D, L] | Type[Schema[A, D, L]] | str | None = pydantic.Field(_DummySchema, alias="schema") - loader_conf: Dict[str, Any] | None = None - skip_reqs_check: bool = False - subject: ArtifactSubject[T, LA] + schema_: Schema[A, D, L] | Type[Schema[A, D, L]] | str | None = pydantic.Field( + alias="schema", + default_factory=_DummySchema, + description="See :paramref:`load_artifact.schema`", + ) + + loader_conf: Dict[str, Any] | None = pydantic.Field( + default=None, + description="See :paramref:`Schema.get_loader.loader`", + ) + + skip_reqs_check: bool = pydantic.Field( + default=False, + description="See :paramref:`load_artifact.skip_reqs_check`", + ) + + subject: ArtifactSubject[T, LA] = pydantic.Field( + description=( + "Instance (or `dict` to be parsed into instance) of :class:`RunArtifact` or :class:`ModelVersionArtifact`. " + "See also: :paramref:`load_artifact.subject`." + ) + ) _parse_subject = pydantic.validator("subject", pre=True, allow_reuse=True)(_parse_subject) def download(self) -> Path: - """Cache subject metadata and artifact.""" + """Cache subject metadata and artifact. + + See also: + - :meth:`mlopus.mlflow.RunApi.cache_meta` + - :meth:`mlopus.mlflow.RunApi.cache_artifact` + - :meth:`mlopus.mlflow.ModelVersionApi.cache_meta` + - :meth:`mlopus.mlflow.ModelVersionApi.cache_artifact` + + :return: A `Path` to the cached artifact. + """ return self.subject.using(self.mlflow_api).cache() def export(self, target: Path) -> Path: - """Export subject metadata and artifact cache.""" + """Export subject metadata and artifact cache. + + See also: + - :meth:`mlopus.mlflow.RunApi.export_meta` + - :meth:`mlopus.mlflow.RunApi.export_artifact` + - :meth:`mlopus.mlflow.ModelVersionApi.export_meta` + - :meth:`mlopus.mlflow.ModelVersionApi.export_artifact` + + :param target: Target cache export `Path`. + """ return self.subject.using(self.mlflow_api).export(target) def load(self, schema: Schema[A, D, L] | Type[Schema[A, D, L]] | str | None = None, dry_run: bool = False) -> A: - """Load artifact.""" + """Load artifact. + + See also: + - :meth:`mlopus.mlflow.RunApi.load_artifact` + - :meth:`mlopus.mlflow.ModelVersionApi.load_artifact` + + :param schema: | Override :attr:`schema_` + :param dry_run: | See :paramref:`~mlopus.artschema.Loader.load.dry_run` + :return: + - If :paramref:`dry_run` is `True`: A `Path` to the cached artifact. + - Otherwise: An instance of :attr:`Schema.Artifact` + """ return self._load(schema, dry_run)[1] def _load( @@ -194,20 +249,60 @@ class LogArtifactSpec(MlflowApiMixin, Generic[T, LA]): """Specification for logging an artifact.""" schema_: Schema[A, D, L] | Type[Schema[A, D, L]] | str | None = pydantic.Field( - alias="schema", default_factory=_DummySchema + alias="schema", + default_factory=_DummySchema, + description="See :paramref:`load_artifact.schema`", + ) + + dumper_conf: Dict[str, Any] | None = pydantic.Field( + default=None, + description="See :paramref:`Schema.get_dumper.dumper`", + ) + + skip_reqs_check: bool = pydantic.Field( + default=False, + description="See :paramref:`load_artifact.skip_reqs_check`", + ) + + auto_register: bool | Dict[str, Any] = pydantic.Field( + default=False, + description="See :paramref:`log_run_artifact.auto_register` and :paramref:`log_model_version.auto_register`", + ) + + keep_the_source: bool | None = pydantic.Field( + default=None, + description="See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.keep_the_source`", + ) + + allow_duplication: bool | None = pydantic.Field( + default=None, + description="See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.allow_duplication`", + ) + + use_cache: bool | None = pydantic.Field( + default=None, + description="See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.use_cache`", + ) + + subject: ArtifactSubject[T, LA] = pydantic.Field( + description=( + "Instance (or `dict` to be parsed into instance) of :class:`RunArtifact` or :class:`ModelVersionArtifact`. " + "See also: :paramref:`load_artifact.subject`." + ) ) - dumper_conf: Dict[str, Any] | None = None - skip_reqs_check: bool = False - auto_register: bool | Dict[str, Any] = False - keep_the_source: bool | None = None - allow_duplication: bool | None = None - use_cache: bool | None = None - subject: ArtifactSubject[T, LA] _parse_subject = pydantic.validator("subject", pre=True, allow_reuse=True)(_parse_subject) def log(self, artifact: A | dict | Path, schema: Schema[A, D, L] | Type[Schema[A, D, L]] | str | None = None) -> T: - """Log artifact.""" + """Log artifact. + + See also: + - :meth:`mlopus.mlflow.RunApi.log_artifact` + - :meth:`mlopus.mlflow.ModelApi.log_version` + + :param schema: | Override :attr:`schema_` + :param artifact: | See :paramref:`Schema.get_dumper.artifact` + """ return self._log(artifact, schema)[1] def _log( diff --git a/src/mlopus/artschema/tags.py b/src/mlopus/artschema/tags.py index d99392c..25ef640 100644 --- a/src/mlopus/artschema/tags.py +++ b/src/mlopus/artschema/tags.py @@ -4,8 +4,7 @@ import importlib_metadata -from mlopus.mlflow import schema -from mlopus.mlflow.api.entity import EntityApi +import mlopus.mlflow from mlopus.utils import pydantic, packaging, dicts, import_utils, typing_utils from .framework import Schema @@ -15,15 +14,16 @@ DEFAULT_CONSTRAINT = ">=" # Default constraint for package version -A = TypeVar("A") # Any type +T = TypeVar("T") # Any type -# Entity types -E = schema.Experiment -R = schema.Run -M = schema.Model -V = schema.ModelVersion +Entity = ( + mlopus.mlflow.schema.Experiment + | mlopus.mlflow.schema.Run + | mlopus.mlflow.schema.Model + | mlopus.mlflow.schema.ModelVersion +) -T = TypeVar("T", bound=EntityApi) # Type of Entity API +API = mlopus.mlflow.ExpApi | mlopus.mlflow.RunApi | mlopus.mlflow.ModelApi | mlopus.mlflow.ModelVersionApi def _maybe_warn_editable_dist(cls: str, dist: importlib_metadata.Distribution): @@ -69,8 +69,13 @@ class ClassSpec(pydantic.BaseModel, pydantic.MappingMixin): cls: str pkg: PkgSpec - def load(self, type_: Type[A] | None = None, skip_reqs_check: bool = False) -> Type[A]: - """Load class by fully qualified name, optionally doing package requirement checks.""" + def load(self, type_: Type[T] | None = None, skip_reqs_check: bool = False) -> Type[T]: + """Load class by fully qualified name. + + :param type_: If specified, the loaded class must inherit this type. + :param skip_reqs_check: Ignore package requirements for the loaded class. + :return: Loaded class as subclass of :paramref:`type_` + """ if not skip_reqs_check: self.pkg.check_requirement() self.pkg.check_extras() @@ -155,7 +160,9 @@ class Tags(pydantic.BaseModel, pydantic.MappingMixin): # Package details are inferred automatically unless specified when calling `using(...)`. """ - schemas: Dict[str, ClassSpec] = pydantic.Field(default_factory=dict) + schemas: Dict[str, ClassSpec] = pydantic.Field( + default_factory=dict, description="Mapping of alias to artifact schema class specification." + ) @pydantic.validate_arguments def using( @@ -167,14 +174,28 @@ def using( with_constraint: packaging.VersionConstraint = DEFAULT_CONSTRAINT, and_extras: Sequence[str] | None = None, ) -> "Tags": - """Add aliased artifact schema to this `Tags` object.""" + """Add aliased artifact schema to this `Tags` object. + + If not specified, the params :paramref:`from_package` and :paramref:`at_version` are inferred from + the metadata of the provided :paramref:`cls` and the packages installed in the current environment. + + :param cls: Artifact schema class. + :param aliased_as: Schema alias. Defaults to `default`. + :param from_package: Required package for this schema. + :param at_version: Required package version. + :param with_constraint: Version requirement constraint. + :param and_extras: Required package extras for using the schema. + """ if (alias := aliased_as or DEFAULT_ALIAS) in self.schemas: raise ValueError(f"Found duplicated schema alias: {alias}") self.schemas[alias] = ClassSpec.parse_class(cls, from_package, at_version, with_constraint, and_extras) return self - def register(self, subject: T): - """Register these artifact schema tags for the specified subject.""" + def register(self, subject: API): + """Register these artifact schema tags for the specified :paramref:`subject`. + + :param subject: | Experiment, run, model or model version with API handle. + """ logger.info("Registering artifact schemas for %s\n%s", subject, self.json(indent=4)) subject.set_tags(self) @@ -185,17 +206,23 @@ def get_schema(self, alias: str | None = None) -> ClassSpec: return cls_spec @classmethod - def parse_subject(cls, subject: E | R | M | V) -> "Tags": - """Parse experiment, run, registered model, model version or tags dict into artifact schema tags.""" + def parse_subject(cls, subject: Entity) -> "Tags": + """Parse artifact schema tags from :paramref:`subject`. + + - Runs inherit schemas of their parent experiment. + - Model versions inherit schemas of their parent model. + + :param subject: | Experiment, run, model or model version. + """ key = "schemas" match subject: - case M() | E(): + case mlopus.mlflow.schema.Model() | mlopus.mlflow.schema.Experiment(): tags = subject.tags.get(key, {}) - case R(): + case mlopus.mlflow.schema.Run(): # Merge schema tags from run and parent exp (run takes precedence) tags = dicts.deep_merge(subject.exp.tags.get(key, {}), subject.tags.get(key, {})) - case V(): + case mlopus.mlflow.schema.ModelVersion(): # Merge schema tags from model version and parent model (version takes precedence) tags = dicts.deep_merge(subject.model.tags.get(key, {}), subject.tags.get(key, {})) case _: diff --git a/src/mlopus/lineage.py b/src/mlopus/lineage.py index 94145fb..714db8c 100644 --- a/src/mlopus/lineage.py +++ b/src/mlopus/lineage.py @@ -42,11 +42,17 @@ class Config: repr_empty: bool = False - runs: Dict[str, Set[str]] = pydantic.Field({}, arg_type=_RunLineageArg) - """Mapping of `run_id` -> `[path_in_run]`""" - - models: Dict[str, Set[str]] = pydantic.Field({}, arg_type=_ModelLineageArg) - """Mapping of `model_name` -> `[versions]`""" + runs: Dict[str, Set[str]] = pydantic.Field( + default={}, + arg_type=_RunLineageArg, + description="Mapping of `run_id` -> `[path_in_run]`", + ) + + models: Dict[str, Set[str]] = pydantic.Field( + default={}, + arg_type=_ModelLineageArg, + description="Mapping of `model_name` -> `[versions]`", + ) @property def runs_by_path(self) -> Dict[str, Set[str]]: @@ -85,11 +91,15 @@ class Config: repr_empty: bool = False - inputs: Inputs = None - """Run inputs.""" + inputs: Inputs = pydantic.Field( + default=None, + description="Run inputs.", + ) - outputs: Outputs = None - """Run outputs.""" + outputs: Outputs = pydantic.Field( + default=None, + description="Run outputs.", + ) def with_input(self, arg: _LineageArg) -> "_LineageTags": """Add input model or run artifact.""" @@ -102,19 +112,35 @@ def with_output(self, arg: _LineageArg) -> "_LineageTags": return self def with_input_model(self, name: str, version: str | None = None) -> "_LineageTags": - """Add input model.""" + """Add input model. + + :param name: Model name. + :param version: Model version. + """ return self.with_input(_ModelLineageArg(name, version)) def with_input_artifact(self, run_id: str, path_in_run: str | None = None) -> "_LineageTags": - """Add input run artifact.""" + """Add input run artifact. + + :param run_id: Run ID. + :param path_in_run: Plain relative path inside run artifacts (e.g.: `a/b/c`) + """ return self.with_input(_RunLineageArg(run_id, path_in_run)) def with_output_model(self, name: str, version: str | None = None) -> "_LineageTags": - """Add output model.""" + """Add output model. + + :param name: Model name. + :param version: Model version. + """ return self.with_output(_ModelLineageArg(name, version)) def with_output_artifact(self, run_id: str, path_in_run: str | None = None) -> "_LineageTags": - """Add output run artifact.""" + """Add output run artifact. + + :param run_id: Run ID. + :param path_in_run: Plain relative path inside run artifacts (e.g.: `a/b/c`) + """ return self.with_output(_RunLineageArg(run_id, path_in_run)) @@ -141,7 +167,10 @@ class Lineage(_LineageTags): @classmethod def of(cls, run: RunApi) -> "Lineage": - """Parse lineage tags from experiment run with API handle.""" + """Parse lineage tags from experiment run with API handle. + + :param run: Run metadata with API handle. + """ return cls(run=run, **run.tags.get("lineage", {})) def register(self) -> "Lineage": @@ -192,5 +221,8 @@ def render(self) -> Dict[str, str | Set[str]]: def of(run: RunApi) -> Lineage: - """Parse lineage tags from run API.""" + """Parse lineage tags from run API. + + :param run: Run metadata with API handle. + """ return Lineage.of(run) diff --git a/src/mlopus/mlflow/__init__.py b/src/mlopus/mlflow/__init__.py index 03cedad..43faa2d 100644 --- a/src/mlopus/mlflow/__init__.py +++ b/src/mlopus/mlflow/__init__.py @@ -1,18 +1,11 @@ -"""This module offers tools for configuring and using standardized, plugin-based MLflow APIs. +"""This module is based on the interface :class:`~mlopus.mlflow.BaseMlflowApi`, +which may be implemented in order to work with different MLflow backends/providers +in the scope of experiment tracking and model registry. -Plugins must implement the interface `mlopus.mlflow.BaseMlflowApi` -and be included in the entry-points group `mlopus.mlflow_api_providers`. +Built-in implementations can be found under the module :mod:`mlopus.mlflow.providers` +and are also available under the plugin group `mlopus.mlflow_api_providers`. -While each plugin may offer access to a different MLflow-like backend/provider, -all plugins are meant to be thread-safe and independent of env vars/global vars, -so multiple API instances can coexist in the same program if necessary. - -The default plugin, aliased in the entry-points as `mlflow`, -handles communication to open-source MLflow servers (assuming -no artifacts proxy and server-managed SQL database). - -Another built-in alternative is the minimal-dependency `generic` plugin, -which works exclusively with the local cache and does not implement any client-server communication. +Third-party implementations may also be added to that group in order to expand funcionality. """ from . import providers diff --git a/src/mlopus/mlflow/api/base.py b/src/mlopus/mlflow/api/base.py index 20c16b1..1d54a56 100644 --- a/src/mlopus/mlflow/api/base.py +++ b/src/mlopus/mlflow/api/base.py @@ -28,14 +28,19 @@ T = TypeVar("T", bound=schema.BaseEntity) # Identifier types -ExpIdentifier = contract.ExpIdentifier -RunIdentifier = contract.RunIdentifier -ModelIdentifier = contract.ModelIdentifier -ModelVersionIdentifier = contract.ModelVersionIdentifier +ExpIdentifier = contract.ExpIdentifier | ExpApi +RunIdentifier = contract.RunIdentifier | RunApi +ModelIdentifier = contract.ModelIdentifier | ModelApi +ModelVersionIdentifier = contract.ModelVersionIdentifier | ModelVersionApi class BaseMlflowApi(contract.MlflowApiContract, ABC, frozen=True): - """Base class for API clients that use "MLflow-like" backends for experiment tracking and model registry.""" + """Base class for API clients that use "MLflow-like" backends for experiment tracking and model registry. + + Important: + Implementations of this interface are meant to be thread-safe and independent of env vars/globals, + so multiple API instances can coexist in the same program if necessary. + """ cache_dir: Path = pydantic.Field( default=None, @@ -717,44 +722,80 @@ def clean_temp_artifacts(self): @pydantic.validate_arguments def clean_cached_run_artifact(self, run: RunIdentifier, path_in_run: str = ""): - """Clean cached artifact for specified run.""" + """Clean cached artifact for specified run. + + :param run: Run ID or object. + :param path_in_run: Plain relative path inside run artifacts (e.g.: `a/b/c`) + """ self._clean_run_artifact(self._coerce_run_id(run), path_in_run) @pydantic.validate_arguments def clean_cached_model_artifact(self, model_version: ModelVersionIdentifier): - """Clean cached artifact for specified model version.""" + """Clean cached artifact for specified model version. + + :param model_version: Model version object or `(name, version)` tuple. + """ mv = self._coerce_mv(model_version) self.clean_cached_run_artifact(mv.run, mv.path_in_run) @pydantic.validate_arguments def list_run_artifacts(self, run: RunIdentifier, path_in_run: str = "") -> transfer.LsResult: - """List run artifacts in repo.""" + """List run artifacts in repo. + + :param run: Run ID or object. + :param path_in_run: Plain relative path inside run artifacts (e.g.: `a/b/c`) + """ return self._list_run_artifacts(run, path_in_run) @pydantic.validate_arguments - def list_model_artifact(self, model_version: ModelVersionIdentifier) -> transfer.LsResult: - """List model version artifacts in repo.""" - return self.list_run_artifacts((mv := self._coerce_mv(model_version)).run, mv.path_in_run) + def list_model_artifact(self, model_version: ModelVersionIdentifier, path_suffix: str = "") -> transfer.LsResult: + """List model version artifacts in repo. + + :param model_version: Model version object or `(name, version)` tuple. + :param path_suffix: Plain relative path inside model artifact dir (e.g.: `a/b/c`). + """ + return self.list_run_artifacts( + run=(mv := self._coerce_mv(model_version)).run, + path_in_run=mv.path_in_run + "/" + path_suffix.strip("/"), + ) @pydantic.validate_arguments def cache_run_artifact(self, run: RunIdentifier, path_in_run: str = "") -> Path: - """Pull run artifact from MLflow server to local cache.""" + """Pull run artifact from MLflow server to local cache. + + :param run: Run ID or object. + :param path_in_run: Plain relative path inside run artifacts (e.g.: `a/b/c`) + """ return self._pull_run_artifact(run, path_in_run) @pydantic.validate_arguments def cache_model_artifact(self, model_version: ModelVersionIdentifier) -> Path: - """Pull model version artifact from MLflow server to local cache.""" + """Pull model version artifact from MLflow server to local cache. + + :param model_version: Model version object or `(name, version)` tuple. + """ mv = self._coerce_mv(model_version) return self.cache_run_artifact(mv.run, mv.path_in_run) @pydantic.validate_arguments def get_run_artifact(self, run: RunIdentifier, path_in_run: str = "") -> Path: - """Get local path to run artifact.""" + """Get local path to run artifact. + + Triggers a cache pull on a cache miss or if :attr:`always_pull_artifacts`. + + :param run: Run ID or object. + :param path_in_run: Plain relative path inside run artifacts (e.g.: `a/b/c`) + """ return self._get_run_artifact(self._coerce_run_id(run), path_in_run) @pydantic.validate_arguments def get_model_artifact(self, model_version: ModelVersionIdentifier) -> Path: - """Get local path to model artifact.""" + """Get local path to model artifact. + + Triggers a cache pull on a cache miss or if :attr:`always_pull_artifacts`. + + :param model_version: Model version object or `(name, version)` tuple. + """ mv = self._coerce_mv(model_version) return self.get_run_artifact(mv.run, mv.path_in_run) @@ -767,7 +808,16 @@ def place_run_artifact( overwrite: bool = False, link: bool = True, ): - """Place run artifact on target path.""" + """Place run artifact on target path. + + Triggers a cache pull on a cache miss or if :attr:`always_pull_artifacts`. + + :param run: Run ID or object. + :param target: Target path. + :param path_in_run: Plain relative path inside run artifacts (e.g.: `a/b/c`) + :param overwrite: Overwrite target path if exists. + :param link: Use symbolic link instead of copy. + """ self._place_run_artifact(self._coerce_run_id(run), path_in_run, target, link, overwrite) @pydantic.validate_arguments @@ -778,7 +828,15 @@ def place_model_artifact( overwrite: bool = False, link: bool = True, ): - """Place model version artifact on target path.""" + """Place model version artifact on target path. + + Triggers a cache pull on a cache miss or if :attr:`always_pull_artifacts`. + + :param model_version: Model version object or `(name, version)` tuple. + :param target: Target path. + :param overwrite: Overwrite target path if exists. + :param link: Use symbolic link instead of copy. + """ mv = self._coerce_mv(model_version) self.place_run_artifact(mv.run, target, mv.path_in_run, overwrite, link) @@ -789,7 +847,14 @@ def export_run_artifact( target: Path, path_in_run: str = "", ) -> Path: - """Export run artifact cache to target path.""" + """Export run artifact cache to target path while keeping the original cache structure. + + The target path can then be used as cache dir by the `generic` MLflow API in offline mode. + + :param run: Run ID or object. + :param target: Cache export path. + :param path_in_run: Plain relative path inside run artifacts (e.g.: `a/b/c`) + """ if paths.is_sub_dir(target, self.cache_dir) or paths.is_sub_dir(self.cache_dir, target): raise paths.IllegalPath(f"Cannot export cache to itself, its subdirs or parents: {target}") cache = self._get_run_artifact(run, path_in_run) @@ -804,18 +869,37 @@ def export_model_artifact( model_version: ModelVersionIdentifier, target: Path, ) -> Path: - """Export model version artifact cache to target path.""" + """Export model version artifact cache to target path while keeping the original cache structure. + + The target path can then be used as cache dir by the `generic` MLflow API in offline mode. + + :param model_version: Model version object or `(name, version)` tuple. + :param target: Cache export path. + """ mv = self._coerce_mv(model_version) return self.export_run_artifact(mv.run, target, mv.path_in_run) @pydantic.validate_arguments def load_run_artifact(self, run: RunIdentifier, loader: Callable[[Path], A], path_in_run: str = "") -> A: - """Load run artifact.""" + """Load run artifact. + + Triggers a cache pull on a cache miss or if :attr:`always_pull_artifacts`. + + :param run: Run ID or object. + :param loader: Loader callback. + :param path_in_run: Plain relative path inside run artifacts (e.g.: `a/b/c`) + """ return loader(self._get_run_artifact(self._coerce_run_id(run), path_in_run)) @pydantic.validate_arguments def load_model_artifact(self, model_version: ModelVersionIdentifier, loader: Callable[[Path], A]) -> A: - """Load model artifact.""" + """Load model version artifact. + + Triggers a cache pull on a cache miss or if :attr:`always_pull_artifacts`. + + :param model_version: Model version object or `(name, version)` tuple. + :param loader: Loader callback. + """ mv = self._coerce_mv(model_version) logger.info("Loading model: %s v%s", mv.model.name, mv.version) return self.load_run_artifact(mv.run, loader, mv.path_in_run) @@ -831,7 +915,33 @@ def log_run_artifact( allow_duplication: bool | None = None, use_cache: bool | None = None, ): - """Publish artifact file or dir to experiment run.""" + """Publish artifact file or dir to experiment run. + + The flags :paramref:`keep_the_source`, :paramref:`allow_duplication` and :paramref:`use_cache` are + experimental and may conflict with one another. It is recommended to leave them unspecified, so this + method will do a best-effort to use cache if it makes sense to, keep the source files if it makes + sense to (possibly as a symbolic link) and avoid duplicated disk usage when possible. + + :param run: | Run ID or object. + + :param source: | Path to artifact file or dir, or a dumper callback. + | If it's a callback and the upload is interrupted, the temporary artifact is kept. + + :param path_in_run: Plain relative path inside run artifacts (e.g.: `a/b/c`) + + - If `source` is a `Path`: Defaults to file or dir name. + - If `source` is a callback: No default available. + + :param keep_the_source: + - If `source` is a `Path`: Keep that file or dir (defaults to `True`). + - If `source` is a callback: Keep the temporary artifact, even after a successful upload (defaults to `False`). + + :param allow_duplication: | If `False`, a `source` file or dir may be replaced with a symbolic link to the local cache in order to avoid duplicated disk usage. + | Defaults to `True` if :paramref:`keep_the_source` is `True` and the run artifacts repo is local. + + :param use_cache: | If `True`, keep artifact in local cache after publishing. + | Defaults to `True` if the run artifacts repo is remote. + """ tmp = None if using_dumper := callable(source): @@ -925,7 +1035,31 @@ def log_model_version( version: str | None = None, tags: Mapping | None = None, ) -> ModelVersionApi: - """Publish artifact file or dir as model version inside the specified experiment run.""" + """Publish artifact file or dir as model version inside the specified experiment run. + + :param model: | Model name or object. + + :param run: | Run ID or object. + + :param source: | See :paramref:`log_run_artifact.source` + + :param path_in_run: | Plain relative path inside run artifacts (e.g.: `a/b/c`). + | Defaults to model name. + + :param keep_the_source: | See :paramref:`log_run_artifact.keep_the_source` + + :param allow_duplication: | See :paramref:`log_run_artifact.allow_duplication` + + :param use_cache: | See :paramref:`log_run_artifact.use_cache` + + :param version: | Arbitrary model version + | (not supported by all backends). + + :param tags: | Model version tags. + | See :class:`schema.ModelVersion.tags` + + :return: New model version metadata with API handle. + """ logger.info("Logging version of model '%s'", model_name := self._coerce_model_name(model)) path_in_run = path_in_run or patterns.encode_model_name(model_name) self.log_run_artifact(run, source, path_in_run, keep_the_source, allow_duplication, use_cache) @@ -933,115 +1067,191 @@ def log_model_version( @pydantic.validate_arguments def get_exp_url(self, exp: ExpIdentifier) -> str: - """Get Experiment URL.""" + """Get Experiment URL. + + :param exp: Exp ID or object. + """ return str(self._impl_get_exp_url(self._coerce_exp_id(exp))) @pydantic.validate_arguments def get_run_url(self, run: RunIdentifier, exp: ExpIdentifier | None = None) -> str: - """Get Run URL.""" + """Get Run URL. + + :param run: Run ID or object. + :param exp: Exp ID or object. + + Caveats: + - :paramref:`exp` must be specified on :attr:`~BaseMlflowApi.offline_mode` + if :paramref:`run` is an ID and the run metadata is not in cache. + """ exp = self._coerce_run(run).exp if exp is None else exp return str(self._impl_get_run_url(self._coerce_run_id(run), self._coerce_exp_id(exp))) @pydantic.validate_arguments def get_model_url(self, model: ModelIdentifier) -> str: - """Get URL to registered model.""" + """Get URL to registered model. + + :param model: Model name or object. + """ return str(self._impl_get_model_url(self._coerce_model_name(model))) @pydantic.validate_arguments def get_model_version_url(self, model_version: ModelVersionIdentifier) -> str: - """Get model version URL.""" + """Get model version URL. + + :param model_version: Model version object or `(name, version)` tuple. + """ return str(self._impl_get_mv_url(*self._coerce_mv_tuple(model_version))) @pydantic.validate_arguments def get_exp(self, exp: ExpIdentifier, **cache_opts: bool) -> ExpApi: - """Get Experiment API by ID.""" + """Get Experiment API by ID. + + :param exp: Exp ID or object. + """ return ExpApi(**self._get_exp(self._coerce_exp_id(exp), **cache_opts)).using(self) @pydantic.validate_arguments def get_run(self, run: RunIdentifier, **cache_opts: bool) -> RunApi: - """Get Run API by ID.""" + """Get Run API by ID. + + :param run: Run ID or object. + """ return RunApi(**self._get_run(self._coerce_run_id(run), **cache_opts)).using(self) @pydantic.validate_arguments def get_model(self, model: ModelIdentifier, **cache_opts: bool) -> ModelApi: - """Get Model API by name.""" + """Get Model API by name. + + :param model: Model name or object. + """ return ModelApi(**self._get_model(self._coerce_model_name(model), **cache_opts)).using(self) @pydantic.validate_arguments def get_model_version(self, model_version: ModelVersionIdentifier, **cache_opts: bool) -> ModelVersionApi: - """Get ModelVersion API by name and version.""" + """Get ModelVersion API by name and version. + + :param model_version: Model version object or `(name, version)` tuple. + """ return ModelVersionApi(**self._get_mv(self._coerce_mv_tuple(model_version), **cache_opts)).using(self) @pydantic.validate_arguments def find_exps(self, query: mongo.Query | None = None, sorting: mongo.Sorting | None = None) -> Iterator[ExpApi]: - """Search experiments with query in MongoDB query language.""" + """Search experiments with query in MongoDB query language. + + :param query: Query in MongoDB query language. + :param sorting: Sorting criteria (e.g.: `[("asc_field", 1), ("desc_field", -1)]`). + """ return (ExpApi(**x).using(self) for x in self._find_experiments(query or {}, sorting or [])) @pydantic.validate_arguments def find_runs(self, query: mongo.Query | None = None, sorting: mongo.Sorting | None = None) -> Iterator[RunApi]: - """Search runs with query in MongoDB query language.""" + """Search runs with query in MongoDB query language. + + :param query: Query in MongoDB query language. + :param sorting: Sorting criteria (e.g.: `[("asc_field", 1), ("desc_field", -1)]`). + """ return (RunApi(**x).using(self) for x in self._find_runs(query or {}, sorting or [])) @pydantic.validate_arguments def find_models(self, query: mongo.Query | None = None, sorting: mongo.Sorting | None = None) -> Iterator[ModelApi]: - """Search registered models with query in MongoDB query language.""" + """Search registered models with query in MongoDB query language. + + :param query: Query in MongoDB query language. + :param sorting: Sorting criteria (e.g.: `[("asc_field", 1), ("desc_field", -1)]`). + """ return (ModelApi(**x).using(self) for x in self._find_models(query or {}, sorting or [])) @pydantic.validate_arguments def find_model_versions( self, query: mongo.Query | None = None, sorting: mongo.Sorting | None = None ) -> Iterator[ModelVersionApi]: - """Search model versions with query in MongoDB query language.""" + """Search model versions with query in MongoDB query language. + + :param query: Query in MongoDB query language. + :param sorting: Sorting criteria (e.g.: `[("asc_field", 1), ("desc_field", -1)]`). + """ return (ModelVersionApi(**x).using(self) for x in self._find_mv(query or {}, sorting or [])) @pydantic.validate_arguments def find_child_runs(self, parent: RunIdentifier) -> Iterator[RunApi]: - """Find child runs.""" + """Find child runs. + + :param parent: Run ID or object. + """ return (RunApi(**x).using(self) for x in self._impl_find_child_runs(self._coerce_run(parent))) @pydantic.validate_arguments def cache_exp_meta(self, exp: ExpIdentifier) -> ExpApi: - """Get latest Experiment metadata and save to local cache.""" + """Get latest Experiment metadata and save to local cache. + + :param exp: Experiment ID or object. + """ return self.get_exp(exp, force_cache_refresh=True) @pydantic.validate_arguments def cache_run_meta(self, run: RunIdentifier) -> RunApi: - """Get latest Run metadata and save to local cache.""" + """Get latest Run metadata and save to local cache. + + :param run: Run ID or object. + """ return self.get_run(run, force_cache_refresh=True) @pydantic.validate_arguments def cache_model_meta(self, model: ModelIdentifier) -> ModelApi: - """Get latest Model metadata and save to local cache.""" + """Get latest Model metadata and save to local cache. + + :param model: Model name or object. + """ return self.get_model(model, force_cache_refresh=True) @pydantic.validate_arguments def cache_model_version_meta(self, model_version: ModelVersionIdentifier) -> ModelVersionApi: - """Get latest model version metadata and save to local cache.""" + """Get latest model version metadata and save to local cache. + + :param model_version: Model version object or `(name, version)` tuple. + """ return self.get_model_version(model_version, force_cache_refresh=True) @pydantic.validate_arguments def export_exp_meta(self, exp: ExpIdentifier, target: Path) -> ExpApi: - """Export experiment metadata cache to target.""" + """Export experiment metadata cache to target. + + :param exp: Experiment ID or object. + :param target: Cache export path. + """ self._export_meta(exp := self.get_exp(id_ := self._coerce_exp_id(exp)), self._get_exp_cache(id_), target) return exp @pydantic.validate_arguments def export_run_meta(self, run: RunIdentifier, target: Path) -> RunApi: - """Export run metadata cache to target.""" + """Export run metadata cache to target. + + :param run: Run ID or object. + :param target: Cache export path. + """ self._export_meta(run := self.get_run(id_ := self._coerce_run_id(run)), self._get_run_cache(id_), target) self.export_exp_meta(run.exp, target) return run @pydantic.validate_arguments def export_model_meta(self, model: ModelIdentifier, target: Path) -> ModelApi: - """Export model metadata cache to target.""" + """Export model metadata cache to target. + + :param model: Model name or object. + :param target: Cache export path. + """ name = self._coerce_model_name(model) self._export_meta(model := self.get_model(name), self._get_model_cache(name), target) return model @pydantic.validate_arguments def export_model_version_meta(self, mv: ModelVersionIdentifier, target: Path) -> ModelVersionApi: - """Export model version metadata cache to target.""" + """Export model version metadata cache to target. + + :param mv: Model version object or `(name, version)` tuple. + :param target: Cache export path. + """ tup = self._coerce_mv_tuple(mv) self._export_meta(mv := self.get_model_version(tup), self._get_mv_cache(*tup), target) self.export_model_meta(mv.model, target) @@ -1049,12 +1259,19 @@ def export_model_version_meta(self, mv: ModelVersionIdentifier, target: Path) -> @pydantic.validate_arguments def create_exp(self, name: str, tags: Mapping | None = None) -> ExpApi: - """Create Experiment and return its API.""" + """Create Experiment and return its API. + + :param name: See :attr:`schema.Experiment.name`. + :param tags: See :attr:`schema.Experiment.tags`. + """ return ExpApi(**self._create_exp(name, tags or {})).using(self) @pydantic.validate_arguments def get_or_create_exp(self, name: str) -> ExpApi: - """Get or create Experiment and return its API.""" + """Get or create Experiment and return its API. + + :param name: See :attr:`schema.Experiment.name`. + """ for exp in self._find_experiments({"name": name}, []): break else: @@ -1064,12 +1281,19 @@ def get_or_create_exp(self, name: str) -> ExpApi: @pydantic.validate_arguments def create_model(self, name: str, tags: Mapping | None = None) -> ModelApi: - """Create registered model and return its API.""" + """Create registered model and return its API. + + :param name: See :attr:`schema.Model.name`. + :param tags: See :attr:`schema.Model.tags`. + """ return ModelApi(**self._create_model(name, tags or {})).using(self) @pydantic.validate_arguments def get_or_create_model(self, name: str) -> ModelApi: - """Get or create registered Model and return its API.""" + """Get or create registered Model and return its API. + + :param name: See :attr:`schema.Model.name`. + """ for model in self._find_models({"name": name}, []): break else: @@ -1086,7 +1310,14 @@ def create_run( repo: str | urls.Url | None = None, parent: RunIdentifier | None = None, ) -> RunApi: - """Declare a new experiment run to be used later.""" + """Declare a new experiment run to be used later. + + :param exp: Experiment ID or object. + :param name: See :attr:`schema.Run.name`. + :param tags: See :attr:`schema.Run.tags`. + :param repo: (Experimental) Cloud storage URL to be used as alternative run artifacts repository. + :param parent: Parent run ID or object. + """ return RunApi(**self._create_run(exp, name, repo, tags or {}, schema.RunStatus.SCHEDULED, parent)).using(self) @pydantic.validate_arguments @@ -1098,18 +1329,32 @@ def start_run( repo: str | urls.Url | None = None, parent: RunIdentifier | None = None, ) -> RunApi: - """Start a new experiment run.""" + """Start a new experiment run. + + :param exp: Experiment ID or object. + :param name: See :attr:`schema.Run.name`. + :param tags: See :attr:`schema.Run.tags`. + :param repo: (Experimental) Cloud storage URL to be used as alternative run artifacts repository. + :param parent: Parent run ID or object. + """ return RunApi(**self._create_run(exp, name, repo, tags or {}, schema.RunStatus.RUNNING, parent)).using(self) @pydantic.validate_arguments def resume_run(self, run: RunIdentifier) -> RunApi: - """Resume a previous experiment run.""" + """Resume a previous experiment run. + + :param run: Run ID or object. + """ self._set_run_status(run_id := self._coerce_run_id(run), schema.RunStatus.RUNNING) return self.get_run(run_id) @pydantic.validate_arguments def end_run(self, run: RunIdentifier, succeeded: bool = True) -> RunApi: - """End experiment run.""" + """End experiment run. + + :param run: Run ID or object. + :param succeeded: Whether the run was successful. + """ status = schema.RunStatus.FINISHED if succeeded else schema.RunStatus.FAILED self._set_run_status(run_id := self._coerce_run_id(run), status) self._set_run_end_time(run_id, datetime.now()) @@ -1117,30 +1362,54 @@ def end_run(self, run: RunIdentifier, succeeded: bool = True) -> RunApi: @pydantic.validate_arguments def set_tags_on_exp(self, exp: ExpIdentifier, tags: Mapping): - """Set tags on experiment.""" + """Set tags on experiment. + + :param exp: Experiment ID or object. + :param tags: See :attr:`schema.Experiment.tags`. + """ self._update_exp_tags(exp, tags) @pydantic.validate_arguments def set_tags_on_run(self, run: RunIdentifier, tags: Mapping): - """Set tags on experiment run.""" + """Set tags on experiment run. + + :param run: Run ID or object. + :param tags: See :attr:`schema.Run.tags`. + """ self._update_run_tags(run, tags) @pydantic.validate_arguments def set_tags_on_model(self, model: ModelIdentifier, tags: Mapping): - """Set tags on registered model.""" + """Set tags on registered model. + + :param model: Model name or object. + :param tags: See :attr:`schema.Model.tags`. + """ self._update_model_tags(model, tags) @pydantic.validate_arguments def set_tags_on_model_version(self, model_version: ModelVersionIdentifier, tags: Mapping): - """Set tags on model version.""" + """Set tags on model version. + + :param model_version: Model version object or `(name, version)` tuple. + :param tags: See :attr:`schema.Model.tags`. + """ self._update_mv_tags(model_version, tags) @pydantic.validate_arguments def log_params(self, run: RunIdentifier, params: Mapping): - """Log params to experiment run.""" + """Log params to experiment run. + + :param run: Run ID or object. + :param params: See :attr:`schema.Run.params`. + """ self._log_run_params(run, params) @pydantic.validate_arguments def log_metrics(self, run: RunIdentifier, metrics: Mapping): - """Log metrics to experiment run.""" + """Log metrics to experiment run. + + :param run: Run ID or object. + :param metrics: See :attr:`schema.Run.metrics`. + """ self._log_run_metrics(run, metrics) diff --git a/src/mlopus/mlflow/api/contract.py b/src/mlopus/mlflow/api/contract.py index 9a7844a..b0df077 100644 --- a/src/mlopus/mlflow/api/contract.py +++ b/src/mlopus/mlflow/api/contract.py @@ -44,7 +44,7 @@ def list_run_artifacts(self, run: RunIdentifier, path_in_run: str = "") -> trans """List run artifacts in repo.""" @abstractmethod - def list_model_artifact(self, model_version: ModelVersionIdentifier) -> transfer.LsResult: + def list_model_artifact(self, model_version: ModelVersionIdentifier, path_suffix: str = "") -> transfer.LsResult: """List model version artifacts in repo.""" @abstractmethod diff --git a/src/mlopus/mlflow/api/exp.py b/src/mlopus/mlflow/api/exp.py index db8f482..0790440 100644 --- a/src/mlopus/mlflow/api/exp.py +++ b/src/mlopus/mlflow/api/exp.py @@ -24,7 +24,11 @@ def url(self) -> str: @pydantic.validate_arguments def find_runs(self, query: mongo.Query | None = None, sorting: mongo.Sorting | None = None) -> Iterator[RunApi]: - """Search runs belonging to this experiment with query in MongoDB query language.""" + """Search runs belonging to this experiment with query in MongoDB query language. + + :param query: Query in MongoDB query language. + :param sorting: Sorting criteria (e.g.: `[("asc_field", 1), ("desc_field", -1)]`). + """ results = self.api.find_runs(dicts.set_reserved_key(query, key="exp.id", val=self.id), sorting) return typing.cast(Iterator[RunApi], results) @@ -33,7 +37,10 @@ def cache_meta(self) -> "ExpApi": return self._use_values_from(self.api.cache_exp_meta(self)) def export_meta(self, target: Path) -> "ExpApi": - """Export metadata cache for this experiment.""" + """Export experiment metadata cache to target. + + :param target: Cache export path. + """ return self._use_values_from(self.api.export_exp_meta(self, target)) @pydantic.validate_arguments @@ -44,7 +51,13 @@ def create_run( repo: str | urls.Url | None = None, parent: RunIdentifier | None = None, ) -> RunApi: - """Declare a new run in this experiment to be used later.""" + """Declare a new run in this experiment to be used later. + + :param name: See :attr:`schema.Run.name`. + :param tags: See :attr:`schema.Run.tags`. + :param repo: See :paramref:`~mlopus.mlflow.BaseMlflowApi.create_run.repo`. + :param parent: Parent run ID or object. + """ return typing.cast(RunApi, self.api.create_run(self, name, tags, repo, parent)) @pydantic.validate_arguments @@ -55,11 +68,20 @@ def start_run( repo: str | urls.Url | None = None, parent: RunIdentifier | None = None, ) -> RunApi: - """Start a new run in this experiment.""" + """Start a new run in this experiment. + + :param name: See :attr:`schema.Run.name`. + :param tags: See :attr:`schema.Run.tags`. + :param repo: See :paramref:`~mlopus.mlflow.BaseMlflowApi.start_run.repo`. + :param parent: Parent run ID or object. + """ return typing.cast(RunApi, self.api.start_run(self, name, tags, repo, parent)) @decorators.require_update def set_tags(self, tags: Mapping) -> "ExpApi": - """Set tags on this experiment.""" + """Set tags on this experiment. + + :param tags: See :attr:`schema.Experiment.tags`. + """ self.api.set_tags_on_exp(self, tags) return self diff --git a/src/mlopus/mlflow/api/model.py b/src/mlopus/mlflow/api/model.py index fb39001..8f35359 100644 --- a/src/mlopus/mlflow/api/model.py +++ b/src/mlopus/mlflow/api/model.py @@ -29,25 +29,38 @@ def cache_meta(self) -> "ModelApi": return self._use_values_from(self.api.cache_model_meta(self)) def export_meta(self, target: Path) -> "ModelApi": - """Export metadata cache for this model.""" + """Export model metadata cache to target. + + :param target: Cache export path. + """ return self._use_values_from(self.api.export_model_meta(self, target)) @decorators.require_update def set_tags(self, tags: Mapping) -> "ModelApi": - """Set tags on this model.""" + """Set tags on this model. + + :param tags: See :attr:`schema.Model.tags`. + """ self.api.set_tags_on_model(self, tags) return self @pydantic.validate_arguments def get_version(self, version: str) -> ModelVersionApi: - """Get metadata of specified version of this model.""" + """Get ModelVersion API by version identifier. + + :param version: Version identifier. + """ return typing.cast(ModelVersionApi, self.api.get_model_version((self.name, version))) @pydantic.validate_arguments def find_versions( self, query: mongo.Query | None = None, sorting: mongo.Sorting | None = None ) -> Iterator[ModelVersionApi]: - """Search versions of this model with query in MongoDB query language.""" + """Search versions of this model with query in MongoDB query language. + + :param query: Query in MongoDB query language. + :param sorting: Sorting criteria (e.g.: `[("asc_field", 1), ("desc_field", -1)]`). + """ results = self.api.find_model_versions(dicts.set_reserved_key(query, key="model.name", val=self.name), sorting) return typing.cast(Iterator[ModelVersionApi], results) @@ -63,7 +76,27 @@ def log_version( version: str | None = None, tags: Mapping | None = None, ) -> ModelVersionApi: - """Publish artifact file or dir as version of this model inside the specified experiment run.""" + """Publish artifact file or dir as model version inside the specified experiment run. + + :param run: | Run ID or object. + + :param source: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.source` + + :param path_in_run: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_model_version.path_in_run` + + :param keep_the_source: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.keep_the_source` + + :param allow_duplication: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.allow_duplication` + + :param use_cache: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.use_cache` + + :param version: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_model_version.version` + + :param tags: | Model version tags. + | See :class:`schema.ModelVersion.tags` + + :return: New model version metadata with API handle. + """ from .run import RunApi mv = self.api.log_model_version( diff --git a/src/mlopus/mlflow/api/mv.py b/src/mlopus/mlflow/api/mv.py index 2b10739..fb6c9b8 100644 --- a/src/mlopus/mlflow/api/mv.py +++ b/src/mlopus/mlflow/api/mv.py @@ -34,13 +34,22 @@ def url(self) -> str: """Get model version URL.""" return self.api.get_model_version_url(self) + @pydantic.validate_arguments + def clean_cached_artifact(self) -> "ModelVersionApi": + """Clean cached artifact for this model version.""" + self.api.clean_cached_model_artifact(self) + return self + def cache(self): """Cache metadata and artifact for this model version.""" self.cache_meta() self.cache_artifact() def export(self, target: Path): - """Export metadata and artifact cache of this model version to target path.""" + """Export metadata and artifact cache of this model version to target path. + + :param target: Cache export path. + """ self.export_meta(target) self.export_artifact(target) @@ -49,12 +58,18 @@ def cache_meta(self) -> "ModelVersionApi": return self._use_values_from(self.api.cache_model_version_meta(self)) def export_meta(self, target: Path) -> "ModelVersionApi": - """Export metadata cache for this model version.""" + """Export model version metadata cache to target. + + :param target: Cache export path. + """ return self._use_values_from(self.api.export_model_version_meta(self, target)) @decorators.require_update def set_tags(self, tags: Mapping) -> "ModelVersionApi": - """Set tags on this model version.""" + """Set tags on this model version. + + :param tags: See :attr:`schema.Model.tags`. + """ self.api.set_tags_on_model_version(self, tags) return self @@ -64,24 +79,51 @@ def cache_artifact(self) -> Path: @pydantic.validate_arguments def export_artifact(self, target: Path) -> Path: - """Export model version artifact cache to target path.""" + """Export model version artifact cache to target. + + See also: + - :meth:`mlopus.mlflow.BaseMlflowApi.export_model_artifact` + + :param target: Cache export path. + """ return self.api.export_model_artifact(self, target) @pydantic.validate_arguments def list_artifacts(self, path_suffix: str = "") -> transfer.LsResult: - """List artifacts in this model version.""" - return self.api.list_run_artifacts(self.run, self.path_in_run + "/" + path_suffix.strip("/")) + """List artifacts in this model version. + + :param path_suffix: Plain relative path inside model artifact dir (e.g.: `a/b/c`). + """ + return self.api.list_model_artifact(self.run, path_suffix) def get_artifact(self) -> Path: - """Get local path to artifact of this model version.""" + """Get local path to model artifact. + + See also: + - :meth:`mlopus.mlflow.BaseMlflowApi.get_model_artifact` + """ return self.api.get_model_artifact(self) @pydantic.validate_arguments def place_artifact(self, target: Path, overwrite: bool = False, link: bool = True): - """Place artifact of this model version on target path.""" + """Place model version artifact on target path. + + See also: + - :meth:`mlopus.mlflow.BaseMlflowApi.place_model_artifact` + + :param target: Target path. + :param overwrite: Overwrite target path if exists. + :param link: Use symbolic link instead of copy. + """ self.api.place_model_artifact(self, target, overwrite, link) @pydantic.validate_arguments def load_artifact(self, loader: Callable[[Path], A]) -> A: - """Load artifact of this model version.""" + """Load model version artifact. + + See also: + - :meth:`mlopus.mlflow.BaseMlflowApi.load_model_artifact` + + :param loader: Loader callback. + """ return self.api.load_model_artifact(self, loader) diff --git a/src/mlopus/mlflow/api/run.py b/src/mlopus/mlflow/api/run.py index a160760..063d25f 100644 --- a/src/mlopus/mlflow/api/run.py +++ b/src/mlopus/mlflow/api/run.py @@ -11,6 +11,8 @@ M = schema.ModelVersion +ModelIdentifier = contract.ModelIdentifier + class RunApi(schema.Run, entity.EntityApi): """Run metadata with MLflow API handle.""" @@ -46,41 +48,79 @@ def url(self) -> str: @pydantic.validate_arguments def clean_cached_artifact(self, path_in_run: str = "") -> "RunApi": - """Clean cached artifact for this run.""" + """Clean cached artifact for this run. + + :param path_in_run: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.clean_cached_run_artifact.path_in_run` + """ self.api.clean_cached_run_artifact(self, path_in_run) return self @pydantic.validate_arguments def list_artifacts(self, path_in_run: str = "") -> transfer.LsResult: - """List artifacts in this run's artifact repo.""" + """List artifacts in this run's artifact repo. + + :param path_in_run: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.list_run_artifacts.path_in_run` + """ return self.api.list_run_artifacts(self, path_in_run) @pydantic.validate_arguments def cache_artifact(self, path_in_run: str = "") -> Path: - """Pull run artifact from MLflow server to local cache.""" + """Pull run artifact from MLflow server to local cache. + + :param path_in_run: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.cache_run_artifact.path_in_run` + """ return self.api.cache_run_artifact(self, path_in_run) @pydantic.validate_arguments def export_artifact(self, target: Path, path_in_run: str = "") -> Path: - """Export run artifact cache to target path.""" + """Export run artifact cache to target. + + See also: + - :meth:`mlopus.mlflow.BaseMlflowApi.export_run_artifact` + + :param target: Cache export path. + :param path_in_run: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.export_run_artifact.path_in_run` + """ return self.api.export_run_artifact(self, target, path_in_run) @pydantic.validate_arguments def get_artifact(self, path_in_run: str = "") -> Path: - """Get local path to run artifact.""" + """Get local path to run artifact. + + See also: + - :meth:`mlopus.mlflow.BaseMlflowApi.get_run_artifact` + + :param path_in_run: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.get_run_artifact.path_in_run` + """ return self.api.get_run_artifact(self, path_in_run) @pydantic.validate_arguments def place_artifact( self, target: Path, path_in_run: str = "", overwrite: bool = False, link: bool = True ) -> "RunApi": - """Place run artifact on target path.""" + """Place run artifact on target path. + + See also: + - :meth:`mlopus.mlflow.BaseMlflowApi.place_run_artifact` + + :param target: Target path. + :param path_in_run: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.place_run_artifact.path_in_run` + :param overwrite: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.place_run_artifact.overwrite` + :param link: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.place_run_artifact.link` + """ self.api.place_run_artifact(self, target, path_in_run, overwrite, link) return self @pydantic.validate_arguments def load_artifact(self, loader: Callable[[Path], A], path_in_run: str = "") -> A: - """Load run artifact.""" + """Load run artifact. + + See also: + - :meth:`mlopus.mlflow.BaseMlflowApi.load_run_artifact` + + :param loader: Loader callback. + :param path_in_run: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.load_run_artifact.path_in_run` + """ return self.api.load_run_artifact(self, loader, path_in_run) @pydantic.validate_arguments @@ -92,14 +132,24 @@ def log_artifact( allow_duplication: bool | None = None, use_cache: bool | None = None, ) -> "RunApi": - """Publish artifact file or dir to this experiment run.""" + """Publish artifact file or dir to this experiment run. + + See also: + - :meth:`mlopus.mlflow.BaseMlflowApi.log_run_artifact` + + :param source: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.source` + :param path_in_run: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.path_in_run` + :param keep_the_source: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.keep_the_source` + :param allow_duplication: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.allow_duplication` + :param use_cache: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.use_cache` + """ self.api.log_run_artifact(self, source, path_in_run, keep_the_source, allow_duplication, use_cache) return self @pydantic.validate_arguments def log_model_version( self, - name: str, + model: ModelIdentifier, source: Path | Callable[[Path], None], path_in_run: str | None = None, keep_the_source: bool | None = None, @@ -108,9 +158,29 @@ def log_model_version( version: str | None = None, tags: Mapping | None = None, ) -> ModelVersionApi: - """Publish artifact file or dir as model version inside this experiment run.""" + """Publish artifact file or dir as model version inside this experiment run. + + :param model: | Model name or object. + + :param source: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.source` + + :param path_in_run: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_model_version.path_in_run` + + :param keep_the_source: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.keep_the_source` + + :param allow_duplication: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.allow_duplication` + + :param use_cache: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_run_artifact.use_cache` + + :param version: | See :paramref:`~mlopus.mlflow.BaseMlflowApi.log_model_version.version` + + :param tags: | Model version tags. + | See :class:`schema.ModelVersion.tags` + + :return: New model version metadata with API handle. + """ mv = self.api.log_model_version( - name, self, source, path_in_run, keep_the_source, allow_duplication, use_cache, version, tags + model, self, source, path_in_run, keep_the_source, allow_duplication, use_cache, version, tags ) return typing.cast(ModelVersionApi, mv) @@ -118,7 +188,11 @@ def log_model_version( def find_model_versions( self, query: mongo.Query | None = None, sorting: mongo.Sorting | None = None ) -> Iterator[ModelVersionApi]: - """Search model versions belonging to this run with query in MongoDB query language.""" + """Search model versions belonging to this run with query in MongoDB query language. + + :param query: Query in MongoDB query language. + :param sorting: Sorting criteria (e.g.: `[("asc_field", 1), ("desc_field", -1)]`). + """ results = self.api.find_model_versions(dicts.set_reserved_key(query, key="run.id", val=self.id), sorting) return typing.cast(Iterator[ModelVersionApi], results) @@ -127,7 +201,10 @@ def cache_meta(self) -> "RunApi": return self._use_values_from(self.api.cache_run_meta(self)) def export_meta(self, target: Path) -> "RunApi": - """Export metadata cache for this run.""" + """Export metadata cache for this run. + + :param target: Cache export path.. + """ return self._use_values_from(self.api.export_run_meta(self, target)) @pydantic.validate_arguments @@ -137,7 +214,12 @@ def create_child( tags: Mapping | None = None, repo: str | urls.Url | None = None, ) -> "RunApi": - """Declare a new child run to be used later.""" + """Declare a new child run to be used later. + + :param name: See :attr:`schema.Run.name`. + :param tags: See :attr:`schema.Run.tags`. + :param repo: See :paramref:`~mlopus.mlflow.BaseMlflowApi.create_run.repo`. + """ return typing.cast(RunApi, self.api.create_run(self.exp, name, tags, repo, self)) @pydantic.validate_arguments @@ -147,7 +229,12 @@ def start_child( tags: Mapping | None = None, repo: str | urls.Url | None = None, ) -> "RunApi": - """Start a new child run.""" + """Start a new child run. + + :param name: See :attr:`schema.Run.name`. + :param tags: See :attr:`schema.Run.tags`. + :param repo: See :paramref:`~mlopus.mlflow.BaseMlflowApi.create_run.repo`. + """ return typing.cast(RunApi, self.api.start_run(self.exp, name, tags, repo, self)) @property @@ -166,18 +253,27 @@ def end_run(self, succeeded: bool = True) -> "RunApi": @decorators.require_update def set_tags(self, tags: Mapping) -> "RunApi": - """Set tags on this run.""" + """Set tags on this run. + + :param tags: See :attr:`schema.Run.tags`. + """ self.api.set_tags_on_run(self, tags) return self @decorators.require_update def log_params(self, params: Mapping) -> "RunApi": - """Log params to this run.""" + """Log params to this run. + + :param params: See :attr:`schema.Run.params`. + """ self.api.log_params(self, params) return self @decorators.require_update def log_metrics(self, metrics: Mapping) -> "RunApi": - """Log metrics to this experiment run.""" + """Log metrics to this experiment run. + + :param metrics: See :attr:`schema.Run.metrics`. + """ self.api.log_metrics(self, metrics) return self diff --git a/src/mlopus/mlflow/traits.py b/src/mlopus/mlflow/traits.py index 0cb3a58..e412ae9 100644 --- a/src/mlopus/mlflow/traits.py +++ b/src/mlopus/mlflow/traits.py @@ -31,7 +31,13 @@ class Foo(MlflowMixinApi): foo.mlflow_api # BaseMlflowApi """ - mlflow_api: BaseMlflowApi = pydantic.Field(exclude=True, default=None) + mlflow_api: BaseMlflowApi = pydantic.Field( + exclude=True, + default=None, + description=( + "Instance of :class:`BaseMlflowApi` or a `dict` of keyword arguments for :func:`mlopus.mlflow.get_api`." + ), + ) @pydantic.validator("mlflow_api", pre=True) # noqa @classmethod diff --git a/src/mlopus/mlflow/utils.py b/src/mlopus/mlflow/utils.py index b499044..1cd296f 100644 --- a/src/mlopus/mlflow/utils.py +++ b/src/mlopus/mlflow/utils.py @@ -3,42 +3,59 @@ from mlopus.utils import import_utils, dicts from .api.base import BaseMlflowApi -A = TypeVar("A", bound=BaseMlflowApi) +API = TypeVar("API", bound=BaseMlflowApi) +"""Type of :class:`BaseMlflowApi`""" PLUGIN_GROUP = "mlopus.mlflow_api_providers" def list_api_plugins() -> List[import_utils.EntryPoint]: - """Get list of all API plugins available in this environment.""" + """List all API plugins available in this environment.""" return import_utils.list_plugins(PLUGIN_GROUP) def get_api( plugin: str | None = None, - cls: Type[A] | str | None = None, + cls: Type[API] | str | None = None, conf: Dict[str, Any] | None = None, -) -> BaseMlflowApi | A: +) -> BaseMlflowApi | API: """Load MLflow API class or plugin with specified configuration. - The default API class is `mlopus.mlflow.providers.mlflow.MlflowApi`, - which manages communication with an open source MLflow server - (assuming no artifacts proxy and database is server-managed). + The default API class is :class:`~mlopus.mlflow.providers.mlflow.MlflowApi` + (registered under the plugin name `mlflow`). + + :param plugin: | Plugin name from group `mlopus.mlflow_api_providers`. + | Incompatible with :paramref:`cls`. + + :param cls: | A type that implements :class:`BaseMlflowApi` or a fully qualified class name of such a type + (e.g.: `package.module:Class`). + | Incompatible with :paramref:`plugin`. + + :param conf: | A `dict` of keyword arguments for the resolved API class. + | See :func:`api_conf_schema` + + :return: API instance. """ return _get_api_cls(plugin, cls).parse_obj(conf or {}) def api_conf_schema( plugin: str | None = None, - cls: Type[A] | str | None = None, + cls: Type[API] | str | None = None, ) -> dicts.AnyDict: - """Get configuration schema for MLflow API class or plugin.""" + """Get configuration schema for MLflow API class or plugin. + + :param plugin: | See :paramref:`get_api.plugin`. + + :param cls: | See :paramref:`get_api.cls`. + """ return _get_api_cls(plugin, cls).schema() def _get_api_cls( plugin: str | None = None, - cls: Type[A] | str | None = None, -) -> Type[A]: + cls: Type[API] | str | None = None, +) -> Type[API]: assert None in (plugin, cls), "`plugin` and `cls` are mutually excluding." if isinstance(cls, str): diff --git a/uv.lock b/uv.lock index d7f48cd..8c43a19 100644 --- a/uv.lock +++ b/uv.lock @@ -296,11 +296,11 @@ wheels = [ [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c2/02/a95f2b11e207f68bc64d7aae9666fed2e2b3f307748d5123dffb72a1bbea/certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b", size = 164065 } +sdist = { url = "https://files.pythonhosted.org/packages/b0/ee/9b19140fe824b367c04c5e1b369942dd754c4c5462d5674002f75c4dedc1/certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9", size = 168507 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1c/d5/c84e1a17bf61d4df64ca866a1c9a913874b4e9bdc131ec689a0ad013fb36/certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90", size = 162960 }, + { url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321 }, ] [[package]] @@ -570,15 +570,15 @@ wheels = [ [[package]] name = "databricks-sdk" -version = "0.30.0" +version = "0.31.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-auth" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a5/fa/ad5f7e9862a409614e2d4999306219c7f6a8286c78eca1c54c7029bcf899/databricks_sdk-0.30.0.tar.gz", hash = "sha256:37c7a12939da09dbdcb6ceba4fcad5f484a63508366225f797429ae4ee557b21", size = 555634 } +sdist = { url = "https://files.pythonhosted.org/packages/64/a8/dbd21e5adab96552dc17898ea1fd0c379714dae57f6071918b4d61365bc7/databricks_sdk-0.31.1.tar.gz", hash = "sha256:8609e655d0e5ecb15c2a8a6468e737f8dcb4f28c33239388de3ab386b921d790", size = 562352 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ad/14/005d6904ffbf477e67913f6480b22522e583ae25160d1fce30d15c1dfa94/databricks_sdk-0.30.0-py3-none-any.whl", hash = "sha256:c3f954c73cdd703815acfa31a2e8442ee85aa3ca3ba8d52f05e85ebce29233d7", size = 538816 }, + { url = "https://files.pythonhosted.org/packages/e5/a2/c5b10237e7d12d63bbc12968b4dc8b7d80249588c8f1a626754dd7e5db52/databricks_sdk-0.31.1-py3-none-any.whl", hash = "sha256:9ab286f87ae1cc98a00ef7d207e40661f4d14a464071425ad169d235919b35f6", size = 545492 }, ] [[package]] @@ -690,15 +690,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e2/14/c8a7d861262139688fa465d2e27ff7113764d6fa03b15b9c7b666729ea2e/dynaconf-3.2.6-py2.py3-none-any.whl", hash = "sha256:3911c740d717df4576ed55f616c7cbad6e06bc8ef23ffca444b6e2a12fb1c34c", size = 231063 }, ] -[[package]] -name = "entrypoints" -version = "0.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ea/8d/a7121ffe5f402dc015277d2d31eb82d2187334503a011c18f2e78ecbb9b2/entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4", size = 13974 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/35/a8/365059bbcd4572cbc41de17fd5b682be5868b218c3c5479071865cab9078/entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f", size = 5294 }, -] - [[package]] name = "enum-tools" version = "0.12.0" @@ -730,11 +721,11 @@ wheels = [ [[package]] name = "executing" -version = "2.0.1" +version = "2.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/08/41/85d2d28466fca93737592b7f3cc456d1cfd6bcd401beceeba17e8e792b50/executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147", size = 836501 } +sdist = { url = "https://files.pythonhosted.org/packages/8c/e3/7d45f492c2c4a0e8e0fad57d081a7c8a0286cdd86372b070cca1ec0caa1e/executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab", size = 977485 } wheels = [ - { url = "https://files.pythonhosted.org/packages/80/03/6ea8b1b2a5ab40a7a60dc464d3daa7aa546e0a74d74a9f8ff551ea7905db/executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc", size = 24922 }, + { url = "https://files.pythonhosted.org/packages/b5/fd/afcd0496feca3276f509df3dbd5dae726fcc756f1a08d9e25abe1733f962/executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf", size = 25805 }, ] [[package]] @@ -848,7 +839,7 @@ wheels = [ [[package]] name = "httpx" -version = "0.27.0" +version = "0.27.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -857,9 +848,9 @@ dependencies = [ { name = "idna" }, { name = "sniffio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5c/2d/3da5bdf4408b8b2800061c339f240c1802f2e82d55e50bd39c5a881f47f0/httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5", size = 126413 } +sdist = { url = "https://files.pythonhosted.org/packages/78/82/08f8c936781f67d9e6b9eeb8a0c8b4e406136ea4c3d1f89a5db71d42e0e6/httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2", size = 144189 } wheels = [ - { url = "https://files.pythonhosted.org/packages/41/7b/ddacf6dcebb42466abd03f368782142baa82e08fc0c1f8eaa05b4bae87d5/httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5", size = 75590 }, + { url = "https://files.pythonhosted.org/packages/56/95/9377bcb415797e44274b51d46e3249eba641711cf3348050f76ee7b15ffc/httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0", size = 76395 }, ] [[package]] @@ -882,14 +873,14 @@ wheels = [ [[package]] name = "importlib-metadata" -version = "7.2.1" +version = "8.4.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "zipp" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/76/72/33d1bb4be61f1327d3cd76fc41e2d001a6b748a0648d944c646643f123fe/importlib_metadata-7.2.1.tar.gz", hash = "sha256:509ecb2ab77071db5137c655e24ceb3eee66e7bbc6574165d0d114d9fc4bbe68", size = 52834 } +sdist = { url = "https://files.pythonhosted.org/packages/c0/bd/fa8ce65b0a7d4b6d143ec23b0f5fd3f7ab80121078c465bc02baeaab22dc/importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5", size = 54320 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/28/7daa5f782f5e2cbbec00556bf23ca106023470ebab3ae1040ee778269af1/importlib_metadata-7.2.1-py3-none-any.whl", hash = "sha256:ffef94b0b66046dd8ea2d619b701fe978d9264d38f3998bc4c27ec3b146a87c8", size = 25037 }, + { url = "https://files.pythonhosted.org/packages/c0/14/362d31bf1076b21e1bcdcb0dc61944822ff263937b804a79231df2774d28/importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1", size = 26269 }, ] [[package]] @@ -936,7 +927,7 @@ wheels = [ [[package]] name = "ipython" -version = "8.26.0" +version = "8.27.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, @@ -951,9 +942,9 @@ dependencies = [ { name = "traitlets" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7e/f4/dc45805e5c3e327a626139c023b296bafa4537e602a61055d377704ca54c/ipython-8.26.0.tar.gz", hash = "sha256:1cec0fbba8404af13facebe83d04436a7434c7400e59f47acf467c64abd0956c", size = 5493422 } +sdist = { url = "https://files.pythonhosted.org/packages/57/24/d4fabaca03c8804bf0b8d994c8ae3a20e57e9330d277fb43d83e558dec5e/ipython-8.27.0.tar.gz", hash = "sha256:0b99a2dc9f15fd68692e898e5568725c6d49c527d36a9fb5960ffbdeaa82ff7e", size = 5494984 } wheels = [ - { url = "https://files.pythonhosted.org/packages/73/48/4d2818054671bb272d1b12ca65748a4145dc602a463683b5c21b260becee/ipython-8.26.0-py3-none-any.whl", hash = "sha256:e6b347c27bdf9c32ee9d31ae85defc525755a1869f14057e900675b9e8d6e6ff", size = 817939 }, + { url = "https://files.pythonhosted.org/packages/a8/a2/6c725958e6f135d8e5de081e69841bb2c1d84b3fc259d02eb092b8fc203a/ipython-8.27.0-py3-none-any.whl", hash = "sha256:f68b3cb8bde357a5d7adc9598d57e22a45dfbea19eb6b98286fa3b288c9cd55c", size = 818986 }, ] [[package]] @@ -1067,19 +1058,19 @@ wheels = [ [[package]] name = "jupyter" -version = "1.0.0" +version = "1.1.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ipykernel" }, { name = "ipywidgets" }, { name = "jupyter-console" }, + { name = "jupyterlab" }, { name = "nbconvert" }, { name = "notebook" }, - { name = "qtconsole" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c9/a9/371d0b8fe37dd231cf4b2cff0a9f0f25e98f3a73c3771742444be27f2944/jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f", size = 12916 } +sdist = { url = "https://files.pythonhosted.org/packages/58/f3/af28ea964ab8bc1e472dba2e82627d36d470c51f5cd38c37502eeffaa25e/jupyter-1.1.1.tar.gz", hash = "sha256:d55467bceabdea49d7e3624af7e33d59c37fff53ed3a350e1ac957bed731de7a", size = 5714959 } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/df/0f5dd132200728a86190397e1ea87cd76244e42d39ec5e88efd25b2abd7e/jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78", size = 2736 }, + { url = "https://files.pythonhosted.org/packages/38/64/285f20a31679bf547b75602702f7800e74dbabae36ef324f716c02804753/jupyter-1.1.1-py2.py3-none-any.whl", hash = "sha256:7a59533c22af65439b24bbe60373a4e95af8f16ac65a6c00820ad378e3f7cc83", size = 2657 }, ] [[package]] @@ -1206,7 +1197,7 @@ wheels = [ [[package]] name = "jupyterlab" -version = "4.2.4" +version = "4.2.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "async-lru" }, @@ -1224,9 +1215,9 @@ dependencies = [ { name = "tornado" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/75/5f/32cc1280b7608f28ba76db1956672950ece63d7631b38a5677a35af5897e/jupyterlab-4.2.4.tar.gz", hash = "sha256:343a979fb9582fd08c8511823e320703281cd072a0049bcdafdc7afeda7f2537", size = 21506914 } +sdist = { url = "https://files.pythonhosted.org/packages/4a/78/ba006df6edaa561fe40be26c35e9da3f9316f071167cd7cc1a1a25bd2664/jupyterlab-4.2.5.tar.gz", hash = "sha256:ae7f3a1b8cb88b4f55009ce79fa7c06f99d70cd63601ee4aa91815d054f46f75", size = 21508698 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f6/b2/ba6fba3f52f785ba9740a7954e0d4477828f7395ee9f2f4707db5835a833/jupyterlab-4.2.4-py3-none-any.whl", hash = "sha256:807a7ec73637744f879e112060d4b9d9ebe028033b7a429b2d1f4fc523d00245", size = 11641369 }, + { url = "https://files.pythonhosted.org/packages/fd/3f/24a0f0ce60959cfd9756a3291cd3a5581e51cbd6f7b4aa121f5bba5320e3/jupyterlab-4.2.5-py3-none-any.whl", hash = "sha256:73b6e0775d41a9fee7ee756c80f58a6bed4040869ccc21411dc559818874d321", size = 11641981 }, ] [[package]] @@ -1393,33 +1384,31 @@ wheels = [ [[package]] name = "mlflow-skinny" -version = "2.15.1" +version = "2.16.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cachetools" }, { name = "click" }, { name = "cloudpickle" }, { name = "databricks-sdk" }, - { name = "entrypoints" }, { name = "gitpython" }, { name = "importlib-metadata" }, { name = "opentelemetry-api" }, { name = "opentelemetry-sdk" }, { name = "packaging" }, { name = "protobuf" }, - { name = "pytz" }, { name = "pyyaml" }, { name = "requests" }, { name = "sqlparse" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fa/dd/b0406075c04572fa934ef2115b2f3b5529b24b1c1dd335bac0fd5c3194c8/mlflow_skinny-2.15.1.tar.gz", hash = "sha256:302f49757ffc8bdfc517b06f5252a02634203fec5e5ce95ad876a36af8403907", size = 5171535 } +sdist = { url = "https://files.pythonhosted.org/packages/c6/c8/22a6eb1793e01ecc9fd4f494d92bea977e628629329c4e8cc2889f265a75/mlflow_skinny-2.16.0.tar.gz", hash = "sha256:9b823173063743783b4e7b6c52bdadcc7d9dab48eb883ac454c0d56609df6b2d", size = 5249386 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/a1/3812743e5dd83317d0469a46d737f0ab5c084fecfecc03a1ac8a7e7ec0d8/mlflow_skinny-2.15.1-py3-none-any.whl", hash = "sha256:a48c6f56106b104dc7221bad91af75a150b927d15210a41928cc8ecba086470a", size = 5497955 }, + { url = "https://files.pythonhosted.org/packages/2b/13/95c8ead52e2ec19b8130868b39bd6d72fa26b4321a0bb47cdc0ff9be6d06/mlflow_skinny-2.16.0-py3-none-any.whl", hash = "sha256:c55541f50efd0f6637377b10e8a654847a3fcd815b8680a95f02e0ca6bd7700c", size = 5581420 }, ] [[package]] name = "mlopus" -version = "0.2.1" +version = "0.3.0" source = { editable = "." } dependencies = [ { name = "dacite" }, @@ -1458,7 +1447,9 @@ dev = [ { name = "sphinx" }, { name = "sphinx-autodoc-typehints" }, { name = "sphinx-code-include" }, + { name = "sphinx-paramlinks" }, { name = "sphinx-rtd-theme" }, + { name = "sphinx-toolbox" }, { name = "tzlocal" }, ] @@ -1492,7 +1483,9 @@ dev = [ { name = "sphinx", specifier = "<7" }, { name = "sphinx-autodoc-typehints", specifier = "~=1.23" }, { name = "sphinx-code-include", specifier = "~=1.4" }, + { name = "sphinx-paramlinks", specifier = "~=0.6" }, { name = "sphinx-rtd-theme", specifier = "~=2.0" }, + { name = "sphinx-toolbox", specifier = ">=3.8.0" }, { name = "tzlocal", specifier = "~=5.2" }, ] @@ -1635,7 +1628,7 @@ wheels = [ [[package]] name = "notebook" -version = "7.2.1" +version = "7.2.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jupyter-server" }, @@ -1644,9 +1637,9 @@ dependencies = [ { name = "notebook-shim" }, { name = "tornado" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ee/dc/479d01290c6e8f27d0afd7f8f57d2292bd86afa8ab9565a3a326a6d38854/notebook-7.2.1.tar.gz", hash = "sha256:4287b6da59740b32173d01d641f763d292f49c30e7a51b89c46ba8473126341e", size = 4946671 } +sdist = { url = "https://files.pythonhosted.org/packages/0f/33/30b83c1c84e368087059bde1269549612584924db156bff53654e165a498/notebook-7.2.2.tar.gz", hash = "sha256:2ef07d4220421623ad3fe88118d687bc0450055570cdd160814a59cf3a1c516e", size = 4948876 } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/b4/b0cdaf52c35a3a40633136bee5152d6670acb555c698d23a3458dca65781/notebook-7.2.1-py3-none-any.whl", hash = "sha256:f45489a3995746f2195a137e0773e2130960b51c9ac3ce257dbc2705aab3a6ca", size = 5036402 }, + { url = "https://files.pythonhosted.org/packages/46/77/53732fbf48196af9e51c2a61833471021c1d77d335d57b96ee3588c0c53d/notebook-7.2.2-py3-none-any.whl", hash = "sha256:c89264081f671bc02eec0ed470a627ed791b9156cad9285226b31611d3e9fe1c", size = 5037123 }, ] [[package]] @@ -1663,61 +1656,62 @@ wheels = [ [[package]] name = "numpy" -version = "2.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/a4/f8188c4f3e07f7737683588210c073478abcb542048cf4ab6fedad0b458a/numpy-2.1.0.tar.gz", hash = "sha256:7dc90da0081f7e1da49ec4e398ede6a8e9cc4f5ebe5f9e06b443ed889ee9aaa2", size = 18868922 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e4/6c/87c885569ebe002f9c5f5de8eda8a3622360143d61e6174610f67c695ad3/numpy-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6326ab99b52fafdcdeccf602d6286191a79fe2fda0ae90573c5814cd2b0bc1b8", size = 21149295 }, - { url = "https://files.pythonhosted.org/packages/0a/d6/8d9c9a94c44ae456dbfc5f2ef719aebab6cce38064b815e98efd4e4a4141/numpy-2.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0937e54c09f7a9a68da6889362ddd2ff584c02d015ec92672c099b61555f8911", size = 13756742 }, - { url = "https://files.pythonhosted.org/packages/ec/f5/1c7d0baa22edd3e51301c2fb74b61295c737ca254345f45d9211b2f3cb6b/numpy-2.1.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:30014b234f07b5fec20f4146f69e13cfb1e33ee9a18a1879a0142fbb00d47673", size = 5352245 }, - { url = "https://files.pythonhosted.org/packages/de/ea/3e277e9971af78479c5ef318cc477718f5b541b6d1529ae494700a90347b/numpy-2.1.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:899da829b362ade41e1e7eccad2cf274035e1cb36ba73034946fccd4afd8606b", size = 6885239 }, - { url = "https://files.pythonhosted.org/packages/5d/f4/30f3b75be994a390a366bb5284ac29217edd27a6e6749196ad08d366290d/numpy-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08801848a40aea24ce16c2ecde3b756f9ad756586fb2d13210939eb69b023f5b", size = 13975963 }, - { url = "https://files.pythonhosted.org/packages/f3/55/2921109f337368848375d8d987e267ba8d1a00d51d5915dc3bcca740d381/numpy-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:398049e237d1aae53d82a416dade04defed1a47f87d18d5bd615b6e7d7e41d1f", size = 16325024 }, - { url = "https://files.pythonhosted.org/packages/fc/d1/d2fe0a6edb2a19a0da37f10cfe63ee50eb22f0874986ffb44936081e6f3b/numpy-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0abb3916a35d9090088a748636b2c06dc9a6542f99cd476979fb156a18192b84", size = 16701102 }, - { url = "https://files.pythonhosted.org/packages/28/4a/018e83dd0fa5f32730b67ff0ac35207f13bee8b870f96aa33c496545b9e6/numpy-2.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10e2350aea18d04832319aac0f887d5fcec1b36abd485d14f173e3e900b83e33", size = 14474060 }, - { url = "https://files.pythonhosted.org/packages/33/94/e1c65ebb0caa410afdeb83ed44778f22b92bd70855285bb168df37022d8c/numpy-2.1.0-cp310-cp310-win32.whl", hash = "sha256:f6b26e6c3b98adb648243670fddc8cab6ae17473f9dc58c51574af3e64d61211", size = 6533851 }, - { url = "https://files.pythonhosted.org/packages/97/fc/961ce4fe1b3295b30ff85a0bc6da13302b870643ed9a79c034fb8469e333/numpy-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:f505264735ee074250a9c78247ee8618292091d9d1fcc023290e9ac67e8f1afa", size = 12863722 }, - { url = "https://files.pythonhosted.org/packages/3e/98/466ac2a77706699ca0141ea197e4f221d2b232051052f8f794a628a489ec/numpy-2.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:76368c788ccb4f4782cf9c842b316140142b4cbf22ff8db82724e82fe1205dce", size = 21153408 }, - { url = "https://files.pythonhosted.org/packages/d5/43/4ff735420b31cd454e4b3acdd0ba7570b453aede6fa16cf7a11cc8780d1b/numpy-2.1.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:f8e93a01a35be08d31ae33021e5268f157a2d60ebd643cfc15de6ab8e4722eb1", size = 5350253 }, - { url = "https://files.pythonhosted.org/packages/ec/a0/1c1b9d935d7196c4a847b76c8a8d012c986ddbc78ef159cc4c0393148062/numpy-2.1.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:9523f8b46485db6939bd069b28b642fec86c30909cea90ef550373787f79530e", size = 6889274 }, - { url = "https://files.pythonhosted.org/packages/d0/d2/4838d8c3b7ac69947ffd686ba3376cb603ea3618305ae3b8547b821df218/numpy-2.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54139e0eb219f52f60656d163cbe67c31ede51d13236c950145473504fa208cb", size = 13982862 }, - { url = "https://files.pythonhosted.org/packages/7b/93/831b4c5b4355210827b3de34f539297e1833c39a68c26a8b454d8cf9f5ed/numpy-2.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5ebbf9fbdabed208d4ecd2e1dfd2c0741af2f876e7ae522c2537d404ca895c3", size = 16336222 }, - { url = "https://files.pythonhosted.org/packages/db/44/7d2f454309a620f1afdde44dffa469fece331b84e7a5bd2dba3f0f465489/numpy-2.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:378cb4f24c7d93066ee4103204f73ed046eb88f9ad5bb2275bb9fa0f6a02bd36", size = 16708990 }, - { url = "https://files.pythonhosted.org/packages/65/6b/46f69972a25e3b682b7a65cb525efa3650cd62e237180c2ecff7a6177173/numpy-2.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8f699a709120b220dfe173f79c73cb2a2cab2c0b88dd59d7b49407d032b8ebd", size = 14487554 }, - { url = "https://files.pythonhosted.org/packages/3f/bc/4b128b3ac152e64e3d117931167bc2289dab47204762ad65011b681d75e7/numpy-2.1.0-cp311-cp311-win32.whl", hash = "sha256:ffbd6faeb190aaf2b5e9024bac9622d2ee549b7ec89ef3a9373fa35313d44e0e", size = 6531834 }, - { url = "https://files.pythonhosted.org/packages/7b/5e/093592740805fe401ce49a627cc8a3f034dac62b34d68ab69db3c56bd662/numpy-2.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0af3a5987f59d9c529c022c8c2a64805b339b7ef506509fba7d0556649b9714b", size = 12869011 }, - { url = "https://files.pythonhosted.org/packages/eb/f5/a06a231cbeea4aff841ff744a12e4bf4d4407f2c753d13ce4563aa126c90/numpy-2.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fe76d75b345dc045acdbc006adcb197cc680754afd6c259de60d358d60c93736", size = 20882951 }, - { url = "https://files.pythonhosted.org/packages/70/1d/4ad38e3a1840f72c29595c06b103ecd9119f260e897ff7e88a74adb0ca14/numpy-2.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f358ea9e47eb3c2d6eba121ab512dfff38a88db719c38d1e67349af210bc7529", size = 13491878 }, - { url = "https://files.pythonhosted.org/packages/b4/3b/569055d01ed80634d6be6ceef8fb28eb0866e4f98c2d97667dcf9fae3e22/numpy-2.1.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:dd94ce596bda40a9618324547cfaaf6650b1a24f5390350142499aa4e34e53d1", size = 5087346 }, - { url = "https://files.pythonhosted.org/packages/24/37/212dd6fbd298c467b80d4d6217b2bc902b520e96a967b59f72603bf1142f/numpy-2.1.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:b47c551c6724960479cefd7353656498b86e7232429e3a41ab83be4da1b109e8", size = 6618269 }, - { url = "https://files.pythonhosted.org/packages/33/4d/435c143c06e16c8bfccbfd9af252b0a8ac7897e0c0e36e539d75a75e91b4/numpy-2.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0756a179afa766ad7cb6f036de622e8a8f16ffdd55aa31f296c870b5679d745", size = 13695244 }, - { url = "https://files.pythonhosted.org/packages/48/3e/bf807eb050abc23adc556f34fcf931ca2d67ad8dfc9c17fcd9332c01347f/numpy-2.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24003ba8ff22ea29a8c306e61d316ac74111cebf942afbf692df65509a05f111", size = 16040181 }, - { url = "https://files.pythonhosted.org/packages/cd/a9/40dc96b5d43076836d82d1e84a3a4a6a4c2925a53ec0b7f31271434ff02c/numpy-2.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b34fa5e3b5d6dc7e0a4243fa0f81367027cb6f4a7215a17852979634b5544ee0", size = 16407920 }, - { url = "https://files.pythonhosted.org/packages/cc/77/39e44cf0a6eb0f93b18ffb00f1964b2c471b1df5605aee486c221b06a8e4/numpy-2.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c4f982715e65036c34897eb598d64aef15150c447be2cfc6643ec7a11af06574", size = 14170943 }, - { url = "https://files.pythonhosted.org/packages/54/02/f0a3c2ec1622dc4346bd126e2578948c7192b3838c893a3d215738fb367b/numpy-2.1.0-cp312-cp312-win32.whl", hash = "sha256:c4cd94dfefbefec3f8b544f61286584292d740e6e9d4677769bc76b8f41deb02", size = 6235947 }, - { url = "https://files.pythonhosted.org/packages/8c/bf/d9d214a9dff020ad1663f1536f45d34e052e4c7f630c46cd363e785e3231/numpy-2.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0cdef204199278f5c461a0bed6ed2e052998276e6d8ab2963d5b5c39a0500bc", size = 12566546 }, - { url = "https://files.pythonhosted.org/packages/c3/16/6b536e1b67624178e3631a3fa60c9c1b5ee7cda2fa9492c4f2de01bfcb06/numpy-2.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8ab81ccd753859ab89e67199b9da62c543850f819993761c1e94a75a814ed667", size = 20833354 }, - { url = "https://files.pythonhosted.org/packages/52/87/130e95aa8a6383fc3de4fdaf7adc629289b79b88548fb6e35e9d924697d7/numpy-2.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:442596f01913656d579309edcd179a2a2f9977d9a14ff41d042475280fc7f34e", size = 13506169 }, - { url = "https://files.pythonhosted.org/packages/d9/c2/0fcf68c67681f9ad9d76156b4606f60b48748ead76d4ba19b90aecd4b626/numpy-2.1.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:848c6b5cad9898e4b9ef251b6f934fa34630371f2e916261070a4eb9092ffd33", size = 5072908 }, - { url = "https://files.pythonhosted.org/packages/72/40/e21bbbfae665ef5fa1dfd7eae1c5dc93ba9d3b36e39d2d38789dd8c22d56/numpy-2.1.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:54c6a63e9d81efe64bfb7bcb0ec64332a87d0b87575f6009c8ba67ea6374770b", size = 6604906 }, - { url = "https://files.pythonhosted.org/packages/0e/ce/848967516bf8dd4f769886a883a4852dbc62e9b63b1137d2b9900f595222/numpy-2.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:652e92fc409e278abdd61e9505649e3938f6d04ce7ef1953f2ec598a50e7c195", size = 13690864 }, - { url = "https://files.pythonhosted.org/packages/15/72/2cebe04758e1123f625ed3221cb3c48602175ad619dd9b47de69689b4656/numpy-2.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ab32eb9170bf8ffcbb14f11613f4a0b108d3ffee0832457c5d4808233ba8977", size = 16036272 }, - { url = "https://files.pythonhosted.org/packages/a7/b7/ae34ced7864b551e0ea01ce4e7acbe7ddf5946afb623dea39760b19bc8b0/numpy-2.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:8fb49a0ba4d8f41198ae2d52118b050fd34dace4b8f3fb0ee34e23eb4ae775b1", size = 16408978 }, - { url = "https://files.pythonhosted.org/packages/4d/22/c9d696b87c5ce25e857d7745fe4f090373a2daf8c26f5e15b32b5db7bff7/numpy-2.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:44e44973262dc3ae79e9063a1284a73e09d01b894b534a769732ccd46c28cc62", size = 14168398 }, - { url = "https://files.pythonhosted.org/packages/9e/8b/63f74dccf86d4832d593bdbe06544f4a0a1b7e18e86e0db1e8231bf47c49/numpy-2.1.0-cp313-cp313-win32.whl", hash = "sha256:ab83adc099ec62e044b1fbb3a05499fa1e99f6d53a1dde102b2d85eff66ed324", size = 6232743 }, - { url = "https://files.pythonhosted.org/packages/23/4b/e30a3132478c69df3e3e587fa87dcbf2660455daec92d8d52e7028a92554/numpy-2.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:de844aaa4815b78f6023832590d77da0e3b6805c644c33ce94a1e449f16d6ab5", size = 12560212 }, - { url = "https://files.pythonhosted.org/packages/5a/1b/40e881a3a272c4861de1e43a3e7ee1559988dd12187463726d3b395a8874/numpy-2.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:343e3e152bf5a087511cd325e3b7ecfd5b92d369e80e74c12cd87826e263ec06", size = 20840821 }, - { url = "https://files.pythonhosted.org/packages/d0/8e/5b7c08f9238f6cc18037f6fd92f83feaa8c19e9decb6bd075cad81f71fae/numpy-2.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f07fa2f15dabe91259828ce7d71b5ca9e2eb7c8c26baa822c825ce43552f4883", size = 13500478 }, - { url = "https://files.pythonhosted.org/packages/65/32/bf9df25ef50761fcb3e089c745d2e195b35cc6506d032f12bb5cc28f6c43/numpy-2.1.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:5474dad8c86ee9ba9bb776f4b99ef2d41b3b8f4e0d199d4f7304728ed34d0300", size = 5095825 }, - { url = "https://files.pythonhosted.org/packages/50/34/d18c95bc5981ea3bb8e6f896aad12159a37dcc67b22cd9464fe3899612f7/numpy-2.1.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:1f817c71683fd1bb5cff1529a1d085a57f02ccd2ebc5cd2c566f9a01118e3b7d", size = 6611470 }, - { url = "https://files.pythonhosted.org/packages/b4/4f/27d56e9f6222419951bfeef54bc0a71dc40c0ebeb248e1aa85655da6fa11/numpy-2.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a3336fbfa0d38d3deacd3fe7f3d07e13597f29c13abf4d15c3b6dc2291cbbdd", size = 13647061 }, - { url = "https://files.pythonhosted.org/packages/f9/e0/ae6e12a157c4ab415b380d0f3596cb9090a0c4acf48cd8cd7bc6d6b93d24/numpy-2.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a894c51fd8c4e834f00ac742abad73fc485df1062f1b875661a3c1e1fb1c2f6", size = 16006479 }, - { url = "https://files.pythonhosted.org/packages/ab/da/b746668c7303bd73af262208abbfa8b1c86be12e9eccb0d3021ed8a58873/numpy-2.1.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:9156ca1f79fc4acc226696e95bfcc2b486f165a6a59ebe22b2c1f82ab190384a", size = 16383064 }, - { url = "https://files.pythonhosted.org/packages/f4/51/c0dcadea0c281be5db32b29f7b977b17bdb53b7dbfcbc3b4f49288de8696/numpy-2.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:624884b572dff8ca8f60fab591413f077471de64e376b17d291b19f56504b2bb", size = 14135556 }, - { url = "https://files.pythonhosted.org/packages/c2/5b/de7ef3b3700ff1da66828f782e0c69732fb42aedbcf7f4a1a19ef6fc7e74/numpy-2.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:15ef8b2177eeb7e37dd5ef4016f30b7659c57c2c0b57a779f1d537ff33a72c7b", size = 20980535 }, - { url = "https://files.pythonhosted.org/packages/92/ed/88a08b5b66bd37234a901f68b4df2beb1dc01d8a955e071991fd0ee9b4fe/numpy-2.1.0-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:e5f0642cdf4636198a4990de7a71b693d824c56a757862230454629cf62e323d", size = 6748666 }, - { url = "https://files.pythonhosted.org/packages/61/bb/ba8edcb7f6478b656b1cb94331adb700c8bc06d51c3519fc647fd37dad24/numpy-2.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15976718c004466406342789f31b6673776360f3b1e3c575f25302d7e789575", size = 16139681 }, - { url = "https://files.pythonhosted.org/packages/92/19/0a05f78c3557ad3ecb0da85e3eb63cb1527a7ea31a521d11a4f08f753f59/numpy-2.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6c1de77ded79fef664d5098a66810d4d27ca0224e9051906e634b3f7ead134c2", size = 12788122 }, +version = "2.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/59/5f/9003bb3e632f2b58f5e3a3378902dcc73c5518070736c6740fe52454e8e1/numpy-2.1.1.tar.gz", hash = "sha256:d0cf7d55b1051387807405b3898efafa862997b4cba8aa5dbe657be794afeafd", size = 18874860 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/37/e3de47233b3ba458b1021a6f95029198b2f68a83eb886a862640b6ec3e9a/numpy-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8a0e34993b510fc19b9a2ce7f31cb8e94ecf6e924a40c0c9dd4f62d0aac47d9", size = 21150738 }, + { url = "https://files.pythonhosted.org/packages/69/30/f41c9b6dab4e1ec56b40d1daa81ce9f9f8d26da6d02af18768a883676bd5/numpy-2.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7dd86dfaf7c900c0bbdcb8b16e2f6ddf1eb1fe39c6c8cca6e94844ed3152a8fd", size = 13758247 }, + { url = "https://files.pythonhosted.org/packages/e1/30/d2f71d3419ada3b3735e2ce9cea7dfe22c268ac9fbb24e0b5ac5fc222633/numpy-2.1.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:5889dd24f03ca5a5b1e8a90a33b5a0846d8977565e4ae003a63d22ecddf6782f", size = 5353756 }, + { url = "https://files.pythonhosted.org/packages/84/64/879bd6877488441cfaa578c96bdc4b43710d7e3ae4f8260fbd04821da395/numpy-2.1.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:59ca673ad11d4b84ceb385290ed0ebe60266e356641428c845b39cd9df6713ab", size = 6886809 }, + { url = "https://files.pythonhosted.org/packages/cd/c4/869f8db87f5c9df86b93ca42036f58911ff162dd091a41e617977ab50d1f/numpy-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13ce49a34c44b6de5241f0b38b07e44c1b2dcacd9e36c30f9c2fcb1bb5135db7", size = 13977367 }, + { url = "https://files.pythonhosted.org/packages/7d/4b/a509d346fffede6120cc17610cc500819417ee9c3da7f08d9aaf15cab2a3/numpy-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:913cc1d311060b1d409e609947fa1b9753701dac96e6581b58afc36b7ee35af6", size = 16326516 }, + { url = "https://files.pythonhosted.org/packages/4a/0c/fdba41b2ddeb7a052f84d85fb17d5e168af0e8034b3a2d6e369b7cc2966f/numpy-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:caf5d284ddea7462c32b8d4a6b8af030b6c9fd5332afb70e7414d7fdded4bfd0", size = 16702642 }, + { url = "https://files.pythonhosted.org/packages/bf/8d/a8da065a46515efdbcf81a92535b816ea17194ce5b767df1f13815c32179/numpy-2.1.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:57eb525e7c2a8fdee02d731f647146ff54ea8c973364f3b850069ffb42799647", size = 14475522 }, + { url = "https://files.pythonhosted.org/packages/b9/d2/5b7cf5851af48c35a73b85750b41f9b622760ee11659665a688e6b3f7cb7/numpy-2.1.1-cp310-cp310-win32.whl", hash = "sha256:9a8e06c7a980869ea67bbf551283bbed2856915f0a792dc32dd0f9dd2fb56728", size = 6535211 }, + { url = "https://files.pythonhosted.org/packages/e5/6a/b1f7d73fec1942ded4b474a78c3fdd11c4fad5232143f41dd7e6ae166080/numpy-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:d10c39947a2d351d6d466b4ae83dad4c37cd6c3cdd6d5d0fa797da56f710a6ae", size = 12865289 }, + { url = "https://files.pythonhosted.org/packages/f7/86/2c01070424a42b286ea0271203682c3d3e81e10ce695545b35768307b383/numpy-2.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0d07841fd284718feffe7dd17a63a2e6c78679b2d386d3e82f44f0108c905550", size = 21154850 }, + { url = "https://files.pythonhosted.org/packages/ef/4e/d3426d9e620a18bbb979f28e4dc7f9a2c35eb7cf726ffcb33545ebdd3e6a/numpy-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b5613cfeb1adfe791e8e681128f5f49f22f3fcaa942255a6124d58ca59d9528f", size = 13789477 }, + { url = "https://files.pythonhosted.org/packages/c6/6e/fb6b1b2da9f4c757f55b202f10b6af0fe4fee87ace6e830228a12ab8ae5d/numpy-2.1.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0b8cc2715a84b7c3b161f9ebbd942740aaed913584cae9cdc7f8ad5ad41943d0", size = 5351769 }, + { url = "https://files.pythonhosted.org/packages/58/9a/07c8a9dc7254f3265ae014e33768d1cfd8eb73ee6cf215f4ec3b497e4255/numpy-2.1.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:b49742cdb85f1f81e4dc1b39dcf328244f4d8d1ded95dea725b316bd2cf18c95", size = 6890872 }, + { url = "https://files.pythonhosted.org/packages/08/4e/3b50fa3b1e045793056ed5a1fc6f89dd897ff9cb00900ca6377fe552d442/numpy-2.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8d5f8a8e3bc87334f025194c6193e408903d21ebaeb10952264943a985066ca", size = 13984256 }, + { url = "https://files.pythonhosted.org/packages/d9/37/108d692f7e2544b9ae972c7bfa06c26717871c273ccec86470bc3132b04d/numpy-2.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d51fc141ddbe3f919e91a096ec739f49d686df8af254b2053ba21a910ae518bf", size = 16337778 }, + { url = "https://files.pythonhosted.org/packages/95/2d/df81a1be3be6d3a92fd12dfd6c26a0dc026b276136ec1056562342a484a2/numpy-2.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:98ce7fb5b8063cfdd86596b9c762bf2b5e35a2cdd7e967494ab78a1fa7f8b86e", size = 16710448 }, + { url = "https://files.pythonhosted.org/packages/8f/34/4b2e604c5c44bd64b6c85e89d88871b41e60233b3ddf97419b37ae5b0c72/numpy-2.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:24c2ad697bd8593887b019817ddd9974a7f429c14a5469d7fad413f28340a6d2", size = 14489002 }, + { url = "https://files.pythonhosted.org/packages/9f/0d/67c04b6bfefd0abbe7f60f7e4f11e3aca15d688faec1d1df089966105a9a/numpy-2.1.1-cp311-cp311-win32.whl", hash = "sha256:397bc5ce62d3fb73f304bec332171535c187e0643e176a6e9421a6e3eacef06d", size = 6533215 }, + { url = "https://files.pythonhosted.org/packages/94/7a/4c00332a3ca79702bbc86228afd0e84e6f91b47222ec8cdf00677dd16481/numpy-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:ae8ce252404cdd4de56dcfce8b11eac3c594a9c16c231d081fb705cf23bd4d9e", size = 12870550 }, + { url = "https://files.pythonhosted.org/packages/36/11/c573ef66c004f991989c2c6218229d9003164525549409aec5ec9afc0285/numpy-2.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c803b7934a7f59563db459292e6aa078bb38b7ab1446ca38dd138646a38203e", size = 20884403 }, + { url = "https://files.pythonhosted.org/packages/6b/6c/a9fbef5fd2f9685212af2a9e47485cde9357c3e303e079ccf85127516f2d/numpy-2.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6435c48250c12f001920f0751fe50c0348f5f240852cfddc5e2f97e007544cbe", size = 13493375 }, + { url = "https://files.pythonhosted.org/packages/34/f2/1316a6b08ad4c161d793abe81ff7181e9ae2e357a5b06352a383b9f8e800/numpy-2.1.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:3269c9eb8745e8d975980b3a7411a98976824e1fdef11f0aacf76147f662b15f", size = 5088823 }, + { url = "https://files.pythonhosted.org/packages/be/15/fabf78a6d4a10c250e87daf1cd901af05e71501380532ac508879cc46a7e/numpy-2.1.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:fac6e277a41163d27dfab5f4ec1f7a83fac94e170665a4a50191b545721c6521", size = 6619825 }, + { url = "https://files.pythonhosted.org/packages/9f/8a/76ddef3e621541ddd6984bc24d256a4e3422d036790cbbe449e6cad439ee/numpy-2.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcd8f556cdc8cfe35e70efb92463082b7f43dd7e547eb071ffc36abc0ca4699b", size = 13696705 }, + { url = "https://files.pythonhosted.org/packages/cb/22/2b840d297183916a95847c11f82ae11e248fa98113490b2357f774651e1d/numpy-2.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b9cd92c8f8e7b313b80e93cedc12c0112088541dcedd9197b5dee3738c1201", size = 16041649 }, + { url = "https://files.pythonhosted.org/packages/c7/e8/6f4825d8f576cfd5e4d6515b9eec22bd618868bdafc8a8c08b446dcb65f0/numpy-2.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:afd9c680df4de71cd58582b51e88a61feed4abcc7530bcd3d48483f20fc76f2a", size = 16409358 }, + { url = "https://files.pythonhosted.org/packages/bf/f8/5edf1105b0dc24fd66fc3e9e7f3bca3d920cde571caaa4375ec1566073c3/numpy-2.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8661c94e3aad18e1ea17a11f60f843a4933ccaf1a25a7c6a9182af70610b2313", size = 14172488 }, + { url = "https://files.pythonhosted.org/packages/f4/c2/dddca3e69a024d2f249a5b68698328163cbdafb7e65fbf6d36373bbabf12/numpy-2.1.1-cp312-cp312-win32.whl", hash = "sha256:950802d17a33c07cba7fd7c3dcfa7d64705509206be1606f196d179e539111ed", size = 6237195 }, + { url = "https://files.pythonhosted.org/packages/b7/98/5640a09daa3abf0caeaefa6e7bf0d10c0aa28a77c84e507d6a716e0e23df/numpy-2.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:3fc5eabfc720db95d68e6646e88f8b399bfedd235994016351b1d9e062c4b270", size = 12568082 }, + { url = "https://files.pythonhosted.org/packages/6b/9e/8bc6f133bc6d359ccc9ec051853aded45504d217685191f31f46d36b7065/numpy-2.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:046356b19d7ad1890c751b99acad5e82dc4a02232013bd9a9a712fddf8eb60f5", size = 20834810 }, + { url = "https://files.pythonhosted.org/packages/32/1b/429519a2fa28681814c511574017d35f3aab7136d554cc65f4c1526dfbf5/numpy-2.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6e5a9cb2be39350ae6c8f79410744e80154df658d5bea06e06e0ac5bb75480d5", size = 13507739 }, + { url = "https://files.pythonhosted.org/packages/25/18/c732d7dd9896d11e4afcd487ac65e62f9fa0495563b7614eb850765361fa/numpy-2.1.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:d4c57b68c8ef5e1ebf47238e99bf27657511ec3f071c465f6b1bccbef12d4136", size = 5074465 }, + { url = "https://files.pythonhosted.org/packages/3e/37/838b7ae9262c370ab25312bab365492016f11810ffc03ebebbd54670b669/numpy-2.1.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:8ae0fd135e0b157365ac7cc31fff27f07a5572bdfc38f9c2d43b2aff416cc8b0", size = 6606418 }, + { url = "https://files.pythonhosted.org/packages/8b/b9/7ff3bfb71e316a5b43a124c4b7a5881ab12f3c32636014bef1f757f19dbd/numpy-2.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981707f6b31b59c0c24bcda52e5605f9701cb46da4b86c2e8023656ad3e833cb", size = 13692464 }, + { url = "https://files.pythonhosted.org/packages/42/78/75bcf16e6737cd196ff7ecf0e1fd3f953293a34dff4fd93fb488e8308536/numpy-2.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ca4b53e1e0b279142113b8c5eb7d7a877e967c306edc34f3b58e9be12fda8df", size = 16037763 }, + { url = "https://files.pythonhosted.org/packages/23/99/36bf5ffe034d06df307bc783e25cf164775863166dcd878879559fe0379f/numpy-2.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e097507396c0be4e547ff15b13dc3866f45f3680f789c1a1301b07dadd3fbc78", size = 16410374 }, + { url = "https://files.pythonhosted.org/packages/7f/16/04c5dab564887d4cd31a9ed30e51467fa70d52a4425f5a9bd1eed5b3d34c/numpy-2.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7506387e191fe8cdb267f912469a3cccc538ab108471291636a96a54e599556", size = 14169873 }, + { url = "https://files.pythonhosted.org/packages/09/e0/d1b5adbf1731886c4186c59a9fa208585df9452a43a2b60e79af7c649717/numpy-2.1.1-cp313-cp313-win32.whl", hash = "sha256:251105b7c42abe40e3a689881e1793370cc9724ad50d64b30b358bbb3a97553b", size = 6234118 }, + { url = "https://files.pythonhosted.org/packages/d0/9c/2391ee6e9ebe77232ddcab29d92662b545e99d78c3eb3b4e26d59b9ca1ca/numpy-2.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:f212d4f46b67ff604d11fff7cc62d36b3e8714edf68e44e9760e19be38c03eb0", size = 12561742 }, + { url = "https://files.pythonhosted.org/packages/38/0e/c4f754f9e73f9bb520e8bf418c646f2c4f70c5d5f2bc561e90f884593193/numpy-2.1.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:920b0911bb2e4414c50e55bd658baeb78281a47feeb064ab40c2b66ecba85553", size = 20858403 }, + { url = "https://files.pythonhosted.org/packages/32/fc/d69092b9171efa0cb8079577e71ce0cac0e08f917d33f6e99c916ed51d44/numpy-2.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:bab7c09454460a487e631ffc0c42057e3d8f2a9ddccd1e60c7bb8ed774992480", size = 13519851 }, + { url = "https://files.pythonhosted.org/packages/14/2a/d7cf2cd9f15b23f623075546ea64a2c367cab703338ca22aaaecf7e704df/numpy-2.1.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:cea427d1350f3fd0d2818ce7350095c1a2ee33e30961d2f0fef48576ddbbe90f", size = 5115444 }, + { url = "https://files.pythonhosted.org/packages/8e/00/e87b2cb4afcecca3b678deefb8fa53005d7054f3b5c39596e5554e5d98f8/numpy-2.1.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:e30356d530528a42eeba51420ae8bf6c6c09559051887196599d96ee5f536468", size = 6628903 }, + { url = "https://files.pythonhosted.org/packages/ab/9d/337ae8721b3beec48c3413d71f2d44b2defbf3c6f7a85184fc18b7b61f4a/numpy-2.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8dfa9e94fc127c40979c3eacbae1e61fda4fe71d84869cc129e2721973231ef", size = 13665945 }, + { url = "https://files.pythonhosted.org/packages/c0/90/ee8668e84c5d5cc080ef3beb622c016adf19ca3aa51afe9dbdcc6a9baf59/numpy-2.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910b47a6d0635ec1bd53b88f86120a52bf56dcc27b51f18c7b4a2e2224c29f0f", size = 16023473 }, + { url = "https://files.pythonhosted.org/packages/38/a0/57c24b2131879183051dc698fbb53fd43b77c3fa85b6e6311014f2bc2973/numpy-2.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:13cc11c00000848702322af4de0147ced365c81d66053a67c2e962a485b3717c", size = 16400624 }, + { url = "https://files.pythonhosted.org/packages/bb/4c/14a41eb5c9548c6cee6af0936eabfd985c69230ffa2f2598321431a9aa0a/numpy-2.1.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:53e27293b3a2b661c03f79aa51c3987492bd4641ef933e366e0f9f6c9bf257ec", size = 14155072 }, + { url = "https://files.pythonhosted.org/packages/94/9a/d6a5d138b53ccdc002fdf07f0d1a960326c510e66cbfff7180c88d37c482/numpy-2.1.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7be6a07520b88214ea85d8ac8b7d6d8a1839b0b5cb87412ac9f49fa934eb15d5", size = 20982055 }, + { url = "https://files.pythonhosted.org/packages/40/b5/78d8b5481aeef6d2aad3724c6aa5398045d2657038dfe54c055cae1fcf75/numpy-2.1.1-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:52ac2e48f5ad847cd43c4755520a2317f3380213493b9d8a4c5e37f3b87df504", size = 6750222 }, + { url = "https://files.pythonhosted.org/packages/eb/9a/59a548ad57df8c432bfac4556504a9fae5c082ffea53d108fcf7ce2956e4/numpy-2.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50a95ca3560a6058d6ea91d4629a83a897ee27c00630aed9d933dff191f170cd", size = 16141236 }, + { url = "https://files.pythonhosted.org/packages/02/31/3cbba87e998748b2e33ca5bc6fcc5662c867037f980918e302aebdf139a2/numpy-2.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:99f4a9ee60eed1385a86e82288971a51e71df052ed0b2900ed30bc840c0f2e39", size = 12789681 }, ] [[package]] @@ -1735,42 +1729,42 @@ wheels = [ [[package]] name = "opentelemetry-api" -version = "1.26.0" +version = "1.27.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "deprecated" }, { name = "importlib-metadata" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/48/d4/e9a0ddef6eed086c96e8265d864a46da099611b7be153b0cfb63fd47e1b4/opentelemetry_api-1.26.0.tar.gz", hash = "sha256:2bd639e4bed5b18486fef0b5a520aaffde5a18fc225e808a1ac4df363f43a1ce", size = 60904 } +sdist = { url = "https://files.pythonhosted.org/packages/c9/83/93114b6de85a98963aec218a51509a52ed3f8de918fe91eb0f7299805c3f/opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342", size = 62693 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/a7/6322d1d7a1fb926e8b99208c27730f21217da2f1e0e11dab48a78a0427a4/opentelemetry_api-1.26.0-py3-none-any.whl", hash = "sha256:7d7ea33adf2ceda2dd680b18b1677e4152000b37ca76e679da71ff103b943064", size = 61533 }, + { url = "https://files.pythonhosted.org/packages/fb/1f/737dcdbc9fea2fa96c1b392ae47275165a7c641663fbb08a8d252968eed2/opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7", size = 63970 }, ] [[package]] name = "opentelemetry-sdk" -version = "1.26.0" +version = "1.27.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-semantic-conventions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d3/85/8ca0d5ebfe708287b091dffcd15553b74bbfe4532f8dd42662b78b2e0cab/opentelemetry_sdk-1.26.0.tar.gz", hash = "sha256:c90d2868f8805619535c05562d699e2f4fb1f00dbd55a86dcefca4da6fa02f85", size = 143139 } +sdist = { url = "https://files.pythonhosted.org/packages/0d/9a/82a6ac0f06590f3d72241a587cb8b0b751bd98728e896cc4cbd4847248e6/opentelemetry_sdk-1.27.0.tar.gz", hash = "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f", size = 145019 } wheels = [ - { url = "https://files.pythonhosted.org/packages/92/f1/a9b550d0f9c049653dd2eab45cecf8fe4baa9795ed143d87834056ffabaf/opentelemetry_sdk-1.26.0-py3-none-any.whl", hash = "sha256:feb5056a84a88670c041ea0ded9921fca559efec03905dddeb3885525e0af897", size = 109475 }, + { url = "https://files.pythonhosted.org/packages/c1/bd/a6602e71e315055d63b2ff07172bd2d012b4cba2d4e00735d74ba42fc4d6/opentelemetry_sdk-1.27.0-py3-none-any.whl", hash = "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d", size = 110505 }, ] [[package]] name = "opentelemetry-semantic-conventions" -version = "0.47b0" +version = "0.48b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "deprecated" }, { name = "opentelemetry-api" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/93/85/edef14d10ad00ddd9fffb20e4d3d938f4c5c1247e11a175066fe2b4a72f8/opentelemetry_semantic_conventions-0.47b0.tar.gz", hash = "sha256:a8d57999bbe3495ffd4d510de26a97dadc1dace53e0275001b2c1b2f67992a7e", size = 83994 } +sdist = { url = "https://files.pythonhosted.org/packages/0a/89/1724ad69f7411772446067cdfa73b598694c8c91f7f8c922e344d96d81f9/opentelemetry_semantic_conventions-0.48b0.tar.gz", hash = "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a", size = 89445 } wheels = [ - { url = "https://files.pythonhosted.org/packages/00/c2/ca5cef8e4cd8eec5a95deed95ec3f6005e499fd9d17ca08731ced03a6921/opentelemetry_semantic_conventions-0.47b0-py3-none-any.whl", hash = "sha256:4ff9d595b85a59c1c1413f02bba320ce7ea6bf9e2ead2b0913c4395c7bbc1063", size = 138027 }, + { url = "https://files.pythonhosted.org/packages/b7/7a/4f0063dbb0b6c971568291a8bc19a4ca70d3c185db2d956230dd67429dfc/opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl", hash = "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f", size = 149685 }, ] [[package]] @@ -1919,16 +1913,16 @@ wheels = [ [[package]] name = "protobuf" -version = "5.27.3" +version = "5.28.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1b/61/0671db2ab2aee7c92d6c1b617c39b30a4cd973950118da56d77e7f397a9d/protobuf-5.27.3.tar.gz", hash = "sha256:82460903e640f2b7e34ee81a947fdaad89de796d324bcbc38ff5430bcdead82c", size = 401665 } +sdist = { url = "https://files.pythonhosted.org/packages/5f/d7/331ee1f3b798c34d2257c79d5426ecbe95d46d2b40ba808a29da6947f6d8/protobuf-5.28.0.tar.gz", hash = "sha256:dde74af0fa774fa98892209992295adbfb91da3fa98c8f67a88afe8f5a349add", size = 422388 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cd/6d/a8a4b84aaf83d6cb552247e1da2fa07140bbda894bbaf0d5afddec4699df/protobuf-5.27.3-cp310-abi3-win32.whl", hash = "sha256:dcb307cd4ef8fec0cf52cb9105a03d06fbb5275ce6d84a6ae33bc6cf84e0a07b", size = 405837 }, - { url = "https://files.pythonhosted.org/packages/a4/30/cb5395acd5f65edc0dee77bdd134fe556c52fade2ad3ea9ac2676d01effe/protobuf-5.27.3-cp310-abi3-win_amd64.whl", hash = "sha256:16ddf3f8c6c41e1e803da7abea17b1793a97ef079a912e42351eabb19b2cffe7", size = 426928 }, - { url = "https://files.pythonhosted.org/packages/ca/bc/bceb11aa96dd0b2ae7002d2f46870fbdef7649a0c28420f0abb831ee3294/protobuf-5.27.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:68248c60d53f6168f565a8c76dc58ba4fa2ade31c2d1ebdae6d80f969cdc2d4f", size = 412255 }, - { url = "https://files.pythonhosted.org/packages/d9/83/a610396958a5b735e988880a0fdb5cde80b95111eb550849a5bd5cff735d/protobuf-5.27.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:b8a994fb3d1c11156e7d1e427186662b64694a62b55936b2b9348f0a7c6625ce", size = 307150 }, - { url = "https://files.pythonhosted.org/packages/4c/98/db690e43e2f28495c8fc7c997003cbd59a6db342914b404e216c9b6791f0/protobuf-5.27.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:a55c48f2a2092d8e213bd143474df33a6ae751b781dd1d1f4d953c128a415b25", size = 309266 }, - { url = "https://files.pythonhosted.org/packages/e1/94/d77bd282d3d53155147166c2bbd156f540009b0d7be24330f76286668b90/protobuf-5.27.3-py3-none-any.whl", hash = "sha256:8572c6533e544ebf6899c360e91d6bcbbee2549251643d32c52cf8a5de295ba5", size = 164778 }, + { url = "https://files.pythonhosted.org/packages/66/34/fc43138c93316839080324cb066f35224b75dae56b9f0fdd9d47c988ee9a/protobuf-5.28.0-cp310-abi3-win32.whl", hash = "sha256:66c3edeedb774a3508ae70d87b3a19786445fe9a068dd3585e0cefa8a77b83d0", size = 419672 }, + { url = "https://files.pythonhosted.org/packages/de/f7/e7e03be7e7307123f6467080f283e484de7e892db54dd9a46f057d08c9ee/protobuf-5.28.0-cp310-abi3-win_amd64.whl", hash = "sha256:6d7cc9e60f976cf3e873acb9a40fed04afb5d224608ed5c1a105db4a3f09c5b6", size = 431486 }, + { url = "https://files.pythonhosted.org/packages/ce/ec/34f67d6a3398aa360524d90f75a8c648c99c807b2f1001f5ab16355c1d12/protobuf-5.28.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:532627e8fdd825cf8767a2d2b94d77e874d5ddb0adefb04b237f7cc296748681", size = 414744 }, + { url = "https://files.pythonhosted.org/packages/fe/79/636415c84eed9835fed83183db73fd6ea7ba76a85cae321ff2eaad722e85/protobuf-5.28.0-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:018db9056b9d75eb93d12a9d35120f97a84d9a919bcab11ed56ad2d399d6e8dd", size = 316527 }, + { url = "https://files.pythonhosted.org/packages/19/15/da43113361db20f2d521bc38d92549edbe06856aeec085c420b2b8af5751/protobuf-5.28.0-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:6206afcb2d90181ae8722798dcb56dc76675ab67458ac24c0dd7d75d632ac9bd", size = 316615 }, + { url = "https://files.pythonhosted.org/packages/e3/b2/4df9958122a0377e571972c71692420bafd623d1df3ce506d88c2aba7e12/protobuf-5.28.0-py3-none-any.whl", hash = "sha256:510ed78cd0980f6d3218099e874714cdf0d8a95582e7b059b06cabad855ed0a0", size = 169574 }, ] [[package]] @@ -2364,37 +2358,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/d2/3b2ab40f455a256cb6672186bea95cd97b459ce4594050132d71e76f0d6f/pyzmq-26.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c", size = 550762 }, ] -[[package]] -name = "qtconsole" -version = "5.5.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "ipykernel" }, - { name = "jupyter-client" }, - { name = "jupyter-core" }, - { name = "packaging" }, - { name = "pygments" }, - { name = "pyzmq" }, - { name = "qtpy" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/78/c7/d49db5355490fa5672e835eb464203ec8122b1ee693dc6495500dcfbeac3/qtconsole-5.5.2.tar.gz", hash = "sha256:6b5fb11274b297463706af84dcbbd5c92273b1f619e6d25d08874b0a88516989", size = 439219 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f2/3f/de5e5eb44900c1ed1c1567bc505e3b6e6f4c01cf29e558bf2f8cee29af5b/qtconsole-5.5.2-py3-none-any.whl", hash = "sha256:42d745f3d05d36240244a04e1e1ec2a86d5d9b6edb16dbdef582ccb629e87e0b", size = 123401 }, -] - -[[package]] -name = "qtpy" -version = "2.4.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "packaging" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/eb/9a/7ce646daefb2f85bf5b9c8ac461508b58fa5dcad6d40db476187fafd0148/QtPy-2.4.1.tar.gz", hash = "sha256:a5a15ffd519550a1361bdc56ffc07fda56a6af7292f17c7b395d4083af632987", size = 65492 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/a9/2146d5117ad8a81185331e0809a6b48933c10171f5bac253c6df9fce991c/QtPy-2.4.1-py3-none-any.whl", hash = "sha256:1c1d8c4fa2c884ae742b069151b0abe15b3f70491f3972698c683b8e38de839b", size = 93500 }, -] - [[package]] name = "rclone-python" version = "0.1.12" @@ -2458,15 +2421,15 @@ wheels = [ [[package]] name = "rich" -version = "13.7.1" +version = "13.8.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b3/01/c954e134dc440ab5f96952fe52b4fdc64225530320a910473c1fe270d9aa/rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432", size = 221248 } +sdist = { url = "https://files.pythonhosted.org/packages/cf/60/5959113cae0ce512cf246a6871c623117330105a0d5f59b4e26138f2c9cc/rich-13.8.0.tar.gz", hash = "sha256:a5ac1f1cd448ade0d59cc3356f7db7a7ccda2c8cbae9c7a90c28ff463d3e91f4", size = 222072 } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/67/a37f6214d0e9fe57f6ae54b2956d550ca8365857f42a1ce0392bb21d9410/rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222", size = 240681 }, + { url = "https://files.pythonhosted.org/packages/c7/d9/c2a126eeae791e90ea099d05cb0515feea3688474b978343f3cdcfe04523/rich-13.8.0-py3-none-any.whl", hash = "sha256:2e85306a063b9492dffc86278197a60cbece75bcb766022f3436f567cae11bdc", size = 241597 }, ] [[package]] @@ -2626,11 +2589,11 @@ sdist = { url = "https://files.pythonhosted.org/packages/ac/b7/1af07a98390aba07d [[package]] name = "setuptools" -version = "73.0.1" +version = "74.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8d/37/f4d4ce9bc15e61edba3179f9b0f763fc6d439474d28511b11f0d95bab7a2/setuptools-73.0.1.tar.gz", hash = "sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193", size = 2526506 } +sdist = { url = "https://files.pythonhosted.org/packages/27/cb/e754933c1ca726b0d99980612dc9da2886e76c83968c246cfb50f491a96b/setuptools-74.1.1.tar.gz", hash = "sha256:2353af060c06388be1cecbf5953dcdb1f38362f87a2356c480b6b4d5fcfc8847", size = 1357738 } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/6a/0270e295bf30c37567736b7fca10167640898214ff911273af37ddb95770/setuptools-73.0.1-py3-none-any.whl", hash = "sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e", size = 2346588 }, + { url = "https://files.pythonhosted.org/packages/48/f3/e30ee63caefa90716afdffd7d9ae959cd8d0dbd2d0a0eb9fe1d73ddf806b/setuptools-74.1.1-py3-none-any.whl", hash = "sha256:fc91b5f89e392ef5b77fe143b17e32f65d3024744fba66dc3afe07201684d766", size = 1263655 }, ] [[package]] @@ -2745,6 +2708,16 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6f/42/2fd09d672eaaa937d6893d8b747d07943f97a6e5e30653aee6ebd339b704/sphinx_jinja2_compat-0.3.0-py3-none-any.whl", hash = "sha256:b1e4006d8e1ea31013fa9946d1b075b0c8d2a42c6e3425e63542c1e9f8be9084", size = 7883 }, ] +[[package]] +name = "sphinx-paramlinks" +version = "0.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ae/21/62d3a58ff7bd02bbb9245a63d1f0d2e0455522a11a78951d16088569fca8/sphinx-paramlinks-0.6.0.tar.gz", hash = "sha256:746a0816860aa3fff5d8d746efcbec4deead421f152687411db1d613d29f915e", size = 12363 } + [[package]] name = "sphinx-prompt" version = "1.6.0" @@ -3145,9 +3118,9 @@ wheels = [ [[package]] name = "zipp" -version = "3.20.0" +version = "3.20.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0e/af/9f2de5bd32549a1b705af7a7c054af3878816a1267cb389c03cc4f342a51/zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31", size = 23244 } +sdist = { url = "https://files.pythonhosted.org/packages/d3/8b/1239a3ef43a0d0ebdca623fb6413bc7702c321400c5fdd574f0b7aa0fbb4/zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b", size = 23848 } wheels = [ - { url = "https://files.pythonhosted.org/packages/da/cc/b9958af9f9c86b51f846d8487440af495ecf19b16e426fce1ed0b0796175/zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d", size = 9432 }, + { url = "https://files.pythonhosted.org/packages/07/9e/c96f7a4cd0bf5625bb409b7e61e99b1130dc63a98cb8b24aeabae62d43e8/zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064", size = 8988 }, ]