From 01e998cabe4a4972d5b45f7936219d924706c75c Mon Sep 17 00:00:00 2001 From: Jothi Prakash Date: Fri, 27 Dec 2024 11:47:57 +0530 Subject: [PATCH] PySQL Connector split into connector and sqlalchemy (#444) * Modified the gitignore file to not have .idea file * [PECO-1803] Splitting the PySql connector into the core and the non core part (#417) * Implemented ColumnQueue to test the fetchall without pyarrow Removed token removed token * order of fields in row corrected * Changed the folder structure and tested the basic setup to work * Refractored the code to make connector to work * Basic Setup of connector, core and sqlalchemy is working * Basic integration of core, connect and sqlalchemy is working * Setup working dynamic change from ColumnQueue to ArrowQueue * Refractored the test code and moved to respective folders * Added the unit test for column_queue Fixed __version__ Fix * venv_main added to git ignore * Added code for merging columnar table * Merging code for columnar * Fixed the retry_close sesssion test issue with logging * Fixed the databricks_sqlalchemy tests and introduced pytest.ini for the sqla_testing * Added pyarrow_test mark on pytest * Fixed databricks.sqlalchemy to databricks_sqlalchemy imports * Added poetry.lock * Added dist folder * Changed the pyproject.toml * Minor Fix * Added the pyarrow skip tag on unit tests and tested their working * Fixed the Decimal and timestamp conversion issue in non arrow pipeline * Removed not required files and reformatted * Fixed test_retry error * Changed the folder structure to src / databricks * Removed the columnar non arrow flow to another PR * Moved the README to the root * removed columnQueue instance * Revmoved databricks_sqlalchemy dependency in core * Changed the pysql_supports_arrow predicate, introduced changes in the pyproject.toml * Ran the black formatter with the original version * Extra .py removed from all the __init__.py files names * Undo formatting check * Check * Check * Check * Check * Check * Check * Check * Check * Check * Check * Check * Check * Check * Check * BIG UPDATE * Refeactor code * Refractor * Fixed versioning * Minor refractoring * Minor refractoring * Changed the folder structure such that sqlalchemy has not reference here * Fixed README.md and CONTRIBUTING.md * Added manual publish * On push trigger added * Manually setting the publish step * Changed versioning in pyproject.toml * Bumped up the version to 4.0.0.b3 and also changed the structure to have pyarrow as optional * Removed the sqlalchemy tests from integration.yml file * [PECO-1803] Print warning message if pyarrow is not installed (#468) Print warning message if pyarrow is not installed Signed-off-by: Jacky Hu * [PECO-1803] Remove sqlalchemy and update README.md (#469) Remove sqlalchemy and update README.md Signed-off-by: Jacky Hu * Removed all sqlalchemy related stuff * generated the lock file * Fixed failing tests * removed poetry.lock * Updated the lock file * Fixed poetry numpy 2.2.2 issue * Workflow fixes --------- Signed-off-by: Jacky Hu Co-authored-by: Jacky Hu --- .github/workflows/code-quality-checks.yml | 51 ++ .github/workflows/integration.yml | 2 - .github/workflows/publish-manual.yml | 78 ++ .gitignore | 2 +- CHANGELOG.md | 5 + CONTRIBUTING.md | 3 - README.md | 23 +- examples/sqlalchemy.py | 174 ---- poetry.lock | 801 ++++++------------ pyproject.toml | 21 +- src/databricks/sql/client.py | 7 + .../sqlalchemy/README.sqlalchemy.md | 203 ----- src/databricks/sqlalchemy/README.tests.md | 44 - src/databricks/sqlalchemy/__init__.py | 4 - src/databricks/sqlalchemy/_ddl.py | 100 --- src/databricks/sqlalchemy/_parse.py | 385 --------- src/databricks/sqlalchemy/_types.py | 323 ------- src/databricks/sqlalchemy/base.py | 436 ---------- src/databricks/sqlalchemy/py.typed | 0 src/databricks/sqlalchemy/requirements.py | 249 ------ src/databricks/sqlalchemy/setup.cfg | 4 - src/databricks/sqlalchemy/test/_extra.py | 70 -- src/databricks/sqlalchemy/test/_future.py | 331 -------- src/databricks/sqlalchemy/test/_regression.py | 311 ------- .../sqlalchemy/test/_unsupported.py | 450 ---------- src/databricks/sqlalchemy/test/conftest.py | 13 - .../overrides/_componentreflectiontest.py | 189 ----- .../sqlalchemy/test/overrides/_ctetest.py | 33 - src/databricks/sqlalchemy/test/test_suite.py | 13 - .../sqlalchemy/test_local/__init__.py | 5 - .../sqlalchemy/test_local/conftest.py | 44 - .../sqlalchemy/test_local/e2e/MOCK_DATA.xlsx | Bin 59837 -> 0 bytes .../sqlalchemy/test_local/e2e/test_basic.py | 543 ------------ .../sqlalchemy/test_local/test_ddl.py | 96 --- .../sqlalchemy/test_local/test_parsing.py | 160 ---- .../sqlalchemy/test_local/test_types.py | 161 ---- tests/unit/test_arrow_queue.py | 10 +- tests/unit/test_cloud_fetch_queue.py | 8 +- tests/unit/test_fetches.py | 8 +- tests/unit/test_fetches_bench.py | 8 +- tests/unit/test_thrift_backend.py | 10 +- 41 files changed, 467 insertions(+), 4911 deletions(-) create mode 100644 .github/workflows/publish-manual.yml delete mode 100644 examples/sqlalchemy.py mode change 100755 => 100644 poetry.lock delete mode 100644 src/databricks/sqlalchemy/README.sqlalchemy.md delete mode 100644 src/databricks/sqlalchemy/README.tests.md delete mode 100644 src/databricks/sqlalchemy/__init__.py delete mode 100644 src/databricks/sqlalchemy/_ddl.py delete mode 100644 src/databricks/sqlalchemy/_parse.py delete mode 100644 src/databricks/sqlalchemy/_types.py delete mode 100644 src/databricks/sqlalchemy/base.py delete mode 100755 src/databricks/sqlalchemy/py.typed delete mode 100644 src/databricks/sqlalchemy/requirements.py delete mode 100644 src/databricks/sqlalchemy/setup.cfg delete mode 100644 src/databricks/sqlalchemy/test/_extra.py delete mode 100644 src/databricks/sqlalchemy/test/_future.py delete mode 100644 src/databricks/sqlalchemy/test/_regression.py delete mode 100644 src/databricks/sqlalchemy/test/_unsupported.py delete mode 100644 src/databricks/sqlalchemy/test/conftest.py delete mode 100644 src/databricks/sqlalchemy/test/overrides/_componentreflectiontest.py delete mode 100644 src/databricks/sqlalchemy/test/overrides/_ctetest.py delete mode 100644 src/databricks/sqlalchemy/test/test_suite.py delete mode 100644 src/databricks/sqlalchemy/test_local/__init__.py delete mode 100644 src/databricks/sqlalchemy/test_local/conftest.py delete mode 100644 src/databricks/sqlalchemy/test_local/e2e/MOCK_DATA.xlsx delete mode 100644 src/databricks/sqlalchemy/test_local/e2e/test_basic.py delete mode 100644 src/databricks/sqlalchemy/test_local/test_ddl.py delete mode 100644 src/databricks/sqlalchemy/test_local/test_parsing.py delete mode 100644 src/databricks/sqlalchemy/test_local/test_types.py diff --git a/.github/workflows/code-quality-checks.yml b/.github/workflows/code-quality-checks.yml index 80ac94a7..6a349233 100644 --- a/.github/workflows/code-quality-checks.yml +++ b/.github/workflows/code-quality-checks.yml @@ -58,6 +58,57 @@ jobs: #---------------------------------------------- - name: Run tests run: poetry run python -m pytest tests/unit + run-unit-tests-with-arrow: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [ 3.8, 3.9, "3.10", "3.11" ] + steps: + #---------------------------------------------- + # check-out repo and set-up python + #---------------------------------------------- + - name: Check out repository + uses: actions/checkout@v2 + - name: Set up python ${{ matrix.python-version }} + id: setup-python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + #---------------------------------------------- + # ----- install & configure poetry ----- + #---------------------------------------------- + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + + #---------------------------------------------- + # load cached venv if cache exists + #---------------------------------------------- + - name: Load cached venv + id: cached-poetry-dependencies + uses: actions/cache@v2 + with: + path: .venv-pyarrow + key: venv-pyarrow-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ github.event.repository.name }}-${{ hashFiles('**/poetry.lock') }} + #---------------------------------------------- + # install dependencies if cache does not exist + #---------------------------------------------- + - name: Install dependencies + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: poetry install --no-interaction --no-root + #---------------------------------------------- + # install your root project, if required + #---------------------------------------------- + - name: Install library + run: poetry install --no-interaction --all-extras + #---------------------------------------------- + # run test suite + #---------------------------------------------- + - name: Run tests + run: poetry run python -m pytest tests/unit check-linting: runs-on: ubuntu-latest strategy: diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index f28c22a8..aef7b7f2 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -55,5 +55,3 @@ jobs: #---------------------------------------------- - name: Run e2e tests run: poetry run python -m pytest tests/e2e - - name: Run SQL Alchemy tests - run: poetry run python -m pytest src/databricks/sqlalchemy/test_local diff --git a/.github/workflows/publish-manual.yml b/.github/workflows/publish-manual.yml new file mode 100644 index 00000000..ecad71a2 --- /dev/null +++ b/.github/workflows/publish-manual.yml @@ -0,0 +1,78 @@ +name: Publish to PyPI Manual [Production] + +# Allow manual triggering of the workflow +on: + workflow_dispatch: {} + +jobs: + publish: + name: Publish + runs-on: ubuntu-latest + + steps: + #---------------------------------------------- + # Step 1: Check out the repository code + #---------------------------------------------- + - name: Check out repository + uses: actions/checkout@v2 # Check out the repository to access the code + + #---------------------------------------------- + # Step 2: Set up Python environment + #---------------------------------------------- + - name: Set up python + id: setup-python + uses: actions/setup-python@v2 + with: + python-version: 3.9 # Specify the Python version to be used + + #---------------------------------------------- + # Step 3: Install and configure Poetry + #---------------------------------------------- + - name: Install Poetry + uses: snok/install-poetry@v1 # Install Poetry, the Python package manager + with: + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + +# #---------------------------------------------- +# # Step 4: Load cached virtual environment (if available) +# #---------------------------------------------- +# - name: Load cached venv +# id: cached-poetry-dependencies +# uses: actions/cache@v2 +# with: +# path: .venv # Path to the virtual environment +# key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ github.event.repository.name }}-${{ hashFiles('**/poetry.lock') }} +# # Cache key is generated based on OS, Python version, repo name, and the `poetry.lock` file hash + +# #---------------------------------------------- +# # Step 5: Install dependencies if the cache is not found +# #---------------------------------------------- +# - name: Install dependencies +# if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' # Only run if the cache was not hit +# run: poetry install --no-interaction --no-root # Install dependencies without interaction + +# #---------------------------------------------- +# # Step 6: Update the version to the manually provided version +# #---------------------------------------------- +# - name: Update pyproject.toml with the specified version +# run: poetry version ${{ github.event.inputs.version }} # Use the version provided by the user input + + #---------------------------------------------- + # Step 7: Build and publish the first package to PyPI + #---------------------------------------------- + - name: Build and publish databricks sql connector to PyPI + working-directory: ./databricks_sql_connector + run: | + poetry build + poetry publish -u __token__ -p ${{ secrets.PROD_PYPI_TOKEN }} # Publish with PyPI token + #---------------------------------------------- + # Step 7: Build and publish the second package to PyPI + #---------------------------------------------- + + - name: Build and publish databricks sql connector core to PyPI + working-directory: ./databricks_sql_connector_core + run: | + poetry build + poetry publish -u __token__ -p ${{ secrets.PROD_PYPI_TOKEN }} # Publish with PyPI token \ No newline at end of file diff --git a/.gitignore b/.gitignore index a1fe5bbd..2ae38dbc 100644 --- a/.gitignore +++ b/.gitignore @@ -195,7 +195,7 @@ cython_debug/ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ +.idea/ # End of https://www.toptal.com/developers/gitignore/api/python,macos diff --git a/CHANGELOG.md b/CHANGELOG.md index d426b97e..88c4979d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,10 @@ # Release History +# 4.0.0 (TBD) + +- Split the connector into two separate packages: `databricks-sql-connector` and `databricks-sqlalchemy`. The `databricks-sql-connector` package contains the core functionality of the connector, while the `databricks-sqlalchemy` package contains the SQLAlchemy dialect for the connector. +- Pyarrow dependency is now optional in `databricks-sql-connector`. Users needing arrow are supposed to explicitly install pyarrow + # 3.7.0 (2024-12-23) - Fix: Incorrect number of rows fetched in inline results when fetching results with FETCH_NEXT orientation (databricks/databricks-sql-python#479 by @jprakash-db) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ce0968d4..0cb25876 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -144,9 +144,6 @@ The `PySQLStagingIngestionTestSuite` namespace requires a cluster running DBR ve The suites marked `[not documented]` require additional configuration which will be documented at a later time. -#### SQLAlchemy dialect tests - -See README.tests.md for details. ### Code formatting diff --git a/README.md b/README.md index 54d4b178..a4c5a130 100644 --- a/README.md +++ b/README.md @@ -3,9 +3,9 @@ [![PyPI](https://img.shields.io/pypi/v/databricks-sql-connector?style=flat-square)](https://pypi.org/project/databricks-sql-connector/) [![Downloads](https://pepy.tech/badge/databricks-sql-connector)](https://pepy.tech/project/databricks-sql-connector) -The Databricks SQL Connector for Python allows you to develop Python applications that connect to Databricks clusters and SQL warehouses. It is a Thrift-based client with no dependencies on ODBC or JDBC. It conforms to the [Python DB API 2.0 specification](https://www.python.org/dev/peps/pep-0249/) and exposes a [SQLAlchemy](https://www.sqlalchemy.org/) dialect for use with tools like `pandas` and `alembic` which use SQLAlchemy to execute DDL. Use `pip install databricks-sql-connector[sqlalchemy]` to install with SQLAlchemy's dependencies. `pip install databricks-sql-connector[alembic]` will install alembic's dependencies. +The Databricks SQL Connector for Python allows you to develop Python applications that connect to Databricks clusters and SQL warehouses. It is a Thrift-based client with no dependencies on ODBC or JDBC. It conforms to the [Python DB API 2.0 specification](https://www.python.org/dev/peps/pep-0249/). -This connector uses Arrow as the data-exchange format, and supports APIs to directly fetch Arrow tables. Arrow tables are wrapped in the `ArrowQueue` class to provide a natural API to get several rows at a time. +This connector uses Arrow as the data-exchange format, and supports APIs (e.g. `fetchmany_arrow`) to directly fetch Arrow tables. Arrow tables are wrapped in the `ArrowQueue` class to provide a natural API to get several rows at a time. [PyArrow](https://arrow.apache.org/docs/python/index.html) is required to enable this and use these APIs, you can install it via `pip install pyarrow` or `pip install databricks-sql-connector[pyarrow]`. You are welcome to file an issue here for general use cases. You can also contact Databricks Support [here](help.databricks.com). @@ -22,7 +22,12 @@ For the latest documentation, see ## Quickstart -Install the library with `pip install databricks-sql-connector` +### Installing the core library +Install using `pip install databricks-sql-connector` + +### Installing the core library with PyArrow +Install using `pip install databricks-sql-connector[pyarrow]` + ```bash export DATABRICKS_HOST=********.databricks.com @@ -60,6 +65,18 @@ or to a Databricks Runtime interactive cluster (e.g. /sql/protocolv1/o/123456789 > to authenticate the target Databricks user account and needs to open the browser for authentication. So it > can only run on the user's machine. +## SQLAlchemy +Starting from `databricks-sql-connector` version 4.0.0 SQLAlchemy support has been extracted to a new library `databricks-sqlalchemy`. + +- Github repository [databricks-sqlalchemy github](https://github.com/databricks/databricks-sqlalchemy) +- PyPI [databricks-sqlalchemy pypi](https://pypi.org/project/databricks-sqlalchemy/) + +### Quick SQLAlchemy guide +Users can now choose between using the SQLAlchemy v1 or SQLAlchemy v2 dialects with the connector core + +- Install the latest SQLAlchemy v1 using `pip install databricks-sqlalchemy~=1.0` +- Install SQLAlchemy v2 using `pip install databricks-sqlalchemy` + ## Contributing diff --git a/examples/sqlalchemy.py b/examples/sqlalchemy.py deleted file mode 100644 index 7492dc5a..00000000 --- a/examples/sqlalchemy.py +++ /dev/null @@ -1,174 +0,0 @@ -""" -databricks-sql-connector includes a SQLAlchemy 2.0 dialect compatible with Databricks SQL. To install -its dependencies you can run `pip install databricks-sql-connector[sqlalchemy]`. - -The expected connection string format which you can pass to create_engine() is: - -databricks://token:dapi***@***.cloud.databricks.com?http_path=/sql/***&catalog=**&schema=** - -Our dialect implements the majority of SQLAlchemy 2.0's API. Because of the extent of SQLAlchemy's -capabilities it isn't feasible to provide examples of every usage in a single script, so we only -provide a basic one here. Learn more about usage in README.sqlalchemy.md in this repo. -""" - -# fmt: off - -import os -from datetime import date, datetime, time, timedelta, timezone -from decimal import Decimal -from uuid import UUID - -# By convention, backend-specific SQLA types are defined in uppercase -# This dialect exposes Databricks SQL's TIMESTAMP and TINYINT types -# as these are not covered by the generic, camelcase types shown below -from databricks.sqlalchemy import TIMESTAMP, TINYINT - -# Beside the CamelCase types shown below, line comments reflect -# the underlying Databricks SQL / Delta table type -from sqlalchemy import ( - BigInteger, # BIGINT - Boolean, # BOOLEAN - Column, - Date, # DATE - DateTime, # TIMESTAMP_NTZ - Integer, # INTEGER - Numeric, # DECIMAL - String, # STRING - Time, # STRING - Uuid, # STRING - create_engine, - select, -) -from sqlalchemy.orm import DeclarativeBase, Session - -host = os.getenv("DATABRICKS_SERVER_HOSTNAME") -http_path = os.getenv("DATABRICKS_HTTP_PATH") -access_token = os.getenv("DATABRICKS_TOKEN") -catalog = os.getenv("DATABRICKS_CATALOG") -schema = os.getenv("DATABRICKS_SCHEMA") - - -# Extra arguments are passed untouched to databricks-sql-connector -# See src/databricks/sql/thrift_backend.py for complete list -extra_connect_args = { - "_tls_verify_hostname": True, - "_user_agent_entry": "PySQL Example Script", -} - - -engine = create_engine( - f"databricks://token:{access_token}@{host}?http_path={http_path}&catalog={catalog}&schema={schema}", - connect_args=extra_connect_args, echo=True, -) - - -class Base(DeclarativeBase): - pass - - -# This object gives a usage example for each supported type -# for more details on these, see README.sqlalchemy.md -class SampleObject(Base): - __tablename__ = "pysql_sqlalchemy_example_table" - - bigint_col = Column(BigInteger, primary_key=True) - string_col = Column(String) - tinyint_col = Column(TINYINT) - int_col = Column(Integer) - numeric_col = Column(Numeric(10, 2)) - boolean_col = Column(Boolean) - date_col = Column(Date) - datetime_col = Column(TIMESTAMP) - datetime_col_ntz = Column(DateTime) - time_col = Column(Time) - uuid_col = Column(Uuid) - -# This generates a CREATE TABLE statement against the catalog and schema -# specified in the connection string -Base.metadata.create_all(engine) - -# Output SQL is: -# CREATE TABLE pysql_sqlalchemy_example_table ( -# bigint_col BIGINT NOT NULL, -# string_col STRING, -# tinyint_col SMALLINT, -# int_col INT, -# numeric_col DECIMAL(10, 2), -# boolean_col BOOLEAN, -# date_col DATE, -# datetime_col TIMESTAMP, -# datetime_col_ntz TIMESTAMP_NTZ, -# time_col STRING, -# uuid_col STRING, -# PRIMARY KEY (bigint_col) -# ) USING DELTA - -# The code that follows will INSERT a record using SQLAlchemy ORM containing these values -# and then SELECT it back out. The output is compared to the input to demonstrate that -# all type information is preserved. -sample_object = { - "bigint_col": 1234567890123456789, - "string_col": "foo", - "tinyint_col": -100, - "int_col": 5280, - "numeric_col": Decimal("525600.01"), - "boolean_col": True, - "date_col": date(2020, 12, 25), - "datetime_col": datetime( - 1991, 8, 3, 21, 30, 5, tzinfo=timezone(timedelta(hours=-8)) - ), - "datetime_col_ntz": datetime(1990, 12, 4, 6, 33, 41), - "time_col": time(23, 59, 59), - "uuid_col": UUID(int=255), -} -sa_obj = SampleObject(**sample_object) - -session = Session(engine) -session.add(sa_obj) -session.commit() - -# Output SQL is: -# INSERT INTO -# pysql_sqlalchemy_example_table ( -# bigint_col, -# string_col, -# tinyint_col, -# int_col, -# numeric_col, -# boolean_col, -# date_col, -# datetime_col, -# datetime_col_ntz, -# time_col, -# uuid_col -# ) -# VALUES -# ( -# :bigint_col, -# :string_col, -# :tinyint_col, -# :int_col, -# :numeric_col, -# :boolean_col, -# :date_col, -# :datetime_col, -# :datetime_col_ntz, -# :time_col, -# :uuid_col -# ) - -# Here we build a SELECT query using ORM -stmt = select(SampleObject).where(SampleObject.int_col == 5280) - -# Then fetch one result with session.scalar() -result = session.scalar(stmt) - -# Finally, we read out the input data and compare it to the output -compare = {key: getattr(result, key) for key in sample_object.keys()} -assert compare == sample_object - -# Then we drop the demonstration table -Base.metadata.drop_all(engine) - -# Output SQL is: -# DROP TABLE pysql_sqlalchemy_example_table diff --git a/poetry.lock b/poetry.lock old mode 100755 new mode 100644 index 576adbd3..2b63f135 --- a/poetry.lock +++ b/poetry.lock @@ -1,35 +1,14 @@ # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. -[[package]] -name = "alembic" -version = "1.13.2" -description = "A database migration tool for SQLAlchemy." -optional = true -python-versions = ">=3.8" -files = [ - {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, - {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, -] - -[package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.9\""} -importlib-resources = {version = "*", markers = "python_version < \"3.9\""} -Mako = "*" -SQLAlchemy = ">=1.3.0" -typing-extensions = ">=4" - -[package.extras] -tz = ["backports.zoneinfo"] - [[package]] name = "astroid" -version = "3.2.2" +version = "3.2.4" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.8.0" files = [ - {file = "astroid-3.2.2-py3-none-any.whl", hash = "sha256:e8a0083b4bb28fcffb6207a3bfc9e5d0a68be951dd7e336d5dcf639c682388c0"}, - {file = "astroid-3.2.2.tar.gz", hash = "sha256:8ead48e31b92b2e217b6c9733a21afafe479d52d6e164dd25fb1a770c7c3cf94"}, + {file = "astroid-3.2.4-py3-none-any.whl", hash = "sha256:413658a61eeca6202a59231abb473f932038fbcbf1666587f66d482083413a25"}, + {file = "astroid-3.2.4.tar.gz", hash = "sha256:0e14202810b30da1b735827f78f5157be2bbd4a7a59b7707ca0bfc2fb4c0063a"}, ] [package.dependencies] @@ -72,112 +51,127 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2024.6.2" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, - {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] [[package]] @@ -207,13 +201,13 @@ files = [ [[package]] name = "dill" -version = "0.3.8" +version = "0.3.9" description = "serialize all of Python" optional = false python-versions = ">=3.8" files = [ - {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, - {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, + {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, + {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, ] [package.extras] @@ -222,147 +216,42 @@ profile = ["gprof2dot (>=2022.7.29)"] [[package]] name = "et-xmlfile" -version = "1.1.0" +version = "2.0.0" description = "An implementation of lxml.xmlfile for the standard library" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, - {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, + {file = "et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa"}, + {file = "et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54"}, ] [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] test = ["pytest (>=6)"] -[[package]] -name = "greenlet" -version = "3.0.3" -description = "Lightweight in-process concurrent programming" -optional = true -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - [[package]] name = "idna" -version = "3.7" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, -] - -[[package]] -name = "importlib-metadata" -version = "8.0.0" -description = "Read metadata from Python packages" -optional = true -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, - {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "importlib-resources" -version = "6.4.0" -description = "Read resources from Python packages" -optional = true -python-versions = ">=3.8" +python-versions = ">=3.6" files = [ - {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, - {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] [[package]] name = "iniconfig" @@ -439,94 +328,6 @@ docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"] flake8 = ["flake8"] tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] -[[package]] -name = "mako" -version = "1.3.5" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -optional = true -python-versions = ">=3.8" -files = [ - {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, - {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, -] - -[package.dependencies] -MarkupSafe = ">=0.9.2" - -[package.extras] -babel = ["Babel"] -lingua = ["lingua"] -testing = ["pytest"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = true -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - [[package]] name = "mccabe" version = "0.7.0" @@ -540,47 +341,53 @@ files = [ [[package]] name = "mypy" -version = "1.10.1" +version = "1.13.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"}, - {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"}, - {file = "mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a"}, - {file = "mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9"}, - {file = "mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d"}, - {file = "mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a"}, - {file = "mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84"}, - {file = "mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f"}, - {file = "mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b"}, - {file = "mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e"}, - {file = "mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7"}, - {file = "mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3"}, - {file = "mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e"}, - {file = "mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04"}, - {file = "mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31"}, - {file = "mypy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604282c886497645ffb87b8f35a57ec773a4a2721161e709a4422c1636ddde5c"}, - {file = "mypy-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37fd87cab83f09842653f08de066ee68f1182b9b5282e4634cdb4b407266bade"}, - {file = "mypy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8addf6313777dbb92e9564c5d32ec122bf2c6c39d683ea64de6a1fd98b90fe37"}, - {file = "mypy-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cc3ca0a244eb9a5249c7c583ad9a7e881aa5d7b73c35652296ddcdb33b2b9c7"}, - {file = "mypy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:1b3a2ffce52cc4dbaeee4df762f20a2905aa171ef157b82192f2e2f368eec05d"}, - {file = "mypy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe85ed6836165d52ae8b88f99527d3d1b2362e0cb90b005409b8bed90e9059b3"}, - {file = "mypy-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2ae450d60d7d020d67ab440c6e3fae375809988119817214440033f26ddf7bf"}, - {file = "mypy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be84c06e6abd72f960ba9a71561c14137a583093ffcf9bbfaf5e613d63fa531"}, - {file = "mypy-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2189ff1e39db399f08205e22a797383613ce1cb0cb3b13d8bcf0170e45b96cc3"}, - {file = "mypy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:97a131ee36ac37ce9581f4220311247ab6cba896b4395b9c87af0675a13a755f"}, - {file = "mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a"}, - {file = "mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, + {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, + {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, + {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, + {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, + {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, + {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" +typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] @@ -710,13 +517,13 @@ et-xmlfile = "*" [[package]] name = "packaging" -version = "24.1" +version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] @@ -799,19 +606,19 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" @@ -830,65 +637,68 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pyarrow" -version = "16.1.0" +version = "17.0.0" description = "Python library for Apache Arrow" -optional = false +optional = true python-versions = ">=3.8" files = [ - {file = "pyarrow-16.1.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:17e23b9a65a70cc733d8b738baa6ad3722298fa0c81d88f63ff94bf25eaa77b9"}, - {file = "pyarrow-16.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4740cc41e2ba5d641071d0ab5e9ef9b5e6e8c7611351a5cb7c1d175eaf43674a"}, - {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98100e0268d04e0eec47b73f20b39c45b4006f3c4233719c3848aa27a03c1aef"}, - {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f68f409e7b283c085f2da014f9ef81e885d90dcd733bd648cfba3ef265961848"}, - {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:a8914cd176f448e09746037b0c6b3a9d7688cef451ec5735094055116857580c"}, - {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:48be160782c0556156d91adbdd5a4a7e719f8d407cb46ae3bb4eaee09b3111bd"}, - {file = "pyarrow-16.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9cf389d444b0f41d9fe1444b70650fea31e9d52cfcb5f818b7888b91b586efff"}, - {file = "pyarrow-16.1.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:d0ebea336b535b37eee9eee31761813086d33ed06de9ab6fc6aaa0bace7b250c"}, - {file = "pyarrow-16.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e73cfc4a99e796727919c5541c65bb88b973377501e39b9842ea71401ca6c1c"}, - {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf9251264247ecfe93e5f5a0cd43b8ae834f1e61d1abca22da55b20c788417f6"}, - {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddf5aace92d520d3d2a20031d8b0ec27b4395cab9f74e07cc95edf42a5cc0147"}, - {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:25233642583bf658f629eb230b9bb79d9af4d9f9229890b3c878699c82f7d11e"}, - {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a33a64576fddfbec0a44112eaf844c20853647ca833e9a647bfae0582b2ff94b"}, - {file = "pyarrow-16.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:185d121b50836379fe012753cf15c4ba9638bda9645183ab36246923875f8d1b"}, - {file = "pyarrow-16.1.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:2e51ca1d6ed7f2e9d5c3c83decf27b0d17bb207a7dea986e8dc3e24f80ff7d6f"}, - {file = "pyarrow-16.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06ebccb6f8cb7357de85f60d5da50e83507954af617d7b05f48af1621d331c9a"}, - {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b04707f1979815f5e49824ce52d1dceb46e2f12909a48a6a753fe7cafbc44a0c"}, - {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d32000693deff8dc5df444b032b5985a48592c0697cb6e3071a5d59888714e2"}, - {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:8785bb10d5d6fd5e15d718ee1d1f914fe768bf8b4d1e5e9bf253de8a26cb1628"}, - {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e1369af39587b794873b8a307cc6623a3b1194e69399af0efd05bb202195a5a7"}, - {file = "pyarrow-16.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:febde33305f1498f6df85e8020bca496d0e9ebf2093bab9e0f65e2b4ae2b3444"}, - {file = "pyarrow-16.1.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b5f5705ab977947a43ac83b52ade3b881eb6e95fcc02d76f501d549a210ba77f"}, - {file = "pyarrow-16.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0d27bf89dfc2576f6206e9cd6cf7a107c9c06dc13d53bbc25b0bd4556f19cf5f"}, - {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d07de3ee730647a600037bc1d7b7994067ed64d0eba797ac74b2bc77384f4c2"}, - {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbef391b63f708e103df99fbaa3acf9f671d77a183a07546ba2f2c297b361e83"}, - {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19741c4dbbbc986d38856ee7ddfdd6a00fc3b0fc2d928795b95410d38bb97d15"}, - {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f2c5fb249caa17b94e2b9278b36a05ce03d3180e6da0c4c3b3ce5b2788f30eed"}, - {file = "pyarrow-16.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:e6b6d3cd35fbb93b70ade1336022cc1147b95ec6af7d36906ca7fe432eb09710"}, - {file = "pyarrow-16.1.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:18da9b76a36a954665ccca8aa6bd9f46c1145f79c0bb8f4f244f5f8e799bca55"}, - {file = "pyarrow-16.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:99f7549779b6e434467d2aa43ab2b7224dd9e41bdde486020bae198978c9e05e"}, - {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f07fdffe4fd5b15f5ec15c8b64584868d063bc22b86b46c9695624ca3505b7b4"}, - {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddfe389a08ea374972bd4065d5f25d14e36b43ebc22fc75f7b951f24378bf0b5"}, - {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3b20bd67c94b3a2ea0a749d2a5712fc845a69cb5d52e78e6449bbd295611f3aa"}, - {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ba8ac20693c0bb0bf4b238751d4409e62852004a8cf031c73b0e0962b03e45e3"}, - {file = "pyarrow-16.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:31a1851751433d89a986616015841977e0a188662fcffd1a5677453f1df2de0a"}, - {file = "pyarrow-16.1.0.tar.gz", hash = "sha256:15fbb22ea96d11f0b5768504a3f961edab25eaf4197c341720c4a387f6c60315"}, + {file = "pyarrow-17.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a5c8b238d47e48812ee577ee20c9a2779e6a5904f1708ae240f53ecbee7c9f07"}, + {file = "pyarrow-17.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db023dc4c6cae1015de9e198d41250688383c3f9af8f565370ab2b4cb5f62655"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da1e060b3876faa11cee287839f9cc7cdc00649f475714b8680a05fd9071d545"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c06d4624c0ad6674364bb46ef38c3132768139ddec1c56582dbac54f2663e2"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:fa3c246cc58cb5a4a5cb407a18f193354ea47dd0648194e6265bd24177982fe8"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:f7ae2de664e0b158d1607699a16a488de3d008ba99b3a7aa5de1cbc13574d047"}, + {file = "pyarrow-17.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5984f416552eea15fd9cee03da53542bf4cddaef5afecefb9aa8d1010c335087"}, + {file = "pyarrow-17.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1c8856e2ef09eb87ecf937104aacfa0708f22dfeb039c363ec99735190ffb977"}, + {file = "pyarrow-17.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e19f569567efcbbd42084e87f948778eb371d308e137a0f97afe19bb860ccb3"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b244dc8e08a23b3e352899a006a26ae7b4d0da7bb636872fa8f5884e70acf15"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b72e87fe3e1db343995562f7fff8aee354b55ee83d13afba65400c178ab2597"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dc5c31c37409dfbc5d014047817cb4ccd8c1ea25d19576acf1a001fe07f5b420"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e3343cb1e88bc2ea605986d4b94948716edc7a8d14afd4e2c097232f729758b4"}, + {file = "pyarrow-17.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a27532c38f3de9eb3e90ecab63dfda948a8ca859a66e3a47f5f42d1e403c4d03"}, + {file = "pyarrow-17.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9b8a823cea605221e61f34859dcc03207e52e409ccf6354634143e23af7c8d22"}, + {file = "pyarrow-17.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1e70de6cb5790a50b01d2b686d54aaf73da01266850b05e3af2a1bc89e16053"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0071ce35788c6f9077ff9ecba4858108eebe2ea5a3f7cf2cf55ebc1dbc6ee24a"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757074882f844411fcca735e39aae74248a1531367a7c80799b4266390ae51cc"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ba11c4f16976e89146781a83833df7f82077cdab7dc6232c897789343f7891a"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b0c6ac301093b42d34410b187bba560b17c0330f64907bfa4f7f7f2444b0cf9b"}, + {file = "pyarrow-17.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7"}, + {file = "pyarrow-17.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:af5ff82a04b2171415f1410cff7ebb79861afc5dae50be73ce06d6e870615204"}, + {file = "pyarrow-17.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:edca18eaca89cd6382dfbcff3dd2d87633433043650c07375d095cd3517561d8"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c7916bff914ac5d4a8fe25b7a25e432ff921e72f6f2b7547d1e325c1ad9d155"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f553ca691b9e94b202ff741bdd40f6ccb70cdd5fbf65c187af132f1317de6145"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0cdb0e627c86c373205a2f94a510ac4376fdc523f8bb36beab2e7f204416163c"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d7d192305d9d8bc9082d10f361fc70a73590a4c65cf31c3e6926cd72b76bc35c"}, + {file = "pyarrow-17.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:02dae06ce212d8b3244dd3e7d12d9c4d3046945a5933d28026598e9dbbda1fca"}, + {file = "pyarrow-17.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:13d7a460b412f31e4c0efa1148e1d29bdf18ad1411eb6757d38f8fbdcc8645fb"}, + {file = "pyarrow-17.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b564a51fbccfab5a04a80453e5ac6c9954a9c5ef2890d1bcf63741909c3f8df"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32503827abbc5aadedfa235f5ece8c4f8f8b0a3cf01066bc8d29de7539532687"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a155acc7f154b9ffcc85497509bcd0d43efb80d6f733b0dc3bb14e281f131c8b"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:dec8d129254d0188a49f8a1fc99e0560dc1b85f60af729f47de4046015f9b0a5"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a48ddf5c3c6a6c505904545c25a4ae13646ae1f8ba703c4df4a1bfe4f4006bda"}, + {file = "pyarrow-17.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:42bf93249a083aca230ba7e2786c5f673507fa97bbd9725a1e2754715151a204"}, + {file = "pyarrow-17.0.0.tar.gz", hash = "sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28"}, ] [package.dependencies] numpy = ">=1.16.6" +[package.extras] +test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] + [[package]] name = "pylint" -version = "3.2.5" +version = "3.2.7" description = "python code static checker" optional = false python-versions = ">=3.8.0" files = [ - {file = "pylint-3.2.5-py3-none-any.whl", hash = "sha256:32cd6c042b5004b8e857d727708720c54a676d1e22917cf1a2df9b4d4868abd6"}, - {file = "pylint-3.2.5.tar.gz", hash = "sha256:e9b7171e242dcc6ebd0aaa7540481d1a72860748a0a7816b8fe6cf6c80a6fe7e"}, + {file = "pylint-3.2.7-py3-none-any.whl", hash = "sha256:02f4aedeac91be69fb3b4bea997ce580a4ac68ce58b89eaefeaf06749df73f4b"}, + {file = "pylint-3.2.7.tar.gz", hash = "sha256:1b7a721b575eaeaa7d39db076b6e7743c993ea44f57979127c517c6c572c803e"}, ] [package.dependencies] -astroid = ">=3.2.2,<=3.3.0-dev0" +astroid = ">=3.2.4,<=3.3.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, @@ -973,13 +783,13 @@ cli = ["click (>=5.0)"] [[package]] name = "pytz" -version = "2024.1" +version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] @@ -1005,102 +815,15 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "sqlalchemy" -version = "2.0.31" -description = "Database Abstraction Library" -optional = true -python-versions = ">=3.7" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2a213c1b699d3f5768a7272de720387ae0122f1becf0901ed6eaa1abd1baf6c"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9fea3d0884e82d1e33226935dac990b967bef21315cbcc894605db3441347443"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ad7f221d8a69d32d197e5968d798217a4feebe30144986af71ada8c548e9fa"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2bee229715b6366f86a95d497c347c22ddffa2c7c96143b59a2aa5cc9eebbc"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cd5b94d4819c0c89280b7c6109c7b788a576084bf0a480ae17c227b0bc41e109"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:750900a471d39a7eeba57580b11983030517a1f512c2cb287d5ad0fcf3aebd58"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-win32.whl", hash = "sha256:7bd112be780928c7f493c1a192cd8c5fc2a2a7b52b790bc5a84203fb4381c6be"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-win_amd64.whl", hash = "sha256:5a48ac4d359f058474fadc2115f78a5cdac9988d4f99eae44917f36aa1476327"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f68470edd70c3ac3b6cd5c2a22a8daf18415203ca1b036aaeb9b0fb6f54e8298"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e2c38c2a4c5c634fe6c3c58a789712719fa1bf9b9d6ff5ebfce9a9e5b89c1ca"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd15026f77420eb2b324dcb93551ad9c5f22fab2c150c286ef1dc1160f110203"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2196208432deebdfe3b22185d46b08f00ac9d7b01284e168c212919891289396"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:352b2770097f41bff6029b280c0e03b217c2dcaddc40726f8f53ed58d8a85da4"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56d51ae825d20d604583f82c9527d285e9e6d14f9a5516463d9705dab20c3740"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-win32.whl", hash = "sha256:6e2622844551945db81c26a02f27d94145b561f9d4b0c39ce7bfd2fda5776dac"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-win_amd64.whl", hash = "sha256:ccaf1b0c90435b6e430f5dd30a5aede4764942a695552eb3a4ab74ed63c5b8d3"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3b74570d99126992d4b0f91fb87c586a574a5872651185de8297c6f90055ae42"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f77c4f042ad493cb8595e2f503c7a4fe44cd7bd59c7582fd6d78d7e7b8ec52c"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd1591329333daf94467e699e11015d9c944f44c94d2091f4ac493ced0119449"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74afabeeff415e35525bf7a4ecdab015f00e06456166a2eba7590e49f8db940e"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b9c01990d9015df2c6f818aa8f4297d42ee71c9502026bb074e713d496e26b67"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66f63278db425838b3c2b1c596654b31939427016ba030e951b292e32b99553e"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-win32.whl", hash = "sha256:0b0f658414ee4e4b8cbcd4a9bb0fd743c5eeb81fc858ca517217a8013d282c96"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-win_amd64.whl", hash = "sha256:fa4b1af3e619b5b0b435e333f3967612db06351217c58bfb50cee5f003db2a5a"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f43e93057cf52a227eda401251c72b6fbe4756f35fa6bfebb5d73b86881e59b0"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d337bf94052856d1b330d5fcad44582a30c532a2463776e1651bd3294ee7e58b"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c06fb43a51ccdff3b4006aafee9fcf15f63f23c580675f7734245ceb6b6a9e05"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:b6e22630e89f0e8c12332b2b4c282cb01cf4da0d26795b7eae16702a608e7ca1"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:79a40771363c5e9f3a77f0e28b3302801db08040928146e6808b5b7a40749c88"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-win32.whl", hash = "sha256:501ff052229cb79dd4c49c402f6cb03b5a40ae4771efc8bb2bfac9f6c3d3508f"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-win_amd64.whl", hash = "sha256:597fec37c382a5442ffd471f66ce12d07d91b281fd474289356b1a0041bdf31d"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dc6d69f8829712a4fd799d2ac8d79bdeff651c2301b081fd5d3fe697bd5b4ab9"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23b9fbb2f5dd9e630db70fbe47d963c7779e9c81830869bd7d137c2dc1ad05fb"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a21c97efcbb9f255d5c12a96ae14da873233597dfd00a3a0c4ce5b3e5e79704"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26a6a9837589c42b16693cf7bf836f5d42218f44d198f9343dd71d3164ceeeac"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc251477eae03c20fae8db9c1c23ea2ebc47331bcd73927cdcaecd02af98d3c3"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2fd17e3bb8058359fa61248c52c7b09a97cf3c820e54207a50af529876451808"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-win32.whl", hash = "sha256:c76c81c52e1e08f12f4b6a07af2b96b9b15ea67ccdd40ae17019f1c373faa227"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-win_amd64.whl", hash = "sha256:4b600e9a212ed59355813becbcf282cfda5c93678e15c25a0ef896b354423238"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b6cf796d9fcc9b37011d3f9936189b3c8074a02a4ed0c0fbbc126772c31a6d4"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:78fe11dbe37d92667c2c6e74379f75746dc947ee505555a0197cfba9a6d4f1a4"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc47dc6185a83c8100b37acda27658fe4dbd33b7d5e7324111f6521008ab4fe"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a41514c1a779e2aa9a19f67aaadeb5cbddf0b2b508843fcd7bafdf4c6864005"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:afb6dde6c11ea4525318e279cd93c8734b795ac8bb5dda0eedd9ebaca7fa23f1"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3f9faef422cfbb8fd53716cd14ba95e2ef655400235c3dfad1b5f467ba179c8c"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-win32.whl", hash = "sha256:fc6b14e8602f59c6ba893980bea96571dd0ed83d8ebb9c4479d9ed5425d562e9"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-win_amd64.whl", hash = "sha256:3cb8a66b167b033ec72c3812ffc8441d4e9f5f78f5e31e54dcd4c90a4ca5bebc"}, - {file = "SQLAlchemy-2.0.31-py3-none-any.whl", hash = "sha256:69f3e3c08867a8e4856e92d7afb618b95cdee18e0bc1647b77599722c9a28911"}, - {file = "SQLAlchemy-2.0.31.tar.gz", hash = "sha256:b607489dd4a54de56984a0c7656247504bd5523d9d0ba799aef59d4add009484"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} -typing-extensions = ">=4.6.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3_binary"] - [[package]] name = "thrift" version = "0.20.0" @@ -1121,24 +844,54 @@ twisted = ["twisted"] [[package]] name = "tomli" -version = "2.0.1" +version = "2.2.1" description = "A lil' TOML parser" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] name = "tomlkit" -version = "0.12.5" +version = "0.13.2" description = "Style preserving TOML library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"}, - {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] [[package]] @@ -1154,24 +907,24 @@ files = [ [[package]] name = "tzdata" -version = "2024.1" +version = "2024.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, ] [[package]] name = "urllib3" -version = "2.2.2" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -1180,26 +933,10 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] -[[package]] -name = "zipp" -version = "3.19.2" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = true -python-versions = ">=3.8" -files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, -] - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - [extras] -alembic = ["alembic", "sqlalchemy"] -sqlalchemy = ["sqlalchemy"] +pyarrow = ["pyarrow"] [metadata] lock-version = "2.0" python-versions = "^3.8.0" -content-hash = "9d8a91369fc79f9ca9f7502e2ed284b66531c954ae59a723e465a76073966998" +content-hash = "43ea4a4ca7c8403d2b2033b783fe57743e100354986c723ef1f202cde2ac8881" diff --git a/pyproject.toml b/pyproject.toml index dc13f283..168fa9fb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "databricks-sql-connector" -version = "3.7.0" +version = "4.0.0" description = "Databricks SQL Connector for Python" authors = ["Databricks "] license = "Apache-2.0" @@ -14,23 +14,19 @@ thrift = ">=0.16.0,<0.21.0" pandas = [ { version = ">=1.2.5,<2.3.0", python = ">=3.8" } ] -pyarrow = ">=14.0.1" - lz4 = "^4.0.2" requests = "^2.18.1" oauthlib = "^3.1.0" numpy = [ - { version = ">=1.16.6", python = ">=3.8,<3.11" }, - { version = ">=1.23.4", python = ">=3.11" }, + { version = "^1.16.6", python = ">=3.8,<3.11" }, + { version = "^1.23.4", python = ">=3.11" }, ] -sqlalchemy = { version = ">=2.0.21", optional = true } openpyxl = "^3.0.10" -alembic = { version = "^1.0.11", optional = true } urllib3 = ">=1.26" +pyarrow = { version = ">=14.0.1", optional=true } [tool.poetry.extras] -sqlalchemy = ["sqlalchemy"] -alembic = ["sqlalchemy", "alembic"] +pyarrow = ["pyarrow"] [tool.poetry.dev-dependencies] pytest = "^7.1.2" @@ -43,9 +39,6 @@ pytest-dotenv = "^0.5.2" "Homepage" = "https://github.com/databricks/databricks-sql-python" "Bug Tracker" = "https://github.com/databricks/databricks-sql-python/issues" -[tool.poetry.plugins."sqlalchemy.dialects"] -"databricks" = "databricks.sqlalchemy:DatabricksDialect" - [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" @@ -62,5 +55,5 @@ markers = {"reviewed" = "Test case has been reviewed by Databricks"} minversion = "6.0" log_cli = "false" log_cli_level = "INFO" -testpaths = ["tests", "src/databricks/sqlalchemy/test_local"] -env_files = ["test.env"] +testpaths = ["tests"] +env_files = ["test.env"] \ No newline at end of file diff --git a/src/databricks/sql/client.py b/src/databricks/sql/client.py index aefed1ef..dca286ef 100755 --- a/src/databricks/sql/client.py +++ b/src/databricks/sql/client.py @@ -54,6 +54,13 @@ logger = logging.getLogger(__name__) +if pyarrow is None: + logger.warning( + "[WARN] pyarrow is not installed by default since databricks-sql-connector 4.0.0," + "any arrow specific api (e.g. fetchmany_arrow) and cloud fetch will be disabled." + "If you need these features, please run pip install pyarrow or pip install databricks-sql-connector[pyarrow] to install" + ) + DEFAULT_RESULT_BUFFER_SIZE_BYTES = 104857600 DEFAULT_ARRAY_SIZE = 100000 diff --git a/src/databricks/sqlalchemy/README.sqlalchemy.md b/src/databricks/sqlalchemy/README.sqlalchemy.md deleted file mode 100644 index 8aa51973..00000000 --- a/src/databricks/sqlalchemy/README.sqlalchemy.md +++ /dev/null @@ -1,203 +0,0 @@ -## Databricks dialect for SQLALchemy 2.0 - -The Databricks dialect for SQLAlchemy serves as bridge between [SQLAlchemy](https://www.sqlalchemy.org/) and the Databricks SQL Python driver. The dialect is included with `databricks-sql-connector==3.0.0` and above. A working example demonstrating usage can be found in `examples/sqlalchemy.py`. - -## Usage with SQLAlchemy <= 2.0 -A SQLAlchemy 1.4 compatible dialect was first released in connector [version 2.4](https://github.com/databricks/databricks-sql-python/releases/tag/v2.4.0). Support for SQLAlchemy 1.4 was dropped from the dialect as part of `databricks-sql-connector==3.0.0`. To continue using the dialect with SQLAlchemy 1.x, you can use `databricks-sql-connector^2.4.0`. - - -## Installation - -To install the dialect and its dependencies: - -```shell -pip install databricks-sql-connector[sqlalchemy] -``` - -If you also plan to use `alembic` you can alternatively run: - -```shell -pip install databricks-sql-connector[alembic] -``` - -## Connection String - -Every SQLAlchemy application that connects to a database needs to use an [Engine](https://docs.sqlalchemy.org/en/20/tutorial/engine.html#tutorial-engine), which you can create by passing a connection string to `create_engine`. The connection string must include these components: - -1. Host -2. HTTP Path for a compute resource -3. API access token -4. Initial catalog for the connection -5. Initial schema for the connection - -**Note: Our dialect is built and tested on workspaces with Unity Catalog enabled. Support for the `hive_metastore` catalog is untested.** - -For example: - -```python -import os -from sqlalchemy import create_engine - -host = os.getenv("DATABRICKS_SERVER_HOSTNAME") -http_path = os.getenv("DATABRICKS_HTTP_PATH") -access_token = os.getenv("DATABRICKS_TOKEN") -catalog = os.getenv("DATABRICKS_CATALOG") -schema = os.getenv("DATABRICKS_SCHEMA") - -engine = create_engine( - f"databricks://token:{access_token}@{host}?http_path={http_path}&catalog={catalog}&schema={schema}" - ) -``` - -## Types - -The [SQLAlchemy type hierarchy](https://docs.sqlalchemy.org/en/20/core/type_basics.html) contains backend-agnostic type implementations (represented in CamelCase) and backend-specific types (represented in UPPERCASE). The majority of SQLAlchemy's [CamelCase](https://docs.sqlalchemy.org/en/20/core/type_basics.html#the-camelcase-datatypes) types are supported. This means that a SQLAlchemy application using these types should "just work" with Databricks. - -|SQLAlchemy Type|Databricks SQL Type| -|-|-| -[`BigInteger`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.BigInteger)| [`BIGINT`](https://docs.databricks.com/en/sql/language-manual/data-types/bigint-type.html) -[`LargeBinary`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.LargeBinary)| (not supported)| -[`Boolean`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Boolean)| [`BOOLEAN`](https://docs.databricks.com/en/sql/language-manual/data-types/boolean-type.html) -[`Date`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Date)| [`DATE`](https://docs.databricks.com/en/sql/language-manual/data-types/date-type.html) -[`DateTime`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.DateTime)| [`TIMESTAMP_NTZ`](https://docs.databricks.com/en/sql/language-manual/data-types/timestamp-ntz-type.html)| -[`Double`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Double)| [`DOUBLE`](https://docs.databricks.com/en/sql/language-manual/data-types/double-type.html) -[`Enum`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Enum)| (not supported)| -[`Float`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Float)| [`FLOAT`](https://docs.databricks.com/en/sql/language-manual/data-types/float-type.html) -[`Integer`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Integer)| [`INT`](https://docs.databricks.com/en/sql/language-manual/data-types/int-type.html) -[`Numeric`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Numeric)| [`DECIMAL`](https://docs.databricks.com/en/sql/language-manual/data-types/decimal-type.html)| -[`PickleType`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.PickleType)| (not supported)| -[`SmallInteger`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.SmallInteger)| [`SMALLINT`](https://docs.databricks.com/en/sql/language-manual/data-types/smallint-type.html) -[`String`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.String)| [`STRING`](https://docs.databricks.com/en/sql/language-manual/data-types/string-type.html)| -[`Text`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Text)| [`STRING`](https://docs.databricks.com/en/sql/language-manual/data-types/string-type.html)| -[`Time`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Time)| [`STRING`](https://docs.databricks.com/en/sql/language-manual/data-types/string-type.html)| -[`Unicode`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Unicode)| [`STRING`](https://docs.databricks.com/en/sql/language-manual/data-types/string-type.html)| -[`UnicodeText`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.UnicodeText)| [`STRING`](https://docs.databricks.com/en/sql/language-manual/data-types/string-type.html)| -[`Uuid`](https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.Uuid)| [`STRING`](https://docs.databricks.com/en/sql/language-manual/data-types/string-type.html) - -In addition, the dialect exposes three UPPERCASE SQLAlchemy types which are specific to Databricks: - -- [`databricks.sqlalchemy.TINYINT`](https://docs.databricks.com/en/sql/language-manual/data-types/tinyint-type.html) -- [`databricks.sqlalchemy.TIMESTAMP`](https://docs.databricks.com/en/sql/language-manual/data-types/timestamp-type.html) -- [`databricks.sqlalchemy.TIMESTAMP_NTZ`](https://docs.databricks.com/en/sql/language-manual/data-types/timestamp-ntz-type.html) - - -### `LargeBinary()` and `PickleType()` - -Databricks Runtime doesn't currently support binding of binary values in SQL queries, which is a pre-requisite for this functionality in SQLAlchemy. - -## `Enum()` and `CHECK` constraints - -Support for `CHECK` constraints is not implemented in this dialect. Support is planned for a future release. - -SQLAlchemy's `Enum()` type depends on `CHECK` constraints and is therefore not yet supported. - -### `DateTime()`, `TIMESTAMP_NTZ()`, and `TIMESTAMP()` - -Databricks Runtime provides two datetime-like types: `TIMESTAMP` which is always timezone-aware and `TIMESTAMP_NTZ` which is timezone agnostic. Both types can be imported from `databricks.sqlalchemy` and used in your models. - -The SQLAlchemy documentation indicates that `DateTime()` is not timezone-aware by default. So our dialect maps this type to `TIMESTAMP_NTZ()`. In practice, you should never need to use `TIMESTAMP_NTZ()` directly. Just use `DateTime()`. - -If you need your field to be timezone-aware, you can import `TIMESTAMP()` and use it instead. - -_Note that SQLAlchemy documentation suggests that you can declare a `DateTime()` with `timezone=True` on supported backends. However, if you do this with the Databricks dialect, the `timezone` argument will be ignored._ - -```python -from sqlalchemy import DateTime -from databricks.sqlalchemy import TIMESTAMP - -class SomeModel(Base): - some_date_without_timezone = DateTime() - some_date_with_timezone = TIMESTAMP() -``` - -### `String()`, `Text()`, `Unicode()`, and `UnicodeText()` - -Databricks Runtime doesn't support length limitations for `STRING` fields. Therefore `String()` or `String(1)` or `String(255)` will all produce identical DDL. Since `Text()`, `Unicode()`, `UnicodeText()` all use the same underlying type in Databricks SQL, they will generate equivalent DDL. - -### `Time()` - -Databricks Runtime doesn't have a native time-like data type. To implement this type in SQLAlchemy, our dialect stores SQLAlchemy `Time()` values in a `STRING` field. Unlike `DateTime` above, this type can optionally support timezone awareness (since the dialect is in complete control of the strings that we write to the Delta table). - -```python -from sqlalchemy import Time - -class SomeModel(Base): - time_tz = Time(timezone=True) - time_ntz = Time() -``` - - -# Usage Notes - -## `Identity()` and `autoincrement` - -Identity and generated value support is currently limited in this dialect. - -When defining models, SQLAlchemy types can accept an [`autoincrement`](https://docs.sqlalchemy.org/en/20/core/metadata.html#sqlalchemy.schema.Column.params.autoincrement) argument. In our dialect, this argument is currently ignored. To create an auto-incrementing field in your model you can pass in an explicit [`Identity()`](https://docs.sqlalchemy.org/en/20/core/defaults.html#identity-ddl) instead. - -Furthermore, in Databricks Runtime, only `BIGINT` fields can be configured to auto-increment. So in SQLAlchemy, you must use the `BigInteger()` type. - -```python -from sqlalchemy import Identity, String - -class SomeModel(Base): - id = BigInteger(Identity()) - value = String() -``` - -When calling `Base.metadata.create_all()`, the executed DDL will include `GENERATED ALWAYS AS IDENTITY` for the `id` column. This is useful when using SQLAlchemy to generate tables. However, as of this writing, `Identity()` constructs are not captured when SQLAlchemy reflects a table's metadata (support for this is planned). - -## Parameters - -`databricks-sql-connector` supports two approaches to parameterizing SQL queries: native and inline. Our SQLAlchemy 2.0 dialect always uses the native approach and is therefore limited to DBR 14.2 and above. If you are writing parameterized queries to be executed by SQLAlchemy, you must use the "named" paramstyle (`:param`). Read more about parameterization in `docs/parameters.md`. - -## Usage with pandas - -Use [`pandas.DataFrame.to_sql`](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_sql.html) and [`pandas.read_sql`](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.read_sql.html#pandas.read_sql) to write and read from Databricks SQL. These methods both accept a SQLAlchemy connection to interact with Databricks. - -### Read from Databricks SQL into pandas -```python -from sqlalchemy import create_engine -import pandas as pd - -engine = create_engine("databricks://token:dapi***@***.cloud.databricks.com?http_path=***&catalog=main&schema=test") -with engine.connect() as conn: - # This will read the contents of `main.test.some_table` - df = pd.read_sql("some_table", conn) -``` - -### Write to Databricks SQL from pandas - -```python -from sqlalchemy import create_engine -import pandas as pd - -engine = create_engine("databricks://token:dapi***@***.cloud.databricks.com?http_path=***&catalog=main&schema=test") -squares = [(i, i * i) for i in range(100)] -df = pd.DataFrame(data=squares,columns=['x','x_squared']) - -with engine.connect() as conn: - # This will write the contents of `df` to `main.test.squares` - df.to_sql('squares',conn) -``` - -## [`PrimaryKey()`](https://docs.sqlalchemy.org/en/20/core/constraints.html#sqlalchemy.schema.PrimaryKeyConstraint) and [`ForeignKey()`](https://docs.sqlalchemy.org/en/20/core/constraints.html#defining-foreign-keys) - -Unity Catalog workspaces in Databricks support PRIMARY KEY and FOREIGN KEY constraints. _Note that Databricks Runtime does not enforce the integrity of FOREIGN KEY constraints_. You can establish a primary key by setting `primary_key=True` when defining a column. - -When building `ForeignKey` or `ForeignKeyConstraint` objects, you must specify a `name` for the constraint. - -If your model definition requires a self-referential FOREIGN KEY constraint, you must include `use_alter=True` when defining the relationship. - -```python -from sqlalchemy import Table, Column, ForeignKey, BigInteger, String - -users = Table( - "users", - metadata_obj, - Column("id", BigInteger, primary_key=True), - Column("name", String(), nullable=False), - Column("email", String()), - Column("manager_id", ForeignKey("users.id", name="fk_users_manager_id_x_users_id", use_alter=True)) -) -``` diff --git a/src/databricks/sqlalchemy/README.tests.md b/src/databricks/sqlalchemy/README.tests.md deleted file mode 100644 index 3ed92aba..00000000 --- a/src/databricks/sqlalchemy/README.tests.md +++ /dev/null @@ -1,44 +0,0 @@ -## SQLAlchemy Dialect Compliance Test Suite with Databricks - -The contents of the `test/` directory follow the SQLAlchemy developers' [guidance] for running the reusable dialect compliance test suite. Since not every test in the suite is applicable to every dialect, two options are provided to skip tests: - -- Any test can be skipped by subclassing its parent class, re-declaring the test-case and adding a `pytest.mark.skip` directive. -- Any test that is decorated with a `@requires` decorator can be skipped by marking the indicated requirement as `.closed()` in `requirements.py` - -We prefer to skip test cases directly with the first method wherever possible. We only mark requirements as `closed()` if there is no easier option to avoid a test failure. This principally occurs in test cases where the same test in the suite is parametrized, and some parameter combinations are conditionally skipped depending on `requirements.py`. If we skip the entire test method, then we skip _all_ permutations, not just the combinations we don't support. - -## Regression, Unsupported, and Future test cases - -We maintain three files of test cases that we import from the SQLAlchemy source code: - -* **`_regression.py`** contains all the tests cases with tests that we expect to pass for our dialect. Each one is marked with `pytest.mark.reiewed` to indicate that we've evaluated it for relevance. This file only contains base class declarations. -* **`_unsupported.py`** contains test cases that fail because of missing features in Databricks. We mark them as skipped with a `SkipReason` enumeration. If Databricks comes to support these features, those test or entire classes can be moved to `_regression.py`. -* **`_future.py`** contains test cases that fail because of missing features in the dialect itself, but which _are_ supported by Databricks generally. We mark them as skipped with a `FutureFeature` enumeration. These are features that have not been prioritised or that do not violate our acceptance criteria. All of these test cases will eventually move to either `_regression.py`. - -In some cases, only certain tests in class should be skipped with a `SkipReason` or `FutureFeature` justification. In those cases, we import the class into `_regression.py`, then import it from there into one or both of `_future.py` and `_unsupported.py`. If a class needs to be "touched" by regression, unsupported, and future, the class will be imported in that order. If an entire class should be skipped, then we do not import it into `_regression.py` at all. - -We maintain `_extra.py` with test cases that depend on SQLAlchemy's reusable dialect test fixtures but which are specific to Databricks (e.g TinyIntegerTest). - -## Running the reusable dialect tests - -``` -poetry shell -cd src/databricks/sqlalchemy/test -python -m pytest test_suite.py --dburi \ - "databricks://token:$access_token@$host?http_path=$http_path&catalog=$catalog&schema=$schema" -``` - -Whatever schema you pass in the `dburi` argument should be empty. Some tests also require the presence of an empty schema named `test_schema`. Note that we plan to implement our own `provision.py` which SQLAlchemy can automatically use to create an empty schema for testing. But for now this is a manual process. - -You can run only reviewed tests by appending `-m "reviewed"` to the test runner invocation. - -You can run only the unreviewed tests by appending `-m "not reviewed"` instead. - -Note that because these tests depend on SQLAlchemy's custom pytest plugin, they are not discoverable by IDE-based test runners like VSCode or PyCharm and must be invoked from a CLI. - -## Running local unit and e2e tests - -Apart from the SQLAlchemy reusable suite, we maintain our own unit and e2e tests under the `test_local/` directory. These can be invoked from a VSCode or Pycharm since they don't depend on a custom pytest plugin. Due to pytest's lookup order, the `pytest.ini` which is required for running the reusable dialect tests, also conflicts with VSCode and Pycharm's default pytest implementation and overrides the settings in `pyproject.toml`. So to run these tests, you can delete or rename `pytest.ini`. - - -[guidance]: "https://github.com/sqlalchemy/sqlalchemy/blob/rel_2_0_22/README.dialects.rst" diff --git a/src/databricks/sqlalchemy/__init__.py b/src/databricks/sqlalchemy/__init__.py deleted file mode 100644 index 2a17ac3e..00000000 --- a/src/databricks/sqlalchemy/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from databricks.sqlalchemy.base import DatabricksDialect -from databricks.sqlalchemy._types import TINYINT, TIMESTAMP, TIMESTAMP_NTZ - -__all__ = ["TINYINT", "TIMESTAMP", "TIMESTAMP_NTZ"] diff --git a/src/databricks/sqlalchemy/_ddl.py b/src/databricks/sqlalchemy/_ddl.py deleted file mode 100644 index d5d0bf87..00000000 --- a/src/databricks/sqlalchemy/_ddl.py +++ /dev/null @@ -1,100 +0,0 @@ -import re -from sqlalchemy.sql import compiler, sqltypes -import logging - -logger = logging.getLogger(__name__) - - -class DatabricksIdentifierPreparer(compiler.IdentifierPreparer): - """https://docs.databricks.com/en/sql/language-manual/sql-ref-identifiers.html""" - - legal_characters = re.compile(r"^[A-Z0-9_]+$", re.I) - - def __init__(self, dialect): - super().__init__(dialect, initial_quote="`") - - -class DatabricksDDLCompiler(compiler.DDLCompiler): - def post_create_table(self, table): - post = [" USING DELTA"] - if table.comment: - comment = self.sql_compiler.render_literal_value( - table.comment, sqltypes.String() - ) - post.append("COMMENT " + comment) - - post.append("TBLPROPERTIES('delta.feature.allowColumnDefaults' = 'enabled')") - return "\n".join(post) - - def visit_unique_constraint(self, constraint, **kw): - logger.warning("Databricks does not support unique constraints") - pass - - def visit_check_constraint(self, constraint, **kw): - logger.warning("This dialect does not support check constraints") - pass - - def visit_identity_column(self, identity, **kw): - """When configuring an Identity() with Databricks, only the always option is supported. - All other options are ignored. - - Note: IDENTITY columns must always be defined as BIGINT. An exception will be raised if INT is used. - - https://www.databricks.com/blog/2022/08/08/identity-columns-to-generate-surrogate-keys-are-now-available-in-a-lakehouse-near-you.html - """ - text = "GENERATED %s AS IDENTITY" % ( - "ALWAYS" if identity.always else "BY DEFAULT", - ) - return text - - def visit_set_column_comment(self, create, **kw): - return "ALTER TABLE %s ALTER COLUMN %s COMMENT %s" % ( - self.preparer.format_table(create.element.table), - self.preparer.format_column(create.element), - self.sql_compiler.render_literal_value( - create.element.comment, sqltypes.String() - ), - ) - - def visit_drop_column_comment(self, create, **kw): - return "ALTER TABLE %s ALTER COLUMN %s COMMENT ''" % ( - self.preparer.format_table(create.element.table), - self.preparer.format_column(create.element), - ) - - def get_column_specification(self, column, **kwargs): - """ - Emit a log message if a user attempts to set autoincrement=True on a column. - See comments in test_suite.py. We may implement implicit IDENTITY using this - feature in the future, similar to the Microsoft SQL Server dialect. - """ - if column is column.table._autoincrement_column or column.autoincrement is True: - logger.warning( - "Databricks dialect ignores SQLAlchemy's autoincrement semantics. Use explicit Identity() instead." - ) - - colspec = super().get_column_specification(column, **kwargs) - if column.comment is not None: - literal = self.sql_compiler.render_literal_value( - column.comment, sqltypes.STRINGTYPE - ) - colspec += " COMMENT " + literal - - return colspec - - -class DatabricksStatementCompiler(compiler.SQLCompiler): - def limit_clause(self, select, **kw): - """Identical to the default implementation of SQLCompiler.limit_clause except it writes LIMIT ALL instead of LIMIT -1, - since Databricks SQL doesn't support the latter. - - https://docs.databricks.com/en/sql/language-manual/sql-ref-syntax-qry-select-limit.html - """ - text = "" - if select._limit_clause is not None: - text += "\n LIMIT " + self.process(select._limit_clause, **kw) - if select._offset_clause is not None: - if select._limit_clause is None: - text += "\n LIMIT ALL" - text += " OFFSET " + self.process(select._offset_clause, **kw) - return text diff --git a/src/databricks/sqlalchemy/_parse.py b/src/databricks/sqlalchemy/_parse.py deleted file mode 100644 index 6d38e1e6..00000000 --- a/src/databricks/sqlalchemy/_parse.py +++ /dev/null @@ -1,385 +0,0 @@ -from typing import List, Optional, Dict -import re - -import sqlalchemy -from sqlalchemy.engine import CursorResult -from sqlalchemy.engine.interfaces import ReflectedColumn - -from databricks.sqlalchemy import _types as type_overrides - -""" -This module contains helper functions that can parse the contents -of metadata and exceptions received from DBR. These are mostly just -wrappers around regexes. -""" - - -class DatabricksSqlAlchemyParseException(Exception): - pass - - -def _match_table_not_found_string(message: str) -> bool: - """Return True if the message contains a substring indicating that a table was not found""" - - DBR_LTE_12_NOT_FOUND_STRING = "Table or view not found" - DBR_GT_12_NOT_FOUND_STRING = "TABLE_OR_VIEW_NOT_FOUND" - return any( - [ - DBR_LTE_12_NOT_FOUND_STRING in message, - DBR_GT_12_NOT_FOUND_STRING in message, - ] - ) - - -def _describe_table_extended_result_to_dict_list( - result: CursorResult, -) -> List[Dict[str, str]]: - """Transform the CursorResult of DESCRIBE TABLE EXTENDED into a list of Dictionaries""" - - rows_to_return = [] - for row in result.all(): - this_row = {"col_name": row.col_name, "data_type": row.data_type} - rows_to_return.append(this_row) - - return rows_to_return - - -def extract_identifiers_from_string(input_str: str) -> List[str]: - """For a string input resembling (`a`, `b`, `c`) return a list of identifiers ['a', 'b', 'c']""" - - # This matches the valid character list contained in DatabricksIdentifierPreparer - pattern = re.compile(r"`([A-Za-z0-9_]+)`") - matches = pattern.findall(input_str) - return [i for i in matches] - - -def extract_identifier_groups_from_string(input_str: str) -> List[str]: - """For a string input resembling : - - FOREIGN KEY (`pname`, `pid`, `pattr`) REFERENCES `main`.`pysql_sqlalchemy`.`tb1` (`name`, `id`, `attr`) - - Return ['(`pname`, `pid`, `pattr`)', '(`name`, `id`, `attr`)'] - """ - pattern = re.compile(r"\([`A-Za-z0-9_,\s]*\)") - matches = pattern.findall(input_str) - return [i for i in matches] - - -def extract_three_level_identifier_from_constraint_string(input_str: str) -> dict: - """For a string input resembling : - FOREIGN KEY (`parent_user_id`) REFERENCES `main`.`pysql_dialect_compliance`.`users` (`user_id`) - - Return a dict like - { - "catalog": "main", - "schema": "pysql_dialect_compliance", - "table": "users" - } - - Raise a DatabricksSqlAlchemyParseException if a 3L namespace isn't found - """ - pat = re.compile(r"REFERENCES\s+(.*?)\s*\(") - matches = pat.findall(input_str) - - if not matches: - raise DatabricksSqlAlchemyParseException( - "3L namespace not found in constraint string" - ) - - first_match = matches[0] - parts = first_match.split(".") - - def strip_backticks(input: str): - return input.replace("`", "") - - try: - return { - "catalog": strip_backticks(parts[0]), - "schema": strip_backticks(parts[1]), - "table": strip_backticks(parts[2]), - } - except IndexError: - raise DatabricksSqlAlchemyParseException( - "Incomplete 3L namespace found in constraint string: " + ".".join(parts) - ) - - -def _parse_fk_from_constraint_string(constraint_str: str) -> dict: - """Build a dictionary of foreign key constraint information from a constraint string. - - For example: - - ``` - FOREIGN KEY (`pname`, `pid`, `pattr`) REFERENCES `main`.`pysql_dialect_compliance`.`tb1` (`name`, `id`, `attr`) - ``` - - Return a dictionary like: - - ``` - { - "constrained_columns": ["pname", "pid", "pattr"], - "referred_table": "tb1", - "referred_schema": "pysql_dialect_compliance", - "referred_columns": ["name", "id", "attr"] - } - ``` - - Note that the constraint name doesn't appear in the constraint string so it will not - be present in the output of this function. - """ - - referred_table_dict = extract_three_level_identifier_from_constraint_string( - constraint_str - ) - referred_table = referred_table_dict["table"] - referred_schema = referred_table_dict["schema"] - - # _extracted is a tuple of two lists of identifiers - # we assume the first immediately follows "FOREIGN KEY" and the second - # immediately follows REFERENCES $tableName - _extracted = extract_identifier_groups_from_string(constraint_str) - constrained_columns_str, referred_columns_str = ( - _extracted[0], - _extracted[1], - ) - - constrained_columns = extract_identifiers_from_string(constrained_columns_str) - referred_columns = extract_identifiers_from_string(referred_columns_str) - - return { - "constrained_columns": constrained_columns, - "referred_table": referred_table, - "referred_columns": referred_columns, - "referred_schema": referred_schema, - } - - -def build_fk_dict( - fk_name: str, fk_constraint_string: str, schema_name: Optional[str] -) -> dict: - """ - Given a foriegn key name and a foreign key constraint string, return a dictionary - with the following keys: - - name - the name of the foreign key constraint - constrained_columns - a list of column names that make up the foreign key - referred_table - the name of the table that the foreign key references - referred_columns - a list of column names that are referenced by the foreign key - referred_schema - the name of the schema that the foreign key references. - - referred schema will be None if the schema_name argument is None. - This is required by SQLAlchey's ComponentReflectionTest::test_get_foreign_keys - """ - - # The foreign key name is not contained in the constraint string so we - # need to add it manually - base_fk_dict = _parse_fk_from_constraint_string(fk_constraint_string) - - if not schema_name: - schema_override_dict = dict(referred_schema=None) - else: - schema_override_dict = {} - - # mypy doesn't like this method of conditionally adding a key to a dictionary - # while keeping everything immutable - complete_foreign_key_dict = { - "name": fk_name, - **base_fk_dict, - **schema_override_dict, # type: ignore - } - - return complete_foreign_key_dict - - -def _parse_pk_columns_from_constraint_string(constraint_str: str) -> List[str]: - """Build a list of constrained columns from a constraint string returned by DESCRIBE TABLE EXTENDED - - For example: - - PRIMARY KEY (`id`, `name`, `email_address`) - - Returns a list like - - ["id", "name", "email_address"] - """ - - _extracted = extract_identifiers_from_string(constraint_str) - - return _extracted - - -def build_pk_dict(pk_name: str, pk_constraint_string: str) -> dict: - """Given a primary key name and a primary key constraint string, return a dictionary - with the following keys: - - constrained_columns - A list of string column names that make up the primary key - - name - The name of the primary key constraint - """ - - constrained_columns = _parse_pk_columns_from_constraint_string(pk_constraint_string) - - return {"constrained_columns": constrained_columns, "name": pk_name} - - -def match_dte_rows_by_value(dte_output: List[Dict[str, str]], match: str) -> List[dict]: - """Return a list of dictionaries containing only the col_name:data_type pairs where the `data_type` - value contains the match argument. - - Today, DESCRIBE TABLE EXTENDED doesn't give a deterministic name to the fields - a constraint will be found in its output. So we cycle through its output looking - for a match. This is brittle. We could optionally make two roundtrips: the first - would query information_schema for the name of the constraint on this table, and - a second to DESCRIBE TABLE EXTENDED, at which point we would know the name of the - constraint. But for now we instead assume that Python list comprehension is faster - than a network roundtrip - """ - - output_rows = [] - - for row_dict in dte_output: - if match in row_dict["data_type"]: - output_rows.append(row_dict) - - return output_rows - - -def match_dte_rows_by_key(dte_output: List[Dict[str, str]], match: str) -> List[dict]: - """Return a list of dictionaries containing only the col_name:data_type pairs where the `col_name` - value contains the match argument. - """ - - output_rows = [] - - for row_dict in dte_output: - if match in row_dict["col_name"]: - output_rows.append(row_dict) - - return output_rows - - -def get_fk_strings_from_dte_output(dte_output: List[Dict[str, str]]) -> List[dict]: - """If the DESCRIBE TABLE EXTENDED output contains foreign key constraints, return a list of dictionaries, - one dictionary per defined constraint - """ - - output = match_dte_rows_by_value(dte_output, "FOREIGN KEY") - - return output - - -def get_pk_strings_from_dte_output( - dte_output: List[Dict[str, str]] -) -> Optional[List[dict]]: - """If the DESCRIBE TABLE EXTENDED output contains primary key constraints, return a list of dictionaries, - one dictionary per defined constraint. - - Returns None if no primary key constraints are found. - """ - - output = match_dte_rows_by_value(dte_output, "PRIMARY KEY") - - return output - - -def get_comment_from_dte_output(dte_output: List[Dict[str, str]]) -> Optional[str]: - """Returns the value of the first "Comment" col_name data in dte_output""" - output = match_dte_rows_by_key(dte_output, "Comment") - if not output: - return None - else: - return output[0]["data_type"] - - -# The keys of this dictionary are the values we expect to see in a -# TGetColumnsRequest's .TYPE_NAME attribute. -# These are enumerated in ttypes.py as class TTypeId. -# TODO: confirm that all types in TTypeId are included here. -GET_COLUMNS_TYPE_MAP = { - "boolean": sqlalchemy.types.Boolean, - "smallint": sqlalchemy.types.SmallInteger, - "tinyint": type_overrides.TINYINT, - "int": sqlalchemy.types.Integer, - "bigint": sqlalchemy.types.BigInteger, - "float": sqlalchemy.types.Float, - "double": sqlalchemy.types.Float, - "string": sqlalchemy.types.String, - "varchar": sqlalchemy.types.String, - "char": sqlalchemy.types.String, - "binary": sqlalchemy.types.String, - "array": sqlalchemy.types.String, - "map": sqlalchemy.types.String, - "struct": sqlalchemy.types.String, - "uniontype": sqlalchemy.types.String, - "decimal": sqlalchemy.types.Numeric, - "timestamp": type_overrides.TIMESTAMP, - "timestamp_ntz": type_overrides.TIMESTAMP_NTZ, - "date": sqlalchemy.types.Date, -} - - -def parse_numeric_type_precision_and_scale(type_name_str): - """Return an intantiated sqlalchemy Numeric() type that preserves the precision and scale indicated - in the output from TGetColumnsRequest. - - type_name_str - The value of TGetColumnsReq.TYPE_NAME. - - If type_name_str is "DECIMAL(18,5) returns sqlalchemy.types.Numeric(18,5) - """ - - pattern = re.compile(r"DECIMAL\((\d+,\d+)\)") - match = re.search(pattern, type_name_str) - precision_and_scale = match.group(1) - precision, scale = tuple(precision_and_scale.split(",")) - - return sqlalchemy.types.Numeric(int(precision), int(scale)) - - -def parse_column_info_from_tgetcolumnsresponse(thrift_resp_row) -> ReflectedColumn: - """Returns a dictionary of the ReflectedColumn schema parsed from - a single of the result of a TGetColumnsRequest thrift RPC - """ - - pat = re.compile(r"^\w+") - - # This method assumes a valid TYPE_NAME field in the response. - # TODO: add error handling in case TGetColumnsResponse format changes - - _raw_col_type = re.search(pat, thrift_resp_row.TYPE_NAME).group(0).lower() # type: ignore - _col_type = GET_COLUMNS_TYPE_MAP[_raw_col_type] - - if _raw_col_type == "decimal": - final_col_type = parse_numeric_type_precision_and_scale( - thrift_resp_row.TYPE_NAME - ) - else: - final_col_type = _col_type - - # See comments about autoincrement in test_suite.py - # Since Databricks SQL doesn't currently support inline AUTOINCREMENT declarations - # the autoincrement must be manually declared with an Identity() construct in SQLAlchemy - # Other dialects can perform this extra Identity() step automatically. But that is not - # implemented in the Databricks dialect right now. So autoincrement is currently always False. - # It's not clear what IS_AUTO_INCREMENT in the thrift response actually reflects or whether - # it ever returns a `YES`. - - # Per the guidance in SQLAlchemy's docstrings, we prefer to not even include an autoincrement - # key in this dictionary. - this_column = { - "name": thrift_resp_row.COLUMN_NAME, - "type": final_col_type, - "nullable": bool(thrift_resp_row.NULLABLE), - "default": thrift_resp_row.COLUMN_DEF, - "comment": thrift_resp_row.REMARKS or None, - } - - # TODO: figure out how to return sqlalchemy.interfaces in a way that mypy respects - return this_column # type: ignore diff --git a/src/databricks/sqlalchemy/_types.py b/src/databricks/sqlalchemy/_types.py deleted file mode 100644 index 5fc14a70..00000000 --- a/src/databricks/sqlalchemy/_types.py +++ /dev/null @@ -1,323 +0,0 @@ -from datetime import datetime, time, timezone -from itertools import product -from typing import Any, Union, Optional - -import sqlalchemy -from sqlalchemy.engine.interfaces import Dialect -from sqlalchemy.ext.compiler import compiles - -from databricks.sql.utils import ParamEscaper - - -def process_literal_param_hack(value: Any): - """This method is supposed to accept a Python type and return a string representation of that type. - But due to some weirdness in the way SQLAlchemy's literal rendering works, we have to return - the value itself because, by the time it reaches our custom type code, it's already been converted - into a string. - - TimeTest - DateTimeTest - DateTimeTZTest - - This dynamic only seems to affect the literal rendering of datetime and time objects. - - All fail without this hack in-place. I'm not sure why. But it works. - """ - return value - - -@compiles(sqlalchemy.types.Enum, "databricks") -@compiles(sqlalchemy.types.String, "databricks") -@compiles(sqlalchemy.types.Text, "databricks") -@compiles(sqlalchemy.types.Time, "databricks") -@compiles(sqlalchemy.types.Unicode, "databricks") -@compiles(sqlalchemy.types.UnicodeText, "databricks") -@compiles(sqlalchemy.types.Uuid, "databricks") -def compile_string_databricks(type_, compiler, **kw): - """ - We override the default compilation for Enum(), String(), Text(), and Time() because SQLAlchemy - defaults to incompatible / abnormal compiled names - - Enum -> VARCHAR - String -> VARCHAR[LENGTH] - Text -> VARCHAR[LENGTH] - Time -> TIME - Unicode -> VARCHAR[LENGTH] - UnicodeText -> TEXT - Uuid -> CHAR[32] - - But all of these types will be compiled to STRING in Databricks SQL - """ - return "STRING" - - -@compiles(sqlalchemy.types.Integer, "databricks") -def compile_integer_databricks(type_, compiler, **kw): - """ - We need to override the default Integer compilation rendering because Databricks uses "INT" instead of "INTEGER" - """ - return "INT" - - -@compiles(sqlalchemy.types.LargeBinary, "databricks") -def compile_binary_databricks(type_, compiler, **kw): - """ - We need to override the default LargeBinary compilation rendering because Databricks uses "BINARY" instead of "BLOB" - """ - return "BINARY" - - -@compiles(sqlalchemy.types.Numeric, "databricks") -def compile_numeric_databricks(type_, compiler, **kw): - """ - We need to override the default Numeric compilation rendering because Databricks uses "DECIMAL" instead of "NUMERIC" - - The built-in visit_DECIMAL behaviour captures the precision and scale. Here we're just mapping calls to compile Numeric - to the SQLAlchemy Decimal() implementation - """ - return compiler.visit_DECIMAL(type_, **kw) - - -@compiles(sqlalchemy.types.DateTime, "databricks") -def compile_datetime_databricks(type_, compiler, **kw): - """ - We need to override the default DateTime compilation rendering because Databricks uses "TIMESTAMP_NTZ" instead of "DATETIME" - """ - return "TIMESTAMP_NTZ" - - -@compiles(sqlalchemy.types.ARRAY, "databricks") -def compile_array_databricks(type_, compiler, **kw): - """ - SQLAlchemy's default ARRAY can't compile as it's only implemented for Postgresql. - The Postgres implementation works for Databricks SQL, so we duplicate that here. - - :type_: - This is an instance of sqlalchemy.types.ARRAY which always includes an item_type attribute - which is itself an instance of TypeEngine - - https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.ARRAY - """ - - inner = compiler.process(type_.item_type, **kw) - - return f"ARRAY<{inner}>" - - -class TIMESTAMP_NTZ(sqlalchemy.types.TypeDecorator): - """Represents values comprising values of fields year, month, day, hour, minute, and second. - All operations are performed without taking any time zone into account. - - Our dialect maps sqlalchemy.types.DateTime() to this type, which means that all DateTime() - objects are stored without tzinfo. To read and write timezone-aware datetimes use - databricks.sql.TIMESTAMP instead. - - https://docs.databricks.com/en/sql/language-manual/data-types/timestamp-ntz-type.html - """ - - impl = sqlalchemy.types.DateTime - - cache_ok = True - - def process_result_value(self, value: Union[None, datetime], dialect): - if value is None: - return None - return value.replace(tzinfo=None) - - -class TIMESTAMP(sqlalchemy.types.TypeDecorator): - """Represents values comprising values of fields year, month, day, hour, minute, and second, - with the session local time-zone. - - Our dialect maps sqlalchemy.types.DateTime() to TIMESTAMP_NTZ, which means that all DateTime() - objects are stored without tzinfo. To read and write timezone-aware datetimes use - this type instead. - - ```python - # This won't work - `Column(sqlalchemy.DateTime(timezone=True))` - - # But this does - `Column(TIMESTAMP)` - ```` - - https://docs.databricks.com/en/sql/language-manual/data-types/timestamp-type.html - """ - - impl = sqlalchemy.types.DateTime - - cache_ok = True - - def process_result_value(self, value: Union[None, datetime], dialect): - if value is None: - return None - - if not value.tzinfo: - return value.replace(tzinfo=timezone.utc) - return value - - def process_bind_param( - self, value: Union[datetime, None], dialect - ) -> Optional[datetime]: - """pysql can pass datetime.datetime() objects directly to DBR""" - return value - - def process_literal_param( - self, value: Union[datetime, None], dialect: Dialect - ) -> str: - """ """ - return process_literal_param_hack(value) - - -@compiles(TIMESTAMP, "databricks") -def compile_timestamp_databricks(type_, compiler, **kw): - """ - We need to override the default DateTime compilation rendering because Databricks uses "TIMESTAMP_NTZ" instead of "DATETIME" - """ - return "TIMESTAMP" - - -class DatabricksTimeType(sqlalchemy.types.TypeDecorator): - """Databricks has no native TIME type. So we store it as a string.""" - - impl = sqlalchemy.types.Time - cache_ok = True - - BASE_FMT = "%H:%M:%S" - MICROSEC_PART = ".%f" - TIMEZONE_PART = "%z" - - def _generate_fmt_string(self, ms: bool, tz: bool) -> str: - """Return a format string for datetime.strptime() that includes or excludes microseconds and timezone.""" - _ = lambda x, y: x if y else "" - return f"{self.BASE_FMT}{_(self.MICROSEC_PART,ms)}{_(self.TIMEZONE_PART,tz)}" - - @property - def allowed_fmt_strings(self): - """Time strings can be read with or without microseconds and with or without a timezone.""" - - if not hasattr(self, "_allowed_fmt_strings"): - ms_switch = tz_switch = [True, False] - self._allowed_fmt_strings = [ - self._generate_fmt_string(x, y) - for x, y in product(ms_switch, tz_switch) - ] - - return self._allowed_fmt_strings - - def _parse_result_string(self, value: str) -> time: - """Parse a string into a time object. Try all allowed formats until one works.""" - for fmt in self.allowed_fmt_strings: - try: - # We use timetz() here because we want to preserve the timezone information - # Calling .time() will strip the timezone information - return datetime.strptime(value, fmt).timetz() - except ValueError: - pass - - raise ValueError(f"Could not parse time string {value}") - - def _determine_fmt_string(self, value: time) -> str: - """Determine which format string to use to render a time object as a string.""" - ms_bool = value.microsecond > 0 - tz_bool = value.tzinfo is not None - return self._generate_fmt_string(ms_bool, tz_bool) - - def process_bind_param(self, value: Union[time, None], dialect) -> Union[None, str]: - """Values sent to the database are converted to %:H:%M:%S strings.""" - if value is None: - return None - fmt_string = self._determine_fmt_string(value) - return value.strftime(fmt_string) - - # mypy doesn't like this workaround because TypeEngine wants process_literal_param to return a string - def process_literal_param(self, value, dialect) -> time: # type: ignore - """ """ - return process_literal_param_hack(value) - - def process_result_value( - self, value: Union[None, str], dialect - ) -> Union[time, None]: - """Values received from the database are parsed into datetime.time() objects""" - if value is None: - return None - - return self._parse_result_string(value) - - -class DatabricksStringType(sqlalchemy.types.TypeDecorator): - """We have to implement our own String() type because SQLAlchemy's default implementation - wants to escape single-quotes with a doubled single-quote. Databricks uses a backslash for - escaping of literal strings. And SQLAlchemy's default escaping breaks Databricks SQL. - """ - - impl = sqlalchemy.types.String - cache_ok = True - pe = ParamEscaper() - - def process_literal_param(self, value, dialect) -> str: - """SQLAlchemy's default string escaping for backslashes doesn't work for databricks. The logic here - implements the same logic as our legacy inline escaping logic. - """ - - return self.pe.escape_string(value) - - def literal_processor(self, dialect): - """We manually override this method to prevent further processing of the string literal beyond - what happens in the process_literal_param() method. - - The SQLAlchemy docs _specifically_ say to not override this method. - - It appears that any processing that happens from TypeEngine.process_literal_param happens _before_ - and _in addition to_ whatever the class's impl.literal_processor() method does. The String.literal_processor() - method performs a string replacement that doubles any single-quote in the contained string. This raises a syntax - error in Databricks. And it's not necessary because ParamEscaper() already implements all the escaping we need. - - We should consider opening an issue on the SQLAlchemy project to see if I'm using it wrong. - - See type_api.py::TypeEngine.literal_processor: - - ```python - def process(value: Any) -> str: - return fixed_impl_processor( - fixed_process_literal_param(value, dialect) - ) - ``` - - That call to fixed_impl_processor wraps the result of fixed_process_literal_param (which is the - process_literal_param defined in our Databricks dialect) - - https://docs.sqlalchemy.org/en/20/core/custom_types.html#sqlalchemy.types.TypeDecorator.literal_processor - """ - - def process(value): - """This is a copy of the default String.literal_processor() method but stripping away - its double-escaping behaviour for single-quotes. - """ - - _step1 = self.process_literal_param(value, dialect="databricks") - if dialect.identifier_preparer._double_percents: - _step2 = _step1.replace("%", "%%") - else: - _step2 = _step1 - - return "%s" % _step2 - - return process - - -class TINYINT(sqlalchemy.types.TypeDecorator): - """Represents 1-byte signed integers - - Acts like a sqlalchemy SmallInteger() in Python but writes to a TINYINT field in Databricks - - https://docs.databricks.com/en/sql/language-manual/data-types/tinyint-type.html - """ - - impl = sqlalchemy.types.SmallInteger - cache_ok = True - - -@compiles(TINYINT, "databricks") -def compile_tinyint(type_, compiler, **kw): - return "TINYINT" diff --git a/src/databricks/sqlalchemy/base.py b/src/databricks/sqlalchemy/base.py deleted file mode 100644 index 9148de7f..00000000 --- a/src/databricks/sqlalchemy/base.py +++ /dev/null @@ -1,436 +0,0 @@ -from typing import Any, List, Optional, Dict, Union - -import databricks.sqlalchemy._ddl as dialect_ddl_impl -import databricks.sqlalchemy._types as dialect_type_impl -from databricks import sql -from databricks.sqlalchemy._parse import ( - _describe_table_extended_result_to_dict_list, - _match_table_not_found_string, - build_fk_dict, - build_pk_dict, - get_fk_strings_from_dte_output, - get_pk_strings_from_dte_output, - get_comment_from_dte_output, - parse_column_info_from_tgetcolumnsresponse, -) - -import sqlalchemy -from sqlalchemy import DDL, event -from sqlalchemy.engine import Connection, Engine, default, reflection -from sqlalchemy.engine.interfaces import ( - ReflectedForeignKeyConstraint, - ReflectedPrimaryKeyConstraint, - ReflectedColumn, - ReflectedTableComment, -) -from sqlalchemy.engine.reflection import ReflectionDefaults -from sqlalchemy.exc import DatabaseError, SQLAlchemyError - -try: - import alembic -except ImportError: - pass -else: - from alembic.ddl import DefaultImpl - - class DatabricksImpl(DefaultImpl): - __dialect__ = "databricks" - - -import logging - -logger = logging.getLogger(__name__) - - -class DatabricksDialect(default.DefaultDialect): - """This dialect implements only those methods required to pass our e2e tests""" - - # See sqlalchemy.engine.interfaces for descriptions of each of these properties - name: str = "databricks" - driver: str = "databricks" - default_schema_name: str = "default" - preparer = dialect_ddl_impl.DatabricksIdentifierPreparer # type: ignore - ddl_compiler = dialect_ddl_impl.DatabricksDDLCompiler - statement_compiler = dialect_ddl_impl.DatabricksStatementCompiler - supports_statement_cache: bool = True - supports_multivalues_insert: bool = True - supports_native_decimal: bool = True - supports_sane_rowcount: bool = False - non_native_boolean_check_constraint: bool = False - supports_identity_columns: bool = True - supports_schemas: bool = True - default_paramstyle: str = "named" - div_is_floordiv: bool = False - supports_default_values: bool = False - supports_server_side_cursors: bool = False - supports_sequences: bool = False - supports_native_boolean: bool = True - - colspecs = { - sqlalchemy.types.DateTime: dialect_type_impl.TIMESTAMP_NTZ, - sqlalchemy.types.Time: dialect_type_impl.DatabricksTimeType, - sqlalchemy.types.String: dialect_type_impl.DatabricksStringType, - } - - # SQLAlchemy requires that a table with no primary key - # constraint return a dictionary that looks like this. - EMPTY_PK: Dict[str, Any] = {"constrained_columns": [], "name": None} - - # SQLAlchemy requires that a table with no foreign keys - # defined return an empty list. Same for indexes. - EMPTY_FK: List - EMPTY_INDEX: List - EMPTY_FK = EMPTY_INDEX = [] - - @classmethod - def import_dbapi(cls): - return sql - - def _force_paramstyle_to_native_mode(self): - """This method can be removed after databricks-sql-connector wholly switches to NATIVE ParamApproach. - - This is a hack to trick SQLAlchemy into using a different paramstyle - than the one declared by this module in src/databricks/sql/__init__.py - - This method is called _after_ the dialect has been initialised, which is important because otherwise - our users would need to include a `paramstyle` argument in their SQLAlchemy connection string. - - This dialect is written to support NATIVE queries. Although the INLINE approach can technically work, - the same behaviour can be achieved within SQLAlchemy itself using its literal_processor methods. - """ - - self.paramstyle = self.default_paramstyle - - def create_connect_args(self, url): - # TODO: can schema be provided after HOST? - # Expected URI format is: databricks+thrift://token:dapi***@***.cloud.databricks.com?http_path=/sql/*** - - kwargs = { - "server_hostname": url.host, - "access_token": url.password, - "http_path": url.query.get("http_path"), - "catalog": url.query.get("catalog"), - "schema": url.query.get("schema"), - "use_inline_params": False, - } - - self.schema = kwargs["schema"] - self.catalog = kwargs["catalog"] - - self._force_paramstyle_to_native_mode() - - return [], kwargs - - def get_columns( - self, connection, table_name, schema=None, **kwargs - ) -> List[ReflectedColumn]: - """Return information about columns in `table_name`.""" - - with self.get_connection_cursor(connection) as cur: - resp = cur.columns( - catalog_name=self.catalog, - schema_name=schema or self.schema, - table_name=table_name, - ).fetchall() - - if not resp: - # TGetColumnsRequest will not raise an exception if passed a table that doesn't exist - # But Databricks supports tables with no columns. So if the result is an empty list, - # we need to check if the table exists (and raise an exception if not) or simply return - # an empty list. - self._describe_table_extended( - connection, - table_name, - self.catalog, - schema or self.schema, - expect_result=False, - ) - return resp - columns = [] - for col in resp: - row_dict = parse_column_info_from_tgetcolumnsresponse(col) - columns.append(row_dict) - - return columns - - def _describe_table_extended( - self, - connection: Connection, - table_name: str, - catalog_name: Optional[str] = None, - schema_name: Optional[str] = None, - expect_result=True, - ) -> Union[List[Dict[str, str]], None]: - """Run DESCRIBE TABLE EXTENDED on a table and return a list of dictionaries of the result. - - This method is the fastest way to check for the presence of a table in a schema. - - If expect_result is False, this method returns None as the output dict isn't required. - - Raises NoSuchTableError if the table is not present in the schema. - """ - - _target_catalog = catalog_name or self.catalog - _target_schema = schema_name or self.schema - _target = f"`{_target_catalog}`.`{_target_schema}`.`{table_name}`" - - # sql injection risk? - # DESCRIBE TABLE EXTENDED in DBR doesn't support parameterised inputs :( - stmt = DDL(f"DESCRIBE TABLE EXTENDED {_target}") - - try: - result = connection.execute(stmt) - except DatabaseError as e: - if _match_table_not_found_string(str(e)): - raise sqlalchemy.exc.NoSuchTableError( - f"No such table {table_name}" - ) from e - raise e - - if not expect_result: - return None - - fmt_result = _describe_table_extended_result_to_dict_list(result) - return fmt_result - - @reflection.cache - def get_pk_constraint( - self, - connection, - table_name: str, - schema: Optional[str] = None, - **kw: Any, - ) -> ReflectedPrimaryKeyConstraint: - """Fetch information about the primary key constraint on table_name. - - Returns a dictionary with these keys: - constrained_columns - a list of column names that make up the primary key. Results is an empty list - if no PRIMARY KEY is defined. - - name - the name of the primary key constraint - """ - - result = self._describe_table_extended( - connection=connection, - table_name=table_name, - schema_name=schema, - ) - - # Type ignore is because mypy knows that self._describe_table_extended *can* - # return None (even though it never will since expect_result defaults to True) - raw_pk_constraints: List = get_pk_strings_from_dte_output(result) # type: ignore - if not any(raw_pk_constraints): - return self.EMPTY_PK # type: ignore - - if len(raw_pk_constraints) > 1: - logger.warning( - "Found more than one primary key constraint in DESCRIBE TABLE EXTENDED output. " - "This is unexpected. Please report this as a bug. " - "Only the first primary key constraint will be returned." - ) - - first_pk_constraint = raw_pk_constraints[0] - pk_name = first_pk_constraint.get("col_name") - pk_constraint_string = first_pk_constraint.get("data_type") - - # TODO: figure out how to return sqlalchemy.interfaces in a way that mypy respects - return build_pk_dict(pk_name, pk_constraint_string) # type: ignore - - def get_foreign_keys( - self, connection, table_name, schema=None, **kw - ) -> List[ReflectedForeignKeyConstraint]: - """Return information about foreign_keys in `table_name`.""" - - result = self._describe_table_extended( - connection=connection, - table_name=table_name, - schema_name=schema, - ) - - # Type ignore is because mypy knows that self._describe_table_extended *can* - # return None (even though it never will since expect_result defaults to True) - raw_fk_constraints: List = get_fk_strings_from_dte_output(result) # type: ignore - - if not any(raw_fk_constraints): - return self.EMPTY_FK - - fk_constraints = [] - for constraint_dict in raw_fk_constraints: - fk_name = constraint_dict.get("col_name") - fk_constraint_string = constraint_dict.get("data_type") - this_constraint_dict = build_fk_dict( - fk_name, fk_constraint_string, schema_name=schema - ) - fk_constraints.append(this_constraint_dict) - - # TODO: figure out how to return sqlalchemy.interfaces in a way that mypy respects - return fk_constraints # type: ignore - - def get_indexes(self, connection, table_name, schema=None, **kw): - """SQLAlchemy requires this method. Databricks doesn't support indexes.""" - return self.EMPTY_INDEX - - @reflection.cache - def get_table_names(self, connection: Connection, schema=None, **kwargs): - """Return a list of tables in the current schema.""" - - _target_catalog = self.catalog - _target_schema = schema or self.schema - _target = f"`{_target_catalog}`.`{_target_schema}`" - - stmt = DDL(f"SHOW TABLES FROM {_target}") - - tables_result = connection.execute(stmt).all() - views_result = self.get_view_names(connection=connection, schema=schema) - - # In Databricks, SHOW TABLES FROM returns both tables and views. - # Potential optimisation: rewrite this to instead query information_schema - tables_minus_views = [ - row.tableName for row in tables_result if row.tableName not in views_result - ] - - return tables_minus_views - - @reflection.cache - def get_view_names( - self, - connection, - schema=None, - only_materialized=False, - only_temp=False, - **kwargs, - ) -> List[str]: - """Returns a list of string view names contained in the schema, if any.""" - - _target_catalog = self.catalog - _target_schema = schema or self.schema - _target = f"`{_target_catalog}`.`{_target_schema}`" - - stmt = DDL(f"SHOW VIEWS FROM {_target}") - result = connection.execute(stmt).all() - - return [ - row.viewName - for row in result - if (not only_materialized or row.isMaterialized) - and (not only_temp or row.isTemporary) - ] - - @reflection.cache - def get_materialized_view_names( - self, connection: Connection, schema: Optional[str] = None, **kw: Any - ) -> List[str]: - """A wrapper around get_view_names that fetches only the names of materialized views""" - return self.get_view_names(connection, schema, only_materialized=True) - - @reflection.cache - def get_temp_view_names( - self, connection: Connection, schema: Optional[str] = None, **kw: Any - ) -> List[str]: - """A wrapper around get_view_names that fetches only the names of temporary views""" - return self.get_view_names(connection, schema, only_temp=True) - - def do_rollback(self, dbapi_connection): - # Databricks SQL Does not support transactions - pass - - @reflection.cache - def has_table( - self, connection, table_name, schema=None, catalog=None, **kwargs - ) -> bool: - """For internal dialect use, check the existence of a particular table - or view in the database. - """ - - try: - self._describe_table_extended( - connection=connection, - table_name=table_name, - catalog_name=catalog, - schema_name=schema, - ) - return True - except sqlalchemy.exc.NoSuchTableError as e: - return False - - def get_connection_cursor(self, connection): - """Added for backwards compatibility with 1.3.x""" - if hasattr(connection, "_dbapi_connection"): - return connection._dbapi_connection.dbapi_connection.cursor() - elif hasattr(connection, "raw_connection"): - return connection.raw_connection().cursor() - elif hasattr(connection, "connection"): - return connection.connection.cursor() - - raise SQLAlchemyError( - "Databricks dialect can't obtain a cursor context manager from the dbapi" - ) - - @reflection.cache - def get_schema_names(self, connection, **kw): - """Return a list of all schema names available in the database.""" - stmt = DDL("SHOW SCHEMAS") - result = connection.execute(stmt) - schema_list = [row[0] for row in result] - return schema_list - - @reflection.cache - def get_table_comment( - self, - connection: Connection, - table_name: str, - schema: Optional[str] = None, - **kw: Any, - ) -> ReflectedTableComment: - result = self._describe_table_extended( - connection=connection, - table_name=table_name, - schema_name=schema, - ) - - if result is None: - return ReflectionDefaults.table_comment() - - comment = get_comment_from_dte_output(result) - - if comment: - return dict(text=comment) - else: - return ReflectionDefaults.table_comment() - - -@event.listens_for(Engine, "do_connect") -def receive_do_connect(dialect, conn_rec, cargs, cparams): - """Helpful for DS on traffic from clients using SQLAlchemy in particular""" - - # Ignore connect invocations that don't use our dialect - if not dialect.name == "databricks": - return - - ua = cparams.get("_user_agent_entry", "") - - def add_sqla_tag_if_not_present(val: str): - if not val: - output = "sqlalchemy" - - if val and "sqlalchemy" in val: - output = val - - else: - output = f"sqlalchemy + {val}" - - return output - - cparams["_user_agent_entry"] = add_sqla_tag_if_not_present(ua) - - if sqlalchemy.__version__.startswith("1.3"): - # SQLAlchemy 1.3.x fails to parse the http_path, catalog, and schema from our connection string - # These should be passed in as connect_args when building the Engine - - if "schema" in cparams: - dialect.schema = cparams["schema"] - - if "catalog" in cparams: - dialect.catalog = cparams["catalog"] diff --git a/src/databricks/sqlalchemy/py.typed b/src/databricks/sqlalchemy/py.typed deleted file mode 100755 index e69de29b..00000000 diff --git a/src/databricks/sqlalchemy/requirements.py b/src/databricks/sqlalchemy/requirements.py deleted file mode 100644 index 5c70c029..00000000 --- a/src/databricks/sqlalchemy/requirements.py +++ /dev/null @@ -1,249 +0,0 @@ -""" -The complete list of requirements is provided by SQLAlchemy here: - -https://github.com/sqlalchemy/sqlalchemy/blob/main/lib/sqlalchemy/testing/requirements.py - -When SQLAlchemy skips a test because a requirement is closed() it gives a generic skip message. -To make these failures more actionable, we only define requirements in this file that we wish to -force to be open(). If a test should be skipped on Databricks, it will be specifically marked skip -in test_suite.py with a Databricks-specific reason. - -See the special note about the array_type exclusion below. -See special note about has_temp_table exclusion below. -""" - -import sqlalchemy.testing.requirements -import sqlalchemy.testing.exclusions - - -class Requirements(sqlalchemy.testing.requirements.SuiteRequirements): - @property - def date_historic(self): - """target dialect supports representation of Python - datetime.datetime() objects with historic (pre 1970) values.""" - - return sqlalchemy.testing.exclusions.open() - - @property - def datetime_historic(self): - """target dialect supports representation of Python - datetime.datetime() objects with historic (pre 1970) values.""" - - return sqlalchemy.testing.exclusions.open() - - @property - def datetime_literals(self): - """target dialect supports rendering of a date, time, or datetime as a - literal string, e.g. via the TypeEngine.literal_processor() method. - - """ - - return sqlalchemy.testing.exclusions.open() - - @property - def timestamp_microseconds(self): - """target dialect supports representation of Python - datetime.datetime() with microsecond objects but only - if TIMESTAMP is used.""" - - return sqlalchemy.testing.exclusions.open() - - @property - def time_microseconds(self): - """target dialect supports representation of Python - datetime.time() with microsecond objects. - - This requirement declaration isn't needed but I've included it here for completeness. - Since Databricks doesn't have a TIME type, SQLAlchemy will compile Time() columns - as STRING Databricks data types. And we use a custom time type to render those strings - between str() and time.time() representations. Therefore we can store _any_ precision - that SQLAlchemy needs. The time_microseconds requirement defaults to ON for all dialects - except mssql, mysql, mariadb, and oracle. - """ - - return sqlalchemy.testing.exclusions.open() - - @property - def infinity_floats(self): - """The Float type can persist and load float('inf'), float('-inf').""" - - return sqlalchemy.testing.exclusions.open() - - @property - def precision_numerics_retains_significant_digits(self): - """A precision numeric type will return empty significant digits, - i.e. a value such as 10.000 will come back in Decimal form with - the .000 maintained.""" - - return sqlalchemy.testing.exclusions.open() - - @property - def precision_numerics_many_significant_digits(self): - """target backend supports values with many digits on both sides, - such as 319438950232418390.273596, 87673.594069654243 - - """ - return sqlalchemy.testing.exclusions.open() - - @property - def array_type(self): - """While Databricks does support ARRAY types, pysql cannot bind them. So - we cannot use them with SQLAlchemy - - Due to a bug in SQLAlchemy, we _must_ define this exclusion as closed() here or else the - test runner will crash the pytest process due to an AttributeError - """ - - # TODO: Implement array type using inline? - return sqlalchemy.testing.exclusions.closed() - - @property - def table_ddl_if_exists(self): - """target platform supports IF NOT EXISTS / IF EXISTS for tables.""" - - return sqlalchemy.testing.exclusions.open() - - @property - def identity_columns(self): - """If a backend supports GENERATED { ALWAYS | BY DEFAULT } - AS IDENTITY""" - return sqlalchemy.testing.exclusions.open() - - @property - def identity_columns_standard(self): - """If a backend supports GENERATED { ALWAYS | BY DEFAULT } - AS IDENTITY with a standard syntax. - This is mainly to exclude MSSql. - """ - return sqlalchemy.testing.exclusions.open() - - @property - def has_temp_table(self): - """target dialect supports checking a single temp table name - - unfortunately this is not the same as temp_table_names - - SQLAlchemy's HasTableTest is not normalised in such a way that temp table tests - are separate from temp view and normal table tests. If those tests were split out, - we would just add detailed skip markers in test_suite.py. But since we'd like to - run the HasTableTest group for the features we support, we must set this exclusinon - to closed(). - - It would be ideal if there were a separate requirement for has_temp_view. Without it, - we're in a bind. - """ - return sqlalchemy.testing.exclusions.closed() - - @property - def temporary_views(self): - """target database supports temporary views""" - return sqlalchemy.testing.exclusions.open() - - @property - def views(self): - """Target database must support VIEWs.""" - - return sqlalchemy.testing.exclusions.open() - - @property - def temporary_tables(self): - """target database supports temporary tables - - ComponentReflection test is intricate and simply cannot function without this exclusion being defined here. - This happens because we cannot skip individual combinations used in ComponentReflection test. - """ - return sqlalchemy.testing.exclusions.closed() - - @property - def table_reflection(self): - """target database has general support for table reflection""" - return sqlalchemy.testing.exclusions.open() - - @property - def comment_reflection(self): - """Indicates if the database support table comment reflection""" - return sqlalchemy.testing.exclusions.open() - - @property - def comment_reflection_full_unicode(self): - """Indicates if the database support table comment reflection in the - full unicode range, including emoji etc. - """ - return sqlalchemy.testing.exclusions.open() - - @property - def temp_table_reflection(self): - """ComponentReflection test is intricate and simply cannot function without this exclusion being defined here. - This happens because we cannot skip individual combinations used in ComponentReflection test. - """ - return sqlalchemy.testing.exclusions.closed() - - @property - def index_reflection(self): - """ComponentReflection test is intricate and simply cannot function without this exclusion being defined here. - This happens because we cannot skip individual combinations used in ComponentReflection test. - """ - return sqlalchemy.testing.exclusions.closed() - - @property - def unique_constraint_reflection(self): - """ComponentReflection test is intricate and simply cannot function without this exclusion being defined here. - This happens because we cannot skip individual combinations used in ComponentReflection test. - - Databricks doesn't support UNIQUE constraints. - """ - return sqlalchemy.testing.exclusions.closed() - - @property - def reflects_pk_names(self): - """Target driver reflects the name of primary key constraints.""" - - return sqlalchemy.testing.exclusions.open() - - @property - def datetime_implicit_bound(self): - """target dialect when given a datetime object will bind it such - that the database server knows the object is a date, and not - a plain string. - """ - - return sqlalchemy.testing.exclusions.open() - - @property - def tuple_in(self): - return sqlalchemy.testing.exclusions.open() - - @property - def ctes(self): - return sqlalchemy.testing.exclusions.open() - - @property - def ctes_with_update_delete(self): - return sqlalchemy.testing.exclusions.open() - - @property - def delete_from(self): - """Target must support DELETE FROM..FROM or DELETE..USING syntax""" - return sqlalchemy.testing.exclusions.open() - - @property - def table_value_constructor(self): - return sqlalchemy.testing.exclusions.open() - - @property - def reflect_tables_no_columns(self): - return sqlalchemy.testing.exclusions.open() - - @property - def denormalized_names(self): - """Target database must have 'denormalized', i.e. - UPPERCASE as case insensitive names.""" - - return sqlalchemy.testing.exclusions.open() - - @property - def time_timezone(self): - """target dialect supports representation of Python - datetime.time() with tzinfo with Time(timezone=True).""" - - return sqlalchemy.testing.exclusions.open() diff --git a/src/databricks/sqlalchemy/setup.cfg b/src/databricks/sqlalchemy/setup.cfg deleted file mode 100644 index ab89d17d..00000000 --- a/src/databricks/sqlalchemy/setup.cfg +++ /dev/null @@ -1,4 +0,0 @@ - -[sqla_testing] -requirement_cls=databricks.sqlalchemy.requirements:Requirements -profile_file=profiles.txt diff --git a/src/databricks/sqlalchemy/test/_extra.py b/src/databricks/sqlalchemy/test/_extra.py deleted file mode 100644 index 2f3e7a7d..00000000 --- a/src/databricks/sqlalchemy/test/_extra.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Additional tests authored by Databricks that use SQLAlchemy's test fixtures -""" - -import datetime - -from sqlalchemy.testing.suite.test_types import ( - _LiteralRoundTripFixture, - fixtures, - testing, - eq_, - select, - Table, - Column, - config, - _DateFixture, - literal, -) -from databricks.sqlalchemy import TINYINT, TIMESTAMP - - -class TinyIntegerTest(_LiteralRoundTripFixture, fixtures.TestBase): - __backend__ = True - - def test_literal(self, literal_round_trip): - literal_round_trip(TINYINT, [5], [5]) - - @testing.fixture - def integer_round_trip(self, metadata, connection): - def run(datatype, data): - int_table = Table( - "tiny_integer_table", - metadata, - Column( - "id", - TINYINT, - primary_key=True, - test_needs_autoincrement=False, - ), - Column("tiny_integer_data", datatype), - ) - - metadata.create_all(config.db) - - connection.execute(int_table.insert(), {"id": 1, "integer_data": data}) - - row = connection.execute(select(int_table.c.integer_data)).first() - - eq_(row, (data,)) - - assert isinstance(row[0], int) - - return run - - -class DateTimeTZTestCustom(_DateFixture, fixtures.TablesTest): - """This test confirms that when a user uses the TIMESTAMP - type to store a datetime object, it retains its timezone - """ - - __backend__ = True - datatype = TIMESTAMP - data = datetime.datetime(2012, 10, 15, 12, 57, 18, tzinfo=datetime.timezone.utc) - - @testing.requires.datetime_implicit_bound - def test_select_direct(self, connection): - - # We need to pass the TIMESTAMP type to the literal function - # so that the value is processed correctly. - result = connection.scalar(select(literal(self.data, TIMESTAMP))) - eq_(result, self.data) diff --git a/src/databricks/sqlalchemy/test/_future.py b/src/databricks/sqlalchemy/test/_future.py deleted file mode 100644 index 6e470f60..00000000 --- a/src/databricks/sqlalchemy/test/_future.py +++ /dev/null @@ -1,331 +0,0 @@ -# type: ignore - -from enum import Enum - -import pytest -from databricks.sqlalchemy.test._regression import ( - ExpandingBoundInTest, - IdentityAutoincrementTest, - LikeFunctionsTest, - NormalizedNameTest, -) -from databricks.sqlalchemy.test._unsupported import ( - ComponentReflectionTest, - ComponentReflectionTestExtra, - CTETest, - InsertBehaviorTest, -) -from sqlalchemy.testing.suite import ( - ArrayTest, - BinaryTest, - BizarroCharacterFKResolutionTest, - CollateTest, - ComputedColumnTest, - ComputedReflectionTest, - DifficultParametersTest, - FutureWeCanSetDefaultSchemaWEventsTest, - IdentityColumnTest, - IdentityReflectionTest, - JSONLegacyStringCastIndexTest, - JSONTest, - NativeUUIDTest, - QuotedNameArgumentTest, - RowCountTest, - SimpleUpdateDeleteTest, - WeCanSetDefaultSchemaWEventsTest, -) - - -class FutureFeature(Enum): - ARRAY = "ARRAY column type handling" - BINARY = "BINARY column type handling" - CHECK = "CHECK constraint handling" - COLLATE = "COLLATE DDL generation" - CTE_FEAT = "required CTE features" - EMPTY_INSERT = "empty INSERT support" - FK_OPTS = "foreign key option checking" - GENERATED_COLUMNS = "Delta computed / generated columns support" - IDENTITY = "identity reflection" - JSON = "JSON column type handling" - MULTI_PK = "get_multi_pk_constraint method" - PROVISION = "event-driven engine configuration" - REGEXP = "_visit_regexp" - SANE_ROWCOUNT = "sane_rowcount support" - TBL_OPTS = "get_table_options method" - TEST_DESIGN = "required test-fixture overrides" - TUPLE_LITERAL = "tuple-like IN markers completely" - UUID = "native Uuid() type" - VIEW_DEF = "get_view_definition method" - - -def render_future_feature(rsn: FutureFeature, extra=False) -> str: - postfix = " More detail in _future.py" if extra else "" - return f"[FUTURE][{rsn.name}]: This dialect doesn't implement {rsn.value}.{postfix}" - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.BINARY)) -class BinaryTest(BinaryTest): - """Databricks doesn't support binding of BINARY type values. When DBR supports this, we can implement - in this dialect. - """ - - pass - - -class ExpandingBoundInTest(ExpandingBoundInTest): - @pytest.mark.skip(render_future_feature(FutureFeature.TUPLE_LITERAL)) - def test_empty_heterogeneous_tuples_bindparam(self): - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.TUPLE_LITERAL)) - def test_empty_heterogeneous_tuples_direct(self): - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.TUPLE_LITERAL)) - def test_empty_homogeneous_tuples_bindparam(self): - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.TUPLE_LITERAL)) - def test_empty_homogeneous_tuples_direct(self): - pass - - -class NormalizedNameTest(NormalizedNameTest): - @pytest.mark.skip(render_future_feature(FutureFeature.TEST_DESIGN, True)) - def test_get_table_names(self): - """I'm not clear how this test can ever pass given that it's assertion looks like this: - - ```python - eq_(tablenames[0].upper(), tablenames[0].lower()) - eq_(tablenames[1].upper(), tablenames[1].lower()) - ``` - - It's forcibly calling .upper() and .lower() on the same string and expecting them to be equal. - """ - pass - - -class CTETest(CTETest): - @pytest.mark.skip(render_future_feature(FutureFeature.CTE_FEAT, True)) - def test_delete_from_round_trip(self): - """Databricks dialect doesn't implement multiple-table criteria within DELETE""" - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.TEST_DESIGN, True)) -class IdentityColumnTest(IdentityColumnTest): - """Identity works. Test needs rewrite for Databricks. See comments in test_suite.py - - The setup for these tests tries to create a table with a DELTA IDENTITY column but has two problems: - 1. It uses an Integer() type for the column. Whereas DELTA IDENTITY columns must be BIGINT. - 2. It tries to set the start == 42, which Databricks doesn't support - - I can get the tests to _run_ by patching the table fixture to use BigInteger(). But it asserts that the - identity of two rows are 42 and 43, which is not possible since they will be rows 1 and 2 instead. - - I'm satisified through manual testing that our implementation of visit_identity_column works but a better test is needed. - """ - - pass - - -class IdentityAutoincrementTest(IdentityAutoincrementTest): - @pytest.mark.skip(render_future_feature(FutureFeature.TEST_DESIGN, True)) - def test_autoincrement_with_identity(self): - """This test has the same issue as IdentityColumnTest.test_select_all in that it creates a table with identity - using an Integer() rather than a BigInteger(). If I override this behaviour to use a BigInteger() instead, the - test passes. - """ - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.TEST_DESIGN)) -class BizarroCharacterFKResolutionTest(BizarroCharacterFKResolutionTest): - """Some of the combinations in this test pass. Others fail. Given the esoteric nature of these failures, - we have opted to defer implementing fixes to a later time, guided by customer feedback. Passage of - these tests is not an acceptance criteria for our dialect. - """ - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.TEST_DESIGN)) -class DifficultParametersTest(DifficultParametersTest): - """Some of the combinations in this test pass. Others fail. Given the esoteric nature of these failures, - we have opted to defer implementing fixes to a later time, guided by customer feedback. Passage of - these tests is not an acceptance criteria for our dialect. - """ - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.IDENTITY, True)) -class IdentityReflectionTest(IdentityReflectionTest): - """It's not clear _how_ to implement this for SQLAlchemy. Columns created with GENERATED ALWAYS AS IDENTITY - are not specially demarked in the output of TGetColumnsResponse or DESCRIBE TABLE EXTENDED. - - We could theoretically parse this from the contents of `SHOW CREATE TABLE` but that feels like a hack. - """ - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.JSON)) -class JSONTest(JSONTest): - """Databricks supports JSON path expressions in queries it's just not implemented in this dialect.""" - - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.JSON)) -class JSONLegacyStringCastIndexTest(JSONLegacyStringCastIndexTest): - """Same comment applies as JSONTest""" - - pass - - -class LikeFunctionsTest(LikeFunctionsTest): - @pytest.mark.skip(render_future_feature(FutureFeature.REGEXP)) - def test_not_regexp_match(self): - """The defaul dialect doesn't implement _visit_regexp methods so we don't get them automatically.""" - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.REGEXP)) - def test_regexp_match(self): - """The defaul dialect doesn't implement _visit_regexp methods so we don't get them automatically.""" - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.COLLATE)) -class CollateTest(CollateTest): - """This is supported in Databricks. Not implemented here.""" - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.UUID, True)) -class NativeUUIDTest(NativeUUIDTest): - """Type implementation will be straightforward. Since Databricks doesn't have a native UUID type we can use - a STRING field, create a custom TypeDecorator for sqlalchemy.types.Uuid and add it to the dialect's colspecs. - - Then mark requirements.uuid_data_type as open() so this test can run. - """ - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.SANE_ROWCOUNT)) -class RowCountTest(RowCountTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.SANE_ROWCOUNT)) -class SimpleUpdateDeleteTest(SimpleUpdateDeleteTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.PROVISION, True)) -class WeCanSetDefaultSchemaWEventsTest(WeCanSetDefaultSchemaWEventsTest): - """provision.py allows us to define event listeners that emit DDL for things like setting up a test schema - or, in this case, changing the default schema for the connection after it's been built. This would override - the schema defined in the sqlalchemy connection string. This support is possible but is not implemented - in the dialect. Deferred for now. - """ - - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.PROVISION, True)) -class FutureWeCanSetDefaultSchemaWEventsTest(FutureWeCanSetDefaultSchemaWEventsTest): - """provision.py allows us to define event listeners that emit DDL for things like setting up a test schema - or, in this case, changing the default schema for the connection after it's been built. This would override - the schema defined in the sqlalchemy connection string. This support is possible but is not implemented - in the dialect. Deferred for now. - """ - - pass - - -class ComponentReflectionTest(ComponentReflectionTest): - @pytest.mark.skip(reason=render_future_feature(FutureFeature.TBL_OPTS, True)) - def test_multi_get_table_options_tables(self): - """It's not clear what the expected ouput from this method would even _be_. Requires research.""" - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.VIEW_DEF)) - def test_get_view_definition(self): - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.VIEW_DEF)) - def test_get_view_definition_does_not_exist(self): - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.MULTI_PK)) - def test_get_multi_pk_constraint(self): - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.CHECK)) - def test_get_multi_check_constraints(self): - pass - - -class ComponentReflectionTestExtra(ComponentReflectionTestExtra): - @pytest.mark.skip(render_future_feature(FutureFeature.CHECK)) - def test_get_check_constraints(self): - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.FK_OPTS)) - def test_get_foreign_key_options(self): - """It's not clear from the test code what the expected output is here. Further research required.""" - pass - - -class InsertBehaviorTest(InsertBehaviorTest): - @pytest.mark.skip(render_future_feature(FutureFeature.EMPTY_INSERT, True)) - def test_empty_insert(self): - """Empty inserts are possible using DEFAULT VALUES on Databricks. To implement it, we need - to hook into the SQLCompiler to render a no-op column list. With SQLAlchemy's default implementation - the request fails with a syntax error - """ - pass - - @pytest.mark.skip(render_future_feature(FutureFeature.EMPTY_INSERT, True)) - def test_empty_insert_multiple(self): - """Empty inserts are possible using DEFAULT VALUES on Databricks. To implement it, we need - to hook into the SQLCompiler to render a no-op column list. With SQLAlchemy's default implementation - the request fails with a syntax error - """ - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.ARRAY)) -class ArrayTest(ArrayTest): - """While Databricks supports ARRAY types, DBR cannot handle bound parameters of this type. - This makes them unusable to SQLAlchemy without some workaround. Potentially we could inline - the values of these parameters (which risks sql injection). - """ - - -@pytest.mark.reviewed -@pytest.mark.skip(render_future_feature(FutureFeature.TEST_DESIGN, True)) -class QuotedNameArgumentTest(QuotedNameArgumentTest): - """These tests are challenging. The whole test setup depends on a table with a name like `quote ' one` - which will never work on Databricks because table names can't contains spaces. But QuotedNamedArgumentTest - also checks the behaviour of DDL identifier preparation process. We need to override some of IdentifierPreparer - methods because these are the ultimate control for whether or not CHECK and UNIQUE constraints are emitted. - """ - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_future_feature(FutureFeature.GENERATED_COLUMNS)) -class ComputedColumnTest(ComputedColumnTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_future_feature(FutureFeature.GENERATED_COLUMNS)) -class ComputedReflectionTest(ComputedReflectionTest): - pass diff --git a/src/databricks/sqlalchemy/test/_regression.py b/src/databricks/sqlalchemy/test/_regression.py deleted file mode 100644 index 4dbc5ec2..00000000 --- a/src/databricks/sqlalchemy/test/_regression.py +++ /dev/null @@ -1,311 +0,0 @@ -# type: ignore - -import pytest -from sqlalchemy.testing.suite import ( - ArgSignatureTest, - BooleanTest, - CastTypeDecoratorTest, - ComponentReflectionTestExtra, - CompositeKeyReflectionTest, - CompoundSelectTest, - DateHistoricTest, - DateTest, - DateTimeCoercedToDateTimeTest, - DateTimeHistoricTest, - DateTimeMicrosecondsTest, - DateTimeTest, - DeprecatedCompoundSelectTest, - DistinctOnTest, - EscapingTest, - ExistsTest, - ExpandingBoundInTest, - FetchLimitOffsetTest, - FutureTableDDLTest, - HasTableTest, - IdentityAutoincrementTest, - InsertBehaviorTest, - IntegerTest, - IsOrIsNotDistinctFromTest, - JoinTest, - LikeFunctionsTest, - NormalizedNameTest, - NumericTest, - OrderByLabelTest, - PingTest, - PostCompileParamsTest, - ReturningGuardsTest, - RowFetchTest, - SameNamedSchemaTableTest, - StringTest, - TableDDLTest, - TableNoColumnsTest, - TextTest, - TimeMicrosecondsTest, - TimestampMicrosecondsTest, - TimeTest, - TimeTZTest, - TrueDivTest, - UnicodeTextTest, - UnicodeVarcharTest, - UuidTest, - ValuesExpressionTest, -) - -from databricks.sqlalchemy.test.overrides._ctetest import CTETest -from databricks.sqlalchemy.test.overrides._componentreflectiontest import ( - ComponentReflectionTest, -) - - -@pytest.mark.reviewed -class NumericTest(NumericTest): - pass - - -@pytest.mark.reviewed -class HasTableTest(HasTableTest): - pass - - -@pytest.mark.reviewed -class ComponentReflectionTestExtra(ComponentReflectionTestExtra): - pass - - -@pytest.mark.reviewed -class InsertBehaviorTest(InsertBehaviorTest): - pass - - -@pytest.mark.reviewed -class ComponentReflectionTest(ComponentReflectionTest): - """This test requires two schemas be present in the target Databricks workspace: - - The schema set in --dburi - - A second schema named "test_schema" - - Note that test_get_multi_foreign keys is flaky because DBR does not guarantee the order of data returned in DESCRIBE TABLE EXTENDED - - _Most_ of these tests pass if we manually override the bad test setup. - """ - - pass - - -@pytest.mark.reviewed -class TableDDLTest(TableDDLTest): - pass - - -@pytest.mark.reviewed -class FutureTableDDLTest(FutureTableDDLTest): - pass - - -@pytest.mark.reviewed -class FetchLimitOffsetTest(FetchLimitOffsetTest): - pass - - -@pytest.mark.reviewed -class UuidTest(UuidTest): - pass - - -@pytest.mark.reviewed -class ValuesExpressionTest(ValuesExpressionTest): - pass - - -@pytest.mark.reviewed -class BooleanTest(BooleanTest): - pass - - -@pytest.mark.reviewed -class PostCompileParamsTest(PostCompileParamsTest): - pass - - -@pytest.mark.reviewed -class TimeMicrosecondsTest(TimeMicrosecondsTest): - pass - - -@pytest.mark.reviewed -class TextTest(TextTest): - pass - - -@pytest.mark.reviewed -class StringTest(StringTest): - pass - - -@pytest.mark.reviewed -class DateTimeMicrosecondsTest(DateTimeMicrosecondsTest): - pass - - -@pytest.mark.reviewed -class TimestampMicrosecondsTest(TimestampMicrosecondsTest): - pass - - -@pytest.mark.reviewed -class DateTimeCoercedToDateTimeTest(DateTimeCoercedToDateTimeTest): - pass - - -@pytest.mark.reviewed -class TimeTest(TimeTest): - pass - - -@pytest.mark.reviewed -class DateTimeTest(DateTimeTest): - pass - - -@pytest.mark.reviewed -class DateTimeHistoricTest(DateTimeHistoricTest): - pass - - -@pytest.mark.reviewed -class DateTest(DateTest): - pass - - -@pytest.mark.reviewed -class DateHistoricTest(DateHistoricTest): - pass - - -@pytest.mark.reviewed -class RowFetchTest(RowFetchTest): - pass - - -@pytest.mark.reviewed -class CompositeKeyReflectionTest(CompositeKeyReflectionTest): - pass - - -@pytest.mark.reviewed -class TrueDivTest(TrueDivTest): - pass - - -@pytest.mark.reviewed -class ArgSignatureTest(ArgSignatureTest): - pass - - -@pytest.mark.reviewed -class CompoundSelectTest(CompoundSelectTest): - pass - - -@pytest.mark.reviewed -class DeprecatedCompoundSelectTest(DeprecatedCompoundSelectTest): - pass - - -@pytest.mark.reviewed -class CastTypeDecoratorTest(CastTypeDecoratorTest): - pass - - -@pytest.mark.reviewed -class DistinctOnTest(DistinctOnTest): - pass - - -@pytest.mark.reviewed -class EscapingTest(EscapingTest): - pass - - -@pytest.mark.reviewed -class ExistsTest(ExistsTest): - pass - - -@pytest.mark.reviewed -class IntegerTest(IntegerTest): - pass - - -@pytest.mark.reviewed -class IsOrIsNotDistinctFromTest(IsOrIsNotDistinctFromTest): - pass - - -@pytest.mark.reviewed -class JoinTest(JoinTest): - pass - - -@pytest.mark.reviewed -class OrderByLabelTest(OrderByLabelTest): - pass - - -@pytest.mark.reviewed -class PingTest(PingTest): - pass - - -@pytest.mark.reviewed -class ReturningGuardsTest(ReturningGuardsTest): - pass - - -@pytest.mark.reviewed -class SameNamedSchemaTableTest(SameNamedSchemaTableTest): - pass - - -@pytest.mark.reviewed -class UnicodeTextTest(UnicodeTextTest): - pass - - -@pytest.mark.reviewed -class UnicodeVarcharTest(UnicodeVarcharTest): - pass - - -@pytest.mark.reviewed -class TableNoColumnsTest(TableNoColumnsTest): - pass - - -@pytest.mark.reviewed -class ExpandingBoundInTest(ExpandingBoundInTest): - pass - - -@pytest.mark.reviewed -class CTETest(CTETest): - pass - - -@pytest.mark.reviewed -class NormalizedNameTest(NormalizedNameTest): - pass - - -@pytest.mark.reviewed -class IdentityAutoincrementTest(IdentityAutoincrementTest): - pass - - -@pytest.mark.reviewed -class LikeFunctionsTest(LikeFunctionsTest): - pass - - -@pytest.mark.reviewed -class TimeTZTest(TimeTZTest): - pass diff --git a/src/databricks/sqlalchemy/test/_unsupported.py b/src/databricks/sqlalchemy/test/_unsupported.py deleted file mode 100644 index c1f81205..00000000 --- a/src/databricks/sqlalchemy/test/_unsupported.py +++ /dev/null @@ -1,450 +0,0 @@ -# type: ignore - -from enum import Enum - -import pytest -from databricks.sqlalchemy.test._regression import ( - ComponentReflectionTest, - ComponentReflectionTestExtra, - CTETest, - FetchLimitOffsetTest, - FutureTableDDLTest, - HasTableTest, - InsertBehaviorTest, - NumericTest, - TableDDLTest, - UuidTest, -) - -# These are test suites that are fully skipped with a SkipReason -from sqlalchemy.testing.suite import ( - AutocommitIsolationTest, - DateTimeTZTest, - ExceptionTest, - HasIndexTest, - HasSequenceTest, - HasSequenceTestEmpty, - IsolationLevelTest, - LastrowidTest, - LongNameBlowoutTest, - PercentSchemaNamesTest, - ReturningTest, - SequenceCompilerTest, - SequenceTest, - ServerSideCursorsTest, - UnicodeSchemaTest, -) - - -class SkipReason(Enum): - AUTO_INC = "implicit AUTO_INCREMENT" - CTE_FEAT = "required CTE features" - CURSORS = "server-side cursors" - DECIMAL_FEAT = "required decimal features" - ENFORCE_KEYS = "enforcing primary or foreign key restraints" - FETCH = "fetch clauses" - IDENTIFIER_LENGTH = "identifiers > 255 characters" - IMPL_FLOAT_PREC = "required implicit float precision" - IMPLICIT_ORDER = "deterministic return order if ORDER BY is not present" - INDEXES = "SQL INDEXes" - RETURNING = "INSERT ... RETURNING syntax" - SEQUENCES = "SQL SEQUENCES" - STRING_FEAT = "required STRING type features" - SYMBOL_CHARSET = "symbols expected by test" - TEMP_TBL = "temporary tables" - TIMEZONE_OPT = "timezone-optional TIMESTAMP fields" - TRANSACTIONS = "transactions" - UNIQUE = "UNIQUE constraints" - - -def render_skip_reason(rsn: SkipReason, setup_error=False, extra=False) -> str: - prefix = "[BADSETUP]" if setup_error else "" - postfix = " More detail in _unsupported.py" if extra else "" - return f"[UNSUPPORTED]{prefix}[{rsn.name}]: Databricks does not support {rsn.value}.{postfix}" - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.ENFORCE_KEYS)) -class ExceptionTest(ExceptionTest): - """Per Databricks documentation, primary and foreign key constraints are informational only - and are not enforced. - - https://docs.databricks.com/api/workspace/tableconstraints - """ - - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.IDENTIFIER_LENGTH)) -class LongNameBlowoutTest(LongNameBlowoutTest): - """These tests all include assertions that the tested name > 255 characters""" - - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.SEQUENCES)) -class HasSequenceTest(HasSequenceTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.SEQUENCES)) -class HasSequenceTestEmpty(HasSequenceTestEmpty): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.INDEXES)) -class HasIndexTest(HasIndexTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.SYMBOL_CHARSET)) -class UnicodeSchemaTest(UnicodeSchemaTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.CURSORS)) -class ServerSideCursorsTest(ServerSideCursorsTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.SYMBOL_CHARSET)) -class PercentSchemaNamesTest(PercentSchemaNamesTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.TRANSACTIONS)) -class IsolationLevelTest(IsolationLevelTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.TRANSACTIONS)) -class AutocommitIsolationTest(AutocommitIsolationTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.RETURNING)) -class ReturningTest(ReturningTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.SEQUENCES)) -class SequenceTest(SequenceTest): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(reason=render_skip_reason(SkipReason.SEQUENCES)) -class SequenceCompilerTest(SequenceCompilerTest): - pass - - -class FetchLimitOffsetTest(FetchLimitOffsetTest): - @pytest.mark.flaky - @pytest.mark.skip(reason=render_skip_reason(SkipReason.IMPLICIT_ORDER, extra=True)) - def test_limit_render_multiple_times(self): - """This test depends on the order that records are inserted into the table. It's passing criteria requires that - a record inserted with id=1 is the first record returned when no ORDER BY clause is specified. But Databricks occasionally - INSERTS in a different order, which makes this test seem to fail. The test is flaky, but the underlying functionality - (can multiple LIMIT clauses be rendered) is not broken. - - Unclear if this is a bug in Databricks, Delta, or some race-condition in the test itself. - """ - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_bound_fetch_offset(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_fetch_offset_no_order(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_fetch_offset_nobinds(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_simple_fetch(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_simple_fetch_offset(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_simple_fetch_percent(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_simple_fetch_percent_ties(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_simple_fetch_ties(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_expr_fetch_offset(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_fetch_offset_percent(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_fetch_offset_percent_ties(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_fetch_offset_ties(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH)) - def test_fetch_offset_ties_exact_number(self): - pass - - -class UuidTest(UuidTest): - @pytest.mark.skip(reason=render_skip_reason(SkipReason.RETURNING)) - def test_uuid_returning(self): - pass - - -class FutureTableDDLTest(FutureTableDDLTest): - @pytest.mark.skip(render_skip_reason(SkipReason.INDEXES)) - def test_create_index_if_not_exists(self): - """We could use requirements.index_reflection and requirements.index_ddl_if_exists - here to disable this but prefer a more meaningful skip message - """ - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.INDEXES)) - def test_drop_index_if_exists(self): - """We could use requirements.index_reflection and requirements.index_ddl_if_exists - here to disable this but prefer a more meaningful skip message - """ - pass - - -class TableDDLTest(TableDDLTest): - @pytest.mark.skip(reason=render_skip_reason(SkipReason.INDEXES)) - def test_create_index_if_not_exists(self, connection): - """We could use requirements.index_reflection and requirements.index_ddl_if_exists - here to disable this but prefer a more meaningful skip message - """ - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.INDEXES)) - def test_drop_index_if_exists(self, connection): - """We could use requirements.index_reflection and requirements.index_ddl_if_exists - here to disable this but prefer a more meaningful skip message - """ - pass - - -class ComponentReflectionTest(ComponentReflectionTest): - """This test requires two schemas be present in the target Databricks workspace: - - The schema set in --dburi - - A second schema named "test_schema" - - Note that test_get_multi_foreign keys is flaky because DBR does not guarantee the order of data returned in DESCRIBE TABLE EXTENDED - """ - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.UNIQUE)) - def test_get_multi_unique_constraints(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.TEMP_TBL, True, True)) - def test_get_temp_view_names(self): - """While Databricks supports temporary views, this test creates a temp view aimed at a temp table. - Databricks doesn't support temp tables. So the test can never pass. - """ - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.TEMP_TBL)) - def test_get_temp_table_columns(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.TEMP_TBL)) - def test_get_temp_table_indexes(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.TEMP_TBL)) - def test_get_temp_table_names(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.TEMP_TBL)) - def test_get_temp_table_unique_constraints(self): - pass - - @pytest.mark.skip(reason=render_skip_reason(SkipReason.TEMP_TBL)) - def test_reflect_table_temp_table(self): - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.INDEXES)) - def test_get_indexes(self): - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.INDEXES)) - def test_multi_indexes(self): - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.INDEXES)) - def get_noncol_index(self): - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.UNIQUE)) - def test_get_unique_constraints(self): - pass - - -class NumericTest(NumericTest): - @pytest.mark.skip(render_skip_reason(SkipReason.DECIMAL_FEAT)) - def test_enotation_decimal(self): - """This test automatically runs if requirements.precision_numerics_enotation_large is open()""" - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.DECIMAL_FEAT)) - def test_enotation_decimal_large(self): - """This test automatically runs if requirements.precision_numerics_enotation_large is open()""" - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.IMPL_FLOAT_PREC, extra=True)) - def test_float_coerce_round_trip(self): - """ - This automatically runs if requirements.literal_float_coercion is open() - - Without additional work, Databricks returns 15.75629997253418 when you SELECT 15.7563. - This is a potential area where we could override the Float literal processor to add a CAST. - Will leave to a PM to decide if we should do so. - """ - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.IMPL_FLOAT_PREC, extra=True)) - def test_float_custom_scale(self): - """This test automatically runs if requirements.precision_generic_float_type is open()""" - pass - - -class HasTableTest(HasTableTest): - """Databricks does not support temporary tables.""" - - @pytest.mark.skip(render_skip_reason(SkipReason.TEMP_TBL)) - def test_has_table_temp_table(self): - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.TEMP_TBL, True, True)) - def test_has_table_temp_view(self): - """Databricks supports temporary views but this test depends on requirements.has_temp_table, which we - explicitly close so that we can run other tests in this group. See the comment under has_temp_table in - requirements.py for details. - - From what I can see, there is no way to run this test since it will fail during setup if we mark has_temp_table - open(). It _might_ be possible to hijack this behaviour by implementing temp_table_keyword_args in our own - provision.py. Doing so would mean creating a real table during this class setup instead of a temp table. Then - we could just skip the temp table tests but run the temp view tests. But this test fixture doesn't cleanup its - temp tables and has no hook to do so. - - It would be ideal for SQLAlchemy to define a separate requirements.has_temp_views. - """ - pass - - -class ComponentReflectionTestExtra(ComponentReflectionTestExtra): - @pytest.mark.skip(render_skip_reason(SkipReason.INDEXES)) - def test_reflect_covering_index(self): - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.INDEXES)) - def test_reflect_expression_based_indexes(self): - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.STRING_FEAT, extra=True)) - def test_varchar_reflection(self): - """Databricks doesn't enforce string length limitations like STRING(255).""" - pass - - -class InsertBehaviorTest(InsertBehaviorTest): - @pytest.mark.skip(render_skip_reason(SkipReason.AUTO_INC, True, True)) - def test_autoclose_on_insert(self): - """The setup for this test creates a column with implicit autoincrement enabled. - This dialect does not implement implicit autoincrement - users must declare Identity() explicitly. - """ - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.AUTO_INC, True, True)) - def test_insert_from_select_autoinc(self): - """Implicit autoincrement is not implemented in this dialect.""" - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.AUTO_INC, True, True)) - def test_insert_from_select_autoinc_no_rows(self): - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.RETURNING)) - def test_autoclose_on_insert_implicit_returning(self): - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(render_skip_reason(SkipReason.AUTO_INC, extra=True)) -class LastrowidTest(LastrowidTest): - """SQLAlchemy docs describe that a column without an explicit Identity() may implicitly create one if autoincrement=True. - That is what this method tests. Databricks supports auto-incrementing IDENTITY columns but they must be explicitly - declared. This limitation is present in our dialect as well. Which means that SQLAlchemy's autoincrement setting of a column - is ignored. We emit a logging.WARN message if you try it. - - In the future we could handle this autoincrement by implicitly calling the visit_identity_column() method of our DDLCompiler - when autoincrement=True. There is an example of this in the Microsoft SQL Server dialect: MSSDDLCompiler.get_column_specification - - For now, if you need to create a SQLAlchemy column with an auto-incrementing identity, you must set this explicitly in your column - definition by passing an Identity() to the column constructor. - """ - - pass - - -class CTETest(CTETest): - """During the teardown for this test block, it tries to drop a constraint that it never named which raises - a compilation error. This could point to poor constraint reflection but our other constraint reflection - tests pass. Requires investigation. - """ - - @pytest.mark.skip(render_skip_reason(SkipReason.CTE_FEAT, extra=True)) - def test_select_recursive_round_trip(self): - pass - - @pytest.mark.skip(render_skip_reason(SkipReason.CTE_FEAT, extra=True)) - def test_delete_scalar_subq_round_trip(self): - """Error received is [UNSUPPORTED_SUBQUERY_EXPRESSION_CATEGORY.MUST_AGGREGATE_CORRELATED_SCALAR_SUBQUERY] - - This suggests a limitation of the platform. But a workaround may be possible if customers require it. - """ - pass - - -@pytest.mark.reviewed -@pytest.mark.skip(render_skip_reason(SkipReason.TIMEZONE_OPT, True)) -class DateTimeTZTest(DateTimeTZTest): - """Test whether the sqlalchemy.DateTime() type can _optionally_ include timezone info. - This dialect maps DateTime() → TIMESTAMP, which _always_ includes tzinfo. - - Users can use databricks.sqlalchemy.TIMESTAMP_NTZ for a tzinfo-less timestamp. The SQLA docs - acknowledge this is expected for some dialects. - - https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.DateTime - """ - - pass diff --git a/src/databricks/sqlalchemy/test/conftest.py b/src/databricks/sqlalchemy/test/conftest.py deleted file mode 100644 index ea43e8d3..00000000 --- a/src/databricks/sqlalchemy/test/conftest.py +++ /dev/null @@ -1,13 +0,0 @@ -from sqlalchemy.dialects import registry -import pytest - -registry.register("databricks", "databricks.sqlalchemy", "DatabricksDialect") -# sqlalchemy's dialect-testing machinery wants an entry like this. -# This seems to be based around dialects maybe having multiple drivers -# and wanting to test driver-specific URLs, but doesn't seem to make -# much sense for dialects with only one driver. -registry.register("databricks.databricks", "databricks.sqlalchemy", "DatabricksDialect") - -pytest.register_assert_rewrite("sqlalchemy.testing.assertions") - -from sqlalchemy.testing.plugin.pytestplugin import * diff --git a/src/databricks/sqlalchemy/test/overrides/_componentreflectiontest.py b/src/databricks/sqlalchemy/test/overrides/_componentreflectiontest.py deleted file mode 100644 index a1f58fa6..00000000 --- a/src/databricks/sqlalchemy/test/overrides/_componentreflectiontest.py +++ /dev/null @@ -1,189 +0,0 @@ -"""The default test setup uses self-referential foreign keys and indexes for a test table. -We override to remove these assumptions. - -Note that test_multi_foreign_keys currently does not pass for all combinations due to -an ordering issue. The dialect returns the expected information. But this test makes assertions -on the order of the returned results. We can't guarantee that order at the moment. - -The test fixture actually tries to sort the outputs, but this sort isn't working. Will need -to follow-up on this later. -""" -import sqlalchemy as sa -from sqlalchemy.testing import config -from sqlalchemy.testing.schema import Column -from sqlalchemy.testing.schema import Table -from sqlalchemy import ForeignKey -from sqlalchemy import testing - -from sqlalchemy.testing.suite.test_reflection import ComponentReflectionTest - - -class ComponentReflectionTest(ComponentReflectionTest): # type: ignore - @classmethod - def define_reflected_tables(cls, metadata, schema): - if schema: - schema_prefix = schema + "." - else: - schema_prefix = "" - - if testing.requires.self_referential_foreign_keys.enabled: - parent_id_args = ( - ForeignKey( - "%susers.user_id" % schema_prefix, name="user_id_fk", use_alter=True - ), - ) - else: - parent_id_args = () - users = Table( - "users", - metadata, - Column("user_id", sa.INT, primary_key=True), - Column("test1", sa.CHAR(5), nullable=False), - Column("test2", sa.Float(), nullable=False), - Column("parent_user_id", sa.Integer, *parent_id_args), - sa.CheckConstraint( - "test2 > 0", - name="zz_test2_gt_zero", - comment="users check constraint", - ), - sa.CheckConstraint("test2 <= 1000"), - schema=schema, - test_needs_fk=True, - ) - - Table( - "dingalings", - metadata, - Column("dingaling_id", sa.Integer, primary_key=True), - Column( - "address_id", - sa.Integer, - ForeignKey( - "%semail_addresses.address_id" % schema_prefix, - name="zz_email_add_id_fg", - comment="di fk comment", - ), - ), - Column( - "id_user", - sa.Integer, - ForeignKey("%susers.user_id" % schema_prefix), - ), - Column("data", sa.String(30), unique=True), - sa.CheckConstraint( - "address_id > 0 AND address_id < 1000", - name="address_id_gt_zero", - ), - sa.UniqueConstraint( - "address_id", - "dingaling_id", - name="zz_dingalings_multiple", - comment="di unique comment", - ), - schema=schema, - test_needs_fk=True, - ) - Table( - "email_addresses", - metadata, - Column("address_id", sa.Integer), - Column("remote_user_id", sa.Integer, ForeignKey(users.c.user_id)), - Column("email_address", sa.String(20)), - sa.PrimaryKeyConstraint( - "address_id", name="email_ad_pk", comment="ea pk comment" - ), - schema=schema, - test_needs_fk=True, - ) - Table( - "comment_test", - metadata, - Column("id", sa.Integer, primary_key=True, comment="id comment"), - Column("data", sa.String(20), comment="data % comment"), - Column( - "d2", - sa.String(20), - comment=r"""Comment types type speedily ' " \ '' Fun!""", - ), - Column("d3", sa.String(42), comment="Comment\nwith\rescapes"), - schema=schema, - comment=r"""the test % ' " \ table comment""", - ) - Table( - "no_constraints", - metadata, - Column("data", sa.String(20)), - schema=schema, - comment="no\nconstraints\rhas\fescaped\vcomment", - ) - - if testing.requires.cross_schema_fk_reflection.enabled: - if schema is None: - Table( - "local_table", - metadata, - Column("id", sa.Integer, primary_key=True), - Column("data", sa.String(20)), - Column( - "remote_id", - ForeignKey("%s.remote_table_2.id" % testing.config.test_schema), - ), - test_needs_fk=True, - schema=config.db.dialect.default_schema_name, - ) - else: - Table( - "remote_table", - metadata, - Column("id", sa.Integer, primary_key=True), - Column( - "local_id", - ForeignKey( - "%s.local_table.id" % config.db.dialect.default_schema_name - ), - ), - Column("data", sa.String(20)), - schema=schema, - test_needs_fk=True, - ) - Table( - "remote_table_2", - metadata, - Column("id", sa.Integer, primary_key=True), - Column("data", sa.String(20)), - schema=schema, - test_needs_fk=True, - ) - - if testing.requires.index_reflection.enabled: - Index("users_t_idx", users.c.test1, users.c.test2, unique=True) - Index("users_all_idx", users.c.user_id, users.c.test2, users.c.test1) - - if not schema: - # test_needs_fk is at the moment to force MySQL InnoDB - noncol_idx_test_nopk = Table( - "noncol_idx_test_nopk", - metadata, - Column("q", sa.String(5)), - test_needs_fk=True, - ) - - noncol_idx_test_pk = Table( - "noncol_idx_test_pk", - metadata, - Column("id", sa.Integer, primary_key=True), - Column("q", sa.String(5)), - test_needs_fk=True, - ) - - if ( - testing.requires.indexes_with_ascdesc.enabled - and testing.requires.reflect_indexes_with_ascdesc.enabled - ): - Index("noncol_idx_nopk", noncol_idx_test_nopk.c.q.desc()) - Index("noncol_idx_pk", noncol_idx_test_pk.c.q.desc()) - - if testing.requires.view_column_reflection.enabled: - cls.define_views(metadata, schema) - if not schema and testing.requires.temp_table_reflection.enabled: - cls.define_temp_tables(metadata) diff --git a/src/databricks/sqlalchemy/test/overrides/_ctetest.py b/src/databricks/sqlalchemy/test/overrides/_ctetest.py deleted file mode 100644 index 3cdae036..00000000 --- a/src/databricks/sqlalchemy/test/overrides/_ctetest.py +++ /dev/null @@ -1,33 +0,0 @@ -"""The default test setup uses a self-referential foreign key. With our dialect this requires -`use_alter=True` and the fk constraint to be named. So we override this to make the test pass. -""" - -from sqlalchemy.testing.suite import CTETest - -from sqlalchemy.testing.schema import Column -from sqlalchemy.testing.schema import Table -from sqlalchemy import ForeignKey -from sqlalchemy import Integer -from sqlalchemy import String - - -class CTETest(CTETest): # type: ignore - @classmethod - def define_tables(cls, metadata): - Table( - "some_table", - metadata, - Column("id", Integer, primary_key=True), - Column("data", String(50)), - Column( - "parent_id", ForeignKey("some_table.id", name="fk_test", use_alter=True) - ), - ) - - Table( - "some_other_table", - metadata, - Column("id", Integer, primary_key=True), - Column("data", String(50)), - Column("parent_id", Integer), - ) diff --git a/src/databricks/sqlalchemy/test/test_suite.py b/src/databricks/sqlalchemy/test/test_suite.py deleted file mode 100644 index 2b40a432..00000000 --- a/src/databricks/sqlalchemy/test/test_suite.py +++ /dev/null @@ -1,13 +0,0 @@ -""" -The order of these imports is important. Test cases are imported first from SQLAlchemy, -then are overridden by our local skip markers in _regression, _unsupported, and _future. -""" - - -# type: ignore -# fmt: off -from sqlalchemy.testing.suite import * -from databricks.sqlalchemy.test._regression import * -from databricks.sqlalchemy.test._unsupported import * -from databricks.sqlalchemy.test._future import * -from databricks.sqlalchemy.test._extra import TinyIntegerTest, DateTimeTZTestCustom diff --git a/src/databricks/sqlalchemy/test_local/__init__.py b/src/databricks/sqlalchemy/test_local/__init__.py deleted file mode 100644 index eca1cf55..00000000 --- a/src/databricks/sqlalchemy/test_local/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -""" -This module contains tests entirely maintained by Databricks. - -These tests do not rely on SQLAlchemy's custom test runner. -""" diff --git a/src/databricks/sqlalchemy/test_local/conftest.py b/src/databricks/sqlalchemy/test_local/conftest.py deleted file mode 100644 index c8b350be..00000000 --- a/src/databricks/sqlalchemy/test_local/conftest.py +++ /dev/null @@ -1,44 +0,0 @@ -import os -import pytest - - -@pytest.fixture(scope="session") -def host(): - return os.getenv("DATABRICKS_SERVER_HOSTNAME") - - -@pytest.fixture(scope="session") -def http_path(): - return os.getenv("DATABRICKS_HTTP_PATH") - - -@pytest.fixture(scope="session") -def access_token(): - return os.getenv("DATABRICKS_TOKEN") - - -@pytest.fixture(scope="session") -def ingestion_user(): - return os.getenv("DATABRICKS_USER") - - -@pytest.fixture(scope="session") -def catalog(): - return os.getenv("DATABRICKS_CATALOG") - - -@pytest.fixture(scope="session") -def schema(): - return os.getenv("DATABRICKS_SCHEMA", "default") - - -@pytest.fixture(scope="session", autouse=True) -def connection_details(host, http_path, access_token, ingestion_user, catalog, schema): - return { - "host": host, - "http_path": http_path, - "access_token": access_token, - "ingestion_user": ingestion_user, - "catalog": catalog, - "schema": schema, - } diff --git a/src/databricks/sqlalchemy/test_local/e2e/MOCK_DATA.xlsx b/src/databricks/sqlalchemy/test_local/e2e/MOCK_DATA.xlsx deleted file mode 100644 index e080689a9d978891664c1848474f64401a453165..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 59837 zcmZU)WmH^2w=LR0kl+L-xCaUD5FA2qPjIJ$HZ<UI-7eob@4b7^ zeLrgNQ9st$YtFT5R@JK7O0sYtZ~y=R5&)2td1TCMIkWiQ6NLo;P~PAA_NG=~maqSL zmc@1|bhDxco(W2BKc(1wN6I#BCeK^NWWX@^)DW@M6N8=ABJ>B&2#AC(pmqE9vfjD; z^2zK*ELKPw?x+A>an}S7NIid5kx8Gs;LohMR4rc~r!`Yi)2CzU5P5 zq9gLw@!{u%vjHedB{*t(g;0{a0=(=9g6toWb1&0Kx zDwRkk^|+7ggny>=KNvV0%Iz6|0RTS20|03647gaafE`?|Ou=7WtgZ6ZmMvCTF+I;T zkvv6Aty4tshm~Lh#jueF#r5kfU9Bg2zuQ+{-4M!@GmDES%wUvEE`rCxrHm8D##ICz z#auYQ6q9tP-*v}eI-mOgJ1rsf5A@bD#=Wbo!~ciC=(6SeUB;e1R3OKt!cP5Ad3H#^ zfjjpWl}rOEf5*I=l^HoacD42Qhi(o*i7!)}1To&o`|hwaj?(#&W0Ew#rZi10uQlL% zKLRYvKe%niZj#mSee&yTrS+Mrw1gh+w2;G)%Mc`PEXzU#bO8|ijWv`K%Y`f&uMET@ zwa?5xlI&uvVZ(@1VLKc`D{Y05C~6Mu^;k}ts=k;YmA@1!wWPg_<_pdjLt|1zVaGc? zAYJ?mnNidgNbO60v(~3U^@CAR8EjFu;8{eIM(o1kK>{Gf4o~G9e4_Bl>1`n%jRFB( z<)Rs5WR9Nr{=p~TlbJ3>o8(U>Ydo1(8B3nA>F_V!VY;~2%R!_b1l!NNkhn@mk5MCB zSH!vQevMl%zt_9opuCe4#nt;gS2*8qaGe~6RU zxsQ==)n0cjeJrKSueee%Yn#lfyh;floR%9Z9xoC8XUL6RJp|F;L%#dou--#%Vr#5q zZwmsm7~9&L{wM6#G0KYFDf*k)5f89q~^DjC7J7Pk1E$674=vWf+`L^)>YUjcF(E?%} z-1S3*Ml)6hgR;I2#}nHv5}(A&?S26ekLu>6^OWXMN}35_W&p?TeWCf%T0w8>Z0RKH zlzU|4#UB*pM{29_DKc?KO?M5_@&NZg$(v|XER1|7?caOD{6EPv1cCm;U3z2{au+L( zq;q?Yi7hsX#E;aky^GmRiQ9E(Nq1tGOgCo{NKFEIV>_S}o2xscfFxo=#@qN$@L3c` zAe&xC-bA{|gWXSGL`)Ei2fXFVekO@Co3f+av#Apr(_%{s+h^D%aLSK~DDG~;{389! z|6_q*!k4*Qlm1(u+m3gB!bmgAYM}YRx77jt^0ijL|MdNyxQwRh-RH#jz!SZ{-^coY z3!byBy`_<@t>u3J{I~c;%cFGtNBmwxW1EL@aIs48v7F)`;C>`}Zp<<6JNhUqSz!SKE6dg`Rb7m_Fn?rt8^0 zem3*4)O6pWu{5p6`9Du`EQ4j}#S;s1rVl*sUfRYJT7Ei?;URtp`c^EYjsJwvgSYljtr|bke(2E{ z?!6^|LD9PTS;f6~&-4L;mnK0<>5?)-!IxYt9}%o_VgmQiklP)H;U4u{BGkNc=O4TU zv;q1vYEP)ih+WF^@5o;<;+AiI3GE^o9q0lJDZzKO9ij7)^u}%}l-StBV*#FuO&D%L zCY(5*lP{Yc@;>nw$s=RIA3S6=(>fPf=K2u0OMgOd%}3Cc5)&Ij=#$mC)S&3R*)5bl zn#Cp&U}Pm@-e|7~W&1O;0Bd6YfYiG2Ofq%K=)YRl`ZScz>E-)Y8E!;>^%?PDE7LxB zQ=9qB>;KB42op`Fu6Hi!-g(9N?=1R1^Cu!|+~z%X`p(=#2kv6nvqRM7q2$yy0v&KJ z5wf}5cBY66TXCHe&C?N<6=m}xSsQHne-{}zhyH6d_QtP4B;rb)F zUk>xo1Jm}Fv`!F0i%>GY#LJ9-VoOv}&@iXC=EvOOU2K53HDHPRB4#JNu{NHGOKYEw z#i)3R_|uN`mY^GI+=Fkw^cffINY~jekQdz-ekxC8r&%nt9Or z+!j1uu1)RoH&pbhYJ$WOBC)fZ(1EW?)q?RqT$#qFVo%^w`z=<7-MW4aan!FP5CnKD zgYW;ZwEtB1r$cTH00^vr2N3-KeE#3w=CyjpEpyjbJgKWd)$iJsKeU0@llf|C`W39` zzEwG^xNENR!^cMV;7Kxg{D!_9u!z)Q*H%_~t$CKv#x<59s#Hk%x%dPGc)wPR7v46z zbA`*VzZ|(&X?ee;A41w}nqJ!5-o_P$U#Xs6y4xY(` zpIJ2!9b>m3!X*lCqvd1@_EX-(x5D9jqJwUK6izTY-O`^yvGR8>bpdY#ZTS&i4VDab zCnXPgO$SQ@Sz3Gt7y_gzSBV>K!ifzptR8n~-WfZ3o@&$H z88?|uqu0f6S?%W)XU^3mvv=z+=V#up=l4tAZ#6d8%}Kl-JVmSRZQc~GThdi}D61Jp)9pNm>{@RxtN90qua}1Vbn5c5^knOXu{+YUhb)lCii(J!*USFw#D=!# zBf&cP!f)TwhT2FnpJ$`^S;35Num2{FF&;m=6*^7F;lDNzxxGMsCy}+EFRws02k{?= z3vAc-GoxNRm%T6CfQQbHFK>@VWlr`sbXSh@*WPuP`Dcn`jEjeg6~@;u&INvdJ-s26^cj-CVPbMs?pgwSK^xX zIBgs@&!SuI{asTz!TeW>#L>QG>inE<|D5lesHIpbyCQhg7`*wG2Q|TUNeLrO4SPi# zg}<;k`Tf<)>g#K!N0ULkdx|V+s_d(`XN|Cqcbn|l!jR1CQ>M80gUwH`j>pjv@79So zzOSpY?c&qS?Jm<<-s_*y6y9tOySp;6?QA=d08@a$vlB!+U$)T#K~m zXj^~U{oWT>xVEpPvc8hguYlNH7S?w9nERR>t_!x(=#~q>_plNQRPNj<7Szf&qLHXY z&iNZ+d++QaPS@5c?;(E6ybdu0YC+(vUCvDzOSVK~_Ys*u{W=2y9mj? z6qDIt%&^=CBCZcxERVa7Z$+4w#FZX0rr{(`%tt&(JuC8L49BVGZ4vh~e(iZ!rx;7e zH0PlhTjY7pwRLu1R9^oVNGE(KExh>BAm~hp&&qt7s&MOfePh^$t53FDOg>YPaCBd; zNrV7DG9H75yT+wb}dK`v$l8e6x#=-q0 zE$_05-{+O)!#9`R@jtTvr?LCtjJuudMM?J36>(r*>wwPw?;@=Ig8pPRL5Y-OJS5+1 z*G+(XJ&9*YkoMYz7-_nAOTBjYpY^YFP@TexTl$9X9tM}gwwx7QN9L*NJXe24ik2;m z`|q|cb+!x)`FU5rQJ!ry?x=$p@$vW@37fLF!G$$zz6=nJ;)+%VAf-@^QNZH24cvet zMjSd87m_v1qJn1aT=$KK9@mWVKU`qC?g)#02ooMDo?DCTk?N#iSk1tb+ z+l8xuQ+e;-zrVm3IRE^vK2lh~e{Vv>`45e4qRk4|@<8^+>KEu6Gt&`v`E7{y8Mep< z^>0s~$9>&WfC1On?eZiw$D%;!wBc<8!8o?Ck8Y6pYE&I;!Lg4aG!wL=xU!VFVY(k7 zc)${qz1xZLVtVOdMk<(wZ?8X4ok?f=H^##)XT+Fxh*DPbfx&cdEaCa%{A*9y>&La= zwW7zVV;VvegUBGW1}bksZ4uJ1n%C#dLMgVP%DALBhK*Vk*PJZD&`G{ZIaZLo(h6iF zZXPqL!|YpZ6~fBX;WTShT%K4hyp=nofN=%qscpSssK-rRrJbxoMEey4H^D^QD;7n0 z!0@vMreTH^G|Sqk-tD7V5T!0Gk49)m`V0c5atP_bsf-`sU6=E6$rlqXmfgO*KysGc z;Fm5>#V?ZP&Z%|t-Ne#C;xY!d*U$t4UTx17j@Bl6`$~ibTTkKGf3Un<1m`~o9%uHy zaMp=lA#-k`u|&bgpqG=D-t5Qt>v%8=pa!S6X$zpb$Uagl(z%@jY>H;~4+aH!Pw=am z3qynAuQu*idbHSvN=y#KjSberA{MLD;h? z{YJa@c3B?Pn9yz5;T(E2uVTXa7Cf%|iT4d5<}-gH-RnaeX$m@GyF22$H#E@U))Mq; zF=n(@5DaX0|9l!^?${Ou>)Iv6dGErc7{HGBxuuxHO5PdCis09NJ@YZzVS4(J3|{G;yNa_EqO@tgni?WIQ5Y+J|;<2 z9Z6IhT(*t;gZYCjt#N}0CTkVvmlu~juP^n0Q$_DdZ#)dh&1tQVQxfpX0LX4|EzUOJ zN`=9sjjE%KYGci|QFg$SFQBO%w@mxFL9f4L3K~oOFO6b41d8OP_-l^0;fQNo-LidE6Nd)7(|`WHSVD=>m-$FubKsUdwp+%W_z(Sr#PU3Px!(4X22qaI*}jsE}_j$+l#0 z>)9P_ke~k|Q?3Db`RXFc(pw~{JEUG%l-8=5MQDa83L#*EH0DuPR@LjUIq1H7+ly(1 zcnc@wf4)$lUZ@vCNam9UVSI!V6h5fcTYS4)>&2W%XfVF|`IjlIun+hlRomv5@xGR^ zi)93bt@7i8R3WYv;J$f9RW*pYW5|6}2&WL5o>C)>22^_@3Cck874D1C$;apzukx?3tP{o#pOQIgh5Rdx zgJ{CD8N)ZrSOY}}?lL}n?(j@|T+5@1$wsK={Z?(Hdu;KhcIMg~3JCjWx+TeUG^>-u ztc%w%vLlDa4r}i7C=%t+pJi+QUq~a}kQHtQJ%d`{YHGVt@MV`2=+j1L)+j z%}@ms*3Rn~iJAN|-u^FqihAN6|3qi6jgSDsXrsw#moPRwgi0R*tWs4H)^zl)p{+3p zS-%SBkd5o@sBD}_el-z5n+R49jXr!*P^Y6~-@@@p73a8RhK8XGWH;qdavsTBd$aZD zwm!UNv914vtu*cZqF>RqlLe1&BTGc3NtCPoJKQDcvFi8e13CW&IoOeSClZxs{3H)X zQtkQLP%45v(E`_k>NG0R=zieHJ-HR7od0Q-|Nkp}$L&2id&Z)lsgIx$Fnv949YDe8* z3Rp@f`xS%pcoz8>v*}1U@wXxDDoLhf&Qm9^DKls#kGi^rjZ%Sd&CR$+;O{R&b!^XA zY!c2E+ruj1iS#t{8~-aTyx-ZU>OAq{9IkXUSzNXQot@%w$Ia*(eYOi}>jRhl12_Hq zb7WL5&y~u~KbFyiYR|97U;OC6Fg}IN0XV4hf3X^M}pi|C$%w>vwE%v#7cNsO^$SK^%p;gFReF zGbe(XqQZ5e!fk}uVbPt^A7kux2le|^>g$L+Ij}R~^i02!tA1@huSVpn2sC`!Fa@fTN2=D6$9Bvf*0TvFQaq%G0Ta%xG z25qkjqkgitstg*t69~(N%wgfWVd1vS?7Q-Z5gCTl*;37(v-L6EAxtZ^@aLVx3++=z zg=CRIZFhlBuBnj#Y`4le?C2g;@)c_8`eQby6SlUx{-pUL#Y9AYa~P2At`Kw%vGO2` zuZjiI^f%wx9D%dT0q9_1XKIhj4yqQcAD(LLBM=Dq?PC21)hwgy6TDy68QZq)!t-(N z77G=hTrb%q%OC8z2zZ2kgaA+}g`D>J>WWQ!6N*EaHN!*1xEjg{;FeXJ(}{qa%YfT5 zL6=At*z?oI=$tQd3Bkg1;{xqSm8LM%y0;%C`XY%F82ZW?`WhevaOyMTuL*j`aNB1P z4P9{Be%v@sZoIfNP}J(d==jmd<0y=cMX{Z0HWpQ0 zir88ZUB2P&+M1N4Pg;W*1TP$&Xfx#`=U`-}}@S9aZ9@Odg)eO;N7RRjn{MV0P{4rl9gkw7UBv<$^Y{r>zyVWWFl zoDL7kj|eG=7%bzaHFzrLi}}m}a4-34;v>2E$#zp-!TGWv{ zU8RJ05jPohDn2=NR0^`$39&(Mdi*sD6Mxh@$YTP$qU)BpAT;^Jl{*iNOs`Cj5N*ey z`QprmexynrVc6au!f3yA<(PEkbadrx9N9KU5Aun=yTUAJ2n!snH~|uF#6q)!N07=a z)*aw#w<6deW<<==g!60zbqdG61?BX#3NosCsR>=FCTXsx>av>dOR(h{2sp;sL3@W; zEdlZm8sDx@{N4;uec-T-!6}n+#|yL5Ju1Ea^mc9~p&wb~DiB}XSB?ke!9K4PbeUte zFiq`()kX|W7u7gQ(c4b=z=7qOPQyNWo&4l4IplLzmCW*TI`VQhTx^@x2ae*IB3Rs_tZQh%iI4hKn8m;ekQGGFc*7UGe#T5;D8pXyo$}Dv~td6~`P~2|13JTH? z6=w$f8Vk4qo)^>k>B6V?Ds*{KBTgMe-SKi>X7}?Y-jCfXa#pT&*tnG*+l0ad!h&+b zf(ElBFL5n~2ZImf0+VpZ=XVlkVC9D}LSM7zEkwYyhfFQoDoB1?@%o#dB=VsP7G$@BJ`NM`^X=F+Ar)_TuZ|i*l^l`S({_@loX> zVmo10S+O}!-&@6H$fu~gek;v-1J7v{^TVv6!i2GG(eIrm#$Sx{=o?a1Nx5<^#j;N5}p`|Bm#(~RGVrn z8O~x@CJ3gh{;>4TqkRQ;WO{$qYD;QrOImw08hV=cUcJkpb^_Qu#tq^YP>tnc%u;>s z>6MK4ihv1Xdl^>nnZX#K)*0fr8}A=c=^Ih+yVfAgYza)0P7|E{34q2&&@~YGsJBLo zsuYc1I^2>4A)K(zx1DG4X*3X5gw?1Ip;0C0hbNbWFP5>J0Xbp>@1QUX6}hA+`#5SM z8Sj4PHAxT4L@lo=39}&L8c?EZ?h>KZ5K%5DSbsF$MK*R~3@5ae&lIHl#qIMnUSkfA z3!1j-AZJVf=a|Z=%|m_sIzVL9<DF!;#(X-HO9VS}|-69J~`b=W%ChJhyz-VmETs`3p1@qRn=y%gt zyOa3&M38LZ_-l0vOsZRPRG+&V(J1-a^x*#43D z{$mYNjP2@tBHq`cbpJMQW0dbgCM-ZDJOPrg>wowskgf6SXNIQJB)$d)U#po61#-BBATJw%$0U04petN7HRY07A!rOr3cz^@)~#604Dz?E zC*@>5fy1n4Hnd?Z=}%dg#!Y&LV@&7nq_yxg{(}`vQFAqgpDtTMqS<-s2$(|&rpP!= ze{bI7Aay0SZzaC(ScB+2Sq`W3Uy)rkEa4`EM-(bqlwm5ncNmF}Z~RU6uQyJb;XZQi zMDYSD)Gw)20~M*$1a=bycFP5J8%~q17;dgsCi_QW?(AakYV+aFZFB}Ul=ekDZ}o{E zSJI^b2x5xj30NP9Ne7r6M|2%WY-iaJzvP?YP;8BWS=65b!r7xpWJlrUDaJ?mk$~Oq z0*sZtYKjFlJCE8KAu2*xHkN8&{K|`V+UEP% zxZTfENFR}`+E{$#t(mr%2zj{^wat>6&63tr49DE3cFB)Y#^eFp{VW)CFE~Q6%Q`+~ zeHEYy{AP_P`Zal&iP#4B+$C<=Nl@Hm?QZOVy(|V;6XQx-XhfOTT)yDM25_g4F}9Nh zf%}yV+fjJS!q1Dp+O6VTp^AUDgb0js{eEdcydRXy=R|FPdpcJ~6FN+0lj~&}s;Dn9 zQn)iQ)o^Tu`6!@U;2PGL3%%sKk8AJzz}2sIK{_l%rB!y@wiy)olrW}2AtRjmA0ro zm;6$$-*6_JOlqfwa>;V7NSLc4kIDWpCJqywj79-~Ifwh((`c05L~f77UtpK}qB~3b zld`|G(w!-By#Bn>A&-rRYWF443TO2@Rq77T*lvILj4Mv!p{Te~xI>On_}W%H@Qbp` z@_~nJ1)q-j-l7Sl$(w`s(z(YMC-Q0AO-UcwvwBkTc)xk>%Ur|K!?k+Pk0Fm+`4{eB zPg@&Ys;D|!EdI1|E6^9WMYWL-!Dk8&QABC-QDaWM)M-*H8ICH2lceTP408>5kMFkR z*@!BSV*(Ohu}?y}?e<|4W=++ddpV4ZHi3~^m(s-Ffd(2M<%RX_2$zl07h#;4)!L)5 zL3#sTO>||hevKFwMJXiKuff)1QJ`}G3R_nFGu9q-C#7>njeq!I(8xx5g=;cD^g+Jo zPZ;D+IhIdHlzkC8zG4%cui};_|GmoJ(Zqg^A~{9x@^H$jzZ>k`k1C;I%M#dKx5}H3 z`}RinzmGFpLS3>T?)GG8c4TUfg5NK>Kl;z@F@7!&J7m3QsS!J2g+VkiK){blP32l_ zruMIlP{WjBAg01?iEEvGIq3v%elikP0PDJ=^HbdNe(TDvoU=Qyf3LEAJr^XDq_t*d zJ-}SiO2n*>Mi%=XK$QneoMNS&84GI( zZ?Jv0E_4w?cpsY{pJ@Uye}c;v1h0mk{h&b9j6)Iz3sSdSM>K^No)OWjDPeH9J_ zxZqjui@%}LCm>_g1;A)Hg>Gg+3;9QFMfmK(u1VF-;l;CS^EL9*lVd)ukdCKZif#fc z-7K?bA>w0iuk@x;DkV{Iq-801!`!YaK$Yv;P#uwna!qApIacw2fJ0h#$oRRS_W|@( z)*Ij3CWDDm(kk-FbT}5@_N^;HS#+QXWbz*-!d`cUuE3-#W2=kHy{@$-6;?^OyWxI= zYPaRZeg%zLex-}_Tn9gk`SiQWi&3CBq z)_=`6Pe58|a`W&0F+Qk$Kr$vOpu>ZRmr@#Q6<`CMEHIHyQ<~^dp73$5j0oZN#F$2? zA|n+nf%=q;5uqL#2@i{6Fp$(t!D?NO4a1n%f}si?gYqU+HmT+!N#+|#<{eM-6@C^6 zD;^T;gIP!?WHqAV>clDbw^3W;^$H>>FMIh-?ZKK@&0mNX<_Iz82zloSeOl#T;1xS! zZqeiw$A@6Pyvyfl=+QylQDBkq@wiZW$ak=MGW29=Wb}cix8ru)=X|{5VjOTB$Z(jt zN`AbR-R`o>WboO6)1;AQsQyebpQwc=Fe~A?8yTcnV?Zh?>uTItqGZr)F4Abe(P$3% zQ2zUK^~fti42#9$mrGt`N@sh~ZgIN=zhaY7D~#-@@oPJWag%zrr};RC&Vp-8nL0m( zd-IKZa{xE?Q)C-0`X93K2)-yOy-(o_)AvY0@{`mfpYm(L*_g z$~>$dO#h3m7MGJ6hZBTzJy;TaVc33G`WNmTW7f^aODwO23FyMKYe)1~u#9%eUqoP} z2=+Tz_FU!JW%15DD1L%_JeqZUf^9rHvGVPbt`1<&*gT1EpW#u2&uKLZnak6dHdnk* znTc))f>X$&UH338k1?*}sIB9muH|s3<@iv@hPaN!D`E3^$jk6p)PdJ&dJ~VXC`oe@ zrQ;4PffPgUGN#3>?MX7|rfGzn53US+s%SW=sW}~}Ikz2;N39Ahv0upHkNbCiqI$C6 z7-4Kt5j#m@bGG?YPK36%!+ng9UzImxAd@)m0|Nzs-+6)iyujc{4^ry%dyEdAn$xN{ z{y45VEJ5P7DMS71OwH=c-@?G)qiu4od7UNVeJrV{0}r|eW=?7*P6sB=51-5rLGFDH z4v2%VIXLNauoJqTrrlt>!u+C}y>!38=k&C_%kztC^RYyuPuVA#6q}jZTbUG?GCNgX z9j$R!6XCe6u}BM5p8nm!VMC~UFRx^}HK{F$Uvdzj<>>bIxgLZgaB+#lQ7rXgf&O8E zkzs)b!m7zaGW_>IL`MP-fowoo3FBw<-s0Z{D#`hhaVWaVC8-rk;8)Udu-jUa32g+U z*}S&cvZQ#Iqy)iG_z*gG`tgQAm9_-dz*p8>Q=uI_`-wLK%cit1I z_;wkb@m19{Cz;H1UnsS|Q;B^Sm;U}fBN0%eyM^EATmlwZFdQ0TG}Vbv_l$%G-1%g@ z8L)ssAw1-X?pimE)l4mwsdzEWc~}&AF;sb2-+$jI>H!|wmTuGYDp*91Pfe<|If>Ws zcMJ|uxUl0WLcfLZoC=zsvarnxa==7_CF_%<{F9_2lca#cwU#YZ{6@3Tcy)0-s}@O+ zu6KSniuM{2SC6AJ$VM&6BWK%SU^a%rnMS4)m=gOa(_z4a7_#HReMO=;$}TU4waK z6r11;F!rQ*J?IOv`Gz0c2;I#<-t|eaQ)+f>cPggEgjNe3%VtDaW<-2uL>G}}_5cqI zRkhio>%TC7tPF@H`?JyQ;q_l-X9w?rPH6)bT>yg$IBVY$g7PL&cfv_|%xQnjDV89k zE7c;ppfUF3M>N1W&Fs7U$VgJwZrG1ryX&0+%f@91;}-sLG$Pq*!kQ%li6w%VCBlWr zJvYTB3m=6iI=BD1TNPAO0Dq~_?ToOcKwaGDAR zO%2#hArZe?X^l@x0@_a96|;+(#rj|3Bg8y{(K&mCesuQIn|0YZu}heh%kSloE?W>` zTM+SCJZ`J2l~0zRNx9L%WqA{){7w-PAoPh(l;#a>yWk}OPAu=h;QPDnHmdoVH@jPX zwvZ#RI3}$w}+M>EywQ(3}>>Pi7)*GhmDPoZF%nVJ=USwRJiq_1p0D+d1M(vUZ*i%Jk7k zd=Oc5ur*=CN5Tja!U$I~_tl0Bqp;v-=pAhYti5+bmhx@cd|?&>v<=-s&H2i5!??cX zyVWpIZUA^Z034BBq_fWj`&G&dCtSZE2G59c1OMXFxxRumsIX7XPh(&eKDcSU{;UqJ zVE(jBM1)O5BtS%T#eQp%S0N&@V*6rxoz4tfL{x4?9VQF58XNAUTpuB>-NE2i9N#%=)9E zM&cIMNoZhsllGXy>6imyIc2&tZmmUh-~C}HEGM2U`sKH1?xXe(iLIzj6aikQCIt<< zrV$)?cip`2R!?$BPvIm_9+wP8%2fw$l5|cmms_qtehZ{E%4!mPPL)P>eBj_`yN+bk z#2vlRg)}&W_eGbv7|{G5(7elXL7VS^`xWM4*U-wHKMFm%zuQ-;xl6)p2`z#&PTM;a z3mBP913M)sl-La|7=ag1l(RyVM?#b*MwEB?{PZj08Xkn?u{gxC%SxOM57=nFWF$HH z&xIOe#{R15X##x`{0VY=kE~p~TK3+sPgO$;lI1=1lm3;HJynw*7WM@nBEu=GFLYqT zJM5rcl>9gXe20WjGEtmzLEZ%suwzV?LK)$^EQFszPI5nP=7wzLejHdh+502-2o>Fa zf^yYxxedZsb{}9xridw$o3|5XV{D@Ft>NJ=NyxK|KSVFnreM;h@Y1H7Yu(mldlAwEZ_l6&EK3C#M}J z=cfFK<*UyV?}42UIyVMnGFmyz!tTAUo{g}aX;}SZt|Whq?5(ACm4jl1!)}G+&*>t) zzOrnG*97v`uLaocgO;qx;)$4B>8HhyP#>H{H$Fjg2u{^z{r_vQiFTamb^f>d!-l}@ zNuju~OGdolN}c{e5u?Hn`Kv{;Yx6Tt*r^qnzhC(#o@qKM>sC2v{_~})_qB@MW>$)Z zN+B&b(u={^`VEQhGjZk8w+heQQ3v42s}{Jv7t zZs8;;OogVwd*91rU*4 zA@?h{`xS67l&4wA{&(p$`O|AM|7-FkWqr(a=X1S#7y5SH#d@F zFaU*DG3CeC-Agmo?{quOP0U$eD6)#Fvc7z`wm!Z>wc`i?>I(|@iiD@grjoE6kg*HA@bSi{F0rS4Vo%{`Pq~yb`#Q%z`IP1JoOPIe z7at5;Dk$7Fxo0GJr&@DWk>FSG{6d_!+1F{o7lu;qKV*eHii9msg6;XPF{Z=ELc$Xz z%zzgIWDHlEA<0tycT;h;gN(!R1aUIo)^vhN+T?+iFDcpbZzFh<;h2*nSd-yA)z0=n zDK{6G?XU~bfhX51ObOY%Chwz4_=}*L#XN>P7%4r)u1#!|EfDRKv35Hd zem*&JF^PH{I21X;w?ckCt0EiuelZ$Z9fha@<)uOfP3IpmFcgKaNoPi<%NnnBL{!=!2ZlS-N#7|Tm$ z35)VzEz@HaO*2cN6cD%z1R{*NK?cE3#DE)*s_E8nJnxQ-eraCx(5KzJXXsuQ#aS*6 z$gqhzD8-}KAEWUki*Pe5c`J*s!e~t#>f>H~n8CmCKE-Bpnh@hvKZ0!#xJZbsrRm-{ zNoxBem<0(X4j85tCwp{iI;2+TCc-BsMkXgviP6(b`ZTGDvffO+r!7-vMGWBd{2FSs2_U4DJ^O*J43V2Y=rn z#Apk3F1wor0k~*FQ9thm9Nt#1aX7w;s;>Qk2mQ8yJh9wlC&~^dCoS(LWA7#l>?Zr2 z$d%{88tBW#U9Ty9_z4d92ire|$~HR@^}4+KeyywVDh;}53pEt=iG@N>GWa$#pj#Py zX9lvS^!&T@53_kwr=HH(a82J{0{Ts}(O-Jk=QET2QFy_|vTZA|!_q4n<7rCcE6U@< zCqC$&M|?gg6Fk*~bl%d40$K0H0-Y*N24AOVm_0t`Ct8x3703X_UcLLSzx!^a`)-TR zi}U*9;U^e-4n^#AGxQFYc#18e6SVg!qG}gPt#dF&vP%l(bf^(SgRYjtxt3!OcZ4Z% z`T}9raqqB^1D^08_zI-$oxi1P_xn0`dVMg{(-?(ZzH-`983;0#wJ9-IC^A1SG9Qy& zu%Cq5#v+wEmMgw&B&S{#LCURKSemXj&UVIe6gv)hQ?;4ogwldnlLo9WMm}DQkX($o zqPQ~)Wh3CX6uFVtu+|5!tTO{WSZlh>8m{h6&BN%(B@;^V&U8NB8faA3*McAXuurs4 zJRTV+tvnw@zOiBzx#K}if*S=$^1j%FwjP$4Sv=naDv7^>sMvpgU2a0A)7}&Oi6XF8 z4mxzU|A$sPNj#su6%mRq!;WYa*5Sr#;-M+H4|064$|(jt_FcR~&`e>(Bx8 zN(t5Go~9IdG148jfL^#25f>+WpD>@QCSStS2q^6d@HuV5(pBnSu^e7+v}W+|T`H%4 zhYlgmTh=eqnO8<0H#WBwuMdU@ELEx9ZP#G`VF_xZ3mc#SF`*G=wwXo{*X^W2$iE_O z#TTa;L|)Ydl9d_@{42M8M@KofJbz{Z{9BlLwXMC#y@l_1VS-Na{Cdwr<~*O$_((I- zeGPTb2p$>5Vf(3p%`tZC=V&Co&NX4+X_=*{FWIXU#Q~RIAFyyYt3B&@{m5(-Re0Fg zVW&Scy5|0y-XaKvUr9;TU@cGe!?Vn*LHd%YGp<^D47S4tsG3k~_IR+B;)6H;h~F1g z4OcVvjnjz(7Qt`$rna$6&sE-QwDkY3*HZp<8os?u z?C&u^|M@Pg$9r*eQ~go|hHc!wGLpRg;uM%=(|v`0ga&h_ju&t$lUCdt(l&ITA{Tp+ z(H##k30!7vBK%=71KaUH0WFfp=^p-D*-BRvnL9sR%}zmNcVwp1GM64lQNi4M4A;*> zC>^ z2YZokbMfs8W|Z}rDBAMy*#%w7sxZ!+LNCNVWEj=_^|V0=b2L8lTk9Tz%y-zs#e8gW zgcIcSyg}#rjYdG5t$!FH2wk`8qW2Jxhx+-_ZTnt9YD#^6kPG0AUr(@Mm;7?vOLXhu zjA*4$T?YOP*CO=jN#qwc7_G^xl$OG+7Fqije0FJWZ)JI>UfBfGRX^n7`eEH2>1BJ>y zBQ2}J8$!C+)L~R?Z&ntu?TZ?(A6YgOoQdQl3?sp=7%VwbjX(XCB#Vp73H&KC-^f1H zry=;KAxkU#=Olm{-GLI}pO>oR>NLJ2mD>BF^kb;jDo=Il_J((!Z(mCdeg|_e^T>5C zbwQ06(SjEdrWX;f7g3hbK&v?$z-Kx8Ao7~0MY6P%Y?GRapDQ9>&>h^Oq%9|nXoAPI z7YYkr3n=j|M|2Z7^p8b|j74Zkc$q1V`Ia3vpeUr<0dF^&*GbLL_e#e9@73L2NkFI3 zm0i2shAWXVG#aCUIR}O!=MPm542!Yw5%jC>Ek;cH4V~hmu*4b~N=lF9Y{BKykl9GSxrj&H4~E6M>M35Xv81UT*%y&86WF)K6sTEo?)V-Z6tUM2i zP!R#0mOp+e1{GIFhWOmJoJdv}Xh;}n#2EiE<5F_R{OsSFD(sF=00UWKOwE%7Lt54M z-VdG3Y3ZXZ(YK`LgKOqT5dTKipF{x2?%*ikm$$zMlVFrd<=R0W?11RmO5FcTMKR||}Lzs5Q z@^ng_u8H`7y<`^Ae=Q+&dPi1>$12tasiq=HrW;A79jS9An==mA7xw;b8scY*34pvM z7N81VKJhiDTA1g2;wuMUkDr_{k;iadlY8)Jc1;`Uo6y*!?DKW-l~S+IzD-a8bE%r}Q#B!fHQ{C7&6x8O zE3EK5WYk5>uU~Ll~L>plV_5SIOA<*JqZxZNl5*TR`*aB*` zBpb2gQe?pQ5hh8=zDbW42MKuQW2@JKkIE)Oi7ur{(vW6FqvB}xDK`Y5%&8OA zJ!2YMntunH?*q+|A_IpGMQrom4`roJoGkVQ-|r#3+TZ`6Lj>#ZcZaU%xdkj;IJ+qk z&Tm%*1B;`ErBoR~p8309?u?0ttwcXAe3Of})z4bh00Fl7qFG6SI|rwQx^-uq@1t z=K2pj* zQYtc1s^f@=9|qsxv7N>fi;Yro%a_)iWWP#v#0rbEM^FTYkjee=D1Su1i&jmdK32*< zRw^=9YGc7%KOrL-<$n?NRZ(qqUAs6GiWPS&ZiNJQC|2Byw?J_xxJz*>#U;2GC{TiH zaR>=k+={zH@sszv_|F*gc8%<;u`}mdkA;41>;N?|F)$8(@wU3r`nyz|4!}2qM26F( zXY#3_H-*TCrw{rM&sog&y|C?;ur1nqZ9K4N6|U^s=!wc9m6mki@@ASE20+H<{vQCH z*mZ~TkEZw8xK-GN#Iim-S`$931&{Ww&wq#%w{MYIAM-ja-g zjzs7sm8a#qoMl&DgBpg8Bnem~3DhJBAwvRA9+J~>JV;OTAXAM$R?CbaA+rT59{X8A zy=j>0e7;jrh^m^Kg+zbmhf6-G@p%?XLKaF97D|RRD%z7_Dr17yZ^bT}m|4wf4B!1z zcVB03{KW;%q)G#33o^pUj;1Ue<7`!Xuwp`e@=HPh^2WtYmBa(Q;2}upU_|I(V(4H;)YIQev8a}eaP6&xJ>&OY+hHvE zo+VG)Zd#nGw-{DkC5vV+8;33Z5Z}QF-@!!R!B27?aJn3x{f^KdK`7~+tXtxuY6gP7 zY=f6FN&C+gQqvO1WY{&*JK69MDF@o69SWiy3b7rE@a2WFgQ!TZfd-i)l{-VvCf-i! z9=W?T^)BM4r(SP#Li-^)cUQ4JQB!-v%#>$BW97GyYXHj?!0QUIqjm%}nKB4)Cb(P8 zO)DKEWV&)~YuLXx#>@T<+lUi&)g;V8m|zP;#PnkR4@H%s8A*Yrzj#20` zv#qq|lsG*7S!VjDZRF(ISbkO*j&Ngc(~p$$3KSzpP8(G=nXz&TvTN8e#q@?(8BZoztnkpY!u$oWQR> zgJUeguXsQ4j@M||XYUcx^iVyu?tSG#8XZlxGt0g2TEcC7fpE{fAp;eF7ei2rW!R-M zmsYa~R(0vs?-~*KIlt=e!3Lmzp-1s%O7Nk2O<$Bdo~r=86w^t)(ZlGfWQ`Ek!bHbX(7^20PQ`>!+9zMKs({ zcexNV=PKYW`<_f*rzVfm{{iSdGVn={0RRf3T{i8yUXj-OIab_YsLxtLT9uP7b*qq^ z+@iLS?0>K&*2&AsEISdZVG*iP6RPCmhmYiL>4?e}ri|EpxY>K63+4J%i7jBkUjb-kr1t`>ipq?$~D%qBoj6Ck>OK~=V&-wDcalFC6t+c2izBG%dO ze?s}RaWb^K%5FO3aBw&5(j=QZ=Y81;z@Nc6dC@xijdixDb@oNdjsJA>mh%I~H#(U6 znUxgkMUV;n7k8Vr(49(nm~~4n-U*HXt4l`mWfWA$SVc!xMMqIZXB8S$?BM52&@8gY zT4>WqS0Ly4Wm-pzZ=b7x#A_PmXBqfK)G*7DF^CS>^cBb)0rZRjZYLa-Z)|08Uo0!? zw2uJF&a41Dwu>awGI^>J!2;^5@uYC_xdGp)Wy5c*W1S=gNeJfR4NJBtOLiEI+P+91 zN=t^EIR{LgP5O2_P3=MU_EgK${PF2A>q`t$MkZ0goD-v97IPCikQoi=i3Z&Ma)dv7 z4F>FlJfq9=74V9C(O%n2t58L ze7qikT_OwU)$IQz33-GMD^D`v&~N+7Oc(A=Jm-?avB;dkK*dpLjPdvOffGY3NQiYr zh;?F!bw_lkZdqy{SdyVj=IZbnRKyyE#C02~IwYI^BwjPDU{b$wpz`W@p}J5NBQkA#TGJ$aE&Suid`^=g*MVt36$quES=nEQr$1^T^legB zAane^fLE8}`mlIS*tixfUZ`#@r}mOuihgZ+x2QR~UCQoVUei8o)T-%U_|wtv{E&Dk z7MLqT594uvXZ#N|9u6I!guXtqz6swY^?l!U#XN#s%b^yz9Ox!=IydYB{EH4xxg^C{d%Ea zkjQpok9|mZwzuXhmLOS#kSq~M7P5nGm+=^ZFaj;4u-ia8LI}+EZ5lPZ7N?bC3P_xV zH%0=SMzy&V*HQEX`ImrzJ|JKS2>AKMR-F%6K4nmZvcA;%ho+|T794}EF?P=@9>_Lo zy=IU81^XI7UR%C~Wu4`8YItDt?Klnp|6Qrq2>%gKjG#C3_DkJE)P`DBx`{<~^PlEK zLIr!|jY7<DY{0jyg9k1fQVX!-Wp_axxr)v(%Hp7@+N>P4*)+_A?0 zUxt{6MxNtS4p9N0bux0vu)z*Lw#O}Z@a6|+V;l94HYXo#@c8X+^M35%I0{8SHBpt6 zNFizhuGHR&vFVSjB6`DSvsrOsw>%^ zDA`WnER@Ryr&w-|WTT!bwl3ZAb0LW)7zoD2=4pG{@i1JoLP7>xP7-4Y<;UVzmdS_c zx}i>V&{uR&8aimNvZe&&UcUwD-c0?&q|gXzg9NuNX|P<4=0Ao2K1Fo5@aJ@e;6?PL33u4>cE$GAeHMwH2PB$bcn1P^5Y{O}i+U{C25_dgSVF>Q{ThrT{ z(h$aO&u>fa*+lNyV(!_OV2IwAQ)L;hYuP<9)>UqzTHw15&GiQ5^KUd0M zhd5awA>G5n!PVcS1=0aRfW9}zyn*)|eJWo{gU5_vXc&__*~@lC-)Wk$x;VS1vbgXr zoy!%yRvuvEeWLbLF97Khik0h6mIxgwOnP1lbx!+QOhgJ**Q#P+dR7|2U}{0q>NGhm z##Wee)8t!;!!ND^W(atzq6NvKGWW_~1$X+k{HNkAs`^jG%j;mQETv0$t4-5ZR(uO2 zFwRJF)hFRL??e|Exc1V0>)pw3th{4XkxRAr+tU_vrD?`B(4i86s6xLOEx^eFoe3V= zP7n1{{JpViZVd#hp#m+4Sd-CvGhCxAXvEa_xKPUN%W1;STy-)P{>W#cW_pTRriY>R zScjG_lDYD_qOw;zW+t!-{KRinQM|NH@)aj_I60Mn9Z#0c-W+wTytMv_Rbo&@ZyTz? z?|K;WS~hiBZhYi=gH3=$kV5=X^f49lxVZQTr2GUT>}aHcbqo169E-oc=2@+B0z zYI#WCH;1{&BM$rVIvwgZ?*EwOcg12o`4%)%5w_6WOLU?oDH-zGLU^yDb@Yob_0ewqIKOL8Dc1`=kZ&4{Y#g?pCO%x zDzv>Kn{N3un&tbg{J% zd?v-X{B~)(m%}(kaNc#-tHsn8cdq&3Khd!E>ThX-WZCs3cMV!5`DMHE-Xp3B>R$Yw zcn(`3Pv-HRYi+*>V|<*iPBnc^`cLZ3=Rp@7_WK~)8L78{F!8XLWYDYE{dqi;Yl-DCD;-J8N`8NQwOzK(gWkhdEKH*xyR(Z2JTI*_g*GhFVvrGpf8v>V7N zp@lgR;djqS)<4UNYaRR?SSimzyN4`rG|kM+()V*6J74wuXFYW zfwK>?$`mBmRH?FjEyt25df|HMYpf)H$=xnUw=3%ImNJ$@wi_gE^v%Bfiu}7L9UhXb zx$)n*xApYVVQE>&o*t)@-b+k;27Hd59N1Z~U}A#zRK;+()hV0cgXz(e^-cbLed}M@ z5Z9#(3W5s?;R}k80Fx(jCMg8-NCyk|PRD`=bu=slYK^EvtUCvmhGdLY=`c&!slGU) zVEwu)KPvM5v}7YaZ-c-=dW`4m{M(A132ckZGVa4C-F7#aPjK=InUpKogUvDhORTL3 zC*4uv4W9qMP}ZuCqwWIrLh2S%K+S5vIRd=HnG?t}vs+p`UH%z?3QI-BbcD9Be80Gj zjR-w1K;9?_-7G*pwd~IHdjxNwEAJhywo9SSpXeTw3H!g(2CkZWZ;R7BEj$#45c}E- z7N}`S6L=Xbpvo$sDk`8N;RkKqDk>WnU!<9oU=Z?B zL2D~QI}VZZYGwYZ2c*{py6OUv9txb+e7q2vq5XIGHFa>Du63TO~Hz_5X3ym;IP+u0Ry?G>^IF%dcFFU)R6if7sP> zo7tda6;U0x;JN1s2*u20hZ1mCzCS8q{-pcFmgb!JcFxUAfi8F32@cJk|j_XSKA~FG#4Am$&XS4*TSMUm1Mj(FPgd7Ggf?U>lA*eWU6?kU+08_ye?-w~p9 z>D=14(2^7tPizj|xJYJvI;HzHAfb%tkME3m(BStlmzO9=XkqnJ8+Sx2Et8uB!7HZ(I)&di&9}>2D#{DvZuQ6Ak#P3kqur7zzstiwhWBEQ6Ia zq;~6@%gd%b9Q@eHer9Yk6BKN@|lgU3P;q{Zr zWQ`i`+50-4NG6FC>>BB8pUR(@a_8hL@fxaAy`?CD=;~z1izZo+WigBNwt14l;hU-G zvnlxb6l-0PHN)mim9@qyZNhqz5p!UYZV1n{`8bgt$wAz+SWT(gEdNMcfrLvPX0CD( zQ5Jl~OoJZa)e++;mVhOeKrNQwqvF5jPwI`_OmXjaC83AxUq!nSV3P+rSD*<%U9CWs zfI_*&cKX_=Q?s#|mNc3AG?|7pnOs?P@tA2m!4IYo>-d6jywPLCiydHYk}Wg)>?0*6 zPnO&(I=iU?l4#?ZH}tn^w|n1Wx7A|TkF!`;b8w&DwS-k440Pfwk1=yYa}4&4xRm3& z9qYQPDnTdtwRO5Dt@hn2En)`^Dc^;7!Ge;|f>OkS@;n=o*B+ZFw3tJU@tx^C%o@q$ zv}?*&NA=0~s;)OA{Sg^+r0qPVzlGRge4!&_IDI-gd3yNUG}f`ls=t_sMv(39G>{E8 zkHSMQA=P(hdY;AJ8jeskQJY-8;nq`zGQKvi=8Ja}NWc)F8j90a44%)=s=;X)LVNx6XmTmj+yrF&1$6%fL>X!8GI|}d zl-bl*SZXkXyoB(etdd^K8F)WWn-nmg5Mqx51-27)zDta1O#PqQw?_sRe(|2#v2s*h z3Z&2bIS-$J11&hR1?6rk=ZikD;KRseo^u z)=8b!Zv-cVPFLcJMnVy7Z{==uXSax5W0es7*WV<8mV<#A!9;_1`D~CT0Lw3c*Dt^h z?opooAgV+>>P6rhK58I7hV0UG8uWz=VdGL6F;M;^tiWBU%M((r!mZ#D3_JwaAA|We z!1bG8K4JO~yFE?>w>cg0(}i@yQ?HHR&;2&{ktB`^qD>QWAp~xp?u%zcsW@3mwET=~ zt#<=AVLRaye(6qa=ygaS@pw|L0(@0Y>TU zhlBK|V(A}myfT0gJs^uN&`TG%6L_=@zXgY4y!vE%o4=oVY2|AK58x?_Y7|v@X<)RC zjuRe+nJN(^zD{SX>osxGZ zG58}UDMyN4I`ANIC-v42Pq=Rbu@n)5gLDix_!R>;l-@8=j-Wpu~YmW zji-0R69BC(t?y9|AMbFzDGDJiz34NXQ!J@9?v3c9k;^v|5oZ(q=Mxb^KMl$U4XCk` zbHXWE*BE$ustyR#l@X@t&fSqa$78guYySJREXbLFG0~D3QJ)ymkQmWw-xaW4yGxa{ z=Xvo@yMiyC8}dzUaKslQ>YdK2hwqM}gR$V6!jRWRj!kCju@|1kI{(v+5-G|eqzfM{ z_`GYq1Ol~*e@a0*ZC0p5+-;PyNGt)B-~UcTh5b*7i-KSsQYD2}VR(E0AJ!GNs_fnx zdBEJlWFAVPHAZR`SqJ z_cP%((f!y|h z+!h-z3UYtjmLq<9$avq}wQ>jfie!Xqpr1YYqkbSmfizPOG>HcMf>x#Mx^!<^P1j zC^4z{U_%ik^Rdp|3TmXoPKGZDRc}JAGnKLFp&uDl6}l@m`giS0AwgvkL1l?SW$*mc zRkxPuUB%{_cE62;dp9wZaxCtEef7iI)$TlYLP)K>eOvS8-v)Fz9jd-MRHZpol{xtV zE=mPkrQ(K}H-DHUa!1nlmm6J4LBe*HMpe5TA_h?deZ*79i>_K`NMWf2uo*&Fs`on& z?da7|)b*(6!poIdRQN@}m+h{KgHfp;;!9%4SrR82Kdw8H*FLE`ovXe&SEV^u^*uG( z`3maj<%28eUmU3@q5T9?NKRe!7+hiE-aQBlzfdr9N@Qa0Sc4=v`5M0hm?8k45&tQ? zJ6}m?&^yFC5!|`k5-U3sE~MjdhkF=8brfCxy9i)y+U)#-9q>YUf4b0=~4&cHqKx8P~%1tED^w%je?TR zj*`vb!`x6SVCWEG?)emav;;ZN<;UD>X>YjrD1LQ0X^w*(i1reVH@r+-+d;bihOk}j z+rr;0!oOJ}f3waptVug1aAHCN%%6%*B&zV?`?{WyX~#mjt_pO&sX$=B9IRYL&*Fw6 zdtoPwa3@P-C#!Hu5;ZRptEJg5ajq(=zEWa@D}*-*kwB&!KNMA1m#OuXg@@fznkbuE zI}ZBRT_MoO3}ku>^mq&0OgLIG5dizkA>EzB@uyRm)uX`WOXs{MUIW6=@&>&|F#KQm z%lL|B5BGP^#(t6zoFag+5>RK%|P?ti^R0;wVpZwdnxivdc_0QKSZUy3j0y(KM_Tf47{M7r|nXt?ND_V&^? zQt51cU%-75I{JO*Q*P^9%vJQf1%+%vg=`Up>`-Ep%Pa{U^7V$Q>fLbG1Nv9Zh;zAJ$Lz^{MATdYTDzDOi(N)C^Zw*N5FrnqSvDvj_`x-US>T8HAjml z;&4P+{D@~4XfI1)71(>R8ANykB2lKSqO!4VeM1_!-HkOoWHkVa8i36m~F&Q3{$bXw+kY4x#Ea%3>L8r>Eq)`mJUPFKuqL#S&w?j!+yB>|<8fch#w zuwnWw;JBV<`)e?gAt^RiER9tD6K8ut`Z;MTBHl@*(n8ykxpTJQLxqLPIlBmr;*dX4t}?Ik+;rx{fj% zWSR$nto=amexRcJ+&5Ft@Se3Vw<}_^%ahf7NRlgKwE{l_*zi1#S0<65u1UI}cffc1 ztz$ZRXL51JWg;78t(#>c!pr;Y|L}jR1xs>jgz-p!@%*w)miY$P=r1GQ7!kTdeH*Ei zB|?;Kr~c9I)92T~ zt88P8=>LT;l-wue+Fpj;=on7@#z8T^v5JR8JAA-R|EvYsS+hUW*L#n9=P$$vrO4oF zO&`W>acZ}vnLqa`)kWxaU0lgjrV{~o1o9=7tbqrZvj=u`G(N81#s=&Q;$u%X-e#?% zN?79^FzRnjerT^0Sts$h%&?_dob;akPt6(#V!$!Pq4|7T?X>0zcGx8d5Z1d+5*$y9Y2<|lymx2jRDk4V;=wK8;z%d(lwQcmZGdEuG(yJl2*U-?n>h7 zr1Vy+8Jxd2idR!-SuxGTf1IqJ zCW(@(@jv=&#Lq-vCDBOM=PTs}K1+{`nzFoy(_s@AG1buw9)z>-z9#omc*#;>_wAK^ zGW!Gk;V(4&;XA`vN5!~uuuFH48JOqqxGB7vO8~EbP^5A5sLkeAmnW>ej*+x!F=~22 zNMvjB<>9O1PiY>)Zn89U z3Zgne$Q5?T+|P8KWwtOSzISIH(erO1)xym88~B$B-KQEaDl0pBF^~CrF-s^$8m~Eo z7I= zL(j@f9)J*papfrX32MraSGc~`4Uv#yl;(eBvsKiIGO zF(M)x4%Ra(P7dt+I(x#@RFWdN!C?d%8`0(K$A@|n)9vC2mMLR4DxBI#+2)ARM6p`* zB{(DY9sry?14o7DXi zbA3Jk=Y`zgHYK3J-@evN&g9b`&;WVMs(#$!gzk=cz#8GenowL1$wyG zKVxsELhG_yvKBgvYL1p5(>OaxKAmw5->n)$9y2Fn%56Y;)W?PzgMBJZNn~N98cBp2 zFhULdJxfmNY^Jl{@0ls*BbTwb6NS7Cb}w11MPV0G!QHe2S0Er5GO=eRT}s-(EVp8O zXkH14_KT19ON{nw%Un5*XbCr25dnEJ<+>pMQPnuB9ohYuF#t->ekv^dl^JeUlv=6M zOhV{WPvr+m@{3RMOHA@x_xk?aV<5Zid3B;y0sGoBMbO#~<2BX!qrJ*TDIeFY?x!LG zOtLQs$6 z2O|^sK~ntUQ~VNB5WCj74@1o}$I+o~a;lt{qB2G!W%|}Gm{0cIOS4PCyrwkdD!mIE zoYWQ-(dASsX;})kEajCP_FyBkyLeF;+1N##`SV%t&A zj1nBO#kS`dd9t0?_IACU+8Ray#!`U_Qh|z6fu_CBT#HQYqzFM((|*Y`x3pO~`pQ$9 z!A@b85Z3_P+Q*RkEfVzY4IY8_ox-Nf2m7rqIpwg-^3%-nx^}XaX&eL6kVpRI%X5r@ z*ujk;q30zsZd;&JN;qcGTd5#-{|_a+91zfbJm@|Vbl(=xxgQJY7D8J$Xmpr2ogn=- z@T;Jm{WP+0Eh$4GYr8I8U0NGQe(~*ud7dBTHy5#Tm~i>2aCzOs9G&%6Tpi*e0cpr2 zU80^BpDXD_k0ObNZ`{z!Ze@wkuoxKH#zJYFD=`oPf}+5XVFlL+TD zEb+;)!l9Ml7ifOUf*DEplcxnS$%zw@bSpQT2S9ws!`I}8S2TwK=j_%}tCH9;BuJ0C z@%WDBzqlF4oMpV{9v!U?Bc#kSS3-<;1obvA3}XCbY3I8r-gQw3cTt2?!z{M)C7I4( zWlm$EQhg}4Zk_e*r@phQ+uKn-syG{3Xx31BiXg`e0v^i1#_-nkjp=#QXx+I7?D2O^F zh`Q1dv=5!ZTFXXSA!ZG-#A+$a?OjQFG{UcrlsM>=ld~JG5Zgcp$P`SzVi-f^q6(HRxD8+i#;;DO}6# zA0*U@UZW3sO@z2j^teq(ll{b3sq~P{M=8izJPX_=@ZpN#Lanjjq?9u=3~}}Aja_^_ zjptT#ANYyNria{So!q7%Zmuk|>@VY%tRulwC7To$2HLQ%lo>75l7go*(!gK+(+Qk@ z@#@bAy&>~P3QqQcL+o)5@@7uxW)5--izCz*?C@yE{VBs@kZ6qot^Q$!$VA~zcfy=C z7QM4pf#>g@Z)`>+XZ{~|&+I6OTV%W&MxtBSKVol_v8R|-ddKHg31Zm}WFt=@Nc^QS zQSRMo(g$s+N41CoRA*nJ_olA=ATRR86AJzl3egjagEu!#B0wD>UY-C+#)7pRGu!NF zr2}1&;x8$%r3^U-eWv_hD3Raf@XP`yp5>a1D#;QlU5zE>6eQ&oCFL@O$Nq#WEI*7* z5R*YdMP!q%6S#HYs#LfoQY(reg1WslO+gJW&&D^42Ne7V6ru+d2OKwLP75~SRA|lz zMNVok2>AkICT02B9)El1gpudtG|9VU@e!F??m%ztxDTVSDc%gZC26@qX}KY3xf0hW z`JW8ZK?s$^00h3k_{+h?(Qs?7==j&6`oBXJK1CPt&DoSweQXmE;oU@TNnD`xDjninafw+^%|j`dC}0nJvx+c{B~dyoiOLO%$_*Hh|5{K&$`dpUgNN z)!LWb8ioV}S62Bl?`+~_hyM|R0Pj9?(Z+f(NG)N?+3{hXN`>#~6MLI2=n>*N5#mx1 z;?fY}_C9DllapyShUq(24{$TW+R?dh?8X&9s4|#634-&S6-3REK?b_(w)UTqX=E*T#*y6x$KQ^EaM# zZx&W5_*N-IRw)YKFSrev2_t%D?+j|N9CFWvl9i!D+C7}S;0;?JyKk6dzTemW1C^Lz zQdl%0Y?u&+wd|KD$NNP9^$H4dE8h`3PP zR;N5d%e-%vd4$7h+=G9?bxK__N4pkBl$H6^Vdc{RQ`*ahwUMOTUg=Ru^km&A=F)a+ zcraVM<7GVgWjys|Jm}?K|Ekl~1aH_|zPo%BvoE&x_b#HDKwhd`QOGsGXKZUXe;hck z4DL4zh9R)%df0G1OsTm(DM`Y7jUx0kt{Mcw8HSj_8^%uKbSCqyH_6DZ4a@^kQl>^S z-tIc0cPTnyCXiz$P-7;5&~v=!`Z6je0>RCAOzlqdj3Do}`Y$iM%S7?aFgH!#VdFL( zc@zjU$1szSl*?w?{-cRNZ6-i-Lw@!E=8Z7w2bkg9g`kH*c;{0#L`H0g=K-3@WXMkCQ={CVbBm2DMq%j)!_I-MAaRhs`1a^8(_>#lsnRl6E%&Qi5R3)We~d*v&*yG|FCxDJ_|c_bEO+us&ypz$o;DM> zd3}|S50~_pe};V9%4E7T(NgiKBB{G6#IiJ&v{aC^RFt$_={i{i2fB94tmjy#_SJ-! z(KiHS%A0o`pji%wVyNk-xTcM#Xh`H;eO>l~X1YP=-Jl;cYTLcufTvN;j$MzjpRX@T z>#c9mGUkk*V0jH1zD>?3QxQvALsII=GG&s*4(_r-@4~XRMlN8s3-AO z!}TtD)H?jzuPi(sv0Vyv7l)BcK+}6!M}M1fADbf|o7W{yW_nb<<t<&EFQABF_1tyZk&d z4WuAjzaXJ*c`JVtIL9#W_L&^s4QC#aE4X=Uo(G0MX>Xxo)2C$fSIMSmY%X6-OoRV3 zpG;Ttl0e_Ar1Y-SU7!BvE~?04{T0&ZH`;Jdp18{Rv)IJp>m8XG?z~q_c`=Bp{cIi? zW7Lo1VPf;*k#$gfRr4ylN_=&mqwSiDS4|K2K~{=f{ow-504xJ3`paq7GJ4g7U~VEN z;r)|vw71J2IC1*yH^hjx7FS`PNOrH`S^JLIBCL3?-@@E=4cUxpL$!m`DoIo453=4J zWCQUc~4Tj+QiA0VR!yVXQa$jVr1Z%U1BA^=NzrIpg$HF*S@{SR{}2hw}Xr!TSLy(vAjc z*LI7lI_;`b?5a}lB3_i8w{Cx@e;_PEfV>5CyEthb0z;BJPyjoQXSItoHMib6Vwqa?sW#XTY@Q%I!m6(jw_W|)3^KvmSVM!6Q3H4es`ZeiqI zZfiQNs$&k#+o($JTz011N~TRQbjLjPiF+!SE1UsESxl;(kY+aGQZL?pMA9T7`qUNPQM1R&Mv9MyKwRrg7Du3 zPM+}Oee8byx};_oLF}xkX?nEyy42F8l_Q}2`2RusdzHIvC4nfe{`=m)e;hxqQulnh zRyBBd?AqtJ8^I=5Hv80byzBQgBROmQ@b&=f`s4OidiMN}-tWI>(qXCJU^Cxf#2tT1Za(!5q6_E}Hsl~1XaGJ6 z+=-9n{Jk5AC2-L_P>L0*aLj?DY6OGV?gc#TIc-MSY!29LurI#sCqaHDA8xgmWfsp| z1sLcdHAmUBwZY;hj*NU=p6wsdtOTofJ9uyP>-~Z<7cR2+F0w=~?*8h;Pbxumk(z0` zE>~m_xUfEs5zUKysQbIrH!jjCuS)li->aDR1B|5r3Q_HhGsx4bpWTHoyPQ z+{>W{7zY^30Tko_igEzc!PS@p{GOU*qh>_f2V=wiq1SeR^NpIwDaFv;%jg2^lak0u zL6Z&0VS)OgS~aQNsffa|D9f^l0ywI**MKrySN=yFiXWky5E500 zw919swv)S}W=Db&+e_9D1YG%-6aj;ZfFVVIEBaz4YQ8OAk5pWJSG+8@Q;$C4rY*V~ z_kusB_{;ZsbS^)nxP){~B^|@fKGNbBDN(l-T@WS*6VBHu=G`vsE8#YWWQ(Y?p;Thc^_O3pcpwK&xB|_wH}zH@pIYMlaKf=0UL!dA!BR z;AlA15ooXhN3S=%Lo-;rbT7zAj>^Bc4kmRX#$^jmV4Nz(>sv@!Jq;b9ML@-Ah}AmE zh}Bs~jsl0ceUc>G&FB2*5;=q>p7OqCdk5!hkb?g1WMcm_%2cC8ZKD&v)+(KFx`kfv zQqZ%O`q{T>fj%+$_dz@rcDVjUiSQGUWi?$e>yBVC`@l335G;7A#aFaN`amo`@E=y} zH>b9nzZF4i!j~v-E;#+I^R4{ZgNhXI-1nbplM8`QXgQvePo8#FG9B5GFRwpr$rSN; z24KszO>5?8Dd35A=Zxmg(T^4t9qaH{i;yq6(uTy{u6S3TqfRZ z1xvZ7dZ*Sk>Am-(_|p4c_sYb@jIahE_OG@^;V^jdU`&G#le1}N1pk}2&h!QICkR&F zc!GcoPlls>xb68Lt{mP)+XnF1;lWnOe`JAkKbPS7J$5Vo+CNNLRvi?!-^9&;qs9YmL&2 zW}a~-lggneK$Og;Q8bv$47uQ2Dh1x~jCsS$lQ9XIz!s#`FTT_-vDB|eJEt(;JL!m! z++rGGcJo-;P#Lf!8OoG}K%nW;!E~w{P*^0+O_Hrd-DK^zrLlsrK6;`v8Bh^rnHVWj#=g!(W-eGM{5 zyS~qTSP%Zd9VUt9`et3@&OfxAt5zK1hxi>l+>#mrY2A#@wq_uU_Lz|Z__&gGvl5Ib z9((VMa93&69v9RlMcX)G53tPlr@Sg2^|QH^EXKsu=V^!696?}PXcQBSR40j02S%u) zVbJKZ`L!D;zuxZ4w{BVEvHSaVHe-I|nB727{&FG1FT(_9WhVQ)7IA7l^Jhh{QxUCY z5!kYb_UDLvh+4wf*YJ{u-?$15G#5Vc&qzp_S~LO-}hxX8CSr zIp&m8PM0H&A%&9=VqTQVaph_U=b+O6eL zF))Dk?*BkZdV^;2sGD!WpwM|K%EEbSlrc<3sHD`(73kKyKJU$>@Hz=iAA1514%;20 z_AP7Iah~L6UKgUA%}*)osflH~!E|?nfmq_I@3ZA31#G6F6TH@wh2ORe@h&{WubsXq zR~D7F#Xms2?&H1g6TR-+TvnP4Hae(7s5qSbE51_MA>~|T33_$`F3nY#Xcxokw|<7_ zObQ#Fu)PT`-G<9|)nrK)XLTW});K5fIUrjEC170!;%tb2nHc3A*pog@if~X_S zd+QUqua=Bn?2aw$C1qa>IGf?VBoa6%5;!ChX!?6q2_H`QuSsxV-ZVlYZh{o?e2b6% z({mZV$_6(v1%ob32IDeE+PZH)ib_1#w_Xl6Gtbz(o$-DIVAP8ESImm6d@$F+67~TA`#=o)05KC zQ>k8Iya5GjYr+fVSI8X*2t0QdYHtQiyS({3`BO;xx;oju2PqaX#{9e&}XCvX2Kl*be#3W4b9E z*yITcZmZv^>-dq=ApCl_e!jO3h!pAg z|D{N`C3KcEes@emc-1H#B3(>7r;+xPp0j9rY(}{uQDFZubPkelXMx6ZVYGAic3@uJiY{V2FyakG?Zw0yC~u}E)vAQ z-`6oyqC-+J;{O+DLeDU#ioVq37(TpK9n0$$)8kG*CBD`8U81%vQUw7^hYx#DEA3J5 zzB5us>d<__XkrF_dJA-Y3xrD?&H1LkvM~;M)YT2B`+&CU{XW~l?C67Nmnq5PXB=_` z;}Or4suuVdsS%tgkfHe)@aYKPdIW$oA6WvF$71k;3cFk_nQOKRtSA4e=A7PDO2(A$ zAh51T)N9qigLoq50tzkS#lIB%e<_6jBF@0e35b;Hv8(U|kxy%Q8FYOJ*z3|+25Q;G zTXy-F=gZ-DM?*%g8ZWxu@M#2!y&G6mXC@H^35$#mi%blQY)cMojmKR|APF@;ynBc> z*GCdj$FUFE((x76Z1}!Y?5{lZrLdC4&0ibvQ+Uma^7ab%oZ$9pP3e289 zf>!*Cp}VB?yDkfJJB>sa(mgPAHw+;;ApLth-}jt%t^40+&6+3fz3*#( z_O<2u_~i_1+X>A_I;dBe&UHCBi0wD-2>*25w!3RQLzRJT&_U)ZD_DaMSXZGr<%!IB zi8*A7fs4k6ES;-zG7mZFxFu3V_g5Vvk79#R6+v|EMb+3)^}cn&hfpn4V( zhhyW0;c>&Hi_V$*nuFPByIEZtPXaA5YFn3QeOluQK@7viHouIy()jYF{gd_oMW<5tseVn!JnIkZ#gB+=rJ}! zFzO*f`XS&5eW~4yBzC$4y`JE6Z| z)W2YKzhIqWBZJ5Lk5(nq^wnxfosNH>oxeS*@yOER`T96`eGKx_JQed>=KdJjWlh5M z7HBRJ#Fq#XNdyJFo9a7xNoRz9dgz|w>TOOb?b?F9MCGBIzP*(u*7q-kubO_N*NRMz zT`xD@P^!07gjp&^TPij~eUi6=NCb-`7T(=5e2(nju$8%N>2XuDwjiZeCJ^cWshT9F zx@VMGa&;5Ol|sgv633cC_F~S7=dImwYz;C7+qjc4XX&K(&rs_uQ4)A+&h}d3VVCIs zTC3ZII|qZ}+-6a~Oo`)v*Tz*C9u2q3kZE0_EAwP2wL70sR)wCCw^mMW>hZ$p>@xEB zDFQlDcbbgOS*s1fCI(LiY9RK5ZjE;}f~92BD+6q{XqZ19n-q_J6{;VYXqWOJF6fFJa}g zY@6rG?h#~{PNCjH5oVznZK24-SC0&BUuDpOl3qufJd=n+J3t*{9qr#bKnOe|*+TE8 zrXTfth%xXe)mjIwvDasSEz-e`|B{nM##=weorjRD4gcG=@@rXktw->quBauUtEOG< zb(KfF!(1ZfD@L=1h%vm&#K)&|WZEla+9zb{TxQiRZ{hFTSgSgqM7#Kf zA$*bXtQ2&uW7qB8ajNrtHIYBep*h!*Bc(Z_uimZ~X4e~S*W2vw1J-dvUX2J#cO>TV zP2}Kz8bgq`+U?w}r7XXumVm``uCPw&He zK+(I<$5nD)ij&t99)!@A-4e&n+ix0>o1b1o+r|e(j-B&gf21gd_143BVX)q4Snq1p z_FTF(>lK;5R6<%@DLtM_E3vxa$K_ogk~EvVU$yWc7C8UFdzZpBP?s85FS^W8$u!;cVOaN^Kj;J;@4byZsG<74nK}pibVaZrZ1A>^*ol=O;iCb=(e$wX>{DV(3Lb{|xNS+LDLJlp1O$ z-q^=Z(%goeET!0@cBYdOZ%taPem7dJH(KQ`%yVtamSUR_tYwqglaPQLrFOm}IhM8Z zNO>bD?K1K`@6KhzT)D2#!n@y325U}sYfj2vzS&TR?Fel#% zxjr28R5x~h(1W{t)7sgV@T{Lma@1EPS5zfeQYAMWY!^HX(wGIs`qlQda?{Pd`W(56 z1l}^^vu1N^S>BL%|-Q0PXv!o zbdOKS>sD7jN1Tmpku`BNlV7s6Dy+VH>~VbR%Z~36bU)aT<*%=7Q?RcuRU^ODm|q`M z2n%`^9rUlXT*Mb0>$m3{YE?e#n)m@ zV9bK~O97Xc0@#&Oq1KQ&x_Tm6DXN;z(}75TI&}h1JPVHIcO&)-xsjj!wrDjvzn<5o zZ+pJY2$?Ki;7;Y|P8H=&-BY|M5UK9d0g`D(;t6Zw{wgE5++Uu()xez1FiaBFJbedJ zu33(5I>_5Gtq*jU+YGf+l zXdh+26W0b5i3M5hURmuvS#3ZvbM6^Erjqx+^~_8tjN8*eYfR2W%KfPG!s&u=io|O0 z=qayg%}S_~G(&SZOmj+1b2#jUOKqQ-VWXWd-0Ht+vqx{1TA7vm#yG4YxMOjl!TbdC z4wKyh_pvhl<6NCqBji@57T{Oi`yapRrEWpg>ShPx*%Sr9`U! zY8SWE6IQ^KAff!Kj7JXrO%s<2f`6WW&t((5AP@e}Q~0;Htm!*#9nA=a-M;hgIDP2W z4}QNCB@uts{%wTu9s}SKl(rd%FH*{7b)>=i(qN#;NJ$zTo7+Mg*}PUb19%)oUg`TA z>!FEwN#=yU$kIDp2NAL=(N1;}y?^7(dHmzgdwzX!u%b9vNgNzo^o5}%fD5ZbW)~L` z8Tt|}BUD*ZY`xKw(U{fEQ@%r)g(<(}T4=bG1BPi#q53_)USwx3fG{?4CrA-{)C`kMA z8iG&S>d-#YQJup#leV=YXq|Zn?c^R170AfA8}0|t_~f(92nik!06c1t5HJ-Na1s|l zBph76*}ZI+*}SM;QQ?HC5l;m&{5ks=k)K)iiO@p=7hicIYtj13LHY%@uEqVI+VjuuD$NrAlb>4h*l`5eTf@1AcHHEJuuNVe1>$WXfK|n zOy#3Y6`@QGpw;MF7PKL4C@}E3x5bOquFDP;%u;hf3>A?75_YV0v0aW|nLiI`XgZlc z9o@1^|MY+m19Z--gtXIsDJz-H7RBO`p9*GfAK-GhM&mgSn}_%;J4b`=_q~LWz_OEz z1NW9aP~}5OCXZB4@y_Q@s(21Zkq&*+vGXj|CBU6QoHd17Jf8`WWwi>`3q!GwWYx3B zN0hyvik@kwcb<{nKuHmuZvmN;uhNwGY#Ot?FMm(>J;4Ido6jYE&_mTU%TDD0u6Rt^Ag5yY2*5q0vYaqORU2+ z^d^6`J$G>&r98rti8;t_nFjuzRR-|)Y+gkqj_idl+9Tb(O|Q62%Jv>f!miA}#e5kT zK8?VzUcEkhs;(QGn1|53-mv;;C zBce8#VB|eqWBw^_o77!F6nF%s(J~)16Hf1}D*)UVg|n@q#&G^bMfaqz*_8;Vk>^V+ z)%#TE_Mj-DL8f3CT$g~o7Z7h7In&N!95A9qeFb;oIE!EIdK&6#N=CusO+dl8P$fGpP@Mg(mU*2elwPrg-mmw0DGxIQm zVIw1RBZHx_+9twrwyGfmaJ&$>#UZl~{TNUY%IE>UX-nDh^e`}NZ0Qgah!sa@axHq? zUFZg=ZK~I z_zlzrkXySQg8P(l40TkwsSA@f>gb?${*?1;g5K<2Q76r(H6>XU=N zFr(dnl&bgst|R|`2D@BNq6zr0mbK6liS3czk=tH{W6le561{Q~eR2|pgT7p%96-3g zVLvBRT)mSvqu)w0BEBf2r-(h*W6P=3vd+r(jk~T7x`qXL{>x$}A3}pL{(P|ss-a#yf>3iU7Ei|damut(nCny~`sj=KD2n+gi2=y$@;h%G{O+Z?kknxO-z2NQ z#St!v>V19RZEik2<=X3%*zo^GjLeB>esuyxor6&4^r&+e%$q#+g}|2Ud0;!5Js){@ zCSs50TbzY#{!67=2e)5;PiG(hD2c0uqAuI}VBs1GzD9~&BZHhjYBPA8VXYQ&HGijY zre8`Bs$br+&<(r0g7Y?bp86TWesW<6n=?FHzGZyTQ>wJ|UM)*gZADYfShuo8_GL>H zyrG7-`cJJw{u~#?aJw)$>Eg;yQdwjR!tRlYxs~3qCL?S9`236T%6~6pTr||m!Sa0` z{TdvZ`_mNNQ2GYDoO4l+;QsM7I(N3^cz#? z>{~ORFD}i_Z_W()l9EHmh3BbC)C!r~Sh5}}eQVbjej~xZkz&7*n0DebQO<27aP_Te z*pj{E!}(V~gHC8E4~utQG~{mtd*yK|0gmLa1LNf=&^-|M0#{}#XOpvA6(yY-t#0$A6I*un`6xfS7=O2#F77>u*B zG`zAj!m>2Gz8BEq7rqGM&Jnnf*nO7!e@Znob@Md1QLMgM7`Dsc>oQ(>A%y^FKDuds z0I(`7?dBtRm}5F0SX;E^H4iZEydKc>HJ|5NO;jSE%>F&J3sH%xJn)}NK;^s(j!&|%Z=qRNlX?(8K6*Vl=9<5(-4YCnEd?kkrmD-!;xD)8y8 zfc;wmG&{kxuG@^BqY?DxlE|8O-E>vjS6DNBt#pywydz@CHI2qtc>@76O&%!XBn#oa zaFtJAAe|7%Dg=V&2Z0LAJwWxJ%!^!(N6N7ar?4-#X?8c~-QhZ{61 zqfiULv_dj?CmCFtstRzqBmedo{mK%dN%VwyGy7~1^{Ar7;1A4SF>XgNcI=fzfA0Ge z0PPk2`9B#I`#{z5gVO@jaT21Ns!;mVIO)erk0wrCo2_#(fnYkzPibe~h4f51Jdx+p z3-uu0dXR8E$lqq6KDi*M(luySUBRVMaAJ?>6x#BPaTnMeIlm*6>5Qg2wp<$8dp zGwtjkh<6YqJP6v&M3qxt;aK3^N~in{Iu^(KxwAoTkac?P$U6mpqRPX#7`Rs3na8Pa zr$BEvf#&QOA@?7>5wnjUASf5UHr$TmuS!yFT>b68V+KG};icK@%2qL{ur-`K_BEWR zP=XSmBSU%8=QD)Bq94y<7tbQB+O~(E3GZ(5JT!M>M?|{pPwzDA)b`PErf+DCyDfnt zV!h(~nUEz7mlZ{dS2gRmf|5V4WduHJ!%6bk!KiBC(UWlPF z>tDil@pR1o6Jm|R2AUr3cM9(zHx~VWEq4D}6m!mGxj1b*`3T}I88t6`s~=D@46U33 z8)~cPe73nbW#*z*$=ns)xkBD3(VkEL{1wL899r48Q#5!QTz+{A2AD z?Q$;1o@~|a+uGx*R8SueRFJwfw_Oc?>F0W!u~fMQ3AtVgxjqRw!@k$y!B!PPE4^!u z%OIIBG=t16O*QRK2L+Z|ml#pZJ*MIN=UZ~U zu6=MlanN0Ki;j+rpI$*DBPvXK@33*-t%T>R#rCpvmEWX!^68c8 z!xUj*iqT<;%?Wq>qbsJrU(O6XP$<>VRyY^+_8Ii0^*x+MWsRX{y?u})zU3^Suz$A^ z9$|$b=W@}Ev0;m%W{aX@i*gq8`VtJB%iB_xGK|O38#%*l0=-L!S$eJ#+_L=7kdhD6 zKMf9_Q7F|%D8eEXqazfh6;y{3kAQhNOD~VLmW2I{NQCMioh|Xw(VBY^D}KWyei}b9 zJTvrdeILDnUciJ-z>!X1Lg7G;5C2jY&HGFKg=(1iAIjSBQ=_O?f3N1fUUq)-P*fpV zOYqepn>z*Zoq|M8K}d#+EZj9TDtkWxtrZJ-Ny(QG@*Tvnw%w@mXL;Ul;%zS6dEe%c z5icNRC0TLMOfu8VIMYnB#@2%o63Kbd%ck8DHX=q_dZ?OE{hNE+N|w=MU8xhSr)=dK zduyVyUq7(ui?Aw+uquhLW=AV5NS^jo3dLq@C?5T~Y7DxrN02Y(UzG?CKkVn^d?6h3Do_eGhmsjq@ zN1tI9{|AZGV_ZlCffGSuiJ<-8r~wKa-0wsf36~rG2ewCjX+B2Lg~CE47Q}8s=ihJJ zuJ(r1qzmU>6y*L!pBjCSX}51NylZi=Yw@FOMz@Bu(23Q@C2*lP7qp-AWFiDj{4fi- zeSGxrq*Y5AedlY#es;lT)ch0fXn@ED;YQKpM!A3%PCayKyM5Y5`N*%UY{?8%$DFlS z_``4z-zf$nM>(|}(3KQIIHdRnIzO7&q(O$0AqUA2ShW>T-p}yyUp$2jie1K{n6`RZQ@SO^HX3J6SR!1RP1;(!2$m^ev^ zX;PzH3KYD&s<=9dAJ4ThH$vw6pKVu8u^TQElvu~la;Nfg(}-~c8_1%Jjh_?`-z`4L zY-L{|)QYtqmjT{_v}+aFzU+bzv<0t8eRWMI`0#>J-L5Y2)Mw9I-G)z_yO&WabCP;oaej2w^23a`H%AQ!}IbVHC`W( zSoDpR6Iooo^GhcYYW!Dpqan!?$D$lw}@4r$i6Ru<4HyxOf#dIn2+7M=8L z_2TriS!9PD>0k5NN2+OLA_N>cs#)quCjok`7fK2Vose~F;>N9X&h(M^iI3-qq;5e( zFdw3u4}q0p#eATT)_7<90PerPOuHDg5V9pRdXJqmV!vsd9?XV5}BG$~60FFd&-Cvx2~5InP80?l4}I)x zisKXyTHDl7+_5-}v64`Ke?8rE0S`Wbcb~xJF9F%Fz#BZC67p`eIJ%ukCc> zL7CAgxelRJutc4LfVsSYgFKMME@t`6(5z+9p-EB3NadS3`KMm= z3MTov4Pu9APrFHLaZ*aUx0l*&cyZ){no$D zER{A**Sr1>G4HzuyC9A#=%M?&o2oVmp8xG{E>Y(rVD2s8;w>;$2XlqV4!azyBNFT0 zeWd-lfqGbDSbMJ;zR9t&Z-KA1%(Z z0{RRJJnKCcx*ws++$e_$iNnEAE?D2V(swGb)XT5;DTMhHM*9>te{Eai!qf00ggTHW zKF)Z_Kw0ZC%=0c;L7W@6D~y(+v**0mLzRnNyzLljtztR&+H&u;Whw0}P_lh9W{a*Y z2Gu?RDTtRkmJ&R`*lKvfDmW72R)VFAhD={W2##-$W9D0C=A&XhLqmS_66Fq zTrK$d`@R?)DS+axabRaJRIm@x`gV-o&~IV&H$3_DbHUaDsJ2;umSXib8ty944{R1u zhcc>y7***RRe!apfm7cI1EV_h5hNpSlEY=%yoY~d_ie-^e_h^p=-C+gu;TYWH5mW=M^xcZR}1h% z>HUu%ieZ2I2pK5=#Q5!Eu6%Zg?5trvl}vUE>-rpC@4SnFGqGO&9qGyOQ?;A}TvsS- zp#tfB4cU9W8>!CMaW1|UU9Rzg_@^lTp$%>$X{r2q>!XoW7R6&IrQADWwyKx?efD-q8Gj%~M z2KDPmq2hp>cP$e!#jm4j92S(py~^M|WpJ#+g0mP{naUY$neZ_DP^U#+ z%;S9S;Q6SakH~nIGb}>AD)3I|Z~L{b?)WER==Q$Z0hdc-phs4PyyQoHt>)V(@+&`O zB!FM`QWid;c$DM(|eKGX=_C)`b6l$H*2QLMb!A>W}la#XN2fhrxu=< zHW6Rw`%y?SDHM;J= zN+YkgPNqZ>WuX%xetekv15<3eBIBR#_^Mb-;Itf=VRC!=;pa+e*g_Q8-B>%jsF|kQ zf*C?5GM%jti7gLwPWZupxXAV~)HUCbC&Ld$ZUsNBq3-L&yHddO6+2IW^JC?p{`4*j zT?tH@h>FrE-V-b2Q;m|py z$nyIBo+GX**6sifIen{y?W8*y>mKc$ua26HZU&gzF^AlWFY}dhJz1Sh$NN7-wfbF7 zef8isrfTZwg*AM6bVoB52Q1%=n#|TF>Lw8Qdd~nDx2fzOj4Mk#VHk<*rUEZ8#oHD} zVtOSQ%Y$|c3{{90jc2SWuDm4(^R$>0o1dICl zW&9T~68%`$M?YB7$nL-=OK52=_a#rI7k5PC%j~k3(xcfzcF_qG0>>jwf^~b3NHVL2{BTQ^ z;=b3#JFkmz?cU7YLb}^%L%TX9`DD#JKEQ9Z+rJPW{scRoND0KBD&Y4o)b|vDDt%)- z>%BZ-UY^ljp3M
8FQw81_MY4=P}IJ#Iy=ITwyy{ay|K>Ec?nku7uqj54Z9ZZZZPnn~!tBnc%op*GbvS$!ps4pf7 zQPOo{90@-1X(m@L18Z(3u(-|KOzpPkMndVc5lx#bXiO8y*lit~H=h@+$o!VrIM&&T z{9jO&mjuSO8&|!WG!>Y15|~7MJxD6}#>Hs&DJTEcNdH~SJWD-C*6I!JJf~KBB(;-9 zk8Vc=nt(a41!pOBfuGwv_Z}bjo(T6|Kub^G`{d14Pv_;i2H7LaZdV{^+>7GgT?^a6 z6TnpGBut|&OgWP^3G*+8X{n4mm``m8P}Bq{Is%k4>x~O(1SRXSsY8Ob0_%BuTh--? zleQsyoyyCokdTS@&KgZ&6S*bSXj@U%bK~K5L&anL#XJ2#v~qa9HXo{f~d1A&dF6=82!7f{(s}kD`K)l7i3DfFuQL zno^a_>Ws-E!b6zRJwrlG{rE1%+qzj>Y;3MNQ|jrMCvPtQKKh$q9jIYV@Fe+4qE=R% z)=Hd~@l3UlmLe-g2cbFFs~561e3HNVTzprYrn3 z*2i7L;;y6Pt~+0+wg0$MM&kL@c$sm|+mX4y2ot~dFovU>jx!AZY}HJ1iiXFY{!m&{ znBgoX{A)MuzM$#TtLf9H=@SdnAB$`y^~|`PI=`DeR_7v%EYI6%rp_81(;sd@g`>u zjSl731ieDZ6DS`F#D}8iLm6`v`g=*GJxN0Y%!_n+cKJrtQ5uwG-}bkiaQ*D~COVan z&T-59S^;Er7bfcql6zpuJu%6>eZ6{m?p&IH$E`8mnpE97J%kM8FPuHv58@Rz6LfR^ z^VFgx+xT+zkk&J~Pk$;1e3GAZ`NM}PF>41`pW&t=8TZ~jF9e(&>3}Iw;TM5&R>e8>!YwX*5;kBOiW8_ z8n@OfdV#RU5X@)`@GF;(Me#iaz4Px!{53~9HAm3Qrc!ai_)l_4Z}!a}cM%yyF7xbG z4c{P}#o1p3hjoauQpMhR!ESVJ~UBE3Crvw z_iokTmnwWw8acojCFG1EM)zZF1dzN(nsk+}cWz+{7tDD*$V-u%G5 zJH83!%h2^=p01o$mevYbFQ{g5dHQ~3h%JCC5uG(LfHe`_bH8cf5ywma#ngwB66&1{ zY(__V+cFUYEXK=XaS9q`0`Xm@)McYA?P`N;Kdbpv5N|3-I2E)zkNTp?@bzaVY3|OfN{pE`EEqS_V7vu(_%dY2L{23hTt^2jochBr(a!u zL|YpE5@fLATieW7nwH$FADD(Q_UFWLqPl?Akh#I zMwd;YURV?Hz4f)yXLQ`nE8t1@c_<*aART&q=qd1M`=;?3IVh(^KfA>)`#&|dA-9y! z?W`Sg^HOtwRbjAT z;%(-)PsGykYAO6>LdQ0`y^C*QjG=8i)zWRpP-w^}UXef2LacrYb$8oQ5vfab^A#Ze z3Xo_8Xm1_mYV$#9MH&vf`{;fDXmt)fbJsM>E$QN|Va`@|$Gj*yvS2PE^|5B0NL`%C zmhyu2Q|%HvTjpR62u}pFpiiaPvIde&QAg!^+D$guzJ9D zU&FW}xZW!S<`oj{6|#!G`{7)IN5=oQX;s^>9#8xrTs-r%L}aW*;ZPC-mcOKgHmP4} z;)zc4ji7`X7%6O43wEvrPh#5`84!zYmtGE&!YSgP;NIk{U^;mlJmk*-S%W7($9M)QWaX;gD# z%>%or^ra_3$J6Qrut_}FF&;dDeNd61Ho-?Jy<)iC`DiQ%%^EoAxDQ17re?gBEW>8C zGEPM+c(?75+XtMge3Z==EW2kdH)k!e4xDxwJ;(q}<1Zh$WhkynLy^edHHPWFW%sV-=B{Nv?JVONkA(7f`h*^?QPfxU59qr< zsu{C#wq&0BF2N&#Pf6sol2Sw3bG%%TTZcpk5bgfIOM{nzK1$>|dA9K|^6BC}rlXND zo`mGKxtKwzkADB5Ed0Z%Gwvx;8-edBycU2X^2Ur7(oGH7q=w|D%^EJ?dEB6R>x=*4 z_BQtjP6Y^~`~H?(y3hj6NII16_6$a}7V7&J`uZRyt) z67(SZcIAq7s=;vJm!uzpX+`i1?Cp5E!#XeG)I`EZd><5$)iBzQS{_dF7yk@ z)mF6k%EZ|nwNO?H`#BcZ`y8Q0qO<+4fo{~HSCyw7{<`hwSm_O!mZEK&S#A*}%}Oy) z`X~^66g_>E3!4}Ck?x)UP0bZIHvPFp)8jcs50RVQCJ$bxpJFeppV7MpoW$RWFmdcm zN9MAHzCJWd>8_9Mg~j$p$M!ZS`DEwbfBS}UargnxkhcL(;*YHj?an)wGJ&&As>+;S zAM{oO;wysm-<-K4XLA*o6DN}6CT8O%J~UKU7KK1x)XJ`Rp}qq-0W%DT!ypG?kRLa*l_AS$r)0P|!mxx`EA;*wTbYHI zc8o(Zk_hZBz|870yH<&0B7yNYtZB9$o?Hc=t%3s&q$R=*8r@h~luMOFLH`O71&wU= z`w>y-^X@G37v%JJpuuE^#8gLu>bpGKA;@q)hnSQ>eiuUyivOuFA>-dnj?*apZ9`7jYb_Oa zb6TsQBKvXa#dwNjc|^i?$R>L3%RAp6MSAbccVdnFURBWxe5Mm{pc6nRE=A4B9v9_$gL&|Na|!ql zRj@~XwN+J189O|Nv!C52JjQQi&zWdA_nRW}FrkI~riQFjLvp{&F2#6&c=Xz51Or#J z#U$@<$YTqQ;I1*5%z+NIP0{!(6Vc|x?l>>3+3J0t!F-=Z`##&1Jby>K7KoQU`8Pye z9qW@qhTZ%{tZ||2VssE+?Wdrp2G)Hvqv+}-Rs}RrU@V~Gf5rkXwYNXXI4=gp9!099 zE{VEVgxl=zARLkCKnG5wP^l-aX1T9&RTv7pM{qQlZ?v0lR9>jk%_1LvUA9wZ-PS>p z00XPP3wXfK7)Q2#6x8_#Y+@ViY?}w;2fF{FbDugP*B2&N6ed>^CSPi6p9*s4!Ge!S zk2nE>?q-t#{6Z2aSzZ-_U@QQ3*j&)nwQ~{X*$7>DOTF-xZsDyn(o+cLeWs6fFPZ4V zho5FoFF`0&FR^WLuOjhqq*OxANuv@i;(m;jntFh-t7}Fz9i~3oNDbdeee^n;iy>I< zCECPe?qNGPR(#@d74*z=If>HJT*AX8Ltol+pq1WFvTUk_sj&V_A?(Yu=q~{7lqY#R zumZR@@MygeMpF}GS>To$kJ>4&%^^5ChqeN%)k z`9g_B5PYR13@?kbO~FP?{1-IKIBJgLD7f>rV*3TyWX zYcDk}8_gED2Q^-H1@ZoJvDF>>baTXj8<4XJ*{5rR6#5L#8zb4E+WKH^MX%VjU(3)z;) zeiL!OBB*JBwVGhm(%9LEZSQ<>nl|bPh}%RG*}k$`BQ7Rg`60fi>29rJG5FeI@3lqg z!nD^tg;`LekQid{ZUEf?@Rt$W(w<9h7tjHxYR#3lY6${g;~M*Eyk12SVATLEgh30V zp@q$!J~j-0GlW-{#|BM;wH~2C_h}b0>oD{_kwRV2-O_w(^+fTsGh-#kp>uCRb8qSA z-nt;KwP=~=SFLLVHz0pAf9PYyAizsTr=GjRd);?dEpi(W=9vM3kEM*7YbAO z3sXc3Q}(7&mf*1(8GA;ik|>cFDg5>xIoFA~ZBSX4 zL-}UdW@VUq(n-Q04<&7s(5!&C`_!k7hy@KFAL9k&PV00yE1(@n@LI2EA+$s~V-e13Mh-tDK z$=Mx)q;^b5pet#M6TTl1@0%i0u_&|bEw+dBR*cL) zUWwk+wZllF&O@*}r_SHH`TDNVJ-d2I1Rmn859~A%Bzg z{u)481-JP#XeE;vbL{%o?0IE3-e6I+ST;vKNLuATb>jF7k@l!&bsR@L)wj)+sH?GW zzBu^X;+b0UP*MZ{s>{nleQp)V&OpCd;2ngLaE z^|TE~C$cX8%Sv?9mr~?)Rw3}qKNcAAPg*F(8AA(A&dAAT>*QUnmdDDXlCNQkAB2acR@y)Er|Nzb4m@72OnkGyZJ3cU%Ng#MLI} zpRuSRx%4C<&Cv(REtH}UOwYHQBig{GeD4oL&}4<=RK&z3H$^?!?Q9wVcQ4$kX9P5g znQPfQgRKhZ95IlsP|sFVvE_<@!hJuQ-63@cw}%@4n)~qn%Ms^I3B0&VRG=$c+ZSK< zK_#|4#wfN>gPp{szuHG~bqLjFHQ*;tQz{-fvVgn!cCsK4!?+HkigDy2nJTI3Hj<>3 zc#J@$snak)X*psft`+E6CByw1daFMO2JAI3Gw0FWM!<#-2ys9I|M(8SomPC^m+N}% zvq?*x4VhP?U!d4`A;ey%&y&R*nOZiwRun9#ppKM&r&MVdDO%HYsJC4F#Hrc@B z$-uj9YCIm(A%$GXyWZT2rxz=S!QVU~VpWI6deOR1hIwpRdJ6jO&5a_PMnxX7{d%u~ z=J>Q+kG#$!O|&NQAkO?=`>;PrZuSJr^rueHf8y&=Ws4gEX@uZH=fr)rK0vxYX3`XEtvrfEcOT86pY#CE& zA_1Om+P<_@kWE6N%CXPS<^`NRKAb%foV|detQai^QovGUUR#0&NkO$Xh3xQl8|FNN zQ`|LC_be(1WK;TOcFMku%>BIi6{i*3u;`s`rfv4rd!Lg3ABPp2EhS@(B|D8sK*$}! z>`25sxGNBk(8Hgt_YG|6IK4Bs>2aZh<^ACFKi{z%WkapV8E3bb=St?~wgaoeoN<@< zLs*mAy+y@@8d+?guT0B{v5vVyo|OY`L$+^qZf3Toyo=ssaXZxpHy_0Nd=!ah-%=5me9OQ(xgz zH=d~+GwoXGWv<{FwUqpV7h8k~bk(hE;RURY1wF}WG&Gf)Sx%=2DOUv5viQ_i_|zDm z!YO|rlCA<)y-V(7)3)>yMs_xr5em3i})yt z_$Z0^{PV)YwF(Iu%Q)M%E{Lo(rYVsifIa=9exW;F9>Z#97b1qz~O13&%;=} z-z6I^oE-rAjeNgymh_$?<;BhaoF)P)J|9_fkd1ypzp_ex&QW>e|Dy%_@Oy`yxJXP} zP=~*8pA5Qza!g)k*yN7lO2)z-h<6Vpya(DPzJQW~htaRH)>w3^rFXARdM~X0uNvL) zG)3jJp9>8)Qxc8So94+>#P?7PeQ8WZX-p+)%;R7u_Ajgk`9OjgdbO`*5InoTdmu-! zVA&)xL^3qayLwCs_Z9Yw3L%>co>1v8$~nwq-pC8s$OH5iy5FM~Pg`ZNHjhkKYHu+x z`>j8~+x!0&7PK$~s1*#1(RR!m^tP>9Bc_hv@M(DLI2=9>*PyBf8sBMhEI}qKl!#mC zhm!}mzPxTr$BA!Jn#aDdbCf|CHBr$^e*b*m(dAaZf!_)D@ocpT0%q|7&hY}1BL{69 zv$UB+7*c|FRpv+A4S0rwDPuOq+X}k3)ZKqLAxvEBq984=_P?(6#geo5P(~sUBN06# z(fG}nYagR4^Up@LmrV+Q3J)=s=xK@7Ps+*J#KswbH0NelYR8s8ndheA=cWP3d z)dak1LPMx#MX9Uf2ijt#v&mVPL4mx8Kgs38*3Df~=ej?`)47*8u4vIt?!(uTSjbQt-tMyuiw{@8-{hE#8Ia&~Gm89q6SnEW94D$QGMCQEM9&hLX?OaL`aC} zX0$MRNrI3FqSs-JQAe*MdJlpaF-n9GC3;H`1PNnCiQb}&PLvVdH+kRxTHpWoe(!f@ zow;+@I?tZ7&%W#4bNAi**=>zEkIdzCspJyDazySI*iG39fp_-A+`yJmd?;@wAD1`4 zm>zrJF0k?$85Z%A{MDn@s!FpxD4!MY-HJl~h(dmJA%FW8KR+{vyG(Nhvy{}In9ty{ zRb2X_y>o7C3#&MbJcfjITqgeT0Uy0|R-^wTa|vB430(yVU3<0DM+ETkXP@ZaKPP^7 z{+tG?hpZWSt@cs=Bn;WfiQ{hmD!q}nVIWsyRy$L-d}pxBa5zGLxXWNzJ)`zdGTG&m z8=r2K(&Mu~K|FCJ(*(oN_tE*E%O{f3p`1=mBq6Xg)-C2CpZN9II~Do-5&8V+eE#Z! zFB{Kwtuje}r@6LCl4O2ewEwOWpBO#J^Q;ykX*y+yG7)Vf=$Z5BQt|03@afv$JT(z1 zc36O&ksO^v>?eEp_A1Nqw#+pp3noN3{ zOzwuI%{?APe^vlrvPSgN47`&6R+NlJOdR&m?BrW>3a(1BSJGn}R&})(;-IQA(1lni z@5gZgySe@X-tgR=#==m*lbXBHPQgOPx|-LYA-QxTRrl6)6aA+{E*@c&aDBv7e}I9PzA+Q6otbX_kJEFaHIF0#{Bpy(nWbD&;=3g`g^4;Vh zi?Um!lRYJ5EoaM82D9vq{KUP+;YNmWHwrgPOY-MDNPgEyw>b09QRP|Th2*Cz{k^q* ziL)l$F=148DN$;1rcO<4l+m-G*QKV{1=H)Q3~W19izsOE(~#j#4`sz4U+FEi9Q>89 zaJZNn919z+(cwkxxio7Tl<8!$Hl(A^j zg}&a8Y)hoDqY6}4W|7{ot&yHi`ev6gVu*dMj~y|NG-X zODZ>izcAV|uhPH|%AOriTvLA+lve1&OyA(yZb7C?O{NPb(}hq67K`R&&GhS%BQJl7 zS`osJmAr5&iPJrf0RX*(cIBWE4@iNOAb7G@@^$lJ*7d`zfWxeUz^;<>jtEX>{!$RZ z);78Q!urt{a+7B38%|GCzFRw+->Va3J^a?h<~jy8=l~mF!3KUQa$>L#Z#2xKLTgOU z{v2!pl|P;eeGK0lqTeJZXVZw2^W$l*VrE`|@?F(mFLiwU!EwFB@v3F?69=G9y^PjU zkHK)A-8<1x+%AQldXHNpUEjRq2#N8J>j_(jT+*6Yh%MZ6mDHa#$mVi3a&MK={W)2)HLL7+Iz|bMY z^CbUNIwx*6iLYAXs^>EU-E4-u;igZA%F5Com!Z>Mon;yb~mLvzi@|>M%CaSr7%h;NXxX0)lo8^pV zw?St)qS+fS;X7t04phj~q9#XEK|u9K`0I5vOPKR{EH{K>27P`4 z`9^^IBS5kSGzK@_fOpf=BATGVafVXr)l36pP>^4dw`g%SGOVz%&pyN@hG~4L(RSQ| z1VcrFQ6Rw}zpTZ=`D)eX>0vO}I)~N=ka{KX*XDa4pIEJ%WrKP`eOrmK-mDHcv*Vn* ztP~nB=I8xX=lu%j{kUuV=i`WI<@~Z+JoIDLA*SWYh~tNCl9w#*&Qd8Iv%yjIRWI3G z#uR}aia@L)&^^o_#$OHSqf1c&;s0YaFQ#C6{H^VaPvzpdqQFnR)H4#JuB8c zw>OcrF6230TUmSpeajV5_=qSxIto93f4_?R&1-~m2z<+g*XSYcI&VirH{L=uyRzb$X{cM{i*)-#1?j<|-F&1x3&DokV|fw#x*B;F!|hQH`$? zr&DY>zh(#bp7qjNT=1}6eX*-zG1kPYtG$IE#l(*aaxRp!DQt2qY!ZgF!HQb%&R=+Ec@dVo zMAq}P`WAuZE!D%!Z5^K{HAvXYzUS3%4L4idu-l%5J{gB@Pe4U!CgdTvIwk+8BhO>M z98k{h_64lXWwOpZVr8~w3rPz1Z2E3m2E|OLc5Xd}K6wb;egvg^KEyvMlR%v>-wPwM z>9}?QlnzQu(SG39b+DG6V*ZTj_xISIqIMr@#p~+dajpUCU-{zhUCZMI`r}0mf{CGd zfWOCOohwSuxN-g(r+(aWGd^~D?D`oK9ebR8x+q=70T=6X+4~wOVt&|x*4m|sH)QFsnVGI z2-zfLM)@Dp;%@%TntM>)fCISEhwhySkFk=gQ9IM{Y%$lmaC5jVAu2k&hhDl5bs&aF zna}gvG)R2xqyq@$_zV{IZF+1ix_@CWI+zU8bC#|V4{)?|xhXup|4rS}+Wf8v)m;&V zyCTTc`};1Bi_HLg;UW+E&4)?6)+)=;+Ecm_$$x}~JxMa7vI|~VWmA^;5v&?jO&L^8 z{ZvhRRSzB5M#2JzNXO|stend>RNnwQ%|Wu)=cGY+4{t?XKGyAiD4+j|iY+O3H}6TxM9EaD z9OI_j)5d3}49`sco|z)=%#`Ffnrbta5PB`N$!be0%38$&rE*8|7k3_h)gQ8_v~0$V zHWx^fs3kNBy(ho@K2qR4IfFLF+yXEVL}n~dqcCVP2)Gzh8a~Odb+k-mv{PKKY`slh z=(6pIScMO1^N2}SG)0eO1injW}uJGwdBy z%qgV7%#nAGac@7x4n#eE5+0ajaI#o1_MZ*&;HiX?ocYx1s*jSp^9K(mo0OD@7U>>Q+`-TDyzr%dChz0INm)2#E`u-i1hS( zs-3m;%6Sm`i(Re7hcq-5InKk)&uW}gp2f$xY<5pI;A~HgX@ZXhZFSLfrLN2-w>heI z2xJz%zR3?~%2(Mkf?M1(FQE(}0pSh#hp+CM<|=9XBECs`gm1}zIF_^g^wjiY%68Fv zL#nU2JE$qM8d9L4I>RhBbkFX2X{($Us!AV}0UCx#wn*Qo@?I&d`DUBlT=hp}H2cBSvTFR6yvw7 zmWhN6RERb&;?w7qi;*{AtDpJC}`Wz9XRboHF|Ct6clH8b0{ZTfh-A z>#ZRyQ*G-jeR_;HGx|Q?JISER&+U1HnO308EZ9%y8kUs&cz>pY{91>eXI9OGr%yDS zYZw2@w|aM?lEr_OOnJbaY(K$$(JRjzfzmztkz``Rg@0lk2kz2Pd$e!HpH_S| zBmZomfXe)sHt1Nm9vwtnqj-ozoEJT<@{nJg5?WtLP+EI`T6&Y&$#?pSD)BavkD*@g z31j67KBZBP)_jGQYK2x|g|?jwT1-)0L`oX^g|>8k(rmAt^DE51sHdO|CS6XR*eQAs zX{NnM3{ZhEJSu*nF@NHqeB>~H>;R~Cnp#(9?N(V@F&KUe?V&83U@`G}zFb~Ptef)L z?Pbal@?BixZntH}L|@46=XxshIa;bYT7@}Uq!g@1H=p^|s~v&@&~b?>r&&mz`$u$) z(9lbYU++W^PR)Y#&UT!;;&WvZmprZZlQ|G4c-0P-9Fe;)wbjnU?o14|!37P2rE?>zrKgi%R4>=ij!;4|B{HQ(V4Wy5$&bEh-3TGM*XB7*L($ zt1r);uM&}$63aGwFmKIvdmc%`6I=pT^|(R$109n5wvw!nipchsK<%VvotM`p2&W<(VadQ(PXb#wClQ{WSmn}kL z^(yos6?ByvdM|#w4F$2m%U#zuV+&I>y0W@>p3a4Z%&dp$ZD^4_x=kt9Fj4nz;>7)- z!qim@l55l?*T5v#zQ*gHKlFj9O!s_e@hIAf0tB#CZpx+yPjRD+t{n!%w0Kgy+uca$ zFZcDp?kK;!q-QnS!@IsjXrP#X9>Z1dzm9Jm7)%sFNODJtM7< zcnaHay2js=j)A&5uiavKjg|ORmH70O_)Iz!Wen6BUaKu-mkiT;eO3YUnkg;XnD<}& zmaQ?)vu3cdASoe^Cz-XS zS=im5glrGU^$_a$C6|Wu^p)y#uX;t4W<-=GI!cphi&-D9s9$s`xm!%8laYX9@yb;{ zEAGO0Iq|EvT(fG#&|7KDr`1+;^&2^izj9Jma#A*}l|Gg@eQ4XHOOdfa*f=!N&T^HwNbG0P|tNd?wu?z?%fEZJHZL zj>Cu^Q{?AdW3Mx^JF_os3JIKdX1m9_rg*1%={TC`I2h?T{OLHp8~F@RBYsgND^0Br zoXzg$Wkzn(#c9gEwWO6$Eco3vq)qfKJvR55vd!&@{p$M##QPbv_ZgI$zPWGv4hy%y zd9QX_be65;J=^xs=&6L1Z-wt$>iS;0w5Cbm*9~|T?L2+&vtl4c+U(E47~Ej*@ZiJ4 zYMJ4o1FtEOh>KCS3`KvE!Yws>+`dtnf0E`eM>+kcW?~~p&^x~R_jrxf`Bc^U^wjw# zdJZwZ^j`a&T&i>Z#o_4HK1bx}`VTbOh%R?xm)6;w-xZj)@p!}ev+u8XeZU}o1<;HcmqVi+jHmUHp9_CKW5a3x!@fgI*dDa4MB=fADlxb5XllBI_bWbZCA0@JiZQcm(6`{a^zEXBPO=uAf z#R3`Kk44R}UMV^$6XNr9nj$&5QSh%aBF{lsN5rvB~@?^p^u;2DK%<3)g zaXgw7D{vM0#eYq@%4PUI7GbPnr>bJ7r(#Dm__V{*Zr9q0`N3ilWEs6sfYI`F?&v~e z9E-j=leSf`O( zjVhj!$0aM`X!1QTW9DX;HzvXz(}lpOOIPF`wBOKHUz(~Yp5b=4;vhqB?BR5Lb;lH{ zdyI*G$7#u|H}3Dp>&D_ts^Uy~;!Gwrhc3F#bn}Y7tUjldxPaE`m8rd1SdvK5 zw_MhjUke54QsFW-ei*jv8T0W8o%B196+4@s0GQC&j@_YPVv@8n+Ck z$_|cSz`PwZnWgcSwc73Fv8q0&ne4@xcH2K7s=bhf?+{-3(I>jWeF_YutD#nt+e!Yf z$#LsJ5NvU^k(SHon}8LpQ)sD5x*C~@FD z%@En2uEX@}1mvOOEFA6i4Kox%!&aiK--fRLa-&@QZZ9~z!${;XoE+eZ6oHCz=r87lt&(!hwk0AO#N> z;B7c?7Y+UQv?q_gMx$)$QZ(NB3&$x5DJZ_XAs_gbF}V0)w=ZUKOfdZ?i(tJd$ch3Cv6 zYZ_df4Oxx?l)TA7cdvj5S3u-DQhEWreD!Key6<2WSLu{9JJZ1%o*}i%_c`hzoE&k< za_gihOW-{?Z^2odg0~3ht}uuo3`)1yH26rtKg{w<9z0Wjt>F`LQtc;G5O<#sT8veZ z7$&5zJUQfXxE3KY#Txtgbu$O*ItMC%165!)5N8zH9<1NV>Z(h^`u02yjct4>eFk*j z)ntA94fW~Sm2TDLohQaB2C6CsdMXC)Z6#}|y7E^-;K^E|^Yfkv^TGY+q5+?ME(?5ER|Le=jUD>Nz zI|@*<<_t+9Tj>w4^EYZtsi}T8CTq|kYk-wCm}pk~B=rK&XuQ?VjG%y8SxxLe8(4`) zq5PjSyD3|Pc7G~bzne!*!0DTocJ%&bGy^?31C#byAFkmiw$V7>lZ*|q1RUNEf@S4H z!{i?I9SeP9a`zz?oNsbztANs=n}Y1%kY7$m=0dRS_wT_S=wClo&~x{ z86@?9?Cnk;OqNQ%+c=Ypw9ti}H9GsVa&~4v^I!tQ;0V(oYi_~Ixzq-F`75(JrNsMA zr(U~Jw7}7szGzx`{)eMZbK*;zWfI3*9TR0{r}spe-Bf{Ns=y9aAXXJP(RZPQsG|Qw zWC7P@`ZLbbZguK-a;D|K*~6FZ9)EEI-u~6_gN0>9WHhPVIS>H>@!5m+?LlNEHJMJg z=NPvc{D&ug{!$hbMSdX$a<24vzDjccDjU?L(&6$J4oB6@eC%1skU;2y_;f(~Iv_Gd zr`8_smMAB0vU%)!Sk|JXF@{1Rlo=@0&d|Y2*DKy;Bw2Hf@!PpAo&&1M2F0^ODRiqH zT9rtS`!0{?h*@)Or>zDKV_!K@;kN^VPu2CNLRWY>E-V^<_q}zoa@|+LC~BhDE0P@| zk{!^=4$49H8I?U(Tq^Bwx%g9AJLlgF>U>z z8rLL>Y;|t^WpLt_Q4LV5X1eA6wa)M%Rp3i}1qvU5!lP06>i5C03?}eC$&rfTEfhY< zWS&HUP`Yj$@Z(2#xtMIW$W~tD6D;(h%^sQLtZQ|U-RQCEXGx5OHilXo1J=estfsOm zWdm+fx8JODkM%~D(Fr|R&Z}f+!za2=(&(FoI%iLWn0eM(s9~tpFkm$z;IwIr9y21b z1^`@)zd8T26)6`-@_Xh;M^8ge%3Es9-yzwK=p|F2jBTl}huAD|7iXI*kn~yDVW{me zU^@)N?9>}Z(cDN1P^v`ps<{wO*98)kb_)5@ zMn)6wTqI{he78~5R0ef}6=8w%vn8|e<5fp7fMJi2yi{WzV}J4Ps{4Nj*KuQR+x>7y_53LrP~a!*HRmw;F_i9$d2z)N_e9=s=y5KpZIQbN&_ znO-)j$i0wAp?vb&`H=$ok#K97!-VvY67=Keh0o>xyzbm)4E&HgzxG_Q7V&MsOFZdQ z1wfK2RcbY5OGKbzIr3vJd46tWaW46%hcn#lmsghPP3H#by-56IO*~=R5Gy}jj3;bG zh4A?xe0TgVh5K$@$uUaxKE+HO?&T+3M-5Ij&$gVjOXLwWQ4$4JFmW1kaS zpSUyOP>2R^yT;hcinN40y%0M}$uW2GOMpL;#X( ztF{skPuI`p5)r#s;mZ=?%Yyc0na^JZNr(XeM|!&J>)akN@%fR(m$JnBj~vNB7R8t) z^P}dJj||Ke-YgN`ENE|*hZ99apBWNL=q{t#?|5wrXf)&@bm^&?c%M9*^MPH@$TYS} zAe9uCyy2M5OFEZR5yTP^#DWfDnGb_@626kp2SP5iRQc&Q8BKkSeTL@0^Rj7OmC@Im zx<5Fp-^5L|~K}R~GBR6e!MxN8_Fl!)Njp?VQKs?DaaZl|0KW4UM9t4J zd)czM?A4{bW!SN9@!s4&5fWPn{fTW0N{|heS_p=3?mT-MeUylbnuwA2*Crv793|+j zc)FrY1c7rU2B!_~!XeFSUcnC?`GeoWJ)OcN$jHSmt{w5iLNhKam8GlaP_dh`_2I;o z2aCg~+Tn$JWN`^R&>N0VE8TvhneQ$xan!5jQtw)QculZ3t)dNETx4^vJLn&iz=&Z*#^s*b`y1667?A$yd{|%07)hkXS!WM{);U93xEB^~__7&_|r*cbkc* zOyZw&`~$sBL=(sTZ=HSJJe_Ub+?@Z4_^P6S1*XG zm(_EBcL>7bzYQeO-dMPU2oxX+q5p>f5aquH^1p}hpHnz_FC2y-g5rsd?;l`3qFsqv z`~@SR5QrDze-!%HfyDn;o$GaB?K-mnKvX3;fbAaw$NnFI|5ud0HaW%L&eX)u6Qb=* Ki0m!^!2bbc3S|iZ diff --git a/src/databricks/sqlalchemy/test_local/e2e/test_basic.py b/src/databricks/sqlalchemy/test_local/e2e/test_basic.py deleted file mode 100644 index ce0b5d89..00000000 --- a/src/databricks/sqlalchemy/test_local/e2e/test_basic.py +++ /dev/null @@ -1,543 +0,0 @@ -import datetime -import decimal -from typing import Tuple, Union, List -from unittest import skipIf - -import pytest -from sqlalchemy import ( - Column, - MetaData, - Table, - Text, - create_engine, - insert, - select, - text, -) -from sqlalchemy.engine import Engine -from sqlalchemy.engine.reflection import Inspector -from sqlalchemy.orm import DeclarativeBase, Mapped, Session, mapped_column -from sqlalchemy.schema import DropColumnComment, SetColumnComment -from sqlalchemy.types import BOOLEAN, DECIMAL, Date, Integer, String - -try: - from sqlalchemy.orm import declarative_base -except ImportError: - from sqlalchemy.ext.declarative import declarative_base - - -USER_AGENT_TOKEN = "PySQL e2e Tests" - - -def sqlalchemy_1_3(): - import sqlalchemy - - return sqlalchemy.__version__.startswith("1.3") - - -def version_agnostic_select(object_to_select, *args, **kwargs): - """ - SQLAlchemy==1.3.x requires arguments to select() to be a Python list - - https://docs.sqlalchemy.org/en/20/changelog/migration_14.html#orm-query-is-internally-unified-with-select-update-delete-2-0-style-execution-available - """ - - if sqlalchemy_1_3(): - return select([object_to_select], *args, **kwargs) - else: - return select(object_to_select, *args, **kwargs) - - -def version_agnostic_connect_arguments(connection_details) -> Tuple[str, dict]: - HOST = connection_details["host"] - HTTP_PATH = connection_details["http_path"] - ACCESS_TOKEN = connection_details["access_token"] - CATALOG = connection_details["catalog"] - SCHEMA = connection_details["schema"] - - ua_connect_args = {"_user_agent_entry": USER_AGENT_TOKEN} - - if sqlalchemy_1_3(): - conn_string = f"databricks://token:{ACCESS_TOKEN}@{HOST}" - connect_args = { - **ua_connect_args, - "http_path": HTTP_PATH, - "server_hostname": HOST, - "catalog": CATALOG, - "schema": SCHEMA, - } - - return conn_string, connect_args - else: - return ( - f"databricks://token:{ACCESS_TOKEN}@{HOST}?http_path={HTTP_PATH}&catalog={CATALOG}&schema={SCHEMA}", - ua_connect_args, - ) - - -@pytest.fixture -def db_engine(connection_details) -> Engine: - conn_string, connect_args = version_agnostic_connect_arguments(connection_details) - return create_engine(conn_string, connect_args=connect_args) - - -def run_query(db_engine: Engine, query: Union[str, Text]): - if not isinstance(query, Text): - _query = text(query) # type: ignore - else: - _query = query # type: ignore - with db_engine.begin() as conn: - return conn.execute(_query).fetchall() - - -@pytest.fixture -def samples_engine(connection_details) -> Engine: - details = connection_details.copy() - details["catalog"] = "samples" - details["schema"] = "nyctaxi" - conn_string, connect_args = version_agnostic_connect_arguments(details) - return create_engine(conn_string, connect_args=connect_args) - - -@pytest.fixture() -def base(db_engine): - return declarative_base() - - -@pytest.fixture() -def session(db_engine): - return Session(db_engine) - - -@pytest.fixture() -def metadata_obj(db_engine): - return MetaData() - - -def test_can_connect(db_engine): - simple_query = "SELECT 1" - result = run_query(db_engine, simple_query) - assert len(result) == 1 - - -def test_connect_args(db_engine): - """Verify that extra connect args passed to sqlalchemy.create_engine are passed to DBAPI - - This will most commonly happen when partners supply a user agent entry - """ - - conn = db_engine.connect() - connection_headers = conn.connection.thrift_backend._transport._headers - user_agent = connection_headers["User-Agent"] - - expected = f"(sqlalchemy + {USER_AGENT_TOKEN})" - assert expected in user_agent - - -@pytest.mark.skipif(sqlalchemy_1_3(), reason="Pandas requires SQLAlchemy >= 1.4") -@pytest.mark.skip( - reason="DBR is currently limited to 256 parameters per call to .execute(). Test cannot pass." -) -def test_pandas_upload(db_engine, metadata_obj): - import pandas as pd - - SCHEMA = "default" - try: - df = pd.read_excel( - "src/databricks/sqlalchemy/test_local/e2e/demo_data/MOCK_DATA.xlsx" - ) - df.to_sql( - "mock_data", - db_engine, - schema=SCHEMA, - index=False, - method="multi", - if_exists="replace", - ) - - df_after = pd.read_sql_table("mock_data", db_engine, schema=SCHEMA) - assert len(df) == len(df_after) - except Exception as e: - raise e - finally: - db_engine.execute("DROP TABLE mock_data") - - -def test_create_table_not_null(db_engine, metadata_obj: MetaData): - table_name = "PySQLTest_{}".format(datetime.datetime.utcnow().strftime("%s")) - - SampleTable = Table( - table_name, - metadata_obj, - Column("name", String(255)), - Column("episodes", Integer), - Column("some_bool", BOOLEAN, nullable=False), - ) - - metadata_obj.create_all(db_engine) - - columns = db_engine.dialect.get_columns( - connection=db_engine.connect(), table_name=table_name - ) - - name_column_description = columns[0] - some_bool_column_description = columns[2] - - assert name_column_description.get("nullable") is True - assert some_bool_column_description.get("nullable") is False - - metadata_obj.drop_all(db_engine) - - -def test_column_comment(db_engine, metadata_obj: MetaData): - table_name = "PySQLTest_{}".format(datetime.datetime.utcnow().strftime("%s")) - - column = Column("name", String(255), comment="some comment") - SampleTable = Table(table_name, metadata_obj, column) - - metadata_obj.create_all(db_engine) - connection = db_engine.connect() - - columns = db_engine.dialect.get_columns( - connection=connection, table_name=table_name - ) - - assert columns[0].get("comment") == "some comment" - - column.comment = "other comment" - connection.execute(SetColumnComment(column)) - - columns = db_engine.dialect.get_columns( - connection=connection, table_name=table_name - ) - - assert columns[0].get("comment") == "other comment" - - connection.execute(DropColumnComment(column)) - - columns = db_engine.dialect.get_columns( - connection=connection, table_name=table_name - ) - - assert columns[0].get("comment") == None - - metadata_obj.drop_all(db_engine) - - -def test_bulk_insert_with_core(db_engine, metadata_obj, session): - import random - - # Maximum number of parameter is 256. 256/4 == 64 - num_to_insert = 64 - - table_name = "PySQLTest_{}".format(datetime.datetime.utcnow().strftime("%s")) - - names = ["Bim", "Miki", "Sarah", "Ira"] - - SampleTable = Table( - table_name, metadata_obj, Column("name", String(255)), Column("number", Integer) - ) - - rows = [ - {"name": names[i % 3], "number": random.choice(range(64))} - for i in range(num_to_insert) - ] - - metadata_obj.create_all(db_engine) - with db_engine.begin() as conn: - conn.execute(insert(SampleTable).values(rows)) - - with db_engine.begin() as conn: - rows = conn.execute(version_agnostic_select(SampleTable)).fetchall() - - assert len(rows) == num_to_insert - - -def test_create_insert_drop_table_core(base, db_engine, metadata_obj: MetaData): - """ """ - - SampleTable = Table( - "PySQLTest_{}".format(datetime.datetime.utcnow().strftime("%s")), - metadata_obj, - Column("name", String(255)), - Column("episodes", Integer), - Column("some_bool", BOOLEAN), - Column("dollars", DECIMAL(10, 2)), - ) - - metadata_obj.create_all(db_engine) - - insert_stmt = insert(SampleTable).values( - name="Bim Adewunmi", episodes=6, some_bool=True, dollars=decimal.Decimal(125) - ) - - with db_engine.connect() as conn: - conn.execute(insert_stmt) - - select_stmt = version_agnostic_select(SampleTable) - with db_engine.begin() as conn: - resp = conn.execute(select_stmt) - - result = resp.fetchall() - - assert len(result) == 1 - - metadata_obj.drop_all(db_engine) - - -# ORM tests are made following this tutorial -# https://docs.sqlalchemy.org/en/14/orm/quickstart.html - - -@skipIf(False, "Unity catalog must be supported") -def test_create_insert_drop_table_orm(db_engine): - """ORM classes built on the declarative base class must have a primary key. - This is restricted to Unity Catalog. - """ - - class Base(DeclarativeBase): - pass - - class SampleObject(Base): - __tablename__ = "PySQLTest_{}".format(datetime.datetime.utcnow().strftime("%s")) - - name: Mapped[str] = mapped_column(String(255), primary_key=True) - episodes: Mapped[int] = mapped_column(Integer) - some_bool: Mapped[bool] = mapped_column(BOOLEAN) - - Base.metadata.create_all(db_engine) - - sample_object_1 = SampleObject(name="Bim Adewunmi", episodes=6, some_bool=True) - sample_object_2 = SampleObject(name="Miki Meek", episodes=12, some_bool=False) - - session = Session(db_engine) - session.add(sample_object_1) - session.add(sample_object_2) - session.flush() - - stmt = version_agnostic_select(SampleObject).where( - SampleObject.name.in_(["Bim Adewunmi", "Miki Meek"]) - ) - - if sqlalchemy_1_3(): - output = [i for i in session.execute(stmt)] - else: - output = [i for i in session.scalars(stmt)] - - assert len(output) == 2 - - Base.metadata.drop_all(db_engine) - - -def test_dialect_type_mappings(db_engine, metadata_obj: MetaData): - """Confirms that we get back the same time we declared in a model and inserted using Core""" - - class Base(DeclarativeBase): - pass - - SampleTable = Table( - "PySQLTest_{}".format(datetime.datetime.utcnow().strftime("%s")), - metadata_obj, - Column("string_example", String(255)), - Column("integer_example", Integer), - Column("boolean_example", BOOLEAN), - Column("decimal_example", DECIMAL(10, 2)), - Column("date_example", Date), - ) - - string_example = "" - integer_example = 100 - boolean_example = True - decimal_example = decimal.Decimal(125) - date_example = datetime.date(2013, 1, 1) - - metadata_obj.create_all(db_engine) - - insert_stmt = insert(SampleTable).values( - string_example=string_example, - integer_example=integer_example, - boolean_example=boolean_example, - decimal_example=decimal_example, - date_example=date_example, - ) - - with db_engine.connect() as conn: - conn.execute(insert_stmt) - - select_stmt = version_agnostic_select(SampleTable) - with db_engine.begin() as conn: - resp = conn.execute(select_stmt) - - result = resp.fetchall() - this_row = result[0] - - assert this_row.string_example == string_example - assert this_row.integer_example == integer_example - assert this_row.boolean_example == boolean_example - assert this_row.decimal_example == decimal_example - assert this_row.date_example == date_example - - metadata_obj.drop_all(db_engine) - - -def test_inspector_smoke_test(samples_engine: Engine): - """It does not appear that 3L namespace is supported here""" - - schema, table = "nyctaxi", "trips" - - try: - inspector = Inspector.from_engine(samples_engine) - except Exception as e: - assert False, f"Could not build inspector: {e}" - - # Expect six columns - columns = inspector.get_columns(table, schema=schema) - - # Expect zero views, but the method should return - views = inspector.get_view_names(schema=schema) - - assert ( - len(columns) == 6 - ), "Dialect did not find the expected number of columns in samples.nyctaxi.trips" - assert len(views) == 0, "Views could not be fetched" - - -@pytest.mark.skip(reason="engine.table_names has been removed in sqlalchemy verison 2") -def test_get_table_names_smoke_test(samples_engine: Engine): - with samples_engine.connect() as conn: - _names = samples_engine.table_names(schema="nyctaxi", connection=conn) # type: ignore - _names is not None, "get_table_names did not succeed" - - -def test_has_table_across_schemas( - db_engine: Engine, samples_engine: Engine, catalog: str, schema: str -): - """For this test to pass these conditions must be met: - - Table samples.nyctaxi.trips must exist - - Table samples.tpch.customer must exist - - The `catalog` and `schema` environment variables must be set and valid - """ - - with samples_engine.connect() as conn: - # 1) Check for table within schema declared at engine creation time - assert samples_engine.dialect.has_table(connection=conn, table_name="trips") - - # 2) Check for table within another schema in the same catalog - assert samples_engine.dialect.has_table( - connection=conn, table_name="customer", schema="tpch" - ) - - # 3) Check for a table within a different catalog - # Create a table in a different catalog - with db_engine.connect() as conn: - conn.execute(text("CREATE TABLE test_has_table (numbers_are_cool INT);")) - - try: - # Verify that this table is not found in the samples catalog - assert not samples_engine.dialect.has_table( - connection=conn, table_name="test_has_table" - ) - # Verify that this table is found in a separate catalog - assert samples_engine.dialect.has_table( - connection=conn, - table_name="test_has_table", - schema=schema, - catalog=catalog, - ) - finally: - conn.execute(text("DROP TABLE test_has_table;")) - - -def test_user_agent_adjustment(db_engine): - # If .connect() is called multiple times on an engine, don't keep pre-pending the user agent - # https://github.com/databricks/databricks-sql-python/issues/192 - c1 = db_engine.connect() - c2 = db_engine.connect() - - def get_conn_user_agent(conn): - return conn.connection.dbapi_connection.thrift_backend._transport._headers.get( - "User-Agent" - ) - - ua1 = get_conn_user_agent(c1) - ua2 = get_conn_user_agent(c2) - same_ua = ua1 == ua2 - - c1.close() - c2.close() - - assert same_ua, f"User agents didn't match \n {ua1} \n {ua2}" - - -@pytest.fixture -def sample_table(metadata_obj: MetaData, db_engine: Engine): - """This fixture creates a sample table and cleans it up after the test is complete.""" - from databricks.sqlalchemy._parse import GET_COLUMNS_TYPE_MAP - - table_name = "PySQLTest_{}".format(datetime.datetime.utcnow().strftime("%s")) - - args: List[Column] = [ - Column(colname, coltype) for colname, coltype in GET_COLUMNS_TYPE_MAP.items() - ] - - SampleTable = Table(table_name, metadata_obj, *args) - - metadata_obj.create_all(db_engine) - - yield table_name - - metadata_obj.drop_all(db_engine) - - -def test_get_columns(db_engine, sample_table: str): - """Created after PECO-1297 and Github Issue #295 to verify that get_columsn behaves like it should for all known SQLAlchemy types""" - - inspector = Inspector.from_engine(db_engine) - - # this raises an exception if `parse_column_info_from_tgetcolumnsresponse` fails a lookup - columns = inspector.get_columns(sample_table) - - assert True - - -class TestCommentReflection: - @pytest.fixture(scope="class") - def engine(self, connection_details: dict): - HOST = connection_details["host"] - HTTP_PATH = connection_details["http_path"] - ACCESS_TOKEN = connection_details["access_token"] - CATALOG = connection_details["catalog"] - SCHEMA = connection_details["schema"] - - connection_string = f"databricks://token:{ACCESS_TOKEN}@{HOST}?http_path={HTTP_PATH}&catalog={CATALOG}&schema={SCHEMA}" - connect_args = {"_user_agent_entry": USER_AGENT_TOKEN} - - engine = create_engine(connection_string, connect_args=connect_args) - return engine - - @pytest.fixture - def inspector(self, engine: Engine) -> Inspector: - return Inspector.from_engine(engine) - - @pytest.fixture(scope="class") - def table(self, engine): - md = MetaData() - tbl = Table( - "foo", - md, - Column("bar", String, comment="column comment"), - comment="table comment", - ) - md.create_all(bind=engine) - - yield tbl - - md.drop_all(bind=engine) - - def test_table_comment_reflection(self, inspector: Inspector, table: Table): - comment = inspector.get_table_comment(table.name) - assert comment == {"text": "table comment"} - - def test_column_comment(self, inspector: Inspector, table: Table): - result = inspector.get_columns(table.name)[0].get("comment") - assert result == "column comment" diff --git a/src/databricks/sqlalchemy/test_local/test_ddl.py b/src/databricks/sqlalchemy/test_local/test_ddl.py deleted file mode 100644 index f596dffa..00000000 --- a/src/databricks/sqlalchemy/test_local/test_ddl.py +++ /dev/null @@ -1,96 +0,0 @@ -import pytest -from sqlalchemy import Column, MetaData, String, Table, create_engine -from sqlalchemy.schema import ( - CreateTable, - DropColumnComment, - DropTableComment, - SetColumnComment, - SetTableComment, -) - - -class DDLTestBase: - engine = create_engine( - "databricks://token:****@****?http_path=****&catalog=****&schema=****" - ) - - def compile(self, stmt): - return str(stmt.compile(bind=self.engine)) - - -class TestColumnCommentDDL(DDLTestBase): - @pytest.fixture - def metadata(self) -> MetaData: - """Assemble a metadata object with one table containing one column.""" - metadata = MetaData() - - column = Column("foo", String, comment="bar") - table = Table("foobar", metadata, column) - - return metadata - - @pytest.fixture - def table(self, metadata) -> Table: - return metadata.tables.get("foobar") - - @pytest.fixture - def column(self, table) -> Column: - return table.columns[0] - - def test_create_table_with_column_comment(self, table): - stmt = CreateTable(table) - output = self.compile(stmt) - - # output is a CREATE TABLE statement - assert "foo STRING COMMENT 'bar'" in output - - def test_alter_table_add_column_comment(self, column): - stmt = SetColumnComment(column) - output = self.compile(stmt) - assert output == "ALTER TABLE foobar ALTER COLUMN foo COMMENT 'bar'" - - def test_alter_table_drop_column_comment(self, column): - stmt = DropColumnComment(column) - output = self.compile(stmt) - assert output == "ALTER TABLE foobar ALTER COLUMN foo COMMENT ''" - - -class TestTableCommentDDL(DDLTestBase): - @pytest.fixture - def metadata(self) -> MetaData: - """Assemble a metadata object with one table containing one column.""" - metadata = MetaData() - - col1 = Column("foo", String) - col2 = Column("foo", String) - tbl_w_comment = Table("martin", metadata, col1, comment="foobar") - tbl_wo_comment = Table("prs", metadata, col2) - - return metadata - - @pytest.fixture - def table_with_comment(self, metadata) -> Table: - return metadata.tables.get("martin") - - @pytest.fixture - def table_without_comment(self, metadata) -> Table: - return metadata.tables.get("prs") - - def test_create_table_with_comment(self, table_with_comment): - stmt = CreateTable(table_with_comment) - output = self.compile(stmt) - assert "USING DELTA" in output - assert "COMMENT 'foobar'" in output - - def test_alter_table_add_comment(self, table_without_comment: Table): - table_without_comment.comment = "wireless mechanical keyboard" - stmt = SetTableComment(table_without_comment) - output = self.compile(stmt) - - assert output == "COMMENT ON TABLE prs IS 'wireless mechanical keyboard'" - - def test_alter_table_drop_comment(self, table_with_comment): - """The syntax for COMMENT ON is here: https://docs.databricks.com/en/sql/language-manual/sql-ref-syntax-ddl-comment.html""" - stmt = DropTableComment(table_with_comment) - output = self.compile(stmt) - assert output == "COMMENT ON TABLE martin IS NULL" diff --git a/src/databricks/sqlalchemy/test_local/test_parsing.py b/src/databricks/sqlalchemy/test_local/test_parsing.py deleted file mode 100644 index c8ab443d..00000000 --- a/src/databricks/sqlalchemy/test_local/test_parsing.py +++ /dev/null @@ -1,160 +0,0 @@ -import pytest -from databricks.sqlalchemy._parse import ( - extract_identifiers_from_string, - extract_identifier_groups_from_string, - extract_three_level_identifier_from_constraint_string, - build_fk_dict, - build_pk_dict, - match_dte_rows_by_value, - get_comment_from_dte_output, - DatabricksSqlAlchemyParseException, -) - - -# These are outputs from DESCRIBE TABLE EXTENDED -@pytest.mark.parametrize( - "input, expected", - [ - ("PRIMARY KEY (`pk1`, `pk2`)", ["pk1", "pk2"]), - ("PRIMARY KEY (`a`, `b`, `c`)", ["a", "b", "c"]), - ("PRIMARY KEY (`name`, `id`, `attr`)", ["name", "id", "attr"]), - ], -) -def test_extract_identifiers(input, expected): - assert ( - extract_identifiers_from_string(input) == expected - ), "Failed to extract identifiers from string" - - -@pytest.mark.parametrize( - "input, expected", - [ - ( - "FOREIGN KEY (`pname`, `pid`, `pattr`) REFERENCES `main`.`pysql_sqlalchemy`.`tb1` (`name`, `id`, `attr`)", - [ - "(`pname`, `pid`, `pattr`)", - "(`name`, `id`, `attr`)", - ], - ) - ], -) -def test_extract_identifer_batches(input, expected): - assert ( - extract_identifier_groups_from_string(input) == expected - ), "Failed to extract identifier groups from string" - - -def test_extract_3l_namespace_from_constraint_string(): - input = "FOREIGN KEY (`parent_user_id`) REFERENCES `main`.`pysql_dialect_compliance`.`users` (`user_id`)" - expected = { - "catalog": "main", - "schema": "pysql_dialect_compliance", - "table": "users", - } - - assert ( - extract_three_level_identifier_from_constraint_string(input) == expected - ), "Failed to extract 3L namespace from constraint string" - - -def test_extract_3l_namespace_from_bad_constraint_string(): - input = "FOREIGN KEY (`parent_user_id`) REFERENCES `pysql_dialect_compliance`.`users` (`user_id`)" - - with pytest.raises(DatabricksSqlAlchemyParseException): - extract_three_level_identifier_from_constraint_string(input) - - -@pytest.mark.parametrize("tschema", [None, "some_schema"]) -def test_build_fk_dict(tschema): - fk_constraint_string = "FOREIGN KEY (`parent_user_id`) REFERENCES `main`.`some_schema`.`users` (`user_id`)" - - result = build_fk_dict("some_fk_name", fk_constraint_string, schema_name=tschema) - - assert result == { - "name": "some_fk_name", - "constrained_columns": ["parent_user_id"], - "referred_schema": tschema, - "referred_table": "users", - "referred_columns": ["user_id"], - } - - -def test_build_pk_dict(): - pk_constraint_string = "PRIMARY KEY (`id`, `name`, `email_address`)" - pk_name = "pk1" - - result = build_pk_dict(pk_name, pk_constraint_string) - - assert result == { - "constrained_columns": ["id", "name", "email_address"], - "name": "pk1", - } - - -# This is a real example of the output from DESCRIBE TABLE EXTENDED as of 15 October 2023 -RAW_SAMPLE_DTE_OUTPUT = [ - ["id", "int"], - ["name", "string"], - ["", ""], - ["# Detailed Table Information", ""], - ["Catalog", "main"], - ["Database", "pysql_sqlalchemy"], - ["Table", "exampleexampleexample"], - ["Created Time", "Sun Oct 15 21:12:54 UTC 2023"], - ["Last Access", "UNKNOWN"], - ["Created By", "Spark "], - ["Type", "MANAGED"], - ["Location", "s3://us-west-2-****-/19a85dee-****/tables/ccb7***"], - ["Provider", "delta"], - ["Comment", "some comment"], - ["Owner", "some.user@example.com"], - ["Is_managed_location", "true"], - ["Predictive Optimization", "ENABLE (inherited from CATALOG main)"], - [ - "Table Properties", - "[delta.checkpoint.writeStatsAsJson=false,delta.checkpoint.writeStatsAsStruct=true,delta.minReaderVersion=1,delta.minWriterVersion=2]", - ], - ["", ""], - ["# Constraints", ""], - ["exampleexampleexample_pk", "PRIMARY KEY (`id`)"], - [ - "exampleexampleexample_fk", - "FOREIGN KEY (`parent_user_id`) REFERENCES `main`.`pysql_dialect_compliance`.`users` (`user_id`)", - ], -] - -FMT_SAMPLE_DT_OUTPUT = [ - {"col_name": i[0], "data_type": i[1]} for i in RAW_SAMPLE_DTE_OUTPUT -] - - -@pytest.mark.parametrize( - "match, output", - [ - ( - "PRIMARY KEY", - [ - { - "col_name": "exampleexampleexample_pk", - "data_type": "PRIMARY KEY (`id`)", - } - ], - ), - ( - "FOREIGN KEY", - [ - { - "col_name": "exampleexampleexample_fk", - "data_type": "FOREIGN KEY (`parent_user_id`) REFERENCES `main`.`pysql_dialect_compliance`.`users` (`user_id`)", - } - ], - ), - ], -) -def test_filter_dict_by_value(match, output): - result = match_dte_rows_by_value(FMT_SAMPLE_DT_OUTPUT, match) - assert result == output - - -def test_get_comment_from_dte_output(): - assert get_comment_from_dte_output(FMT_SAMPLE_DT_OUTPUT) == "some comment" diff --git a/src/databricks/sqlalchemy/test_local/test_types.py b/src/databricks/sqlalchemy/test_local/test_types.py deleted file mode 100644 index b91217ed..00000000 --- a/src/databricks/sqlalchemy/test_local/test_types.py +++ /dev/null @@ -1,161 +0,0 @@ -import enum - -import pytest -import sqlalchemy - -from databricks.sqlalchemy.base import DatabricksDialect -from databricks.sqlalchemy._types import TINYINT, TIMESTAMP, TIMESTAMP_NTZ - - -class DatabricksDataType(enum.Enum): - """https://docs.databricks.com/en/sql/language-manual/sql-ref-datatypes.html""" - - BIGINT = enum.auto() - BINARY = enum.auto() - BOOLEAN = enum.auto() - DATE = enum.auto() - DECIMAL = enum.auto() - DOUBLE = enum.auto() - FLOAT = enum.auto() - INT = enum.auto() - INTERVAL = enum.auto() - VOID = enum.auto() - SMALLINT = enum.auto() - STRING = enum.auto() - TIMESTAMP = enum.auto() - TIMESTAMP_NTZ = enum.auto() - TINYINT = enum.auto() - ARRAY = enum.auto() - MAP = enum.auto() - STRUCT = enum.auto() - - -# Defines the way that SQLAlchemy CamelCase types are compiled into Databricks SQL types. -# Note: I wish I could define this within the TestCamelCaseTypesCompilation class, but pytest doesn't like that. -camel_case_type_map = { - sqlalchemy.types.BigInteger: DatabricksDataType.BIGINT, - sqlalchemy.types.LargeBinary: DatabricksDataType.BINARY, - sqlalchemy.types.Boolean: DatabricksDataType.BOOLEAN, - sqlalchemy.types.Date: DatabricksDataType.DATE, - sqlalchemy.types.DateTime: DatabricksDataType.TIMESTAMP_NTZ, - sqlalchemy.types.Double: DatabricksDataType.DOUBLE, - sqlalchemy.types.Enum: DatabricksDataType.STRING, - sqlalchemy.types.Float: DatabricksDataType.FLOAT, - sqlalchemy.types.Integer: DatabricksDataType.INT, - sqlalchemy.types.Interval: DatabricksDataType.TIMESTAMP_NTZ, - sqlalchemy.types.Numeric: DatabricksDataType.DECIMAL, - sqlalchemy.types.PickleType: DatabricksDataType.BINARY, - sqlalchemy.types.SmallInteger: DatabricksDataType.SMALLINT, - sqlalchemy.types.String: DatabricksDataType.STRING, - sqlalchemy.types.Text: DatabricksDataType.STRING, - sqlalchemy.types.Time: DatabricksDataType.STRING, - sqlalchemy.types.Unicode: DatabricksDataType.STRING, - sqlalchemy.types.UnicodeText: DatabricksDataType.STRING, - sqlalchemy.types.Uuid: DatabricksDataType.STRING, -} - - -def dict_as_tuple_list(d: dict): - """Return a list of [(key, value), ...] from a dictionary.""" - return [(key, value) for key, value in d.items()] - - -class CompilationTestBase: - dialect = DatabricksDialect() - - def _assert_compiled_value( - self, type_: sqlalchemy.types.TypeEngine, expected: DatabricksDataType - ): - """Assert that when type_ is compiled for the databricks dialect, it renders the DatabricksDataType name. - - This method initialises the type_ with no arguments. - """ - compiled_result = type_().compile(dialect=self.dialect) # type: ignore - assert compiled_result == expected.name - - def _assert_compiled_value_explicit( - self, type_: sqlalchemy.types.TypeEngine, expected: str - ): - """Assert that when type_ is compiled for the databricks dialect, it renders the expected string. - - This method expects an initialised type_ so that we can test how a TypeEngine created with arguments - is compiled. - """ - compiled_result = type_.compile(dialect=self.dialect) - assert compiled_result == expected - - -class TestCamelCaseTypesCompilation(CompilationTestBase): - """Per the sqlalchemy documentation[^1] here, the camel case members of sqlalchemy.types are - are expected to work across all dialects. These tests verify that the types compile into valid - Databricks SQL type strings. For example, the sqlalchemy.types.Integer() should compile as "INT". - - Truly custom types like STRUCT (notice the uppercase) are not expected to work across all dialects. - We test these separately. - - Note that these tests have to do with type **name** compiliation. Which is separate from actually - mapping values between Python and Databricks. - - Note: SchemaType and MatchType are not tested because it's not used in table definitions - - [1]: https://docs.sqlalchemy.org/en/20/core/type_basics.html#generic-camelcase-types - """ - - @pytest.mark.parametrize("type_, expected", dict_as_tuple_list(camel_case_type_map)) - def test_bare_camel_case_types_compile(self, type_, expected): - self._assert_compiled_value(type_, expected) - - def test_numeric_renders_as_decimal_with_precision(self): - self._assert_compiled_value_explicit( - sqlalchemy.types.Numeric(10), "DECIMAL(10)" - ) - - def test_numeric_renders_as_decimal_with_precision_and_scale(self): - self._assert_compiled_value_explicit( - sqlalchemy.types.Numeric(10, 2), "DECIMAL(10, 2)" - ) - - -uppercase_type_map = { - sqlalchemy.types.ARRAY: DatabricksDataType.ARRAY, - sqlalchemy.types.BIGINT: DatabricksDataType.BIGINT, - sqlalchemy.types.BINARY: DatabricksDataType.BINARY, - sqlalchemy.types.BOOLEAN: DatabricksDataType.BOOLEAN, - sqlalchemy.types.DATE: DatabricksDataType.DATE, - sqlalchemy.types.DECIMAL: DatabricksDataType.DECIMAL, - sqlalchemy.types.DOUBLE: DatabricksDataType.DOUBLE, - sqlalchemy.types.FLOAT: DatabricksDataType.FLOAT, - sqlalchemy.types.INT: DatabricksDataType.INT, - sqlalchemy.types.SMALLINT: DatabricksDataType.SMALLINT, - sqlalchemy.types.TIMESTAMP: DatabricksDataType.TIMESTAMP, - TINYINT: DatabricksDataType.TINYINT, - TIMESTAMP: DatabricksDataType.TIMESTAMP, - TIMESTAMP_NTZ: DatabricksDataType.TIMESTAMP_NTZ, -} - - -class TestUppercaseTypesCompilation(CompilationTestBase): - """Per the sqlalchemy documentation[^1], uppercase types are considered to be specific to some - database backends. These tests verify that the types compile into valid Databricks SQL type strings. - - [1]: https://docs.sqlalchemy.org/en/20/core/type_basics.html#backend-specific-uppercase-datatypes - """ - - @pytest.mark.parametrize("type_, expected", dict_as_tuple_list(uppercase_type_map)) - def test_bare_uppercase_types_compile(self, type_, expected): - if isinstance(type_, type(sqlalchemy.types.ARRAY)): - # ARRAY cannot be initialised without passing an item definition so we test separately - # I preserve it in the uppercase_type_map for clarity - assert True - else: - self._assert_compiled_value(type_, expected) - - def test_array_string_renders_as_array_of_string(self): - """SQLAlchemy's ARRAY type requires an item definition. And their docs indicate that they've only tested - it with Postgres since that's the only first-class dialect with support for ARRAY. - - https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.ARRAY - """ - self._assert_compiled_value_explicit( - sqlalchemy.types.ARRAY(sqlalchemy.types.String), "ARRAY" - ) diff --git a/tests/unit/test_arrow_queue.py b/tests/unit/test_arrow_queue.py index b3dff45f..c6aef195 100644 --- a/tests/unit/test_arrow_queue.py +++ b/tests/unit/test_arrow_queue.py @@ -1,10 +1,12 @@ import unittest - -import pyarrow as pa - +import pytest +try: + import pyarrow as pa +except ImportError: + pa = None from databricks.sql.utils import ArrowQueue - +@pytest.mark.skipif(pa is None, reason="PyArrow is not installed") class ArrowQueueSuite(unittest.TestCase): @staticmethod def make_arrow_table(batch): diff --git a/tests/unit/test_cloud_fetch_queue.py b/tests/unit/test_cloud_fetch_queue.py index 01d8a79b..59b6ce5c 100644 --- a/tests/unit/test_cloud_fetch_queue.py +++ b/tests/unit/test_cloud_fetch_queue.py @@ -1,12 +1,16 @@ -import pyarrow +try: + import pyarrow +except ImportError: + pyarrow = None import unittest +import pytest from unittest.mock import MagicMock, patch from databricks.sql.thrift_api.TCLIService.ttypes import TSparkArrowResultLink import databricks.sql.utils as utils from databricks.sql.types import SSLOptions - +@pytest.mark.skipif(pyarrow is None, reason="PyArrow is not installed") class CloudFetchQueueSuite(unittest.TestCase): def create_result_link( self, diff --git a/tests/unit/test_fetches.py b/tests/unit/test_fetches.py index 89cedcfa..2af679e3 100644 --- a/tests/unit/test_fetches.py +++ b/tests/unit/test_fetches.py @@ -1,12 +1,16 @@ import unittest +import pytest from unittest.mock import Mock -import pyarrow as pa +try: + import pyarrow as pa +except ImportError: + pa=None import databricks.sql.client as client from databricks.sql.utils import ExecuteResponse, ArrowQueue - +@pytest.mark.skipif(pa is None, reason="PyArrow is not installed") class FetchTests(unittest.TestCase): """ Unit tests for checking the fetch logic. diff --git a/tests/unit/test_fetches_bench.py b/tests/unit/test_fetches_bench.py index 9382c3b3..6c5698b3 100644 --- a/tests/unit/test_fetches_bench.py +++ b/tests/unit/test_fetches_bench.py @@ -1,7 +1,9 @@ import unittest from unittest.mock import Mock - -import pyarrow as pa +try: + import pyarrow as pa +except ImportError: + pa=None import uuid import time import pytest @@ -9,7 +11,7 @@ import databricks.sql.client as client from databricks.sql.utils import ExecuteResponse, ArrowQueue - +@pytest.mark.skipif(pa is None, reason="PyArrow is not installed") class FetchBenchmarkTests(unittest.TestCase): """ Micro benchmark test for Arrow result handling. diff --git a/tests/unit/test_thrift_backend.py b/tests/unit/test_thrift_backend.py index 293467af..135f4229 100644 --- a/tests/unit/test_thrift_backend.py +++ b/tests/unit/test_thrift_backend.py @@ -2,12 +2,14 @@ from decimal import Decimal import itertools import unittest +import pytest from unittest.mock import patch, MagicMock, Mock from ssl import CERT_NONE, CERT_REQUIRED from urllib3 import HTTPSConnectionPool - -import pyarrow - +try: + import pyarrow +except ImportError: + pyarrow=None import databricks.sql from databricks.sql import utils from databricks.sql.types import SSLOptions @@ -26,7 +28,7 @@ def retry_policy_factory(): "_retry_delay_default": (float, 5, 1, 60), } - +@pytest.mark.skipif(pyarrow is None,reason="PyArrow is not installed") class ThriftBackendTestSuite(unittest.TestCase): okay_status = ttypes.TStatus(statusCode=ttypes.TStatusCode.SUCCESS_STATUS)