From 52fdd8956f1dc3eb7fc2f27291db3955944de1c6 Mon Sep 17 00:00:00 2001 From: Phillip Cloud <417981+cpcloud@users.noreply.github.com> Date: Wed, 11 Sep 2024 16:03:23 -0400 Subject: [PATCH] ci: test examples (#10098) --- .github/workflows/ibis-backends-cloud.yml | 2 +- .github/workflows/ibis-backends.yml | 6 +++--- ibis/backends/datafusion/__init__.py | 3 ++- ibis/backends/datafusion/tests/test_register.py | 3 ++- 4 files changed, 8 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ibis-backends-cloud.yml b/.github/workflows/ibis-backends-cloud.yml index 7984abaf7849..00e5f8031475 100644 --- a/.github/workflows/ibis-backends-cloud.yml +++ b/.github/workflows/ibis-backends-cloud.yml @@ -107,7 +107,7 @@ jobs: run: poetry add snowflake-snowpark-python --python="==${{ steps.install_python.outputs.python-version }}" - name: install ibis - run: poetry install --without dev --without docs --extras "${{ join(matrix.backend.extras, ' ') }}" + run: poetry install --without dev --without docs --extras "${{ join(matrix.backend.extras, ' ') }} examples" - uses: extractions/setup-just@v2 env: diff --git a/.github/workflows/ibis-backends.yml b/.github/workflows/ibis-backends.yml index 4a030a23891e..c1639326fac6 100644 --- a/.github/workflows/ibis-backends.yml +++ b/.github/workflows/ibis-backends.yml @@ -468,7 +468,7 @@ jobs: run: pip install 'poetry==1.8.3' - name: install ibis - run: poetry install --without dev --without docs --extras "${{ join(matrix.backend.extras, ' ') }}" + run: poetry install --without dev --without docs --extras "${{ join(matrix.backend.extras, ' ') }} examples" - name: install deps for broken avro-python setup if: matrix.backend.name == 'flink' @@ -663,7 +663,7 @@ jobs: run: poetry lock --no-update - name: install ibis - run: poetry install --without dev --without docs --extras "${{ join(matrix.backend.extras, ' ') }}" + run: poetry install --without dev --without docs --extras "${{ join(matrix.backend.extras, ' ') }} examples" - name: run tests run: just ci-check -m ${{ matrix.backend.name }} --numprocesses auto --dist=loadgroup @@ -749,7 +749,7 @@ jobs: run: poetry lock --no-update - name: install ibis - run: poetry install --without dev --without docs --extras pyspark + run: poetry install --without dev --without docs --extras "pyspark examples" - name: install delta-spark if: matrix.pyspark-version == '3.5' diff --git a/ibis/backends/datafusion/__init__.py b/ibis/backends/datafusion/__init__.py index 0c3aa5fcb6a3..ae00eecbdc94 100644 --- a/ibis/backends/datafusion/__init__.py +++ b/ibis/backends/datafusion/__init__.py @@ -9,6 +9,7 @@ import datafusion as df import pyarrow as pa +import pyarrow.dataset as ds import pyarrow_hotfix # noqa: F401 import sqlglot as sg import sqlglot.expressions as sge @@ -372,7 +373,7 @@ def _register( self.con.deregister_table(table_name) self.con.register_record_batches(table_name, [[source]]) return self.table(table_name) - elif isinstance(source, pa.dataset.Dataset): + elif isinstance(source, ds.Dataset): self.con.deregister_table(table_name) self.con.register_dataset(table_name, source) return self.table(table_name) diff --git a/ibis/backends/datafusion/tests/test_register.py b/ibis/backends/datafusion/tests/test_register.py index 16a82973c7fa..e846b6fb4ae9 100644 --- a/ibis/backends/datafusion/tests/test_register.py +++ b/ibis/backends/datafusion/tests/test_register.py @@ -2,7 +2,6 @@ import pandas as pd import pyarrow as pa -import pyarrow.dataset as ds import pytest import ibis @@ -45,6 +44,8 @@ def test_register_batches(conn): def test_register_dataset(conn): + import pyarrow.dataset as ds + tab = pa.table({"x": [1, 2, 3]}) dataset = ds.InMemoryDataset(tab) with pytest.warns(FutureWarning, match="v9.1"):