Skip to content

Commit

Permalink
ci: test examples (#10098)
Browse files Browse the repository at this point in the history
  • Loading branch information
cpcloud authored Sep 11, 2024
1 parent d48a575 commit 52fdd89
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 6 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ibis-backends-cloud.yml
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ jobs:
run: poetry add snowflake-snowpark-python --python="==${{ steps.install_python.outputs.python-version }}"

- name: install ibis
run: poetry install --without dev --without docs --extras "${{ join(matrix.backend.extras, ' ') }}"
run: poetry install --without dev --without docs --extras "${{ join(matrix.backend.extras, ' ') }} examples"

- uses: extractions/setup-just@v2
env:
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/ibis-backends.yml
Original file line number Diff line number Diff line change
Expand Up @@ -468,7 +468,7 @@ jobs:
run: pip install 'poetry==1.8.3'

- name: install ibis
run: poetry install --without dev --without docs --extras "${{ join(matrix.backend.extras, ' ') }}"
run: poetry install --without dev --without docs --extras "${{ join(matrix.backend.extras, ' ') }} examples"

- name: install deps for broken avro-python setup
if: matrix.backend.name == 'flink'
Expand Down Expand Up @@ -663,7 +663,7 @@ jobs:
run: poetry lock --no-update

- name: install ibis
run: poetry install --without dev --without docs --extras "${{ join(matrix.backend.extras, ' ') }}"
run: poetry install --without dev --without docs --extras "${{ join(matrix.backend.extras, ' ') }} examples"

- name: run tests
run: just ci-check -m ${{ matrix.backend.name }} --numprocesses auto --dist=loadgroup
Expand Down Expand Up @@ -749,7 +749,7 @@ jobs:
run: poetry lock --no-update

- name: install ibis
run: poetry install --without dev --without docs --extras pyspark
run: poetry install --without dev --without docs --extras "pyspark examples"

- name: install delta-spark
if: matrix.pyspark-version == '3.5'
Expand Down
3 changes: 2 additions & 1 deletion ibis/backends/datafusion/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

import datafusion as df
import pyarrow as pa
import pyarrow.dataset as ds
import pyarrow_hotfix # noqa: F401
import sqlglot as sg
import sqlglot.expressions as sge
Expand Down Expand Up @@ -372,7 +373,7 @@ def _register(
self.con.deregister_table(table_name)
self.con.register_record_batches(table_name, [[source]])
return self.table(table_name)
elif isinstance(source, pa.dataset.Dataset):
elif isinstance(source, ds.Dataset):
self.con.deregister_table(table_name)
self.con.register_dataset(table_name, source)
return self.table(table_name)
Expand Down
3 changes: 2 additions & 1 deletion ibis/backends/datafusion/tests/test_register.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import pandas as pd
import pyarrow as pa
import pyarrow.dataset as ds
import pytest

import ibis
Expand Down Expand Up @@ -45,6 +44,8 @@ def test_register_batches(conn):


def test_register_dataset(conn):
import pyarrow.dataset as ds

tab = pa.table({"x": [1, 2, 3]})
dataset = ds.InMemoryDataset(tab)
with pytest.warns(FutureWarning, match="v9.1"):
Expand Down

0 comments on commit 52fdd89

Please sign in to comment.