Skip to content

Add support for Hive (Spark) backends #110

Add support for Hive (Spark) backends

Add support for Hive (Spark) backends #110

Workflow file for this run

name: CI
on:
push:
branches:
- main
pull_request:
env:
DEFAULT_PYTHON: "3.12"
DEFAULT_OS: ubuntu-latest
jobs:
pytest:
runs-on: ${{ matrix.os }}
strategy:
matrix:
python-version: ["3.11", "3.12"]
os: [ubuntu-latest, windows-latest]
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install ".[dev,spark]"
wget https://dlcdn.apache.org/spark/spark-3.5.4/spark-3.5.4-bin-hadoop3.tgz
tar -xzf spark-3.5.4-bin-hadoop3.tgz
export SPARK_HOME=$(pwd)/spark-3.5.4-bin-hadoop3
export PATH=$SPARK_HOME/sbin:$PATH
start-master.sh
start-worker.sh spark://$(hostname):7077
start-thriftserver.sh --master=spark://$(hostname):7077
- name: Run pytest with coverage
run: |
CHRONIFY_HIVE_URL=hive://localhost:10000/default pytest -v --cov --cov-report=xml
- name: codecov
uses: codecov/[email protected]
if: ${{ matrix.os == env.DEFAULT_OS && matrix.python-version == env.DEFAULT_PYTHON }}
with:
token: ${{ secrets.CODECOV_TOKEN }}
name: chronify-tests
fail_ci_if_error: false
verbose: true
mypy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.12
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install ".[dev]"
mypy
ruff:
runs-on: ubuntu-latest
name: "ruff"
steps:
- uses: actions/checkout@v4
- uses: chartboost/ruff-action@v1
with:
src: "./src"