Skip to content

Commit

Permalink
ADD: use case tests
Browse files Browse the repository at this point in the history
  • Loading branch information
matbun committed Oct 17, 2023
1 parent 4bdde4e commit ecb617e
Show file tree
Hide file tree
Showing 7 changed files with 119 additions and 21 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/workflows-dt.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,5 +27,5 @@ jobs:

- name: Run pytest for workflows
shell: bash -l {0}
run: micromamba run -p ./.venv-pytorch pytest -v ./tests/ -m "not distributed"
run: micromamba run -p ./.venv-pytorch pytest -v ./tests/ -m "not hpc"

3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -70,5 +70,6 @@ itwinai = "itwinai.cli:app"
[tool.pytest.ini_options]
markers = [
"integration: integration tests (deselect with '-m \"not integration\"')",
"distributed: test distributed ML on multiple GPUs/nodes (deselect with '-m \"not distributed\"')",
"hpc: require HPC resources (multiple GPUs/nodes). (deselect with '-m \"not hpc\"')",
"functional: functional tests. (deselect with '-m \"not functional\"')",
]
4 changes: 2 additions & 2 deletions tests/backend/torch/test_distribtued_training.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import pytest


@pytest.mark.distributed
@pytest.mark.hpc
def test_distributed_decorator():
"""Test function decorator. Needs torchrun cmd."""
cmd = ("micromamba run -p ./ai/.venv-pytorch "
Expand All @@ -14,7 +14,7 @@ def test_distributed_decorator():
subprocess.run(cmd.split(), check=True)


@pytest.mark.distributed
@pytest.mark.hpc
def test_distributed_trainer():
"""Test vanilla torch distributed trainer. Needs torchrun cmd."""
cmd = ("micromamba run -p ./ai/.venv-pytorch "
Expand Down
37 changes: 37 additions & 0 deletions tests/use-cases/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import os
import pytest
import subprocess

pytest.TORCH_PREFIX = './.venv-pytorch'
pytest.TF_PREFIX = './.venv-tf'

FNAMES = [
'pipeline.yaml',
'startscript',
'train.py',
]


@pytest.fixture
def check_folder_structure():
"""
Verify that the use case folder complies with some predefined
structure.
"""
def _check_structure(root: str):
for fname in FNAMES:
fpath = os.path.join(root, fname)
assert os.path.isfile(fpath), f"'{fname}' is missing in '{fpath}'"
return _check_structure


@pytest.fixture
def install_requirements():
"""Install requirements.txt, if present in root folder."""
def _install_reqs(root: str, env_prefix: str):
req_path = os.path.join(root, 'requirements.txt')
if os.path.isfile(req_path):
cmd = (f"micromamba run -p {env_prefix} "
f"pip install -r {req_path}")
subprocess.run(cmd.split(), check=True)
return _install_reqs
21 changes: 17 additions & 4 deletions tests/use-cases/test_cyclones.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,23 @@
import pytest
import subprocess

# TODO: add tests for use case folder format:
# - structure
# - naming convention
# - file exist
CYCLONES_PATH = "use-cases/cyclones"


def test_structure_cyclones(check_folder_structure):
"""Test cyclones folder structure."""
check_folder_structure(CYCLONES_PATH)


@pytest.mark.functional
def test_mnist_train_torch(install_requirements):
"""
Test MNIST torch native trainer by running it end-to-end.
"""
install_requirements(CYCLONES_PATH, pytest.TF_PREFIX)
cmd = (f"micromamba run -p {pytest.TF_PREFIX} python "
f"{CYCLONES_PATH}/train.py -p {CYCLONES_PATH}/pipeline.yaml")
subprocess.run(cmd.split(), check=True)


@pytest.mark.skip(reason="workflow changed")
Expand Down
59 changes: 53 additions & 6 deletions tests/use-cases/test_mnist.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,62 @@
import pytest
import subprocess

# TODO: add tests for use case folder format:
# - structure
# - naming convention
# - file exist
TORCH_PATH = "use-cases/mnist/torch"
LIGHTNING_PATH = "use-cases/mnist/torch-lightning"
TF_PATH = "use-cases/mnist/tensorflow"


@pytest.mark.skip(reason="workflow changed")
def test_structure_mnist_torch(check_folder_structure):
"""Test MNIST folder structure for torch native trainer."""
check_folder_structure(TORCH_PATH)


def test_structure_mnist_lightning(check_folder_structure):
"""Test MNIST folder structure for torch lightning trainer."""
check_folder_structure(LIGHTNING_PATH)


def test_structure_mnist_tf(check_folder_structure):
"""Test MNIST folder structure for tensorflow trainer."""
check_folder_structure(TF_PATH)


@pytest.mark.functional
def test_mnist_train_torch(install_requirements):
"""
Test MNIST torch native trainer by running it end-to-end.
"""
install_requirements(TORCH_PATH, pytest.TORCH_PREFIX)
cmd = (f"micromamba run -p {pytest.TORCH_PREFIX} python "
f"{TORCH_PATH}/train.py -p {TORCH_PATH}/pipeline.yaml")
subprocess.run(cmd.split(), check=True)


@pytest.mark.functional
def test_mnist_train_lightning(install_requirements):
"""
Test MNIST torch lightning trainer by running it end-to-end.
"""
install_requirements(TORCH_PATH, pytest.TORCH_PREFIX)
cmd = (f"micromamba run -p {pytest.TORCH_PREFIX} python "
f"{LIGHTNING_PATH}/train.py -p {LIGHTNING_PATH}/pipeline.yaml")
subprocess.run(cmd.split(), check=True)


@pytest.mark.functional
def test_mnist_train_tf(install_requirements):
"""
Test MNIST tensorflow trainer by running it end-to-end.
"""
install_requirements(TF_PATH, pytest.TF_PREFIX)
cmd = (f"micromamba run -p {pytest.TF_PREFIX} python "
f"{TF_PATH}/train.py -p {TF_PATH}/pipeline.yaml")
subprocess.run(cmd.split(), check=True)


@pytest.mark.skip(reason="workflow changed. Left as example")
@pytest.mark.integration
def test_mnist_train():
def test_mnist_train_legacy():
"""
Test MNIST training workflow(s) by running it end-to-end.
"""
Expand Down
14 changes: 7 additions & 7 deletions use-cases/cyclones/lib/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from .macros import Network
from itwinai.models.tensorflow.cyclones_vgg import (
custom_VGG_V1, custom_VGG_V2, custom_VGG_V3, ModelV5
custom_VGG_V1, custom_VGG_V2, custom_VGG_V3 # , ModelV5
)


Expand Down Expand Up @@ -61,12 +61,12 @@ def get_network_config(network, **kwargs):
patch_size=kwargs['patch_size'], channels=kwargs['channels'],
activation=kwargs['activation'], regularizer=kwargs['regularizer'])

elif network == Network.MODEL_V5.value:
print('Using Model V5')
model = ModelV5(
patch_size=kwargs['patch_size'], channels=kwargs['channels'],
last_activation=kwargs['activation'],
kernel_size=kwargs['kernel_size'])
# elif network == Network.MODEL_V5.value:
# print('Using Model V5')
# model = ModelV5(
# patch_size=kwargs['patch_size'], channels=kwargs['channels'],
# last_activation=kwargs['activation'],
# kernel_size=kwargs['kernel_size'])

return model

Expand Down

0 comments on commit ecb617e

Please sign in to comment.