diff --git a/.ci/workchains.py b/.ci/workchains.py index 110334f0ae..5504813a10 100644 --- a/.ci/workchains.py +++ b/.ci/workchains.py @@ -7,6 +7,7 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### +# pylint: disable=invalid-name from aiida.common import AttributeDict from aiida.engine import calcfunction, workfunction, WorkChain, ToContext, append_, while_, ExitCode from aiida.engine import BaseRestartWorkChain, process_handler, ProcessHandlerReport diff --git a/.coveragerc b/.coveragerc index a90fc09bc8..b27dfc7b30 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,6 +1,5 @@ [run] source = aiida -omit = aiida/test*.py,aiida/*/test*.py,aiida/*/*/test*.py,aiida/*/*/*/test*.py,aiida/*/*/*/*/test*.py,aiida/*/*/*/*/*/test*.py,aiida/*/migrations/*.py,aiida/*/migrations/versions/*.py [html] directory = .ci/coverage/html diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000000..e908ba2fc9 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,10 @@ +# All files related to dependency management are owned by the +# currently active dependency manager (DM) to trigger an automatic review +# request from the DM upon changes. Please see AEP-002 for details: +# https://github.com/aiidateam/AEP/tree/master/002_dependency_management +setup.* @aiidateam/dependency-manager +environment.yml @aiidateam/dependency-manager +requirements*.txt @aiidateam/dependency-manager +pyproject.toml @aiidateam/dependency-manager +utils/dependency_management.py @aiidateam/dependency-manager +.github/workflows/dm.yml @aiidateam/dependency-manager diff --git a/.github/config/profile.yaml b/.github/config/profile.yaml index c5e0dfa3c5..e58ab2821d 100644 --- a/.github/config/profile.yaml +++ b/.github/config/profile.yaml @@ -11,4 +11,4 @@ db_port: 5432 db_name: PLACEHOLDER_DATABASE_NAME db_username: postgres db_password: '' -repository: PLACEHOLDER_REPOSITORY \ No newline at end of file +repository: PLACEHOLDER_REPOSITORY diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4ffd8c5940..10a1251707 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,35 +1,16 @@ -name: aiida-core +name: continuous-integration on: [push, pull_request] jobs: - conda: - - runs-on: ubuntu-latest - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v1 - - - name: Set up Python 3.7 - uses: actions/setup-python@v1 - with: - python-version: 3.7 - - - name: Conda install - env: - PYTHON_VERSION: 3.7 - run: - .github/workflows/conda.sh - docs: runs-on: ubuntu-latest timeout-minutes: 30 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - name: Set up Python 3.7 uses: actions/setup-python@v1 @@ -60,7 +41,7 @@ jobs: timeout-minutes: 30 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - name: Set up Python 3.7 uses: actions/setup-python@v1 @@ -83,6 +64,41 @@ jobs: run: pre-commit run --all-files || ( git status --short ; git diff ; exit 1 ) + check-requirements: + + runs-on: ubuntu-latest + timeout-minutes: 5 + + steps: + - uses: actions/checkout@v2 + + - name: Set up Python 3.8 + uses: actions/setup-python@v1 + with: + python-version: 3.8 + + - name: Install dm-script dependencies + run: pip install packaging==20.3 click~=7.0 pyyaml~=5.1 toml + + - name: Check requirements files + run: python ./utils/dependency_management.py check-requirements DEFAULT + + - name: Create commit comment + if: failure() + uses: peter-evans/commit-comment@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + path: setup.json + body: | + It appears that at least one of the environments defined in the requirements files + ('requirements/*.txt') is not meeting the dependencies specified in the 'setup.json' file. + These files define the environment for continuous integration tests, so it is important that they are updated. + + If this commit is part of a pull request, you can automatically update the requirements by + commenting with '/update-requirements'. + + Click [here](https://github.com/aiidateam/aiida-core/wiki/AiiDA-Dependency-Management) for more information. + tests: runs-on: ubuntu-latest @@ -91,18 +107,30 @@ jobs: strategy: fail-fast: false matrix: - python-version: [3.5, 3.8] backend: ['django', 'sqlalchemy'] + python-version: [3.5, 3.8] + + services: + postgres: + image: postgres:10 + env: + POSTGRES_DB: test_${{ matrix.backend }} + POSTGRES_PASSWORD: '' + POSTGRES_HOST_AUTH_METHOD: trust + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + rabbitmq: + image: rabbitmq:latest + ports: + - 5672:5672 steps: - - uses: actions/checkout@v1 - - uses: CasperWA/postgresql-action@v1.2 - with: - postgresql version: '10' - postgresql db: test_${{ matrix.backend }} - postgresql user: postgres - postgresql password: '' - postgresql auth: trust + - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 @@ -111,20 +139,25 @@ jobs: - name: Install system dependencies run: | - wget -O - "https://github.com/rabbitmq/signing-keys/releases/download/2.0/rabbitmq-release-signing-key.asc" | sudo apt-key add - - echo 'deb https://dl.bintray.com/rabbitmq-erlang/debian bionic erlang' | sudo tee -a /etc/apt/sources.list.d/bintray.rabbitmq.list - echo 'deb https://dl.bintray.com/rabbitmq/debian bionic main' | sudo tee -a /etc/apt/sources.list.d/bintray.rabbitmq.list sudo rm -f /etc/apt/sources.list.d/dotnetdev.list /etc/apt/sources.list.d/microsoft-prod.list sudo apt update - sudo apt install postgresql-10 rabbitmq-server graphviz - sudo systemctl status rabbitmq-server.service + sudo apt install postgresql-10 graphviz - - name: Install python dependencies + - name: Upgrade pip run: | pip install --upgrade pip - pip install numpy==1.17.4 - pip install -e .[atomic_tools,docs,notebook,rest,testing] + pip --version + + - name: upgrade setuptools [py35] + if: matrix.python-version == 3.5 + run: pip install -I setuptools==38.2.0 # Minimally required version for Python 3.5. + + - name: Install aiida-core + run: | + pip install -r requirements/requirements-py-${{ matrix.python-version }}.txt + pip install --no-deps -e . reentry scan + pip freeze - name: Setup environment env: @@ -138,13 +171,22 @@ jobs: run: .github/workflows/tests.sh + - name: Upload coverage report + if: matrix.python-version == 3.5 && github.repository == 'aiidateam/aiida-core' + uses: codecov/codecov-action@v1 + with: + name: aiida-pytests-py3.5-${{ matrix.backend }} + flags: ${{ matrix.backend }} + file: ./coverage.xml + fail_ci_if_error: true + verdi: runs-on: ubuntu-latest timeout-minutes: 30 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - name: Set up Python 3.7 uses: actions/setup-python@v1 @@ -166,7 +208,7 @@ jobs: timeout-minutes: 30 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - name: Install docker run: | diff --git a/.github/workflows/conda.sh b/.github/workflows/conda.sh deleted file mode 100755 index 5ad1b6628b..0000000000 --- a/.github/workflows/conda.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env bash -set -ev - -wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; -bash miniconda.sh -b -p $HOME/miniconda -export PATH="$HOME/miniconda/bin:$PATH" -hash -r -conda config --set always_yes yes --set changeps1 no - -conda update -q conda -conda info -a -conda env create -f environment.yml -n test-environment diff --git a/.github/workflows/test-install.yml b/.github/workflows/test-install.yml new file mode 100644 index 0000000000..1f06daae6f --- /dev/null +++ b/.github/workflows/test-install.yml @@ -0,0 +1,157 @@ +name: test-install + +on: + pull_request: + paths: + - 'setup.*' + - 'environment.yml' + - '**/requirements*.txt' + - 'pyproject.toml' + - 'util/dependency_management.py' + - '.github/workflows/test-install.yml' + schedule: + - cron: '30 02 * * *' # nightly build + +jobs: + + validate-dependency-specification: + # Note: The specification is also validated by the pre-commit hook. + + runs-on: ubuntu-latest + timeout-minutes: 5 + + steps: + - uses: actions/checkout@v2 + + - name: Set up Python 3.7 + uses: actions/setup-python@v1 + with: + python-version: 3.7 + + - name: Install dm-script dependencies + run: pip install packaging==20.3 click~=7.0 pyyaml~=5.1 toml + + - name: Validate + run: python ./utils/dependency_management.py validate-all + + install-with-pip: + + runs-on: ubuntu-latest + timeout-minutes: 5 + + steps: + - uses: actions/checkout@v2 + + - name: Set up Python 3.7 + uses: actions/setup-python@v1 + with: + python-version: 3.7 + + - name: Pip install + run: | + python -m pip install -e . + python -m pip freeze + + - name: Test importing aiida + run: + python -c "import aiida" + + install-with-conda: + + runs-on: ubuntu-latest + name: install-with-conda + + timeout-minutes: 5 + + steps: + - uses: actions/checkout@v2 + + - name: Setup Conda + uses: s-weigand/setup-conda@v1 + with: + update-conda: true + python-version: 3.7 + - run: conda --version + - run: python --version + - run: which python + + - name: Create conda environment + run: | + conda env create -f environment.yml -n test-environment + source activate test-environment + python -m pip install --no-deps -e . + + - name: Test importing aiida + run: | + source activate test-environment + python -c "import aiida" + + tests: + + needs: [install-with-pip, install-with-conda] + runs-on: ubuntu-latest + timeout-minutes: 30 + + strategy: + fail-fast: false + matrix: + python-version: [3.5, 3.6, 3.7, 3.8] + backend: ['django', 'sqlalchemy'] + + services: + postgres: + image: postgres:10 + env: + POSTGRES_DB: test_${{ matrix.backend }} + POSTGRES_PASSWORD: '' + POSTGRES_HOST_AUTH_METHOD: trust + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + rabbitmq: + image: rabbitmq:latest + ports: + - 5672:5672 + + steps: + - uses: actions/checkout@v2 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install system dependencies + run: | + sudo rm -f /etc/apt/sources.list.d/dotnetdev.list /etc/apt/sources.list.d/microsoft-prod.list + sudo apt update + sudo apt install postgresql-10 graphviz + + - run: pip install --upgrade pip + + - name: upgrade setuptools [py35] + if: matrix.python-version == 3.5 + run: pip install -I setuptools==38.2.0 + + - name: Install aiida-core + run: | + pip install -e .[atomic_tools,docs,notebook,rest,testing] + reentry scan + + - run: pip freeze + + - name: Setup AiiDA environment + env: + AIIDA_TEST_BACKEND: ${{ matrix.backend }} + run: + .github/workflows/setup.sh + + - name: Run test suite + env: + AIIDA_TEST_BACKEND: ${{ matrix.backend }} + run: + .github/workflows/tests.sh diff --git a/.github/workflows/tests.sh b/.github/workflows/tests.sh index 3d639e8f88..3aff3b28f9 100755 --- a/.github/workflows/tests.sh +++ b/.github/workflows/tests.sh @@ -3,16 +3,29 @@ set -ev # Make sure the folder containing the workchains is in the python path before the daemon is started export PYTHONPATH="${PYTHONPATH}:${GITHUB_WORKSPACE}/.ci" -# show timings of tests -export PYTEST_ADDOPTS=" --durations=0" +# pytest options: +# - report timings of tests +# - pytest-cov configuration taken from top-level .coveragerc +# - coverage is reported as XML and in terminal, +# including the numbers/ranges of lines which are not covered +# - coverage results of multiple tests are collected +# - coverage is reported on files in aiida/ +export PYTEST_ADDOPTS="${PYTEST_ADDOPTS} --cov-append" + +# daemon tests verdi daemon start 4 verdi -p test_${AIIDA_TEST_BACKEND} run .ci/test_daemon.py verdi daemon stop -AIIDA_TEST_PROFILE=test_$AIIDA_TEST_BACKEND pytest tests -AIIDA_TEST_PROFILE=test_$AIIDA_TEST_BACKEND pytest .ci/pytest +# tests for the testing infrastructure pytest --noconftest .ci/test_test_manager.py pytest --noconftest .ci/test_profile_manager.py +python .ci/test_plugin_testcase.py # uses custom unittest test runner -python .ci/test_plugin_testcase.py +# Until the `.ci/pytest` tests are moved within `tests` we have to run them separately and pass in the path to the +# `conftest.py` explicitly, because otherwise it won't be able to find the fixtures it provides +AIIDA_TEST_PROFILE=test_$AIIDA_TEST_BACKEND pytest tests/conftest.py .ci/pytest + +# main aiida-core tests +AIIDA_TEST_PROFILE=test_$AIIDA_TEST_BACKEND pytest tests diff --git a/.github/workflows/update-requirements.yml b/.github/workflows/update-requirements.yml new file mode 100644 index 0000000000..ebc0b22a5c --- /dev/null +++ b/.github/workflows/update-requirements.yml @@ -0,0 +1,126 @@ +name: update-requirements + +on: + repository_dispatch: + types: [update-requirements-command] + +jobs: + + tests: + + runs-on: ubuntu-latest + timeout-minutes: 30 + + strategy: + fail-fast: false + matrix: + backend: ['django', 'sqlalchemy'] + python-version: [3.5, 3.6, 3.7, 3.8] + + services: + postgres: + image: postgres:10 + env: + POSTGRES_DB: test_${{ matrix.backend }} + POSTGRES_PASSWORD: '' + POSTGRES_HOST_AUTH_METHOD: trust + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + rabbitmq: + image: rabbitmq:latest + ports: + - 5672:5672 + + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ github.event.client_payload.head_ref }} + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install system dependencies + run: | + sudo rm -f /etc/apt/sources.list.d/dotnetdev.list /etc/apt/sources.list.d/microsoft-prod.list + sudo apt update + sudo apt install postgresql-10 graphviz + + - run: pip install --upgrade pip + + - name: upgrade setuptools [py35] + if: matrix.python-version == 3.5 + run: pip install -I setuptools==38.2.0 + + - name: Install aiida-core + run: | + pip install -e .[atomic_tools,docs,notebook,rest,testing] + reentry scan + + - name: Setup environment + env: + AIIDA_TEST_BACKEND: ${{ matrix.backend }} + run: + .github/workflows/setup.sh + + - name: Run test suite + env: + AIIDA_TEST_BACKEND: ${{ matrix.backend }} + run: + .github/workflows/tests.sh + + - name: Freeze test environment + run: pip freeze | sed '1d' | tee requirements-py-${{ matrix.python-version }}.txt + + # Add python-version specific requirements file to the requirements.txt artifact. + # Will be used in the next job to create a PR in case they are different from the current version. + - uses: actions/upload-artifact@v1 + if: matrix.backend == 'django' # The requirements are identical between backends. + with: + name: requirements.txt + path: requirements-py-${{ matrix.python-version }}.txt + + update-requirements: + + needs: tests + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ github.event.client_payload.head_ref }} + + - name: Download requirements.txt files + uses: actions/download-artifact@v1 + with: + name: requirements.txt + path: requirements + + - name: Commit requirements files + run: | + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + git add requirements/* + git commit -m "Update requirements files." -a || echo "Nothing to update." + + - name: Push changes + uses: ad-m/github-push-action@v0.5.0 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + branch: ${{ github.event.client_payload.head_ref }} + + - name: Create Pull Request (since update via push failed) + if: failure() && github.repository == 'aiidateam/aiida-core' + uses: peter-evans/create-pull-request@v2 + with: + commit-message: "Update requirements files." + token: ${{ secrets.GITHUB_TOKEN }} + title: "Update requirements.txt" + team-reviewers: dependency-manager + branch: "dm/update-requirements.txt" diff --git a/.gitignore b/.gitignore index 9d225c3ef0..1983db653d 100644 --- a/.gitignore +++ b/.gitignore @@ -20,6 +20,7 @@ .cache .pytest_cache .coverage +coverage.xml # Files created by RPN tests .ci/polish/polish_workchains/polish* diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 06e50b4c5f..57a52b649b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -53,7 +53,6 @@ aiida/common/datastructures.py| aiida/engine/daemon/execmanager.py| aiida/engine/processes/calcjobs/tasks.py| - aiida/orm/autogroup.py| aiida/orm/querybuilder.py| aiida/orm/nodes/data/array/bands.py| aiida/orm/nodes/data/array/projection.py| @@ -66,7 +65,6 @@ aiida/parsers/plugins/arithmetic/add.py| aiida/parsers/plugins/templatereplacer/doubler.py| aiida/parsers/plugins/templatereplacer/__init__.py| - aiida/plugins/entry_point.py| aiida/plugins/entry.py| aiida/plugins/info.py| aiida/plugins/registry.py| @@ -110,39 +108,52 @@ description: "This hook runs Prospector: https://github.com/landscapeio/prospector" entry: prospector + - id: dm-generate-all + name: Update all requirements files + entry: python ./utils/dependency_management.py generate-all + language: system + files: >- + (?x)^( + setup.py| + setup.json| + utils/dependency_management.py + )$ + pass_filenames: false + - id: rtd-requirements - name: Requirements for RTD - entry: python ./docs/update_req_for_rtd.py --pre-commit + name: Validate docs/requirements_for_rtd.txt + entry: python ./utils/dependency_management.py validate-rtd-reqs language: system files: >- (?x)^( setup.json| setup.py| + utils/dependency_management.py| docs/requirements_for_rtd.txt| - docs/update_req_for_rtd.py| )$ pass_filenames: false - id: pyproject - name: Validating pyproject.toml - entry: python ./utils/validate_consistency.py toml + name: Validate pyproject.toml + entry: python ./utils/dependency_management.py validate-pyproject-toml language: system files: >- (?x)^( setup.json| setup.py| - utils/validate_consistency.py| + utils/dependency_management.py| + pyproject.toml )$ pass_filenames: false - - id: conda - name: Validating environment.yml - entry: python ./utils/validate_consistency.py conda + - id: dependencies + name: Validate environment.yml + entry: python ./utils/dependency_management.py validate-environment-yml language: system files: >- (?x)^( setup.json| setup.py| - utils/validate_consistency.py| + utils/dependency_management.py| environment.yml| )$ pass_filenames: false @@ -154,6 +165,7 @@ files: >- (?x)^( aiida/cmdline/commands/.*| + aiida/cmdline/params/.*| utils/validate_consistency.py| )$ pass_filenames: false diff --git a/.pylintrc b/.pylintrc index 9e3adfb075..8fc7a6a3c3 100644 --- a/.pylintrc +++ b/.pylintrc @@ -50,7 +50,7 @@ confidence= # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" -disable=bad-continuation,locally-disabled,useless-suppression,django-not-available,bad-option-value,logging-format-interpolation,no-else-raise,import-outside-toplevel +disable=bad-continuation,locally-disabled,useless-suppression,django-not-available,bad-option-value,logging-format-interpolation,no-else-raise,import-outside-toplevel,cyclic-import # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option diff --git a/AUTHORS.txt b/AUTHORS.txt index af2a1eb8ae..ac97a4387b 100644 --- a/AUTHORS.txt +++ b/AUTHORS.txt @@ -52,6 +52,7 @@ and the following people for code contributions, bug fixes, improvements of the * Marco Dorigo * Y.-W. Fang * Marco Gibertini +* Davide Grassano * Daniel Hollas * Eric Hontz * Jianxing Huang diff --git a/CHANGELOG.md b/CHANGELOG.md index b9ca0d93b2..9933ae460c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,48 @@ # Changelog +## v1.2.0 + +### Features +- `ExitCode`: make the exit message parameterizable through templates [[#3824]](https://github.com/aiidateam/aiida-core/pull/3824) +- `GroupPath`: a utility to work with virtual `Group` hierarchies [[#3613]](https://github.com/aiidateam/aiida-core/pull/3613) +- Make `Group` sub classable through entry points [[#3882]](https://github.com/aiidateam/aiida-core/pull/3882)[[#3903]](https://github.com/aiidateam/aiida-core/pull/3903)[[#3926]](https://github.com/aiidateam/aiida-core/pull/3926) +- Add auto-complete support for `CodeParamType` and `GroupParamType` [[#3926]](https://github.com/aiidateam/aiida-core/pull/3926) +- Add export archive migration for `Group` type strings [[#3912]](https://github.com/aiidateam/aiida-core/pull/3912) +- Add the `-v/--version` option to `verdi export migrate` [[#3910]](https://github.com/aiidateam/aiida-core/pull/3910) +- Add the `-l/--limit` option to `verdi group show` [[#3857]](https://github.com/aiidateam/aiida-core/pull/3857) +- Add the `--order-by/--order-direction` options to `verdi group list` [[#3858]](https://github.com/aiidateam/aiida-core/pull/3858) +- Add `prepend_text` and `append_text` to `aiida_local_code_factory` pytest fixture [[#3831]](https://github.com/aiidateam/aiida-core/pull/3831) +- REST API: make it easier to call `run_api` in wsgi scripts [[#3875]](https://github.com/aiidateam/aiida-core/pull/3875) +- Plot bands with only one kpoint [[#3798]](https://github.com/aiidateam/aiida-core/pull/3798) + +### Bug fixes +- Improved validation for CLI parameters [[#3894]](https://github.com/aiidateam/aiida-core/pull/3894) +- Ensure unicity when creating instances of `Autogroup` [[#3650]](https://github.com/aiidateam/aiida-core/pull/3650) +- Prevent nodes without registered entry points from being stored [[#3886]](https://github.com/aiidateam/aiida-core/pull/3886) +- Fix the `RotatingFileHandler` configuration of the daemon logger[[#3891]](https://github.com/aiidateam/aiida-core/pull/3891) +- Ensure log messages are not duplicated in daemon log file [[#3890]](https://github.com/aiidateam/aiida-core/pull/3890) +- Convert argument to `str` in `aiida.common.escaping.escape_for_bash` [[#3873]](https://github.com/aiidateam/aiida-core/pull/3873) +- Remove the return statement of `RemoteData.getfile()` [[#3742]](https://github.com/aiidateam/aiida-core/pull/3742) +- Support for `BandsData` nodes without `StructureData` ancestors [[#3817]](https://github.com/aiidateam/aiida-core/pull/3817) + +### Deprecations +- Deprecate `--group-type` option in favor of `--type-string` for `verdi group list` [[#3926]](https://github.com/aiidateam/aiida-core/pull/3926) + +### Documentation +- Docs: link to documentation of other libraries via `intersphinx` mapping [[#3876]](https://github.com/aiidateam/aiida-core/pull/3876) +- Docs: remove extra `advanced_plotting` from install instructions [[#3860]](https://github.com/aiidateam/aiida-core/pull/3860) +- Docs: consistent use of "plugin" vs "plugin package" terminology [[#3799]](https://github.com/aiidateam/aiida-core/pull/3799) + +### Developers +- Deduplicate code for tests of archive migration code [[#3924]](https://github.com/aiidateam/aiida-core/pull/3924) +- CI: use GitHub Actions services for PostgreSQL and RabbitMQ [[#3901]](https://github.com/aiidateam/aiida-core/pull/3901) +- Move `aiida.manage.external.pgsu` to external package `pgsu` [[#3892]](https://github.com/aiidateam/aiida-core/pull/3892) +- Cleanup the top-level directory of the repository [[#3738]](https://github.com/aiidateam/aiida-core/pull/3738) +- Remove unused `orm.implementation.utils` module [[#3877]](https://github.com/aiidateam/aiida-core/pull/3877) +- Revise dependency management workflow [[#3771]](https://github.com/aiidateam/aiida-core/pull/3771) +- Re-add support for Coverage reports through codecov.io [[#3618]](https://github.com/aiidateam/aiida-core/pull/3618) + + ## v1.1.1 ### Changes diff --git a/README.md b/README.md index 41aa785a5c..179a9f03e7 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ AiiDA (www.aiida.net) is a workflow manager for computational science with a str |-----|----------------------------------------------------------------------------| |Latest release| [![PyPI version](https://badge.fury.io/py/aiida-core.svg)](https://badge.fury.io/py/aiida-core) [![conda-forge](https://img.shields.io/conda/vn/conda-forge/aiida-core.svg?style=flat)](https://anaconda.org/conda-forge/aiida-core) [![PyPI pyversions](https://img.shields.io/pypi/pyversions/aiida-core.svg)](https://pypi.python.org/pypi/aiida-core/) | |Getting help| [![Docs status](https://readthedocs.org/projects/aiida-core/badge)](http://aiida-core.readthedocs.io/) [![Google Group](https://img.shields.io/badge/-Google%20Group-lightgrey.svg)](https://groups.google.com/forum/#!forum/aiidausers) -|Build status| [![Build Status](https://travis-ci.org/aiidateam/aiida-core.svg?branch=develop)](https://travis-ci.org/aiidateam/aiida-core) [![Coverage Status](https://coveralls.io/repos/github/aiidateam/aiida-core/badge.svg?branch=develop)](https://coveralls.io/github/aiidateam/aiida-core?branch=develop) | +|Build status| [![Build Status](https://github.com/aiidateam/aiida-core/workflows/aiida-core/badge.svg)](https://github.com/aiidateam/aiida-core/actions) [![Coverage Status](https://codecov.io/gh/aiidateam/aiida-core/branch/develop/graph/badge.svg)](https://codecov.io/gh/aiidateam/aiida-core) | |Activity| [![PyPI-downloads](https://img.shields.io/pypi/dm/aiida-core.svg?style=flat)](https://pypistats.org/packages/aiida-core) [![Commit Activity](https://img.shields.io/github/commit-activity/m/aiidateam/aiida-core.svg)](https://github.com/aiidateam/aiida-core/pulse) |Community| [![Affiliated with NumFOCUS](https://img.shields.io/badge/NumFOCUS-affiliated%20project-orange.svg?style=flat&colorA=E1523D&colorB=007D8A)](https://numfocus.org/sponsored-projects/affiliated-projects) [![Twitter](https://img.shields.io/twitter/follow/aiidateam.svg?style=social&label=Follow)](https://twitter.com/aiidateam) diff --git a/aiida/__init__.py b/aiida/__init__.py index a6a582127c..bbaea6a112 100644 --- a/aiida/__init__.py +++ b/aiida/__init__.py @@ -32,7 +32,7 @@ 'For further information please visit http://www.aiida.net/. All rights reserved.' ) __license__ = 'MIT license, see LICENSE.txt file.' -__version__ = '1.1.1' +__version__ = '1.2.0' __authors__ = 'The AiiDA team.' __paper__ = ( 'G. Pizzi, A. Cepellotti, R. Sabatini, N. Marzari, and B. Kozinsky,' diff --git a/aiida/backends/djsite/db/migrations/0044_dbgroup_type_string.py b/aiida/backends/djsite/db/migrations/0044_dbgroup_type_string.py new file mode 100644 index 0000000000..8c577ce397 --- /dev/null +++ b/aiida/backends/djsite/db/migrations/0044_dbgroup_type_string.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,too-few-public-methods +"""Migration after the `Group` class became pluginnable and so the group `type_string` changed.""" + +# pylint: disable=no-name-in-module,import-error +from django.db import migrations +from aiida.backends.djsite.db.migrations import upgrade_schema_version + +REVISION = '1.0.44' +DOWN_REVISION = '1.0.43' + +forward_sql = [ + """UPDATE db_dbgroup SET type_string = 'core' WHERE type_string = 'user';""", + """UPDATE db_dbgroup SET type_string = 'core.upf' WHERE type_string = 'data.upf';""", + """UPDATE db_dbgroup SET type_string = 'core.import' WHERE type_string = 'auto.import';""", + """UPDATE db_dbgroup SET type_string = 'core.auto' WHERE type_string = 'auto.run';""", +] + +reverse_sql = [ + """UPDATE db_dbgroup SET type_string = 'user' WHERE type_string = 'core';""", + """UPDATE db_dbgroup SET type_string = 'data.upf' WHERE type_string = 'core.upf';""", + """UPDATE db_dbgroup SET type_string = 'auto.import' WHERE type_string = 'core.import';""", + """UPDATE db_dbgroup SET type_string = 'auto.run' WHERE type_string = 'core.auto';""", +] + + +class Migration(migrations.Migration): + """Migration after the update of group `type_string`""" + dependencies = [ + ('db', '0043_default_link_label'), + ] + + operations = [ + migrations.RunSQL(sql='\n'.join(forward_sql), reverse_sql='\n'.join(reverse_sql)), + upgrade_schema_version(REVISION, DOWN_REVISION), + ] diff --git a/aiida/backends/djsite/db/migrations/__init__.py b/aiida/backends/djsite/db/migrations/__init__.py index a832b4e5f7..41ee2b3d2c 100644 --- a/aiida/backends/djsite/db/migrations/__init__.py +++ b/aiida/backends/djsite/db/migrations/__init__.py @@ -21,7 +21,7 @@ class DeserializationException(AiidaException): pass -LATEST_MIGRATION = '0043_default_link_label' +LATEST_MIGRATION = '0044_dbgroup_type_string' def _update_schema_version(version, apps, _): diff --git a/aiida/backends/djsite/queries.py b/aiida/backends/djsite/queries.py index 209d646306..ff8121c7ab 100644 --- a/aiida/backends/djsite/queries.py +++ b/aiida/backends/djsite/queries.py @@ -108,43 +108,6 @@ def query_group(q_object, args): if args.group_pk is not None: q_object.add(Q(dbgroups__pk__in=args.group_pk), Q.AND) - @staticmethod - def _extract_formula(struc_pk, args, deser_data): - """Extract formula.""" - from aiida.orm.nodes.data.structure import (get_formula, get_symbols_string) - - if struc_pk is not None: - # Exclude structures by the elements - if args.element is not None: - all_kinds = [k['symbols'] for k in deser_data[struc_pk]['kinds']] - all_symbols = [item for sublist in all_kinds for item in sublist] - if not any([s in args.element for s in all_symbols]): - return None - if args.element_only is not None: - all_kinds = [k['symbols'] for k in deser_data[struc_pk]['kinds']] - all_symbols = [item for sublist in all_kinds for item in sublist] - if not all([s in all_symbols for s in args.element_only]): - return None - - # build the formula - symbol_dict = { - k['name']: get_symbols_string(k['symbols'], k['weights']) for k in deser_data[struc_pk]['kinds'] - } - try: - symbol_list = [symbol_dict[s['kind_name']] for s in deser_data[struc_pk]['sites']] - formula = get_formula(symbol_list, mode=args.formulamode) - # If for some reason there is no kind with the name - # referenced by the site - except KeyError: - formula = '<>' - # cycle if we imposed the filter on elements - if args.element is not None or args.element_only is not None: - return None - else: - formula = '<>' - - return formula - def get_bands_and_parents_structure(self, args): """Returns bands and closest parent structure.""" from django.db.models import Q @@ -175,14 +138,24 @@ def get_bands_and_parents_structure(self, args): # get the closest structures (WITHOUT DbPath) structure_dict = get_closest_parents(pks, Q(node_type='data.structure.StructureData.'), chunk_size=1) - struc_pks = [structure_dict[pk] for pk in pks] + struc_pks = [structure_dict.get(pk) for pk in pks] # query for the attributes needed for the structure formula res_attr = models.DbNode.objects.filter(id__in=struc_pks).values_list('id', 'attributes') + res_attr = {rattr[0]: rattr[1] for rattr in res_attr} # prepare the printout for (b_id_lbl_date, struc_pk) in zip(this_chunk, struc_pks): - formula = self._extract_formula(struc_pk, args, {rattr[0]: rattr[1] for rattr in res_attr}) + if struc_pk is not None: + strct = res_attr[struc_pk] + akinds, asites = strct['kinds'], strct['sites'] + formula = self._extract_formula(akinds, asites, args) + else: + if args.element is not None or args.element_only is not None: + formula = None + else: + formula = '<>' + if formula is None: continue entry_list.append([ diff --git a/aiida/backends/general/abstractqueries.py b/aiida/backends/general/abstractqueries.py index bf8a74bca1..1851eca2c6 100644 --- a/aiida/backends/general/abstractqueries.py +++ b/aiida/backends/general/abstractqueries.py @@ -120,8 +120,19 @@ def get_statistics_dict(dataset): return statistics @staticmethod - def _extract_formula(args, akinds, asites): - """Extract formula from the structure object.""" + def _extract_formula(akinds, asites, args): + """ + Extract formula from the structure object. + + :param akinds: list of kinds, e.g. [{'mass': 55.845, 'name': 'Fe', 'symbols': ['Fe'], 'weights': [1.0]}, + {'mass': 15.9994, 'name': 'O', 'symbols': ['O'], 'weights': [1.0]}] + :param asites: list of structure sites e.g. [{'position': [0.0, 0.0, 0.0], 'kind_name': 'Fe'}, + {'position': [2.0, 2.0, 2.0], 'kind_name': 'O'}] + :param args: a namespace with parsed command line parameters, here only 'element' and 'element_only' are used + :type args: dict + + :return: a string with formula if the formula is found + """ from aiida.orm.nodes.data.structure import (get_formula, get_symbols_string) if args.element is not None: @@ -136,7 +147,7 @@ def _extract_formula(args, akinds, asites): # We want only the StructureData that have attributes if akinds is None or asites is None: - return None + return '<>' symbol_dict = {} for k in akinds: @@ -161,7 +172,9 @@ def get_bands_and_parents_structure(self, args): :returns: A list of sublists, each latter containing (in order): - pk as string, formula as string, creation date, bandsdata-label""" + pk as string, formula as string, creation date, bandsdata-label + """ + # pylint: disable=too-many-locals import datetime from aiida.common import timezone @@ -173,22 +186,23 @@ def get_bands_and_parents_structure(self, args): else: q_build.append(orm.User, tag='creator') - bdata_filters = {} - if args.past_days is not None: - bdata_filters.update({'ctime': {'>=': timezone.now() - datetime.timedelta(days=args.past_days)}}) - - q_build.append( - orm.BandsData, tag='bdata', with_user='creator', filters=bdata_filters, project=['id', 'label', 'ctime'] - ) - group_filters = {} if args.group_name is not None: group_filters.update({'name': {'in': args.group_name}}) if args.group_pk is not None: group_filters.update({'id': {'in': args.group_pk}}) - if group_filters: - q_build.append(orm.Group, tag='group', filters=group_filters, with_node='bdata') + + q_build.append(orm.Group, tag='group', filters=group_filters, with_user='creator') + + bdata_filters = {} + if args.past_days is not None: + bdata_filters.update({'ctime': {'>=': timezone.now() - datetime.timedelta(days=args.past_days)}}) + + q_build.append( + orm.BandsData, tag='bdata', with_group='group', filters=bdata_filters, project=['id', 'label', 'ctime'] + ) + bands_list_data = q_build.all() q_build.append( orm.StructureData, @@ -200,12 +214,15 @@ def get_bands_and_parents_structure(self, args): q_build.order_by({orm.StructureData: {'ctime': 'desc'}}) - list_data = q_build.distinct() + structure_dict = dict() + list_data = q_build.distinct().all() + for bid, _, _, _, akinds, asites in list_data: + structure_dict[bid] = (akinds, asites) entry_list = [] already_visited_bdata = set() - for [bid, blabel, bdate, _, akinds, asites] in list_data.all(): + for [bid, blabel, bdate] in bands_list_data: # We process only one StructureData per BandsData. # We want to process the closest StructureData to @@ -217,7 +234,17 @@ def get_bands_and_parents_structure(self, args): if already_visited_bdata.__contains__(bid): continue already_visited_bdata.add(bid) - formula = self._extract_formula(args, akinds, asites) + strct = structure_dict.get(bid, None) + + if strct is not None: + akinds, asites = strct + formula = self._extract_formula(akinds, asites, args) + else: + if args.element is not None or args.element_only is not None: + formula = None + else: + formula = '<>' + if formula is None: continue entry_list.append([str(bid), str(formula), bdate.strftime('%d %b %Y'), blabel]) diff --git a/aiida/backends/sqlalchemy/migrations/versions/bf591f31dd12_dbgroup_type_string.py b/aiida/backends/sqlalchemy/migrations/versions/bf591f31dd12_dbgroup_type_string.py new file mode 100644 index 0000000000..626b561c12 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/bf591f31dd12_dbgroup_type_string.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Migration after the `Group` class became pluginnable and so the group `type_string` changed. + +Revision ID: bf591f31dd12 +Revises: 118349c10896 +Create Date: 2020-03-31 10:00:52.609146 + +""" +# pylint: disable=no-name-in-module,import-error,invalid-name,no-member +from alembic import op +from sqlalchemy.sql import text + +forward_sql = [ + """UPDATE db_dbgroup SET type_string = 'core' WHERE type_string = 'user';""", + """UPDATE db_dbgroup SET type_string = 'core.upf' WHERE type_string = 'data.upf';""", + """UPDATE db_dbgroup SET type_string = 'core.import' WHERE type_string = 'auto.import';""", + """UPDATE db_dbgroup SET type_string = 'core.auto' WHERE type_string = 'auto.run';""", +] + +reverse_sql = [ + """UPDATE db_dbgroup SET type_string = 'user' WHERE type_string = 'core';""", + """UPDATE db_dbgroup SET type_string = 'data.upf' WHERE type_string = 'core.upf';""", + """UPDATE db_dbgroup SET type_string = 'auto.import' WHERE type_string = 'core.import';""", + """UPDATE db_dbgroup SET type_string = 'auto.run' WHERE type_string = 'core.auto';""", +] + +# revision identifiers, used by Alembic. +revision = 'bf591f31dd12' +down_revision = '118349c10896' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + conn = op.get_bind() + statement = text('\n'.join(forward_sql)) + conn.execute(statement) + + +def downgrade(): + """Migrations for the downgrade.""" + conn = op.get_bind() + statement = text('\n'.join(reverse_sql)) + conn.execute(statement) diff --git a/aiida/backends/testbase.py b/aiida/backends/testbase.py index de855eec4b..ed18f27566 100644 --- a/aiida/backends/testbase.py +++ b/aiida/backends/testbase.py @@ -99,7 +99,11 @@ def tearDown(self): def reset_database(self): """Reset the database to the default state deleting any content currently stored""" + from aiida.orm import autogroup + self.clean_db() + if autogroup.CURRENT_AUTOGROUP is not None: + autogroup.CURRENT_AUTOGROUP.clear_group_cache() self.insert_data() @classmethod @@ -109,7 +113,10 @@ def insert_data(cls): inserts default data into the database (which is for the moment a default computer). """ + from aiida.orm import User + cls.create_user() + User.objects.reset() cls.create_computer() @classmethod @@ -180,7 +187,11 @@ def user_email(cls): # pylint: disable=no-self-argument def tearDownClass(cls, *args, **kwargs): # pylint: disable=arguments-differ # Double check for double security to avoid to run the tearDown # if this is not a test profile + from aiida.orm import autogroup + check_if_tests_can_run() + if autogroup.CURRENT_AUTOGROUP is not None: + autogroup.CURRENT_AUTOGROUP.clear_group_cache() cls.clean_db() cls.clean_repository() cls.__backend_instance.tearDownClass_method(*args, **kwargs) diff --git a/aiida/cmdline/commands/cmd_data/cmd_upf.py b/aiida/cmdline/commands/cmd_data/cmd_upf.py index 78f79b0d9e..745f4af7a2 100644 --- a/aiida/cmdline/commands/cmd_data/cmd_upf.py +++ b/aiida/cmdline/commands/cmd_data/cmd_upf.py @@ -64,22 +64,13 @@ def upf_listfamilies(elements, with_description): """ from aiida import orm from aiida.plugins import DataFactory - from aiida.orm.nodes.data.upf import UPFGROUP_TYPE UpfData = DataFactory('upf') # pylint: disable=invalid-name query = orm.QueryBuilder() query.append(UpfData, tag='upfdata') if elements is not None: query.add_filter(UpfData, {'attributes.element': {'in': elements}}) - query.append( - orm.Group, - with_node='upfdata', - tag='group', - project=['label', 'description'], - filters={'type_string': { - '==': UPFGROUP_TYPE - }} - ) + query.append(orm.UpfFamily, with_node='upfdata', tag='group', project=['label', 'description']) query.distinct() if query.count() > 0: diff --git a/aiida/cmdline/commands/cmd_export.py b/aiida/cmdline/commands/cmd_export.py index 4e4b8f0066..651d25ca1a 100644 --- a/aiida/cmdline/commands/cmd_export.py +++ b/aiida/cmdline/commands/cmd_export.py @@ -145,17 +145,26 @@ def create( @options.ARCHIVE_FORMAT() @options.FORCE(help='overwrite output file if it already exists') @options.SILENT() -def migrate(input_file, output_file, force, silent, archive_format): +@click.option( + '-v', + '--version', + type=click.STRING, + required=False, + metavar='VERSION', + help='Specify an exact archive version to migrate to. By default the most recent version is taken.' +) +def migrate(input_file, output_file, force, silent, archive_format, version): # pylint: disable=too-many-locals,too-many-statements,too-many-branches - """ - Migrate an old export archive file to the most recent format. - """ + """Migrate an export archive to a more recent format version.""" import tarfile import zipfile from aiida.common import json from aiida.common.folders import SandboxFolder - from aiida.tools.importexport import migration, extract_zip, extract_tar + from aiida.tools.importexport import EXPORT_VERSION, migration, extract_zip, extract_tar, ArchiveMigrationError + + if version is None: + version = EXPORT_VERSION if os.path.exists(output_file) and not force: echo.echo_critical('the output file already exists') @@ -178,7 +187,10 @@ def migrate(input_file, output_file, force, silent, archive_format): echo.echo_critical('export archive does not contain the required file {}'.format(fhandle.filename)) old_version = migration.verify_metadata_version(metadata) - new_version = migration.migrate_recursively(metadata, data, folder) + try: + new_version = migration.migrate_recursively(metadata, data, folder, version) + except ArchiveMigrationError as exception: + echo.echo_critical(exception) with open(folder.get_abs_path('data.json'), 'wb') as fhandle: json.dump(data, fhandle, indent=4) diff --git a/aiida/cmdline/commands/cmd_group.py b/aiida/cmdline/commands/cmd_group.py index dff712033a..11ca95e4bf 100644 --- a/aiida/cmdline/commands/cmd_group.py +++ b/aiida/cmdline/commands/cmd_group.py @@ -8,12 +8,11 @@ # For further information please visit http://www.aiida.net # ########################################################################### """`verdi group` commands""" - import click from aiida.common.exceptions import UniquenessError from aiida.cmdline.commands.cmd_verdi import verdi -from aiida.cmdline.params import options, arguments, types +from aiida.cmdline.params import options, arguments from aiida.cmdline.utils import echo from aiida.cmdline.utils.decorators import with_dbenv @@ -123,6 +122,7 @@ def group_description(group, description): @verdi_group.command('show') @options.RAW(help='Show only a space-separated list of PKs of the calculations in the group') +@options.LIMIT() @click.option( '-u', '--uuid', @@ -132,18 +132,23 @@ def group_description(group, description): ) @arguments.GROUP() @with_dbenv() -def group_show(group, raw, uuid): +def group_show(group, raw, limit, uuid): """Show information for a given group.""" from tabulate import tabulate from aiida.common.utils import str_timedelta from aiida.common import timezone + if limit: + node_iterator = group.nodes[:limit] + else: + node_iterator = group.nodes + if raw: if uuid: - echo.echo(' '.join(str(_.uuid) for _ in group.nodes)) + echo.echo(' '.join(str(_.uuid) for _ in node_iterator)) else: - echo.echo(' '.join(str(_.pk) for _ in group.nodes)) + echo.echo(' '.join(str(_.pk) for _ in node_iterator)) else: type_string = group.type_string desc = group.description @@ -172,86 +177,90 @@ def group_show(group, raw, uuid): echo.echo(tabulate(table, headers=header)) -@with_dbenv() -def valid_group_type_strings(): - from aiida.orm import GroupTypeString - return tuple(i.value for i in GroupTypeString) - - -@with_dbenv() -def user_defined_group(): - from aiida.orm import GroupTypeString - return GroupTypeString.USER.value - - @verdi_group.command('list') -@options.ALL_USERS(help='Show groups for all users, rather than only for the current user') -@click.option( - '-u', - '--user', - 'user_email', - type=click.STRING, - help='Add a filter to show only groups belonging to a specific user' -) -@click.option('-a', '--all-types', is_flag=True, default=False, help='Show groups of all types') +@options.ALL_USERS(help='Show groups for all users, rather than only for the current user.') +@options.USER(help='Add a filter to show only groups belonging to a specific user') +@options.ALL(help='Show groups of all types.') @click.option( '-t', '--type', 'group_type', - type=types.LazyChoice(valid_group_type_strings), - default=user_defined_group, + default=None, help='Show groups of a specific type, instead of user-defined groups. Start with semicolumn if you want to ' - 'specify aiida-internal type' + 'specify aiida-internal type. [deprecated: use `--type-string` instead. Will be removed in 2.0.0]' ) +@options.TYPE_STRING() @click.option( - '-d', '--with-description', 'with_description', is_flag=True, default=False, help='Show also the group description' + '-d', + '--with-description', + 'with_description', + is_flag=True, + default=False, + help='Show also the group description.' ) -@click.option('-C', '--count', is_flag=True, default=False, help='Show also the number of nodes in the group') -@options.PAST_DAYS(help='add a filter to show only groups created in the past N days', default=None) +@click.option('-C', '--count', is_flag=True, default=False, help='Show also the number of nodes in the group.') +@options.PAST_DAYS(help='Add a filter to show only groups created in the past N days.', default=None) @click.option( '-s', '--startswith', type=click.STRING, default=None, - help='add a filter to show only groups for which the name begins with STRING' + help='Add a filter to show only groups for which the label begins with STRING.' ) @click.option( '-e', '--endswith', type=click.STRING, default=None, - help='add a filter to show only groups for which the name ends with STRING' + help='Add a filter to show only groups for which the label ends with STRING.' ) @click.option( '-c', '--contains', type=click.STRING, default=None, - help='add a filter to show only groups for which the name contains STRING' + help='Add a filter to show only groups for which the label contains STRING.' ) -@options.NODE(help='Show only the groups that contain the node') +@options.ORDER_BY(type=click.Choice(['id', 'label', 'ctime']), default='id') +@options.ORDER_DIRECTION() +@options.NODE(help='Show only the groups that contain the node.') @with_dbenv() def group_list( - all_users, user_email, all_types, group_type, with_description, count, past_days, startswith, endswith, contains, - node + all_users, user, all_entries, group_type, type_string, with_description, count, past_days, startswith, endswith, + contains, order_by, order_dir, node ): """Show a list of existing groups.""" - # pylint: disable=too-many-branches,too-many-arguments, too-many-locals + # pylint: disable=too-many-branches,too-many-arguments,too-many-locals,too-many-statements import datetime - from aiida.common.escaping import escape_for_sql_like - from aiida.common import timezone - from aiida.orm import Group - from aiida.orm import QueryBuilder - from aiida.orm import User + import warnings from aiida import orm + from aiida.common import timezone + from aiida.common.escaping import escape_for_sql_like + from aiida.common.warnings import AiidaDeprecationWarning from tabulate import tabulate - query = QueryBuilder() + builder = orm.QueryBuilder() filters = {} - # Specify group types - if not all_types: - filters = {'type_string': {'==': group_type}} + if group_type is not None: + warnings.warn('`--group-type` is deprecated, use `--type-string` instead', AiidaDeprecationWarning) # pylint: disable=no-member + + if type_string is not None: + raise click.BadOptionUsage('group-type', 'cannot use `--group-type` and `--type-string` at the same time.') + else: + type_string = group_type + + # Have to specify the default for `type_string` here instead of directly in the option otherwise it will always + # raise above if the user specifies just the `--group-type` option. Once that option is removed, the default can + # be moved to the option itself. + if type_string is None: + type_string = 'core' + + if not all_entries: + if '%' in type_string or '_' in type_string: + filters['type_string'] = {'like': type_string} + else: + filters['type_string'] = type_string # Creation time if past_days: @@ -266,26 +275,25 @@ def group_list( if contains: filters['or'].append({'label': {'like': '%{}%'.format(escape_for_sql_like(contains))}}) - query.append(Group, filters=filters, tag='group', project='*') + builder.append(orm.Group, filters=filters, tag='group', project='*') # Query groups that belong to specific user - if user_email: - user = user_email + if user: + user_email = user.email else: # By default: only groups of this user - user = orm.User.objects.get_default().email + user_email = orm.User.objects.get_default().email # Query groups that belong to all users if not all_users: - query.append(User, filters={'email': {'==': user}}, with_group='group') + builder.append(orm.User, filters={'email': {'==': user_email}}, with_group='group') # Query groups that contain a particular node if node: - from aiida.orm import Node - query.append(Node, filters={'id': {'==': node.id}}, with_group='group') + builder.append(orm.Node, filters={'id': {'==': node.id}}, with_group='group') - query.order_by({Group: {'id': 'asc'}}) - result = query.all() + builder.order_by({orm.Group: {order_by: order_dir}}) + result = builder.all() projection_lambdas = { 'pk': lambda group: str(group.pk), @@ -311,9 +319,13 @@ def group_list( for group in result: table.append([projection_lambdas[field](group[0]) for field in projection_fields]) - if not all_types: - echo.echo_info('If you want to see the groups of all types, please add -a/--all-types option') - echo.echo(tabulate(table, headers=projection_header)) + if not all_entries: + echo.echo_info('to show groups of all types, use the `-a/--all` option.') + + if not table: + echo.echo_info('no groups found matching the specified criteria.') + else: + echo.echo(tabulate(table, headers=projection_header)) @verdi_group.command('create') @@ -322,9 +334,8 @@ def group_list( def group_create(group_label): """Create an empty group with a given name.""" from aiida import orm - from aiida.orm import GroupTypeString - group, created = orm.Group.objects.get_or_create(label=group_label, type_string=GroupTypeString.USER.value) + group, created = orm.Group.objects.get_or_create(label=group_label) if created: echo.echo_success("Group created with PK = {} and name '{}'".format(group.id, group.label)) @@ -343,7 +354,7 @@ def group_copy(source_group, destination_group): Note that the destination group may not exist.""" from aiida import orm - dest_group, created = orm.Group.objects.get_or_create(label=destination_group, type_string=source_group.type_string) + dest_group, created = orm.Group.objects.get_or_create(label=destination_group) # Issue warning if destination group is not empty and get user confirmation to continue if not created and not dest_group.is_empty: @@ -353,3 +364,70 @@ def group_copy(source_group, destination_group): # Copy nodes dest_group.add_nodes(list(source_group.nodes)) echo.echo_success('Nodes copied from group<{}> to group<{}>'.format(source_group.label, dest_group.label)) + + +@verdi_group.group('path') +def verdi_group_path(): + """Inspect groups of nodes, with delimited label paths.""" + + +@verdi_group_path.command('ls') +@click.argument('path', type=click.STRING, required=False) +@options.TYPE_STRING(default='core', help='Filter to only include groups of this type string.') +@click.option('-R', '--recursive', is_flag=True, default=False, help='Recursively list sub-paths encountered.') +@click.option('-l', '--long', 'as_table', is_flag=True, default=False, help='List as a table, with sub-group count.') +@click.option( + '-d', + '--with-description', + 'with_description', + is_flag=True, + default=False, + help='Show also the group description.' +) +@click.option( + '--no-virtual', + 'no_virtual', + is_flag=True, + default=False, + help='Only show paths that fully correspond to an existing group.' +) +@click.option('--no-warn', is_flag=True, default=False, help='Do not issue a warning if any paths are invalid.') +@with_dbenv() +def group_path_ls(path, type_string, recursive, as_table, no_virtual, with_description, no_warn): + # pylint: disable=too-many-arguments,too-many-branches + """Show a list of existing group paths.""" + from aiida.plugins import GroupFactory + from aiida.tools.groups.paths import GroupPath, InvalidPath + + try: + path = GroupPath(path or '', cls=GroupFactory(type_string), warn_invalid_child=not no_warn) + except InvalidPath as err: + echo.echo_critical(str(err)) + + if recursive: + children = path.walk() + else: + children = path.children + + if as_table or with_description: + from tabulate import tabulate + headers = ['Path', 'Sub-Groups'] + if with_description: + headers.append('Description') + rows = [] + for child in sorted(children): + if no_virtual and child.is_virtual: + continue + row = [ + child.path if child.is_virtual else click.style(child.path, bold=True), + len([c for c in child.walk() if not c.is_virtual]) + ] + if with_description: + row.append('-' if child.is_virtual else child.get_group().description) + rows.append(row) + echo.echo(tabulate(rows, headers=headers)) + else: + for child in sorted(children): + if no_virtual and child.is_virtual: + continue + echo.echo(child.path, bold=not child.is_virtual) diff --git a/aiida/cmdline/commands/cmd_plugin.py b/aiida/cmdline/commands/cmd_plugin.py index f09c064950..3232441379 100644 --- a/aiida/cmdline/commands/cmd_plugin.py +++ b/aiida/cmdline/commands/cmd_plugin.py @@ -13,7 +13,7 @@ from aiida.cmdline.commands.cmd_verdi import verdi from aiida.cmdline.utils import decorators, echo -from aiida.plugins.entry_point import entry_point_group_to_module_path_map +from aiida.plugins.entry_point import ENTRY_POINT_GROUP_TO_MODULE_PATH_MAP @verdi.group('plugin') @@ -22,7 +22,7 @@ def verdi_plugin(): @verdi_plugin.command('list') -@click.argument('entry_point_group', type=click.Choice(entry_point_group_to_module_path_map.keys()), required=False) +@click.argument('entry_point_group', type=click.Choice(ENTRY_POINT_GROUP_TO_MODULE_PATH_MAP.keys()), required=False) @click.argument('entry_point', type=click.STRING, required=False) @decorators.with_dbenv() def plugin_list(entry_point_group, entry_point): @@ -34,7 +34,7 @@ def plugin_list(entry_point_group, entry_point): if entry_point_group is None: echo.echo_info('Available entry point groups:') - for group in sorted(entry_point_group_to_module_path_map.keys()): + for group in sorted(ENTRY_POINT_GROUP_TO_MODULE_PATH_MAP.keys()): echo.echo('* {}'.format(group)) echo.echo('') diff --git a/aiida/cmdline/commands/cmd_restapi.py b/aiida/cmdline/commands/cmd_restapi.py index a6b8c9adf4..9cbde54473 100644 --- a/aiida/cmdline/commands/cmd_restapi.py +++ b/aiida/cmdline/commands/cmd_restapi.py @@ -12,60 +12,54 @@ Main advantage of doing this by means of a verdi command is that different profiles can be selected at hook-up (-p flag). """ -import os import click -import aiida.restapi from aiida.cmdline.commands.cmd_verdi import verdi from aiida.cmdline.params.options import HOSTNAME, PORT - -CONFIG_DIR = os.path.join(os.path.split(os.path.abspath(aiida.restapi.__file__))[0], 'common') +from aiida.restapi.common import config @verdi.command('restapi') -@HOSTNAME(default='127.0.0.1') -@PORT(default=5000) +@HOSTNAME(default=config.CLI_DEFAULTS['HOST_NAME']) +@PORT(default=config.CLI_DEFAULTS['PORT']) @click.option( '-c', '--config-dir', type=click.Path(exists=True), - default=CONFIG_DIR, - help='the path of the configuration directory' + default=config.CLI_DEFAULTS['CONFIG_DIR'], + help='Path to the configuration directory' ) -@click.option('--debug', 'debug', is_flag=True, default=False, help='run app in debug mode') +@click.option('--debug', 'debug', is_flag=True, default=config.APP_CONFIG['DEBUG'], help='Enable debugging') @click.option( '--wsgi-profile', - 'wsgi_profile', is_flag=True, - default=False, - help='to use WSGI profiler middleware for finding bottlenecks in web application' + default=config.CLI_DEFAULTS['WSGI_PROFILE'], + help='Whether to enable WSGI profiler middleware for finding bottlenecks' +) +@click.option( + '--hookup/--no-hookup', + 'hookup', + is_flag=True, + default=config.CLI_DEFAULTS['HOOKUP_APP'], + help='Hookup app to flask server' ) -@click.option('--hookup/--no-hookup', 'hookup', is_flag=True, default=True, help='to hookup app') def restapi(hostname, port, config_dir, debug, wsgi_profile, hookup): """ Run the AiiDA REST API server. Example Usage: - \b - verdi -p restapi --hostname 127.0.0.5 --port 6789 --config-dir - --debug --wsgi-profile --hookup + verdi -p restapi --hostname 127.0.0.5 --port 6789 """ - from aiida.restapi.api import App, AiidaApi from aiida.restapi.run_api import run_api - # Construct parameter dictionary - kwargs = dict( - prog_name='verdi-restapi', + # Invoke the runner + run_api( hostname=hostname, port=port, config=config_dir, debug=debug, wsgi_profile=wsgi_profile, hookup=hookup, - catch_internal_server=True ) - - # Invoke the runner - run_api(App, AiidaApi, **kwargs) diff --git a/aiida/cmdline/commands/cmd_run.py b/aiida/cmdline/commands/cmd_run.py index 5a43cad6f5..d46b6f984c 100644 --- a/aiida/cmdline/commands/cmd_run.py +++ b/aiida/cmdline/commands/cmd_run.py @@ -10,13 +10,16 @@ """`verdi run` command.""" import contextlib import os +import functools import sys +import warnings import click from aiida.cmdline.commands.cmd_verdi import verdi from aiida.cmdline.params.options.multivalue import MultipleValueOption from aiida.cmdline.utils import decorators, echo +from aiida.common.warnings import AiidaDeprecationWarning @contextlib.contextmanager @@ -37,31 +40,56 @@ def update_environment(argv): sys.path = _path +def validate_entrypoint_string(ctx, param, value): # pylint: disable=unused-argument,invalid-name + """Validate that `value` is a valid entrypoint string.""" + from aiida.orm import autogroup + + try: + autogroup.Autogroup.validate(value) + except Exception as exc: + raise click.BadParameter(str(exc) + ' ({})'.format(value)) + + return value + + @verdi.command('run', context_settings=dict(ignore_unknown_options=True,)) @click.argument('scriptname', type=click.STRING) @click.argument('varargs', nargs=-1, type=click.UNPROCESSED) -@click.option('-g', '--group', is_flag=True, default=True, show_default=True, help='Enables the autogrouping') -@click.option('-n', '--group-name', type=click.STRING, required=False, help='Specify the name of the auto group') -@click.option('-e', '--exclude', cls=MultipleValueOption, default=[], help='Exclude these classes from auto grouping') +@click.option('--auto-group', is_flag=True, help='Enables the autogrouping') +@click.option( + '-l', + '--auto-group-label-prefix', + type=click.STRING, + required=False, + help='Specify the prefix of the label of the auto group (numbers might be automatically ' + 'appended to generate unique names per run).' +) @click.option( - '-i', '--include', cls=MultipleValueOption, default=['all'], help='Include these classes from auto grouping' + '-n', + '--group-name', + type=click.STRING, + required=False, + help='Specify the name of the auto group [DEPRECATED, USE --auto-group-label-prefix instead]. ' + 'This also enables auto-grouping.' ) @click.option( - '-E', - '--excludesubclasses', + '-e', + '--exclude', cls=MultipleValueOption, - default=[], - help='Exclude these classes and their sub classes from auto grouping' + default=None, + help='Exclude these classes from auto grouping (use full entrypoint strings).', + callback=functools.partial(validate_entrypoint_string) ) @click.option( - '-I', - '--includesubclasses', + '-i', + '--include', cls=MultipleValueOption, - default=[], - help='Include these classes and their sub classes from auto grouping' + default=None, + help='Include these classes from auto grouping (use full entrypoint strings or "all").', + callback=validate_entrypoint_string ) @decorators.with_dbenv() -def run(scriptname, varargs, group, group_name, exclude, excludesubclasses, include, includesubclasses): +def run(scriptname, varargs, auto_group, auto_group_label_prefix, group_name, exclude, include): # pylint: disable=too-many-arguments,exec-used """Execute scripts with preloaded AiiDA environment.""" from aiida.cmdline.utils.shell import DEFAULT_MODULES_LIST @@ -80,22 +108,27 @@ def run(scriptname, varargs, group, group_name, exclude, excludesubclasses, incl for app_mod, model_name, alias in DEFAULT_MODULES_LIST: globals_dict['{}'.format(alias)] = getattr(__import__(app_mod, {}, {}, model_name), model_name) - if group: - automatic_group_name = group_name - if automatic_group_name is None: - from aiida.common import timezone - - automatic_group_name = 'Verdi autogroup on ' + timezone.now().strftime('%Y-%m-%d %H:%M:%S') + if group_name: + warnings.warn('--group-name is deprecated, use `--auto-group-label-prefix` instead', AiidaDeprecationWarning) # pylint: disable=no-member + if auto_group_label_prefix: + raise click.BadParameter( + 'You cannot specify both --group-name and --auto-group-label-prefix; ' + 'use --auto-group-label-prefix only' + ) + auto_group_label_prefix = group_name + # To have the old behavior, with auto-group enabled. + auto_group = True + if auto_group: aiida_verdilib_autogroup = autogroup.Autogroup() + # Set the ``group_label_prefix`` if defined, otherwise a default prefix will be used + if auto_group_label_prefix is not None: + aiida_verdilib_autogroup.set_group_label_prefix(auto_group_label_prefix) aiida_verdilib_autogroup.set_exclude(exclude) aiida_verdilib_autogroup.set_include(include) - aiida_verdilib_autogroup.set_exclude_with_subclasses(excludesubclasses) - aiida_verdilib_autogroup.set_include_with_subclasses(includesubclasses) - aiida_verdilib_autogroup.set_group_name(automatic_group_name) # Note: this is also set in the exec environment! This is the intended behavior - autogroup.current_autogroup = aiida_verdilib_autogroup + autogroup.CURRENT_AUTOGROUP = aiida_verdilib_autogroup # Initialize the variable here, otherwise we get UnboundLocalError in the finally clause if it fails to open handle = None @@ -117,5 +150,6 @@ def run(scriptname, varargs, group, group_name, exclude, excludesubclasses, incl # Re-raise the exception to have the error code properly returned at the end raise finally: + autogroup.current_autogroup = None if handle: handle.close() diff --git a/aiida/cmdline/commands/cmd_setup.py b/aiida/cmdline/commands/cmd_setup.py index 4edc23426b..fbfbf8b23c 100644 --- a/aiida/cmdline/commands/cmd_setup.py +++ b/aiida/cmdline/commands/cmd_setup.py @@ -136,22 +136,15 @@ def quicksetup( echo.echo_critical('failed to determine the PostgreSQL setup') try: - create = True - if not postgres.dbuser_exists(db_username): - postgres.create_dbuser(db_username, db_password) - else: - db_name, create = postgres.check_db_name(db_name) - - if create: - postgres.create_db(db_username, db_name) + db_username, db_name = postgres.create_dbuser_db_safe(dbname=db_name, dbuser=db_username, dbpass=db_password) except Exception as exception: echo.echo_error( '\n'.join([ 'Oops! quicksetup was unable to create the AiiDA database for you.', - 'For AiiDA to work, please either create the database yourself as follows:', - manual_setup_instructions(dbuser=su_db_username, dbname=su_db_name), '', - 'Alternatively, give your (operating system) user permission to create postgresql databases' + - 'and run quicksetup again.', '' + 'See `verdi quicksetup -h` for how to specify non-standard parameters for the postgresql connection.\n' + 'Alternatively, create the AiiDA database yourself: ', + manual_setup_instructions(dbuser=su_db_username, + dbname=su_db_name), '', 'and then use `verdi setup` instead', '' ]) ) raise exception @@ -169,8 +162,8 @@ def quicksetup( 'db_backend': db_backend, 'db_name': db_name, # from now on we connect as the AiiDA DB user, which may be forbidden when going via sockets - 'db_host': db_host or 'localhost', - 'db_port': db_port, + 'db_host': postgres.host_for_psycopg2, + 'db_port': postgres.port_for_psycopg2, 'db_username': db_username, 'db_password': db_password, 'repository': repository, diff --git a/aiida/cmdline/params/arguments/__init__.py b/aiida/cmdline/params/arguments/__init__.py index 2ceb521725..71bb8c2544 100644 --- a/aiida/cmdline/params/arguments/__init__.py +++ b/aiida/cmdline/params/arguments/__init__.py @@ -18,7 +18,7 @@ __all__ = ( 'PROFILE', 'PROFILES', 'CALCULATION', 'CALCULATIONS', 'CODE', 'CODES', 'COMPUTER', 'COMPUTERS', 'DATUM', 'DATA', 'GROUP', 'GROUPS', 'NODE', 'NODES', 'PROCESS', 'PROCESSES', 'WORKFLOW', 'WORKFLOWS', 'INPUT_FILE', 'OUTPUT_FILE', - 'LABEL', 'USER', 'PROFILE_NAME', 'CONFIG_OPTION' + 'LABEL', 'USER', 'CONFIG_OPTION' ) @@ -62,10 +62,8 @@ OUTPUT_FILE = OverridableArgument('output_file', metavar='OUTPUT_FILE', type=click.Path()) -LABEL = OverridableArgument('label') +LABEL = OverridableArgument('label', type=click.STRING) USER = OverridableArgument('user', metavar='USER', type=types.UserParamType()) -PROFILE_NAME = OverridableArgument('profile_name', type=click.STRING) - CONFIG_OPTION = OverridableArgument('option', type=types.ConfigOptionParamType()) diff --git a/aiida/cmdline/params/options/__init__.py b/aiida/cmdline/params/options/__init__.py index c2352ac70a..708930028f 100644 --- a/aiida/cmdline/params/options/__init__.py +++ b/aiida/cmdline/params/options/__init__.py @@ -10,6 +10,8 @@ """Module with pre-defined reusable commandline options that can be used as `click` decorators.""" import click +# Note: importing from aiida.manage.postgres leads to circular imports +from pgsu import DEFAULT_DSN as DEFAULT_DBINFO # pylint: disable=no-name-in-module from aiida.backends import BACKEND_DJANGO, BACKEND_SQLA from ...utils import defaults, echo @@ -23,12 +25,12 @@ 'graph_traversal_rules', 'PROFILE', 'CALCULATION', 'CALCULATIONS', 'CODE', 'CODES', 'COMPUTER', 'COMPUTERS', 'DATUM', 'DATA', 'GROUP', 'GROUPS', 'NODE', 'NODES', 'FORCE', 'SILENT', 'VISUALIZATION_FORMAT', 'INPUT_FORMAT', 'EXPORT_FORMAT', 'ARCHIVE_FORMAT', 'NON_INTERACTIVE', 'DRY_RUN', 'USER_EMAIL', 'USER_FIRST_NAME', 'USER_LAST_NAME', - 'USER_INSTITUTION', 'BACKEND', 'DB_HOST', 'DB_PORT', 'DB_USERNAME', 'DB_PASSWORD', 'DB_NAME', 'REPOSITORY_PATH', - 'PROFILE_ONLY_CONFIG', 'PROFILE_SET_DEFAULT', 'PREPEND_TEXT', 'APPEND_TEXT', 'LABEL', 'DESCRIPTION', 'INPUT_PLUGIN', - 'CALC_JOB_STATE', 'PROCESS_STATE', 'EXIT_STATUS', 'FAILED', 'LIMIT', 'PROJECT', 'ORDER_BY', 'PAST_DAYS', - 'OLDER_THAN', 'ALL', 'ALL_STATES', 'ALL_USERS', 'GROUP_CLEAR', 'RAW', 'HOSTNAME', 'TRANSPORT', 'SCHEDULER', 'USER', - 'PORT', 'FREQUENCY', 'VERBOSE', 'TIMEOUT', 'FORMULA_MODE', 'TRAJECTORY_INDEX', 'WITH_ELEMENTS', - 'WITH_ELEMENTS_EXCLUSIVE' + 'USER_INSTITUTION', 'DB_BACKEND', 'DB_ENGINE', 'DB_HOST', 'DB_PORT', 'DB_USERNAME', 'DB_PASSWORD', 'DB_NAME', + 'REPOSITORY_PATH', 'PROFILE_ONLY_CONFIG', 'PROFILE_SET_DEFAULT', 'PREPEND_TEXT', 'APPEND_TEXT', 'LABEL', + 'DESCRIPTION', 'INPUT_PLUGIN', 'CALC_JOB_STATE', 'PROCESS_STATE', 'PROCESS_LABEL', 'TYPE_STRING', 'EXIT_STATUS', + 'FAILED', 'LIMIT', 'PROJECT', 'ORDER_BY', 'PAST_DAYS', 'OLDER_THAN', 'ALL', 'ALL_STATES', 'ALL_USERS', + 'GROUP_CLEAR', 'RAW', 'HOSTNAME', 'TRANSPORT', 'SCHEDULER', 'USER', 'PORT', 'FREQUENCY', 'VERBOSE', 'TIMEOUT', + 'FORMULA_MODE', 'TRAJECTORY_INDEX', 'WITH_ELEMENTS', 'WITH_ELEMENTS_EXCLUSIVE' ) TRAVERSAL_RULE_HELP_STRING = { @@ -210,41 +212,65 @@ def decorator(command): USER_EMAIL = OverridableOption( '--email', - type=click.STRING, - prompt='Email Address (identifies your data when sharing)', - help='Email address that will be associated with your data and will be exported along with it, ' - 'should you choose to share any of your work.' + 'email', + type=types.EmailType(), + help='Email address associated with the data you generate. The email address is exported along with the data, ' + 'when sharing it.' ) USER_FIRST_NAME = OverridableOption( - '--first-name', type=click.STRING, prompt='First name', help='First name of the user.' + '--first-name', type=types.NonEmptyStringParamType(), help='First name of the user.' ) -USER_LAST_NAME = OverridableOption('--last-name', type=click.STRING, prompt='Last name', help='Last name of the user.') +USER_LAST_NAME = OverridableOption('--last-name', type=types.NonEmptyStringParamType(), help='Last name of the user.') USER_INSTITUTION = OverridableOption( - '--institution', type=click.STRING, prompt='Institution', help='Institution name of the user.' + '--institution', type=types.NonEmptyStringParamType(), help='Institution of the user.' +) + +DB_ENGINE = OverridableOption( + '--db-engine', + help='Engine to use to connect to the database.', + default='postgresql_psycopg2', + type=click.Choice(['postgresql_psycopg2']) ) -BACKEND = OverridableOption( - '--backend', +DB_BACKEND = OverridableOption( + '--db-backend', type=click.Choice([BACKEND_DJANGO, BACKEND_SQLA]), default=BACKEND_DJANGO, help='Database backend to use.' ) -DB_HOST = OverridableOption('--db-host', type=click.STRING, help='Database server host.') +DB_HOST = OverridableOption( + '--db-host', + type=types.HostnameType(), + help='Database server host. Leave empty for "peer" authentication.', + default=DEFAULT_DBINFO['host'] +) -DB_PORT = OverridableOption('--db-port', type=click.INT, help='Database server port.') +DB_PORT = OverridableOption( + '--db-port', + type=click.INT, + help='Database server port.', + default=DEFAULT_DBINFO['port'], +) -DB_USERNAME = OverridableOption('--db-username', type=click.STRING, help='Database user name.') +DB_USERNAME = OverridableOption( + '--db-username', type=types.NonEmptyStringParamType(), help='Name of the database user.' +) -DB_PASSWORD = OverridableOption('--db-password', type=click.STRING, help='Database user password.') +DB_PASSWORD = OverridableOption( + '--db-password', + type=click.STRING, + help='Password of the database user.', + hide_input=True, +) -DB_NAME = OverridableOption('--db-name', type=click.STRING, help='Database name.') +DB_NAME = OverridableOption('--db-name', type=types.NonEmptyStringParamType(), help='Database name.') REPOSITORY_PATH = OverridableOption( - '--repository', type=click.Path(file_okay=False), help='Absolute path for the file system repository.' + '--repository', type=click.Path(file_okay=False), help='Absolute path to the file repository.' ) PROFILE_ONLY_CONFIG = OverridableOption( @@ -307,6 +333,16 @@ def decorator(command): help='Only include entries whose process label matches this filter.' ) +TYPE_STRING = OverridableOption( + '-T', + '--type-string', + 'type_string', + type=click.STRING, + required=False, + help='Only include entries whose type string matches this filter. Can include `_` to match a single arbitrary ' + 'character or `%` to match any number of characters.' +) + EXIT_STATUS = OverridableOption( '-E', '--exit-status', 'exit_status', type=click.INT, help='Only include entries with this exit status.' ) @@ -389,7 +425,7 @@ def decorator(command): help='Display only raw query results, without any headers or footers.' ) -HOSTNAME = OverridableOption('-H', '--hostname', help='Hostname.') +HOSTNAME = OverridableOption('-H', '--hostname', type=types.HostnameType(), help='Hostname.') TRANSPORT = OverridableOption( '-T', '--transport', type=types.PluginParamType(group='transports'), required=True, help='Transport type.' @@ -459,7 +495,11 @@ def decorator(command): help='Only select objects containing only these and no other elements.' ) -CONFIG_FILE = ConfigFileOption('--config', help='Load option values from configuration file in yaml format.') +CONFIG_FILE = ConfigFileOption( + '--config', + type=click.Path(exists=True, dir_okay=False), + help='Load option values from configuration file in yaml format.' +) IDENTIFIER = OverridableOption( '-i', diff --git a/aiida/cmdline/params/options/commands/computer.py b/aiida/cmdline/params/options/commands/computer.py index 1209524de5..5a39fc99b1 100644 --- a/aiida/cmdline/params/options/commands/computer.py +++ b/aiida/cmdline/params/options/commands/computer.py @@ -45,15 +45,13 @@ def should_call_default_mpiprocs_per_machine(ctx): # pylint: disable=invalid-na return job_resource_cls.accepts_default_mpiprocs_per_machine() -LABEL = options.LABEL.clone( - prompt='Computer label', cls=InteractiveOption, required=True, type=types.NonEmptyStringParamType() -) +LABEL = options.LABEL.clone(prompt='Computer label', cls=InteractiveOption, required=True) HOSTNAME = options.HOSTNAME.clone( prompt='Hostname', cls=InteractiveOption, required=True, - help='The fully qualified hostname of this computer; for local transports, use localhost.' + help='The fully qualified hostname of the computer; use "localhost" for local transports.', ) DESCRIPTION = options.DESCRIPTION.clone( diff --git a/aiida/cmdline/params/options/commands/setup.py b/aiida/cmdline/params/options/commands/setup.py index 781aa97be9..3fffab2102 100644 --- a/aiida/cmdline/params/options/commands/setup.py +++ b/aiida/cmdline/params/options/commands/setup.py @@ -15,7 +15,7 @@ import click -from aiida.backends import BACKEND_DJANGO, BACKEND_SQLA +from aiida.backends import BACKEND_DJANGO from aiida.cmdline.params import options, types from aiida.manage.configuration import get_config, get_config_option, Profile from aiida.manage.external.postgres import DEFAULT_DBINFO @@ -157,102 +157,58 @@ def get_quicksetup_password(ctx, param, value): # pylint: disable=unused-argume cls=options.interactive.InteractiveOption ) -SETUP_USER_EMAIL = options.OverridableOption( - '--email', - 'email', - prompt='User email', - help='Email address that serves as the user name and a way to identify data created by it.', +SETUP_USER_EMAIL = options.USER_EMAIL.clone( + prompt='Email Address (for sharing data)', default=get_config_option('user.email'), required_fn=lambda x: get_config_option('user.email') is None, required=True, cls=options.interactive.InteractiveOption ) -SETUP_USER_FIRST_NAME = options.OverridableOption( - '--first-name', - 'first_name', +SETUP_USER_FIRST_NAME = options.USER_FIRST_NAME.clone( prompt='First name', - help='First name of the user.', - type=click.STRING, default=get_config_option('user.first_name'), required_fn=lambda x: get_config_option('user.first_name') is None, required=True, cls=options.interactive.InteractiveOption ) -SETUP_USER_LAST_NAME = options.OverridableOption( - '--last-name', - 'last_name', +SETUP_USER_LAST_NAME = options.USER_LAST_NAME.clone( prompt='Last name', - help='Last name of the user.', - type=click.STRING, default=get_config_option('user.last_name'), required_fn=lambda x: get_config_option('user.last_name') is None, required=True, cls=options.interactive.InteractiveOption ) -SETUP_USER_INSTITUTION = options.OverridableOption( - '--institution', - 'institution', +SETUP_USER_INSTITUTION = options.USER_INSTITUTION.clone( prompt='Institution', - help='Institution of the user.', - type=click.STRING, default=get_config_option('user.institution'), required_fn=lambda x: get_config_option('user.institution') is None, required=True, cls=options.interactive.InteractiveOption ) -SETUP_USER_PASSWORD = options.OverridableOption( - '--password', - 'password', - prompt='Password', - help='Optional password to connect to REST API.', - hide_input=True, - type=click.STRING, - default=PASSWORD_UNCHANGED, - confirmation_prompt=True, - cls=options.interactive.InteractiveOption -) +QUICKSETUP_DATABASE_ENGINE = options.DB_ENGINE -QUICKSETUP_DATABASE_ENGINE = options.OverridableOption( - '--db-engine', - help='Engine to use to connect to the database.', - default='postgresql_psycopg2', - type=click.Choice(['postgresql_psycopg2']) -) +QUICKSETUP_DATABASE_BACKEND = options.DB_BACKEND -QUICKSETUP_DATABASE_BACKEND = options.OverridableOption( - '--db-backend', - help='Backend type to use to map the database.', - default=BACKEND_DJANGO, - type=click.Choice([BACKEND_DJANGO, BACKEND_SQLA]) -) - -QUICKSETUP_DATABASE_HOSTNAME = options.OverridableOption( - '--db-host', help='Hostname to connect to the database.', default=DEFAULT_DBINFO['host'], type=click.STRING -) +QUICKSETUP_DATABASE_HOSTNAME = options.DB_HOST -QUICKSETUP_DATABASE_PORT = options.OverridableOption( - '--db-port', help='Port to connect to the database.', default=DEFAULT_DBINFO['port'], type=click.INT -) +QUICKSETUP_DATABASE_PORT = options.DB_PORT QUICKSETUP_DATABASE_NAME = options.OverridableOption( - '--db-name', help='Name of the database to create.', type=click.STRING, callback=get_quicksetup_database_name + '--db-name', + help='Name of the database to create.', + type=types.NonEmptyStringParamType(), + callback=get_quicksetup_database_name ) -QUICKSETUP_DATABASE_USERNAME = options.OverridableOption( - '--db-username', help='Name of the database user to create.', type=click.STRING, callback=get_quicksetup_username +QUICKSETUP_DATABASE_USERNAME = options.DB_USERNAME.clone( + help='Name of the database user to create.', callback=get_quicksetup_username ) -QUICKSETUP_DATABASE_PASSWORD = options.OverridableOption( - '--db-password', - help='Password to connect to the database.', - type=click.STRING, - hide_input=True, - callback=get_quicksetup_password -) +QUICKSETUP_DATABASE_PASSWORD = options.DB_PASSWORD.clone(callback=get_quicksetup_password) QUICKSETUP_SUPERUSER_DATABASE_USERNAME = options.OverridableOption( '--su-db-username', help='User name of the database super user.', type=click.STRING, default=DEFAULT_DBINFO['user'] @@ -270,13 +226,10 @@ def get_quicksetup_password(ctx, param, value): # pylint: disable=unused-argume help='Password to connect as the database superuser.', type=click.STRING, hide_input=True, - default=DEFAULT_DBINFO['password'] + default=DEFAULT_DBINFO['password'], ) -QUICKSETUP_REPOSITORY_URI = options.OverridableOption( - '--repository', - help='Absolute path for the file system repository.', - type=click.Path(file_okay=False), +QUICKSETUP_REPOSITORY_URI = options.REPOSITORY_PATH.clone( callback=get_quicksetup_repository_uri # Cannot use `default` because `ctx` is needed to determine the default ) @@ -293,14 +246,14 @@ def get_quicksetup_password(ctx, param, value): # pylint: disable=unused-argume ) SETUP_DATABASE_HOSTNAME = QUICKSETUP_DATABASE_HOSTNAME.clone( - prompt='Database hostname', - contextual_default=functools.partial(get_profile_attribute_default, ('database_hostname', 'localhost')), + prompt='Database host', + contextual_default=functools.partial(get_profile_attribute_default, ('database_hostname', DEFAULT_DBINFO['host'])), cls=options.interactive.InteractiveOption ) SETUP_DATABASE_PORT = QUICKSETUP_DATABASE_PORT.clone( prompt='Database port', - contextual_default=functools.partial(get_profile_attribute_default, ('database_port', 5432)), + contextual_default=functools.partial(get_profile_attribute_default, ('database_port', DEFAULT_DBINFO['port'])), cls=options.interactive.InteractiveOption ) diff --git a/aiida/cmdline/params/options/interactive.py b/aiida/cmdline/params/options/interactive.py index 4c0101175d..8006d1b6fb 100644 --- a/aiida/cmdline/params/options/interactive.py +++ b/aiida/cmdline/params/options/interactive.py @@ -21,8 +21,8 @@ class InteractiveOption(ConditionalOption): """ - Intercepts certain keyword arguments to circumvent :mod:`click`'s prompting - behaviour and define a more feature-rich one + Prompts for input, intercepting certain keyword arguments to replace :mod:`click`'s prompting + behaviour with a more feature-rich one. .. note:: This class has a parameter ``required_fn`` that can be passed to its ``__init__`` (inherited from the superclass :py:class:`~aiida.cmdline.params.options.conditional.ConditionalOption`) and a @@ -106,7 +106,7 @@ def get_default(self, ctx): return None def _get_default(self, ctx): - """provides the functionality of :func:`click.Option.get_default`""" + """provides the functionality of :meth:`click.Option.get_default`""" if self._contextual_default is not None: default = self._contextual_default(ctx) else: @@ -185,8 +185,9 @@ def safely_convert(self, value, param, ctx): successful = False if value is self.CHARACTER_IGNORE_DEFAULT: - # The ignore default character signifies that the user wants to "not" set the value, so we return `None` - return True, None + # The ignore default character signifies that the user wants to "not" set the value. + # Replace value by an empty string for further processing (e.g. if a non-empty value is required). + value = '' try: value = self.type.convert(value, param, ctx) diff --git a/aiida/cmdline/params/types/__init__.py b/aiida/cmdline/params/types/__init__.py index f2849e7933..3b44d31358 100644 --- a/aiida/cmdline/params/types/__init__.py +++ b/aiida/cmdline/params/types/__init__.py @@ -20,7 +20,7 @@ from .multiple import MultipleValueParamType from .node import NodeParamType from .process import ProcessParamType -from .nonemptystring import NonEmptyStringParamType +from .strings import (NonEmptyStringParamType, EmailType, HostnameType, EntryPointType, LabelStringType) from .path import AbsolutePathParamType, ImportPath from .plugin import PluginParamType from .profile import ProfileParamType diff --git a/aiida/cmdline/params/types/choice.py b/aiida/cmdline/params/types/choice.py index 47cc63b06f..b1ccce62e0 100644 --- a/aiida/cmdline/params/types/choice.py +++ b/aiida/cmdline/params/types/choice.py @@ -43,7 +43,6 @@ def _click_choice(self): """ if self.__click_choice is None: self.__click_choice = click.Choice(self._get_choices()) - # self._get_choices = None return self.__click_choice @property diff --git a/aiida/cmdline/params/types/code.py b/aiida/cmdline/params/types/code.py index 1266d96b6c..da1c6753bc 100644 --- a/aiida/cmdline/params/types/code.py +++ b/aiida/cmdline/params/types/code.py @@ -8,8 +8,9 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Module to define the custom click type for code.""" - import click + +from aiida.cmdline.utils import decorators from .identifier import IdentifierParamType @@ -40,6 +41,14 @@ def orm_class_loader(self): from aiida.orm.utils.loaders import CodeEntityLoader return CodeEntityLoader + @decorators.with_dbenv() + def complete(self, ctx, incomplete): # pylint: disable=unused-argument + """Return possible completions based on an incomplete value. + + :returns: list of tuples of valid entry points (matching incomplete) and a description + """ + return [(option, '') for option, in self.orm_class_loader.get_options(incomplete, project='label')] + def convert(self, value, param, ctx): code = super().convert(value, param, ctx) diff --git a/aiida/cmdline/params/types/group.py b/aiida/cmdline/params/types/group.py index ef216044e7..0645ac6e65 100644 --- a/aiida/cmdline/params/types/group.py +++ b/aiida/cmdline/params/types/group.py @@ -7,13 +7,11 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -""" -Module for custom click param type group -""" - +"""Module for custom click param type group.""" import click -from aiida.cmdline.utils.decorators import with_dbenv +from aiida.common.lang import type_check +from aiida.cmdline.utils import decorators from .identifier import IdentifierParamType @@ -23,29 +21,58 @@ class GroupParamType(IdentifierParamType): name = 'Group' - def __init__(self, create_if_not_exist=False): + def __init__(self, create_if_not_exist=False, sub_classes=('aiida.groups:core',)): + """Construct the parameter type. + + The `sub_classes` argument can be used to narrow the set of subclasses of `Group` that should be matched. By + default all subclasses of `Group` will be matched, otherwise it is restricted to the subclasses that correspond + to the entry point names in the tuple of `sub_classes`. + + To prevent having to load the database environment at import time, the actual loading of the entry points is + deferred until the call to `convert` is made. This is to keep the command line autocompletion light and + responsive. The entry point strings will be validated, however, to see if they correspond to known entry points. + + :param create_if_not_exist: boolean, if True, will create the group if it does not yet exist. By default the + group created will be of class `Group`, unless another subclass is specified through `sub_classes`. Note + that in this case, only a single entry point name can be specified + :param sub_classes: a tuple of entry point strings from the `aiida.groups` entry point group. + """ + type_check(sub_classes, tuple, allow_none=True) + + if create_if_not_exist and len(sub_classes) > 1: + raise ValueError('`sub_classes` can at most contain one entry point if `create_if_not_exist=True`') + self._create_if_not_exist = create_if_not_exist - super().__init__() + super().__init__(sub_classes=sub_classes) @property def orm_class_loader(self): - """ - Return the orm entity loader class, which should be a subclass of OrmEntityLoader. This class is supposed - to be used to load the entity for a given identifier + """Return the orm entity loader class, which should be a subclass of `OrmEntityLoader`. + + This class is supposed to be used to load the entity for a given identifier. - :return: the orm entity loader class for this ParamType + :return: the orm entity loader class for this `ParamType` """ from aiida.orm.utils.loaders import GroupEntityLoader return GroupEntityLoader - @with_dbenv() + @decorators.with_dbenv() + def complete(self, ctx, incomplete): # pylint: disable=unused-argument + """Return possible completions based on an incomplete value. + + :returns: list of tuples of valid entry points (matching incomplete) and a description + """ + return [(option, '') for option, in self.orm_class_loader.get_options(incomplete, project='label')] + + @decorators.with_dbenv() def convert(self, value, param, ctx): - from aiida.orm import Group, GroupTypeString try: group = super().convert(value, param, ctx) except click.BadParameter: if self._create_if_not_exist: - group = Group(label=value, type_string=GroupTypeString.USER.value) + # The particular subclass to load will be stored in `_sub_classes` as loaded by `convert` of the super. + cls = self._sub_classes[0] + group = cls(label=value) else: raise diff --git a/aiida/cmdline/params/types/identifier.py b/aiida/cmdline/params/types/identifier.py index 5fcb824e08..94deaf21a4 100644 --- a/aiida/cmdline/params/types/identifier.py +++ b/aiida/cmdline/params/types/identifier.py @@ -86,6 +86,8 @@ def convert(self, value, param, ctx): from aiida.common import exceptions from aiida.orm.utils.loaders import OrmEntityLoader + value = super().convert(value, param, ctx) + if not value: raise click.BadParameter('the value for the identifier cannot be empty') diff --git a/aiida/cmdline/params/types/plugin.py b/aiida/cmdline/params/types/plugin.py index a4a6077554..607e0c2a35 100644 --- a/aiida/cmdline/params/types/plugin.py +++ b/aiida/cmdline/params/types/plugin.py @@ -16,9 +16,10 @@ from aiida.plugins.entry_point import ENTRY_POINT_STRING_SEPARATOR, ENTRY_POINT_GROUP_PREFIX, EntryPointFormat from aiida.plugins.entry_point import format_entry_point_string, get_entry_point_string_format from aiida.plugins.entry_point import get_entry_point, get_entry_points, get_entry_point_groups +from ..types import EntryPointType -class PluginParamType(click.ParamType): +class PluginParamType(EntryPointType): """ AiiDA Plugin name parameter type. @@ -203,6 +204,8 @@ def convert(self, value, param, ctx): Convert the string value to an entry point instance, if the value can be successfully parsed into an actual entry point. Will raise click.BadParameter if validation fails. """ + value = super().convert(value, param, ctx) + if not value: raise click.BadParameter('plugin name cannot be empty') diff --git a/aiida/cmdline/params/types/profile.py b/aiida/cmdline/params/types/profile.py index b89cacdaf7..6c3902bad6 100644 --- a/aiida/cmdline/params/types/profile.py +++ b/aiida/cmdline/params/types/profile.py @@ -9,10 +9,10 @@ ########################################################################### """Profile param type for click.""" -import click +from .strings import LabelStringType -class ProfileParamType(click.ParamType): +class ProfileParamType(LabelStringType): """The profile parameter type for click.""" name = 'profile' @@ -31,6 +31,8 @@ def convert(self, value, param, ctx): from aiida.common.exceptions import MissingConfigurationError, ProfileConfigurationError from aiida.manage.configuration import get_config, load_profile, Profile + value = super().convert(value, param, ctx) + try: config = get_config(create=True) profile = config.get_profile(value) diff --git a/aiida/cmdline/params/types/strings.py b/aiida/cmdline/params/types/strings.py new file mode 100644 index 0000000000..d85c667794 --- /dev/null +++ b/aiida/cmdline/params/types/strings.py @@ -0,0 +1,124 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +""" +Module for various text-based string validation. +""" + +import re +from click.types import StringParamType + + +class NonEmptyStringParamType(StringParamType): + """Parameter whose values have to be string and non-empty.""" + name = 'nonemptystring' + + def convert(self, value, param, ctx): + newval = super().convert(value, param, ctx) + + # Note: Valid :py:class:`click.ParamType`s need to pass through None unchanged + if newval is None: + return None + + if not newval: # empty string + self.fail('Empty string is not valid!') + + return newval + + def __repr__(self): + return 'NONEMPTYSTRING' + + +class LabelStringType(NonEmptyStringParamType): + """Parameter accepting valid label strings. + + Non-empty string, made up of word characters (includes underscores [1]), dashes, and dots. + + [1] See https://docs.python.org/3/library/re.html + """ + name = 'labelstring' + + ALPHABET = r'\w\.\-' + + def convert(self, value, param, ctx): + newval = super().convert(value, param, ctx) + + if not re.match('^[{}]*$'.format(self.ALPHABET), newval): + self.fail('Please use only alphanumeric characters, dashes, underscores or dots') + + return newval + + def __repr__(self): + return 'LABELSTRING' + + +HOSTNAME_REGEX = \ +r'^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])(\.([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9]))*$' + + +class HostnameType(StringParamType): + """Parameter corresponding to a valid hostname (or empty) string. + + Regex according to https://stackoverflow.com/a/3824105/1069467 + """ + name = 'hostname' + + def convert(self, value, param, ctx): + newval = super().convert(value, param, ctx) + + import click + click.echo(newval) + + if newval and not re.match(HOSTNAME_REGEX, newval): + self.fail('Please enter a valid hostname.') + + return newval + + def __repr__(self): + return 'HOSTNAME' + + +class EmailType(StringParamType): + """Parameter whose values have to correspond to a valid email address format. + + .. note:: For the moment, we do not require the domain suffix, i.e. 'aiida@localhost' is still valid. + """ + name = 'email' + + def convert(self, value, param, ctx): + newval = super().convert(value, param, ctx) + + if not re.match(r'[^@]+@[^@]+(\.[^@]+){0,1}', newval): + self.fail('Please enter a valid email.') + + return newval + + def __repr__(self): + return 'EMAIL' + + +class EntryPointType(NonEmptyStringParamType): + """Parameter whose values have to be valid Python entry point strings. + + See https://packaging.python.org/specifications/entry-points/ + """ + name = 'entrypoint' + + def convert(self, value, param, ctx): + newval = super().convert(value, param, ctx) + + if not re.match(r'[\w.-]', newval): + self.fail( + 'Please enter a valid entry point string: Use only letters, numbers, undercores, dots and dashes.' + ) + + return newval + + def __repr__(self): + return 'ENTRYPOINT' diff --git a/aiida/common/escaping.py b/aiida/common/escaping.py index fd6fa6b2c9..64e17b9744 100644 --- a/aiida/common/escaping.py +++ b/aiida/common/escaping.py @@ -38,6 +38,8 @@ def escape_for_bash(str_to_escape): if str_to_escape is None: return '' + str_to_escape = str(str_to_escape) + escaped_quotes = str_to_escape.replace("'", """'"'"'""") return "'{}'".format(escaped_quotes) diff --git a/aiida/common/log.py b/aiida/common/log.py index 7180d20376..9f208072ed 100644 --- a/aiida/common/log.py +++ b/aiida/common/log.py @@ -175,11 +175,17 @@ def configure_logging(with_orm=False, daemon=False, daemon_log_file=None): 'class': 'logging.handlers.RotatingFileHandler', 'filename': daemon_log_file, 'encoding': 'utf8', - 'maxBytes': 100000, + 'maxBytes': 10000000, # 10 MB + 'backupCount': 10, } for logger in config.get('loggers', {}).values(): logger.setdefault('handlers', []).append(daemon_handler_name) + try: + # Remove the `console` stdout stream handler to prevent messages being duplicated in the daemon log file + logger['handlers'].remove('console') + except ValueError: + pass # Add the `DbLogHandler` if `with_orm` is `True` if with_orm: diff --git a/aiida/engine/processes/calcjobs/calcjob.py b/aiida/engine/processes/calcjobs/calcjob.py index 0e6b234ef0..9f3d2d765f 100644 --- a/aiida/engine/processes/calcjobs/calcjob.py +++ b/aiida/engine/processes/calcjobs/calcjob.py @@ -64,7 +64,7 @@ def validate_calc_job(inputs, ctx): ) -def validate_parser(parser_name, ctx): +def validate_parser(parser_name, ctx): # pylint: disable=unused-argument """Validate the parser. :raises InputValidationError: if the parser name does not correspond to a loadable `Parser` class. @@ -78,7 +78,7 @@ def validate_parser(parser_name, ctx): raise exceptions.InputValidationError('invalid parser specified: {}'.format(exception)) -def validate_resources(resources, ctx): +def validate_resources(resources, ctx): # pylint: disable=unused-argument """Validate the resources. :raises InputValidationError: if `num_machines` is not specified or is not an integer. diff --git a/aiida/engine/processes/calcjobs/tasks.py b/aiida/engine/processes/calcjobs/tasks.py index 7138dcd440..c9056449a8 100644 --- a/aiida/engine/processes/calcjobs/tasks.py +++ b/aiida/engine/processes/calcjobs/tasks.py @@ -321,6 +321,9 @@ class Waiting(plumpy.Waiting): """The waiting state for the `CalcJob` process.""" def __init__(self, process, done_callback, msg=None, data=None): + """ + :param :class:`~plumpy.base.state_machine.StateMachine` process: The process this state belongs to + """ super().__init__(process, done_callback, msg, data) self._task = None self._killing = None diff --git a/aiida/engine/processes/exit_code.py b/aiida/engine/processes/exit_code.py index 2178518be6..de660ed5dd 100644 --- a/aiida/engine/processes/exit_code.py +++ b/aiida/engine/processes/exit_code.py @@ -8,49 +8,69 @@ # For further information please visit http://www.aiida.net # ########################################################################### """A namedtuple and namespace for ExitCodes that can be used to exit from Processes.""" - from collections import namedtuple - from aiida.common.extendeddicts import AttributeDict __all__ = ('ExitCode', 'ExitCodesNamespace') -ExitCode = namedtuple('ExitCode', ['status', 'message', 'invalidates_cache']) -ExitCode.__new__.__defaults__ = (0, None, False) -""" -A namedtuple to define an exit code for a :class:`~aiida.engine.processes.process.Process`. -When this namedtuple is returned from a Process._run() call, it will be interpreted that the Process -should be terminated and that the exit status and message of the namedtuple should be set to the -corresponding attributes of the node. +class ExitCode(namedtuple('ExitCode', ['status', 'message', 'invalidates_cache'])): + """A simple data class to define an exit code for a :class:`~aiida.engine.processes.process.Process`. -:param status: positive integer exit status, where a non-zero value indicated the process failed, default is `0` -:type status: int + When an instance of this clas is returned from a `Process._run()` call, it will be interpreted that the `Process` + should be terminated and that the exit status and message of the namedtuple should be set to the corresponding + attributes of the node. -:param message: optional message with more details about the failure mode -:type message: str + .. note:: this class explicitly sub-classes a namedtuple to not break backwards compatibility and to have it behave + exactly as a tuple. -:param invalidates_cache: optional flag, indicating that a process should not be used in caching -:type invalidates_cache: bool -""" + :param status: positive integer exit status, where a non-zero value indicated the process failed, default is `0` + :type status: int + :param message: optional message with more details about the failure mode + :type message: str -class ExitCodesNamespace(AttributeDict): + :param invalidates_cache: optional flag, indicating that a process should not be used in caching + :type invalidates_cache: bool """ - A namespace of ExitCode tuples that can be accessed through getattr as well as getitem. - Additionally, the collection can be called with an identifier, that can either reference - the integer `status` of the ExitCode that needs to be retrieved or the key in the collection + + def format(self, **kwargs): + """Create a clone of this exit code where the template message is replaced by the keyword arguments. + + :param kwargs: replacement parameters for the template message + :return: `ExitCode` + """ + try: + message = self.message.format(**kwargs) + except KeyError: + template = 'insufficient or incorrect format parameters `{}` for the message template `{}`.' + raise ValueError(template.format(kwargs, self.message)) + + return ExitCode(self.status, message, self.invalidates_cache) + + def __eq__(self, other): + return all(getattr(self, attr) == getattr(other, attr) for attr in ['status', 'message', 'invalidates_cache']) + + +# Set the defaults for the `ExitCode` attributes +ExitCode.__new__.__defaults__ = (0, None, False) + + +class ExitCodesNamespace(AttributeDict): + """A namespace of `ExitCode` instances that can be accessed through getattr as well as getitem. + + Additionally, the collection can be called with an identifier, that can either reference the integer `status` of the + `ExitCode` that needs to be retrieved or the key in the collection. """ def __call__(self, identifier): - """ - Return a specific exit code identified by either its exit status or label + """Return a specific exit code identified by either its exit status or label. - :param identifier: the identifier of the exit code. If the type is integer, it will be interpreted as - the exit code status, otherwise it be interpreted as the exit code label + :param identifier: the identifier of the exit code. If the type is integer, it will be interpreted as the exit + code status, otherwise it be interpreted as the exit code label :type identifier: str - :returns: an ExitCode named tuple + :returns: an `ExitCode` instance :rtype: :class:`aiida.engine.ExitCode` :raises ValueError: if no exit code with the given label is defined for this process diff --git a/aiida/engine/processes/process_spec.py b/aiida/engine/processes/process_spec.py index da9d562303..370789ed94 100644 --- a/aiida/engine/processes/process_spec.py +++ b/aiida/engine/processes/process_spec.py @@ -65,10 +65,10 @@ def exit_code(self, status, label, message, invalidates_cache=False): raise ValueError('status should be a positive integer, received {}'.format(type(status))) if not isinstance(label, str): - raise TypeError('label should be of basestring type and not of {}'.format(type(label))) + raise TypeError('label should be of str type and not of {}'.format(type(label))) if not isinstance(message, str): - raise TypeError('message should be of basestring type and not of {}'.format(type(message))) + raise TypeError('message should be of str type and not of {}'.format(type(message))) if not isinstance(invalidates_cache, bool): raise TypeError('invalidates_cache should be of type bool and not of {}'.format(type(invalidates_cache))) diff --git a/aiida/engine/processes/workchains/utils.py b/aiida/engine/processes/workchains/utils.py index 9869aa3a36..45f2158e8b 100644 --- a/aiida/engine/processes/workchains/utils.py +++ b/aiida/engine/processes/workchains/utils.py @@ -11,6 +11,7 @@ from collections import namedtuple from functools import partial from inspect import getfullargspec +from types import FunctionType # pylint: disable=no-name-in-module from wrapt import decorator from ..exit_code import ExitCode @@ -68,6 +69,9 @@ def process_handler(wrapped=None, *, priority=0, exit_codes=None, enabled=True): if wrapped is None: return partial(process_handler, priority=priority, exit_codes=exit_codes, enabled=enabled) + if not isinstance(wrapped, FunctionType): + raise TypeError('first argument can only be an instance method, use keywords for decorator arguments.') + if not isinstance(priority, int): raise TypeError('the `priority` keyword should be an integer.') diff --git a/aiida/manage/caching.py b/aiida/manage/caching.py index d8079fd747..9b7f1d427d 100644 --- a/aiida/manage/caching.py +++ b/aiida/manage/caching.py @@ -22,7 +22,7 @@ from aiida.common import exceptions from aiida.common.lang import type_check -from aiida.plugins.entry_point import ENTRY_POINT_STRING_SEPARATOR, entry_point_group_to_module_path_map +from aiida.plugins.entry_point import ENTRY_POINT_STRING_SEPARATOR, ENTRY_POINT_GROUP_TO_MODULE_PATH_MAP __all__ = ('get_use_cache', 'enable_caching', 'disable_caching') @@ -248,7 +248,7 @@ def _validate_identifier_pattern(*, identifier): 1. - where `group_name` is one of the keys in `entry_point_group_to_module_path_map` + where `group_name` is one of the keys in `ENTRY_POINT_GROUP_TO_MODULE_PATH_MAP` and `tail` can be anything _except_ `ENTRY_POINT_STRING_SEPARATOR`. 2. a fully qualified Python name @@ -276,7 +276,7 @@ def _validate_identifier_pattern(*, identifier): group_pattern, _ = identifier.split(ENTRY_POINT_STRING_SEPARATOR) if not any( _match_wildcard(string=group_name, pattern=group_pattern) - for group_name in entry_point_group_to_module_path_map + for group_name in ENTRY_POINT_GROUP_TO_MODULE_PATH_MAP ): raise ValueError( common_error_msg + "Group name pattern '{}' does not match any of the AiiDA entry point group names.". @@ -290,7 +290,7 @@ def _validate_identifier_pattern(*, identifier): # aiida.* or aiida.calculations* if '*' in identifier: group_part, _ = identifier.split('*', 1) - if any(group_name.startswith(group_part) for group_name in entry_point_group_to_module_path_map): + if any(group_name.startswith(group_part) for group_name in ENTRY_POINT_GROUP_TO_MODULE_PATH_MAP): return # Finally, check if it could be a fully qualified Python name for identifier_part in identifier.split('.'): diff --git a/aiida/manage/external/pgsu.py b/aiida/manage/external/pgsu.py index 025972e8a3..05c58e2a97 100644 --- a/aiida/manage/external/pgsu.py +++ b/aiida/manage/external/pgsu.py @@ -13,344 +13,11 @@ separate package that can then be tested on multiple OS / postgres setups. Therefore, **please keep this module entirely AiiDA-agnostic**. """ - -try: - import subprocess32 as subprocess -except ImportError: - import subprocess - -from enum import IntEnum -import click - -DEFAULT_DBINFO = { - 'host': 'localhost', - 'port': 5432, - 'user': 'postgres', - 'password': None, - 'database': 'template1', -} - - -class PostgresConnectionMode(IntEnum): - """Describe mode of connecting to postgres.""" - - DISCONNECTED = 0 - PSYCOPG = 1 - PSQL = 2 - - -class PGSU: - """ - Connect to an existing PostgreSQL cluster as the `postgres` superuser and execute SQL commands. - - Tries to use psycopg2 with a fallback to psql subcommands (using ``sudo su`` to run as postgres user). - - Simple Example:: - - postgres = PGSU() - postgres.execute("CREATE USER testuser PASSWORD 'testpw'") - - Complex Example:: - - postgres = PGSU(interactive=True, dbinfo={'port': 5433}) - postgres.execute("CREATE USER testuser PASSWORD 'testpw'") - - Note: In postgresql - * you cannot drop databases you are currently connected to - * 'template0' is the unmodifiable template database (which you cannot connect to) - * 'template1' is the modifiable template database (which you can connect to) - """ - - def __init__(self, interactive=False, quiet=True, dbinfo=None, determine_setup=True): - """Store postgres connection info. - - :param interactive: use True for verdi commands - :param quiet: use False to show warnings/exceptions - :param dbinfo: psycopg dictionary containing keys like 'host', 'user', 'port', 'database' - :param determine_setup: Whether to determine setup upon instantiation. - You may set this to False and use the 'determine_setup()' method instead. - """ - self.interactive = interactive - self.quiet = quiet - self.connection_mode = PostgresConnectionMode.DISCONNECTED - - self.setup_fail_callback = prompt_db_info if interactive else None - self.setup_fail_counter = 0 - self.setup_max_tries = 1 - - self.dbinfo = DEFAULT_DBINFO.copy() - if dbinfo is not None: - self.dbinfo.update(dbinfo) - - if determine_setup: - self.determine_setup() - - def execute(self, command, **kwargs): - """Execute postgres command using determined connection mode. - - :param command: A psql command line as a str - :param kwargs: will be forwarded to _execute_... function - """ - # Use self.dbinfo as default kwargs, update with provided kwargs - kw_copy = self.dbinfo.copy() - kw_copy.update(kwargs) - - if self.connection_mode == PostgresConnectionMode.PSYCOPG: # pylint: disable=no-else-return - return _execute_psyco(command, **kw_copy) - elif self.connection_mode == PostgresConnectionMode.PSQL: - return _execute_psql(command, **kw_copy) - - raise ValueError('Could not connect to postgres.') - - def set_setup_fail_callback(self, callback): - """ - Set a callback to be called when setup cannot be determined automatically - - :param callback: a callable with signature ``callback(interactive, dbinfo)`` - that returns a ``dbinfo`` dictionary. - """ - self.setup_fail_callback = callback - - def determine_setup(self): - """Determine how to connect as the postgres superuser. - - Depending on how postgres is set up, psycopg2 can be used to create dbs and db users, - otherwise a subprocess has to be used that executes psql as an os user with appropriate permissions. - - Note: We aim to connect as a superuser (typically 'postgres') with privileges to manipulate (create/drop) - databases and database users. - - :returns success: True, if connection could be established. - :rtype success: bool - """ - # find out if we run as a postgres superuser or can connect as postgres - # This will work on OSX in some setups but not in the default Debian one - dbinfo = self.dbinfo.copy() - - for pg_user in set([dbinfo.get('user'), None]): - dbinfo['user'] = pg_user - if _try_connect_psycopg(**dbinfo): - self.dbinfo = dbinfo - self.connection_mode = PostgresConnectionMode.PSYCOPG - return True - - # This will work for the default Debian postgres setup, assuming that sudo is available to the user - # Check if the user can find the sudo command - if _sudo_exists(): - if _try_subcmd(interactive=self.interactive, quiet=self.quiet, **dbinfo): - self.dbinfo = dbinfo - self.connection_mode = PostgresConnectionMode.PSQL - return True - elif not self.quiet: - click.echo('Warning: Could not find `sudo` for connecting to the database.') - - self.setup_fail_counter += 1 - return self._no_setup_detected() - - def _no_setup_detected(self): - """Print a warning message and calls the failed setup callback - - :returns: False, if no successful try. - """ - message = '\n'.join([ - 'Warning: Unable to autodetect postgres setup - do you know how to access it?', - ]) - - if not self.quiet: - click.echo(message) - - if self.setup_fail_callback and self.setup_fail_counter <= self.setup_max_tries: - self.dbinfo = self.setup_fail_callback(self.interactive, self.dbinfo) - return self.determine_setup() - - return False - - @property - def is_connected(self): - return self.connection_mode in (PostgresConnectionMode.PSYCOPG, PostgresConnectionMode.PSQL) - - -def prompt_db_info(interactive, dbinfo): - """ - Prompt interactively for postgres database connection details - - Can be used as a setup fail callback for :py:class:`PGSU` - - :return: dictionary with the following keys: host, port, database, user - """ - if not interactive: - return DEFAULT_DBINFO - - access = False - while not access: - dbinfo_new = {} - dbinfo_new['host'] = click.prompt('postgres host', default=dbinfo.get('host'), type=str) - dbinfo_new['port'] = click.prompt('postgres port', default=dbinfo.get('port'), type=int) - dbinfo_new['user'] = click.prompt('postgres super user', default=dbinfo.get('user'), type=str) - dbinfo_new['database'] = click.prompt('database', default=dbinfo.get('database'), type=str) - click.echo('') - click.echo('Trying to access postgres ...') - if _try_connect_psycopg(**dbinfo_new): - access = True - else: - dbinfo_new['password'] = click.prompt( - 'postgres password of {}'.format(dbinfo_new['user']), hide_input=True, type=str, default='' - ) - if not dbinfo_new.get('password'): - dbinfo_new.pop('password') - return dbinfo_new - - -def _try_connect_psycopg(**kwargs): - """ - try to start a psycopg2 connection. - - :return: True if successful, False otherwise - """ - from psycopg2 import connect - success = False - try: - conn = connect(**kwargs) - success = True - conn.close() - except Exception: # pylint: disable=broad-except - pass - return success - - -def _sudo_exists(): - """ - Check that the sudo command can be found - - :return: True if successful, False otherwise - """ - try: - subprocess.check_output(['sudo', '-V']) - except subprocess.CalledProcessError: - return False - except OSError: - return False - - return True - - -def _try_subcmd(**kwargs): - """ - try to run psql in a subprocess. - - :return: True if successful, False otherwise - """ - success = False - try: - kwargs['stderr'] = subprocess.STDOUT - _execute_psql(r'\q', **kwargs) - success = True - except subprocess.CalledProcessError: - pass - return success - - -def _execute_psyco(command, **kwargs): - """ - executes a postgres commandline through psycopg2 - - :param command: A psql command line as a str - :param kwargs: will be forwarded to psycopg2.connect - """ - import psycopg2 - - # Note: Ubuntu 18.04 uses "peer" as the default postgres configuration - # which allows connections only when the unix user matches the database user. - # This restriction no longer applies for IPv4/v6-based connection, - # when specifying host=localhost. - if kwargs.get('host') is None: - kwargs['host'] = 'localhost' - - output = None - with psycopg2.connect(**kwargs) as conn: - conn.autocommit = True - with conn.cursor() as cursor: - cursor.execute(command) - if cursor.description is not None: - output = cursor.fetchall() - - # see http://initd.org/psycopg/docs/usage.html#with-statement - conn.close() - return output - - -def _execute_psql(command, user='postgres', quiet=True, interactive=False, **kwargs): - """ - Executes an SQL command via ``psql`` as another system user in a subprocess. - - Tries to "become" the user specified in ``kwargs`` (i.e. interpreted as UNIX system user) - and run psql in a subprocess. - - :param command: A psql command line as a str - :param quiet: If True, don't print warnings. - :param interactive: If False, `sudo` won't ask for a password and fail if one is required. - :param kwargs: connection details to forward to psql, signature as in psycopg2.connect - """ - option_str = '' - - database = kwargs.pop('database', None) - if database: - option_str += '-d {}'.format(database) - # to do: Forward password to psql; ignore host only when the password is None. # pylint: disable=fixme - kwargs.pop('password', None) - - host = kwargs.pop('host', 'localhost') - if host and host != 'localhost': - option_str += ' -h {}'.format(host) - elif not quiet: - click.echo( - "Warning: Found host 'localhost' but dropping '-h localhost' option for psql " + - 'since this may cause psql to switch to password-based authentication.' - ) - - port = kwargs.pop('port', None) - if port: - option_str += ' -p {}'.format(port) - - user = kwargs.pop('user', 'postgres') - - # Build command line - sudo_cmd = ['sudo'] - if not interactive: - sudo_cmd += ['-n'] - su_cmd = ['su', user, '-c'] - - psql_cmd = ['psql {opt} -tc {cmd}'.format(cmd=escape_for_bash(command), opt=option_str)] - sudo_su_psql = sudo_cmd + su_cmd + psql_cmd - result = subprocess.check_output(sudo_su_psql, **kwargs) - result = result.decode('utf-8').strip().split('\n') - result = [i for i in result if i] - - return result - - -def escape_for_bash(str_to_escape): - """ - This function takes any string and escapes it in a way that - bash will interpret it as a single string. - - Explanation: - - At the end, in the return statement, the string is put within single - quotes. Therefore, the only thing that I have to escape in bash is the - single quote character. To do this, I substitute every single - quote ' with '"'"' which means: - - First single quote: exit from the enclosing single quotes - - Second, third and fourth character: "'" is a single quote character, - escaped by double quotes - - Last single quote: reopen the single quote to continue the string - - Finally, note that for python I have to enclose the string '"'"' - within triple quotes to make it work, getting finally: the complicated - string found below. - """ - escaped_quotes = str_to_escape.replace("'", """'"'"'""") - return "'{}'".format(escaped_quotes) +import warnings +from pgsu import PGSU, PostgresConnectionMode, DEFAULT_DSN as DEFAULT_DBINFO # pylint: disable=unused-import,no-name-in-module +from aiida.common.warnings import AiidaDeprecationWarning + +warnings.warn( # pylint: disable=no-member + '`aiida.manage.external.pgsu` is now available in the separate `pgsu` package. ' + 'This module will be removed entirely in AiiDA 2.0.0', AiidaDeprecationWarning +) diff --git a/aiida/manage/external/postgres.py b/aiida/manage/external/postgres.py index 0c6e9b1c5a..680b62e088 100644 --- a/aiida/manage/external/postgres.py +++ b/aiida/manage/external/postgres.py @@ -21,7 +21,7 @@ import click from aiida.cmdline.utils import echo -from .pgsu import PGSU, PostgresConnectionMode, DEFAULT_DBINFO +from pgsu import PGSU, PostgresConnectionMode, DEFAULT_DSN as DEFAULT_DBINFO # pylint: disable=no-name-in-module _CREATE_USER_COMMAND = 'CREATE USER "{}" WITH PASSWORD \'{}\'' _DROP_USER_COMMAND = 'DROP USER "{}"' @@ -32,20 +32,20 @@ ) _DROP_DB_COMMAND = 'DROP DATABASE "{}"' _GRANT_PRIV_COMMAND = 'GRANT ALL PRIVILEGES ON DATABASE "{}" TO "{}"' -_GET_USERS_COMMAND = "SELECT usename FROM pg_user WHERE usename='{}'" +_USER_EXISTS_COMMAND = "SELECT usename FROM pg_user WHERE usename='{}'" _CHECK_DB_EXISTS_COMMAND = "SELECT datname FROM pg_database WHERE datname='{}'" _COPY_DB_COMMAND = 'CREATE DATABASE "{}" WITH TEMPLATE "{}" OWNER "{}"' class Postgres(PGSU): """ - Adds convenience functions to pgsu.Postgres. + Adds convenience functions to :py:class:`pgsu.PGSU`. - Provides conenience functions for + Provides convenience functions for * creating/dropping users * creating/dropping databases - etc. See pgsu.Postgres for implementation details. + etc. Example:: @@ -55,6 +55,10 @@ class Postgres(PGSU): postgres.create_db('username', 'dbname') """ + def __init__(self, dbinfo=None, **kwargs): + """See documentation of :py:meth:`pgsu.PGSU.__init__`.""" + super().__init__(dsn=dbinfo, **kwargs) + @classmethod def from_profile(cls, profile, **kwargs): """Create Postgres instance with dbinfo from AiiDA profile data. @@ -63,7 +67,7 @@ def from_profile(cls, profile, **kwargs): database superuser. :param profile: AiiDA profile instance - :param kwargs: keyword arguments forwarded to Postgres constructor + :param kwargs: keyword arguments forwarded to PGSU constructor :returns: Postgres instance pre-populated with data from AiiDA profile """ @@ -77,23 +81,25 @@ def from_profile(cls, profile, **kwargs): return Postgres(dbinfo=dbinfo, **kwargs) - def check_db_name(self, dbname): - """Looks up if a database with the name exists, prompts for using or creating a differently named one.""" - create = True - while create and self.db_exists(dbname): - echo.echo_info('database {} already exists!'.format(dbname)) - if not click.confirm('Use it (make sure it is not used by another profile)?'): - dbname = click.prompt('new name', type=str, default=dbname) - else: - create = False - return dbname, create + ### DB user functions ### + + def dbuser_exists(self, dbuser): + """ + Find out if postgres user with name dbuser exists + + :param str dbuser: database user to check for + :return: (bool) True if user exists, False otherwise + """ + return bool(self.execute(_USER_EXISTS_COMMAND.format(dbuser))) def create_dbuser(self, dbuser, dbpass): """ Create a database user in postgres - :param dbuser: (str), Name of the user to be created. - :param dbpass: (str), Password the user should be given. + :param str dbuser: Name of the user to be created. + :param str dbpass: Password the user should be given. + :raises: psycopg2.errors.DuplicateObject if user already exists and + self.connection_mode == PostgresConnectionMode.PSYCOPG """ self.execute(_CREATE_USER_COMMAND.format(dbuser, dbpass)) @@ -101,25 +107,42 @@ def drop_dbuser(self, dbuser): """ Drop a database user in postgres - :param dbuser: (str), Name of the user to be dropped. + :param str dbuser: Name of the user to be dropped. """ self.execute(_DROP_USER_COMMAND.format(dbuser)) - def dbuser_exists(self, dbuser): + def check_dbuser(self, dbuser): + """Looks up if a given user already exists, prompts for using or creating a differently named one. + + :param str dbuser: Name of the user to be created or reused. + :returns: tuple (dbuser, created) """ - Find out if postgres user with name dbuser exists + create = True + while create and self.dbuser_exists(dbuser): + echo.echo_info('Database user "{}" already exists!'.format(dbuser)) + if not click.confirm('Use it? '): + dbuser = click.prompt('New database user name: ', type=str, default=dbuser) + else: + create = False + return dbuser, create - :param dbuser: (str) database user to check for - :return: (bool) True if user exists, False otherwise + ### DB functions ### + + def db_exists(self, dbname): """ - return bool(self.execute(_GET_USERS_COMMAND.format(dbuser))) + Check wether a postgres database with dbname exists + + :param str dbname: Name of the database to check for + :return: (bool), True if database exists, False otherwise + """ + return bool(self.execute(_CHECK_DB_EXISTS_COMMAND.format(dbname))) def create_db(self, dbuser, dbname): """ Create a database in postgres - :param dbuser: (str), Name of the user which should own the db. - :param dbname: (str), Name of the database. + :param str dbuser: Name of the user which should own the db. + :param str dbname: Name of the database. """ self.execute(_CREATE_DB_COMMAND.format(dbname, dbuser)) self.execute(_GRANT_PRIV_COMMAND.format(dbname, dbuser)) @@ -128,28 +151,70 @@ def drop_db(self, dbname): """ Drop a database in postgres - :param dbname: (str), Name of the database. + :param str dbname: Name of the database. """ self.execute(_DROP_DB_COMMAND.format(dbname)) def copy_db(self, src_db, dest_db, dbuser): self.execute(_COPY_DB_COMMAND.format(dest_db, src_db, dbuser)) - def db_exists(self, dbname): + def check_db(self, dbname): + """Looks up if a database with the name exists, prompts for using or creating a differently named one. + + :param str dbname: Name of the database to be created or reused. + :returns: tuple (dbname, created) """ - Check wether a postgres database with dbname exists + create = True + while create and self.db_exists(dbname): + echo.echo_info('database {} already exists!'.format(dbname)) + if not click.confirm('Use it (make sure it is not used by another profile)?'): + dbname = click.prompt('new name', type=str, default=dbname) + else: + create = False + return dbname, create - :param dbname: Name of the database to check for - :return: (bool), True if database exists, False otherwise + def create_dbuser_db_safe(self, dbname, dbuser, dbpass): + """Create DB and user + grant privileges. + + Prompts when reusing existing users / databases. """ - return bool(self.execute(_CHECK_DB_EXISTS_COMMAND.format(dbname))) + dbuser, create = self.check_dbuser(dbuser=dbuser) + if create: + self.create_dbuser(dbuser=dbuser, dbpass=dbpass) + + dbname, create = self.check_db(dbname=dbname) + if create: + self.create_db(dbuser, dbname) + + return dbuser, dbname + + @property + def host_for_psycopg2(self): + """Return correct host for psycopg2 connection (as required by regular AiiDA operation).""" + host = self.dsn.get('host') + if self.connection_mode == PostgresConnectionMode.PSQL: + # If "sudo su postgres" was needed to create the DB, we are likely on Ubuntu, where + # the same will *not* work for arbitrary database users => enforce TCP/IP connection + host = host or 'localhost' + + return host + + @property + def port_for_psycopg2(self): + """Return port for psycopg2 connection (as required by regular AiiDA operation).""" + return self.dsn.get('port') + + @property + def dbinfo(self): + """Alias for Postgres.dsn.""" + return self.dsn.copy() def manual_setup_instructions(dbuser, dbname): """Create a message with instructions for manually creating a database""" dbpass = '' instructions = '\n'.join([ - 'Please run the following commands as the user for PostgreSQL (Ubuntu: $sudo su postgres):', + 'Run the following commands as a UNIX user with access to PostgreSQL (Ubuntu: $ sudo su postgres):', '', '\t$ psql template1', '\t==> ' + _CREATE_USER_COMMAND.format(dbuser, dbpass), diff --git a/aiida/manage/tests/__init__.py b/aiida/manage/tests/__init__.py index 9ff04e39c8..cc4e2e5fbf 100644 --- a/aiida/manage/tests/__init__.py +++ b/aiida/manage/tests/__init__.py @@ -252,10 +252,11 @@ def __init__(self, backend=BACKEND_DJANGO, pgtest=None): # pylint: disable=supe self.postgres = None self._profile = None self._has_test_db = False - self._backup = {} - self._backup['config'] = configuration.CONFIG - self._backup['config_dir'] = settings.AIIDA_CONFIG_FOLDER - self._backup['profile'] = configuration.PROFILE + self._backup = { + 'config': configuration.CONFIG, + 'config_dir': settings.AIIDA_CONFIG_FOLDER, + 'profile': configuration.PROFILE, + } @property def profile_dictionary(self): @@ -264,10 +265,10 @@ def profile_dictionary(self): Used to set up AiiDA profile from self.profile_info dictionary. """ dictionary = { - 'database_engine': self.profile_info['database_engine'], - 'database_backend': self.profile_info['database_backend'], - 'database_port': self.dbinfo.get('port'), - 'database_hostname': self.dbinfo.get('host'), + 'database_engine': self.profile_info.get('database_engine'), + 'database_backend': self.profile_info.get('database_backend'), + 'database_port': self.profile_info.get('database_port'), + 'database_hostname': self.profile_info.get('database_hostname'), 'database_name': self.profile_info.get('database_name'), 'database_username': self.profile_info.get('database_username'), 'database_password': self.profile_info.get('database_password'), @@ -297,9 +298,12 @@ def create_aiida_db(self): if self.pg_cluster is None: self.create_db_cluster() self.postgres = Postgres(interactive=False, quiet=True, dbinfo=self.dbinfo) - self.dbinfo = self.postgres.dbinfo.copy() + # note: not using postgres.create_dbuser_db_safe here since we don't want prompts self.postgres.create_dbuser(self.profile_info['database_username'], self.profile_info['database_password']) self.postgres.create_db(self.profile_info['database_username'], self.profile_info['database_name']) + self.dbinfo = self.postgres.dbinfo + self.profile_info['database_hostname'] = self.postgres.host_for_psycopg2 + self.profile_info['database_port'] = self.postgres.port_for_psycopg2 self._has_test_db = True def create_profile(self): diff --git a/aiida/manage/tests/pytest_fixtures.py b/aiida/manage/tests/pytest_fixtures.py index 5e79c30bdf..310b07e944 100644 --- a/aiida/manage/tests/pytest_fixtures.py +++ b/aiida/manage/tests/pytest_fixtures.py @@ -7,6 +7,7 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### +# pylint: disable=redefined-outer-name,unused-argument """ Collection of pytest fixtures using the TestManager for easy testing of AiiDA plugins. @@ -16,8 +17,8 @@ * aiida_local_code_factory """ -import tempfile import shutil +import tempfile import pytest from aiida.manage.tests import test_manager, get_test_backend_name, get_test_profile_name @@ -30,14 +31,13 @@ def aiida_profile(): Note: scope='session' limits this fixture to run once per session. Thanks to ``autouse=True``, you don't actually need to depend on it explicitly - it will activate as soon as you import it in your ``conftest.py``. """ - # create new TestManager instance - with test_manager(backend=get_test_backend_name(), profile_name=get_test_profile_name()) as test_mgr: - yield test_mgr - # here, the TestManager instance has already been destroyed + with test_manager(backend=get_test_backend_name(), profile_name=get_test_profile_name()) as manager: + yield manager + # Leaving the context manager will automatically cause the `TestManager` instance to be destroyed @pytest.fixture(scope='function') -def clear_database(clear_database_after_test): # pylint: disable=redefined-outer-name,unused-argument +def clear_database(clear_database_after_test): """Alias for 'clear_database_after_test'. Clears the database after each test. Use of the explicit @@ -46,18 +46,15 @@ def clear_database(clear_database_after_test): # pylint: disable=redefined-oute @pytest.fixture(scope='function') -def clear_database_after_test(aiida_profile): # pylint: disable=redefined-outer-name - """Clear the database after each test. - """ +def clear_database_after_test(aiida_profile): + """Clear the database after the test.""" yield - # after the test function has completed, reset the database aiida_profile.reset_db() @pytest.fixture(scope='function') -def clear_database_before_test(aiida_profile): # pylint: disable=redefined-outer-name - """Clear the database before each test. - """ +def clear_database_before_test(aiida_profile): + """Clear the database before the test.""" aiida_profile.reset_db() yield @@ -81,7 +78,7 @@ def temp_dir(): @pytest.fixture(scope='function') -def aiida_localhost(temp_dir): # pylint: disable=redefined-outer-name +def aiida_localhost(temp_dir): """Get an AiiDA computer for localhost. Usage:: @@ -118,7 +115,7 @@ def test_1(aiida_localhost): @pytest.fixture(scope='function') -def aiida_local_code_factory(aiida_localhost): # pylint: disable=redefined-outer-name +def aiida_local_code_factory(aiida_localhost): """Get an AiiDA code on localhost. Searches in the PATH for a given executable and creates an AiiDA code with provided entry point. @@ -126,39 +123,56 @@ def aiida_local_code_factory(aiida_localhost): # pylint: disable=redefined-oute Usage:: def test_1(aiida_local_code_factory): - code = aiida_local_code_factory('pw.x', 'quantumespresso.pw') + code = aiida_local_code_factory('quantumespresso.pw', '/usr/bin/pw.x') # use code for testing ... - :return: A function get_code(executable, entry_point) that returns the Code node. + :return: A function get_code(entry_point, executable) that returns the `Code` node. :rtype: object """ - def get_code(entry_point, executable, computer=aiida_localhost): + def get_code(entry_point, executable, computer=aiida_localhost, label=None, prepend_text=None, append_text=None): """Get local code. + Sets up code for given entry point on given computer. :param entry_point: Entry point of calculation plugin :param executable: name of executable; will be searched for in local system PATH. :param computer: (local) AiiDA computer - :return: The code node + :param prepend_text: a string of code that will be put in the scheduler script before the execution of the code. + :param append_text: a string of code that will be put in the scheduler script after the execution of the code. + :return: the `Code` either retrieved from the database or created if it did not yet exist. :rtype: :py:class:`aiida.orm.Code` """ - from aiida.orm import Code + from aiida.common import exceptions + from aiida.orm import Code, Computer, QueryBuilder - codes = Code.objects.find(filters={'label': executable}) # pylint: disable=no-member - if codes: - return codes[0] + if label is None: + label = executable - executable_path = shutil.which(executable) + builder = QueryBuilder().append(Computer, filters={'uuid': computer.uuid}, tag='computer') + builder.append(Code, filters={'label': label, 'attributes.input_plugin': entry_point}, with_computer='computer') + try: + code = builder.one()[0] + except (exceptions.MultipleObjectsError, exceptions.NotExistent): + code = None + else: + return code + + executable_path = shutil.which(executable) if not executable_path: raise ValueError('The executable "{}" was not found in the $PATH.'.format(executable)) - code = Code( - input_plugin_name=entry_point, - remote_computer_exec=[computer, executable_path], - ) - code.label = executable + code = Code(input_plugin_name=entry_point, remote_computer_exec=[computer, executable_path]) + code.label = label + code.description = label + + if prepend_text is not None: + code.set_prepend_text(prepend_text) + + if append_text is not None: + code.set_append_text(append_text) + return code.store() return get_code diff --git a/aiida/orm/autogroup.py b/aiida/orm/autogroup.py index ed4551a3ad..06e83185e3 100644 --- a/aiida/orm/autogroup.py +++ b/aiida/orm/autogroup.py @@ -7,173 +7,269 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### - +"""Module to manage the autogrouping functionality by ``verdi run``.""" +import re +import warnings from aiida.common import exceptions, timezone -from aiida.orm import GroupTypeString +from aiida.common.escaping import escape_for_sql_like, get_regex_pattern_from_sql +from aiida.common.warnings import AiidaDeprecationWarning +from aiida.orm import AutoGroup +from aiida.plugins.entry_point import get_entry_point_string_from_class +CURRENT_AUTOGROUP = None -current_autogroup = None -VERDIAUTOGROUP_TYPE = GroupTypeString.VERDIAUTOGROUP_TYPE.value +class Autogroup: + """Class to create a new `AutoGroup` instance that will, while active, automatically contain all nodes being stored. -# TODO: make the Autogroup usable to the user, and not only to the verdi run + The autogrouping is checked by the `Node.store()` method which, if `CURRENT_AUTOGROUP is not None` the method + `Autogroup.is_to_be_grouped` is called to decide whether to put the current node being stored in the current + `AutoGroup` instance. -class Autogroup: - """ - An object used for the autogrouping of objects. - The autogrouping is checked by the Node.store() method. - In the store(), the Node will check if current_autogroup is != None. - If so, it will call Autogroup.is_to_be_grouped, and decide whether to put it in a group. - Such autogroups are going to be of the VERDIAUTOGROUP_TYPE. - - The exclude/include lists, can have values 'all' if you want to include/exclude all classes. - Otherwise, they are lists of strings like: calculation.quantumespresso.pw, data.array.kpoints, ... - i.e.: a string identifying the base class, than the path to the class as in Calculation/Data -Factories + The exclude/include lists are lists of strings like: + ``aiida.data:int``, ``aiida.calculation:quantumespresso.pw``, + ``aiida.data:array.%``, ... + i.e.: a string identifying the base class, followed a colona and by the path to the class + as accepted by CalculationFactory/DataFactory. + Each string can contain one or more wildcard characters ``%``; + in this case this is used in a ``like`` comparison with the QueryBuilder. + Note that in this case you have to remember that ``_`` means "any character" + in the QueryBuilder, and you need to escape it if you mean a literal underscore. + + Only one of the two (between exclude and include) can be set. + If none of the two is set, everything is included. """ - def _validate(self, param, is_exact=True): - """ - Used internally to verify the sanity of exclude, include lists - """ - from aiida.plugins import CalculationFactory, DataFactory - - for i in param: - if not any([i.startswith('calculation'), - i.startswith('code'), - i.startswith('data'), - i == 'all', - ]): - raise exceptions.ValidationError('Module not recognized, allow prefixes ' - ' are: calculation, code or data') - the_param = [i + '.' for i in param] - - factorydict = {'calculation': locals()['CalculationFactory'], - 'data': locals()['DataFactory']} - - for i in the_param: - base, module = i.split('.', 1) - if base == 'code': - if module: - raise exceptions.ValidationError('Cannot have subclasses for codes') - elif base == 'all': - continue - else: - if is_exact: - try: - factorydict[base](module.rstrip('.')) - except exceptions.EntryPointError: - raise exceptions.ValidationError('Cannot find the class to be excluded') - return the_param + def __init__(self): + """Initialize with defaults.""" + self._exclude = None + self._include = None + + now = timezone.now() + default_label_prefix = 'Verdi autogroup on ' + now.strftime('%Y-%m-%d %H:%M:%S') + self._group_label_prefix = default_label_prefix + self._group_label = None # Actual group label, set by `get_or_create_group` + + @staticmethod + def validate(strings): + """Validate the list of strings passed to set_include and set_exclude.""" + if strings is None: + return + valid_prefixes = set(['aiida.node', 'aiida.calculations', 'aiida.workflows', 'aiida.data']) + for string in strings: + pieces = string.split(':') + if len(pieces) != 2: + raise exceptions.ValidationError( + "'{}' is not a valid include/exclude filter, must contain two parts split by a colon". + format(string) + ) + if pieces[0] not in valid_prefixes: + raise exceptions.ValidationError( + "'{}' has an invalid prefix, must be among: {}".format(string, sorted(valid_prefixes)) + ) def get_exclude(self): - """Return the list of classes to exclude from autogrouping.""" - try: - return self.exclude - except AttributeError: - return [] + """Return the list of classes to exclude from autogrouping. - def get_exclude_with_subclasses(self): - """ - Return the list of classes to exclude from autogrouping. - Will also exclude their derived subclasses - """ - try: - return self.exclude_with_subclasses - except AttributeError: - return [] + Returns ``None`` if no exclusion list has been set.""" + return self._exclude def get_include(self): - """Return the list of classes to include in the autogrouping.""" - try: - return self.include - except AttributeError: - return [] - - def get_include_with_subclasses(self): """Return the list of classes to include in the autogrouping. - Will also include their derived subclasses.""" - try: - return self.include_with_subclasses - except AttributeError: - return [] + + Returns ``None`` if no inclusion list has been set.""" + return self._include + + def get_group_label_prefix(self): + """Get the prefix of the label of the group. + If no group label prefix was set, it will set a default one by itself.""" + return self._group_label_prefix def get_group_name(self): - """Get the name of the group. - If no group name was set, it will set a default one by itself.""" - try: - return self.group_name - except AttributeError: - now = timezone.now() - gname = 'Verdi autogroup on ' + now.strftime('%Y-%m-%d %H:%M:%S') - self.set_group_name(gname) - return self.group_name + """Get the label of the group. + If no group label was set, it will set a default one by itself. - def set_exclude(self, exclude): - """Return the list of classes to exclude from autogrouping.""" - the_exclude_classes = self._validate(exclude) - if self.get_include() is not None: - if 'all.' in self.get_include(): - if 'all.' in the_exclude_classes: - raise exceptions.ValidationError('Cannot exclude and include all classes') - self.exclude = the_exclude_classes - - def set_exclude_with_subclasses(self, exclude): + .. deprecated:: 1.2.0 + Will be removed in `v2.0.0`, use :py:meth:`.get_group_label_prefix` instead. """ - Set the list of classes to exclude from autogrouping. - Will also exclude their derived subclasses + warnings.warn('function is deprecated, use `get_group_label_prefix` instead', AiidaDeprecationWarning) # pylint: disable=no-member + return self.get_group_label_prefix() + + def set_exclude(self, exclude): + """Set the list of classes to exclude in the autogrouping. + + :param exclude: a list of valid entry point strings (might contain '%' to be used as + string to be matched using SQL's ``LIKE`` pattern-making logic), or ``None`` + to specify no include list. """ - the_exclude_classes = self._validate(exclude, is_exact=False) - self.exclude_with_subclasses = the_exclude_classes + if isinstance(exclude, str): + exclude = [exclude] + self.validate(exclude) + if exclude is not None and self.get_include() is not None: + # It's ok to set None, both as a default, or to 'undo' the exclude list + raise exceptions.ValidationError('Cannot both specify exclude and include') + self._exclude = exclude def set_include(self, include): - """ - Set the list of classes to include in the autogrouping. - """ - the_include_classes = self._validate(include) - if self.get_exclude() is not None: - if 'all.' in self.get_exclude(): - if 'all.' in the_include_classes: - raise exceptions.ValidationError('Cannot exclude and include all classes') + """Set the list of classes to include in the autogrouping. - self.include = the_include_classes + :param include: a list of valid entry point strings (might contain '%' to be used as + string to be matched using SQL's ``LIKE`` pattern-making logic), or ``None`` + to specify no include list. + """ + if isinstance(include, str): + include = [include] + self.validate(include) + if include is not None and self.get_exclude() is not None: + # It's ok to set None, both as a default, or to 'undo' the include list + raise exceptions.ValidationError('Cannot both specify exclude and include') + self._include = include - def set_include_with_subclasses(self, include): + def set_group_label_prefix(self, label_prefix): """ - Set the list of classes to include in the autogrouping. - Will also include their derived subclasses. + Set the label of the group to be created """ - the_include_classes = self._validate(include, is_exact=False) - self.include_with_subclasses = the_include_classes + if not isinstance(label_prefix, str): + raise exceptions.ValidationError('group label must be a string') + self._group_label_prefix = label_prefix def set_group_name(self, gname): + """Set the name of the group. + + .. deprecated:: 1.2.0 + Will be removed in `v2.0.0`, use :py:meth:`.set_group_label_prefix` instead. """ - Set the name of the group to be created + warnings.warn('function is deprecated, use `set_group_label_prefix` instead', AiidaDeprecationWarning) # pylint: disable=no-member + return self.set_group_label_prefix(label_prefix=gname) + + @staticmethod + def _matches(string, filter_string): + """Check if 'string' matches the 'filter_string' (used for include and exclude filters). + + If 'filter_string' does not contain any % sign, perform an exact match. + Otherwise, match with a SQL-like query, where % means any character sequence, + and _ means a single character (these caracters can be escaped with a backslash). + + :param string: the string to match. + :param filter_string: the filter string. """ - if not isinstance(gname, str): - raise exceptions.ValidationError('group name must be a string') - self.group_name = gname + if '%' in filter_string: + regex_filter = get_regex_pattern_from_sql(filter_string) + return re.match(regex_filter, string) is not None + return string == filter_string - def is_to_be_grouped(self, the_class): + def is_to_be_grouped(self, node): """ - Return whether the given class has to be included in the autogroup according to include/exclude list + Return whether the given node has to be included in the autogroup according to include/exclude list - :return (bool): True if the_class is to be included in the autogroup + :return (bool): True if ``node`` is to be included in the autogroup """ + # strings, including possibly 'all' include = self.get_include() - include_ws = self.get_include_with_subclasses() - if (('all.' in include) or - (the_class._plugin_type_string in include) or - any([the_class._plugin_type_string.startswith(i) for i in include_ws]) - ): - exclude = self.get_exclude() - exclude_ws = self.get_exclude_with_subclasses() - if ((not 'all.' in exclude) or - (the_class._plugin_type_string in exclude) or - any([the_class._plugin_type_string.startswith(i) for i in exclude_ws]) - ): - return True - else: - return False + exclude = self.get_exclude() + if include is None and exclude is None: + # Include all classes by default if nothing is explicitly specified. + return True + + # We should never be here, anyway - this should be catched by the `set_include/exclude` methods + assert include is None or exclude is None, "You cannot specify both an 'include' and an 'exclude' list" + + entry_point_string = node.process_type + # If there is no `process_type` we are dealing with a `Data` node so we get the entry point from the class + if not entry_point_string: + entry_point_string = get_entry_point_string_from_class(node.__class__.__module__, node.__class__.__name__) + if include is not None: + # As soon as a filter string matches, we include the class + return any(self._matches(entry_point_string, filter_string) for filter_string in include) + # If we are here, exclude is not None + # include *only* in *none* of the filters match (that is, exclude as + # soon as any of the filters matches) + return not any(self._matches(entry_point_string, filter_string) for filter_string in exclude) + + def clear_group_cache(self): + """Clear the cache of the group name. + + This is mostly used by tests when they reset the database. + """ + self._group_label = None + + def get_or_create_group(self): + """Return the current `AutoGroup`, or create one if None has been set yet. + + This function implements a somewhat complex logic that is however needed + to make sure that, even if `verdi run` is called at the same time multiple + times, e.g. in a for loop in bash, there is never the risk that two ``verdi run`` + Unix processes try to create the same group, with the same label, ending + up in a crash of the code (see PR #3650). + + Here, instead, we make sure that if this concurrency issue happens, + one of the two will get a IntegrityError from the DB, and then recover + trying to create a group with a different label (with a numeric suffix appended), + until it manages to create it. + """ + from aiida.orm import QueryBuilder + + # When this function is called, if it is the first time, just generate + # a new group name (later on, after this ``if`` block`). + # In that case, we will later cache in ``self._group_label`` the group label, + # So the group with the same name can be returned quickly in future + # calls of this method. + if self._group_label is not None: + builder = QueryBuilder().append(AutoGroup, filters={'label': self._group_label}) + results = [res[0] for res in builder.iterall()] + if results: + # If it is not empty, it should have only one result due to the uniqueness constraints + assert len(results) == 1, 'I got more than one autogroup with the same label!' + return results[0] + # There are no results: probably the group has been deleted. + # I continue as if it was not cached + self._group_label = None + + label_prefix = self.get_group_label_prefix() + # Try to do a preliminary QB query to avoid to do too many try/except + # if many of the prefix_NUMBER groups already exist + queryb = QueryBuilder().append( + AutoGroup, + filters={ + 'or': [{ + 'label': { + '==': label_prefix + } + }, { + 'label': { + 'like': escape_for_sql_like(label_prefix + '_') + '%' + } + }] + }, + project='label' + ) + existing_group_labels = [res[0][len(label_prefix):] for res in queryb.all()] + existing_group_ints = [] + for label in existing_group_labels: + if label == '': + # This is just the prefix without name - corresponds to counter = 0 + existing_group_ints.append(0) + elif label.startswith('_'): + try: + existing_group_ints.append(int(label[1:])) + except ValueError: + # It's not an integer, so it will never collide - just ignore it + pass + + if not existing_group_ints: + counter = 0 else: - return False + counter = max(existing_group_ints) + 1 + + while True: + try: + label = label_prefix if counter == 0 else '{}_{}'.format(label_prefix, counter) + group = AutoGroup(label=label).store() + self._group_label = group.label + except exceptions.IntegrityError: + counter += 1 + else: + break + + return group diff --git a/aiida/orm/convert.py b/aiida/orm/convert.py index 197253cffd..d6b577773b 100644 --- a/aiida/orm/convert.py +++ b/aiida/orm/convert.py @@ -61,8 +61,9 @@ def _(backend_entity): @get_orm_entity.register(BackendGroup) def _(backend_entity): - from . import groups - return groups.Group.from_backend_entity(backend_entity) + from .groups import load_group_class + group_class = load_group_class(backend_entity.type_string) + return group_class.from_backend_entity(backend_entity) @get_orm_entity.register(BackendComputer) diff --git a/aiida/orm/groups.py b/aiida/orm/groups.py index cb7b4af801..7d43cbd4be 100644 --- a/aiida/orm/groups.py +++ b/aiida/orm/groups.py @@ -8,7 +8,7 @@ # For further information please visit http://www.aiida.net # ########################################################################### """ AiiDA Group entites""" - +from abc import ABCMeta from enum import Enum import warnings @@ -21,19 +21,63 @@ from . import entities from . import users -__all__ = ('Group', 'GroupTypeString') +__all__ = ('Group', 'GroupTypeString', 'AutoGroup', 'ImportGroup', 'UpfFamily') + + +def load_group_class(type_string): + """Load the sub class of `Group` that corresponds to the given `type_string`. + + .. note:: will fall back on `aiida.orm.groups.Group` if `type_string` cannot be resolved to loadable entry point. + + :param type_string: the entry point name of the `Group` sub class + :return: sub class of `Group` registered through an entry point + """ + from aiida.common.exceptions import EntryPointError + from aiida.plugins.entry_point import load_entry_point + + try: + group_class = load_entry_point('aiida.groups', type_string) + except EntryPointError: + message = 'could not load entry point `{}`, falling back onto `Group` base class.'.format(type_string) + warnings.warn(message) # pylint: disable=no-member + group_class = Group + + return group_class + + +class GroupMeta(ABCMeta): + """Meta class for `aiida.orm.groups.Group` to automatically set the `type_string` attribute.""" + + def __new__(mcs, name, bases, namespace, **kwargs): + from aiida.plugins.entry_point import get_entry_point_from_class + + newcls = ABCMeta.__new__(mcs, name, bases, namespace, **kwargs) # pylint: disable=too-many-function-args + + entry_point_group, entry_point = get_entry_point_from_class(namespace['__module__'], name) + + if entry_point_group is None or entry_point_group != 'aiida.groups': + newcls._type_string = None + message = 'no registered entry point for `{}` so its instances will not be storable.'.format(name) + warnings.warn(message) # pylint: disable=no-member + else: + newcls._type_string = entry_point.name # pylint: disable=protected-access + + return newcls class GroupTypeString(Enum): - """A simple enum of allowed group type strings.""" + """A simple enum of allowed group type strings. + .. deprecated:: 1.2.0 + This enum is deprecated and will be removed in `v2.0.0`. + """ UPFGROUP_TYPE = 'data.upf' IMPORTGROUP_TYPE = 'auto.import' VERDIAUTOGROUP_TYPE = 'auto.run' USER = 'user' -class Group(entities.Entity): +class Group(entities.Entity, metaclass=GroupMeta): """An AiiDA ORM implementation of group of nodes.""" class Collection(entities.Collection): @@ -54,21 +98,10 @@ def get_or_create(self, label=None, **kwargs): if not label: raise ValueError('Group label must be provided') - filters = {'label': label} - - if 'type_string' in kwargs: - if not isinstance(kwargs['type_string'], str): - raise exceptions.ValidationError( - 'type_string must be {}, you provided an object of type ' - '{}'.format(str, type(kwargs['type_string'])) - ) - - filters['type_string'] = kwargs['type_string'] - - res = self.find(filters=filters) + res = self.find(filters={'label': label}) if not res: - return Group(label, backend=self.backend, **kwargs).store(), True + return self.entity_type(label, backend=self.backend, **kwargs).store(), True if len(res) > 1: raise exceptions.MultipleObjectsError('More than one groups found in the database') @@ -83,12 +116,15 @@ def delete(self, id): # pylint: disable=invalid-name, redefined-builtin """ self._backend.groups.delete(id) - def __init__(self, label=None, user=None, description='', type_string=GroupTypeString.USER.value, backend=None): + def __init__(self, label=None, user=None, description='', type_string=None, backend=None): """ Create a new group. Either pass a dbgroup parameter, to reload a group from the DB (and then, no further parameters are allowed), or pass the parameters for the Group creation. + .. deprecated:: 1.2.0 + The parameter `type_string` will be removed in `v2.0.0` and is now determined automatically. + :param label: The group label, required on creation :type label: str @@ -105,12 +141,16 @@ def __init__(self, label=None, user=None, description='', type_string=GroupTypeS if not label: raise ValueError('Group label must be provided') - # Check that chosen type_string is allowed - if not isinstance(type_string, str): - raise exceptions.ValidationError( - 'type_string must be {}, you provided an object of type ' - '{}'.format(str, type(type_string)) - ) + if type_string is not None: + message = '`type_string` is deprecated because it is determined automatically' + warnings.warn(message) # pylint: disable=no-member + + # If `type_string` is explicitly defined, override automatically determined `self._type_string`. This is + # necessary for backwards compatibility. + if type_string is not None: + self._type_string = type_string + + type_string = self._type_string backend = backend or get_manager().get_backend() user = user or users.User.objects(backend).get_default() @@ -130,6 +170,13 @@ def __str__(self): return '"{}" [user-defined], of user {}'.format(self.label, self.user.email) + def store(self): + """Verify that the group is allowed to be stored, which is the case along as `type_string` is set.""" + if self._type_string is None: + raise exceptions.StoringNotAllowed('`type_string` is `None` so the group cannot be stored.') + + return super().store() + @property def label(self): """ @@ -295,11 +342,7 @@ def get(cls, **kwargs): filters = {} if 'type_string' in kwargs: - if not isinstance(kwargs['type_string'], str): - raise exceptions.ValidationError( - 'type_string must be {}, you provided an object of type ' - '{}'.format(str, type(kwargs['type_string'])) - ) + type_check(kwargs['type_string'], str) query = QueryBuilder() for key, val in kwargs.items(): @@ -382,3 +425,15 @@ def get_schema(): 'type': 'unicode' } } + + +class AutoGroup(Group): + """Group to be used to contain selected nodes generated while `aiida.orm.autogroup.CURRENT_AUTOGROUP` is set.""" + + +class ImportGroup(Group): + """Group to be used to contain all nodes from an export archive that has been imported.""" + + +class UpfFamily(Group): + """Group that represents a pseudo potential family containing `UpfData` nodes.""" diff --git a/aiida/orm/implementation/groups.py b/aiida/orm/implementation/groups.py index 74349e25e6..f39314060f 100644 --- a/aiida/orm/implementation/groups.py +++ b/aiida/orm/implementation/groups.py @@ -101,7 +101,7 @@ def get_or_create(cls, *args, **kwargs): :return: (group, created) where group is the group (new or existing, in any case already stored) and created is a boolean saying """ - res = cls.query(name=kwargs.get('name'), type_string=kwargs.get('type_string')) + res = cls.query(name=kwargs.get('name')) if not res: return cls.create(*args, **kwargs), True diff --git a/aiida/orm/implementation/logs.py b/aiida/orm/implementation/logs.py index ad50d27913..5924d0d228 100644 --- a/aiida/orm/implementation/logs.py +++ b/aiida/orm/implementation/logs.py @@ -45,7 +45,7 @@ def loggername(self): The name of the logger that created this entry :return: The entry loggername - :rtype: basestring + :rtype: str """ @abc.abstractproperty @@ -54,7 +54,7 @@ def levelname(self): The name of the log level :return: The entry log level name - :rtype: basestring + :rtype: str """ @abc.abstractproperty @@ -72,7 +72,7 @@ def message(self): Get the message corresponding to the entry :return: The entry message - :rtype: basestring + :rtype: str """ @abc.abstractproperty diff --git a/aiida/orm/implementation/utils.py b/aiida/orm/implementation/utils.py deleted file mode 100644 index 538b496769..0000000000 --- a/aiida/orm/implementation/utils.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Utility functions for AiiDA ORM implementations.""" - -__all__ = ('get_attr',) - - -def get_attr(attrs, key): - """ Get the attribute that corresponds to the given key""" - path = key.split('.') - - dict_ = attrs - for part in path: - if part.isdigit(): - part = int(part) - # Let it raise the appropriate exception - dict_ = dict_[part] - - return dict_ diff --git a/aiida/orm/logs.py b/aiida/orm/logs.py index ca83ddfdad..909cdb0add 100644 --- a/aiida/orm/logs.py +++ b/aiida/orm/logs.py @@ -40,7 +40,7 @@ def create_entry_from_record(record): Helper function to create a log entry from a record created as by the python logging library :param record: The record created by the logging module - :type record: :class:`logging.record` + :type record: :class:`logging.LogRecord` :return: An object implementing the log entry interface :rtype: :class:`aiida.orm.logs.Log` @@ -139,16 +139,16 @@ def __init__(self, time, loggername, levelname, dbnode_id, message='', metadata= :type time: :class:`!datetime.datetime` :param loggername: name of logger - :type loggername: basestring + :type loggername: str :param levelname: name of log level - :type levelname: basestring + :type levelname: str :param dbnode_id: id of database node :type dbnode_id: int :param message: log message - :type message: basestring + :type message: str :param metadata: metadata :type metadata: dict @@ -194,7 +194,7 @@ def loggername(self): The name of the logger that created this entry :return: The entry loggername - :rtype: basestring + :rtype: str """ return self._backend_entity.loggername @@ -204,7 +204,7 @@ def levelname(self): The name of the log level :return: The entry log level name - :rtype: basestring + :rtype: str """ return self._backend_entity.levelname @@ -224,7 +224,7 @@ def message(self): Get the message corresponding to the entry :return: The entry message - :rtype: basestring + :rtype: str """ return self._backend_entity.message diff --git a/aiida/orm/nodes/data/array/bands.py b/aiida/orm/nodes/data/array/bands.py index b636f00097..4c78eac4ea 100644 --- a/aiida/orm/nodes/data/array/bands.py +++ b/aiida/orm/nodes/data/array/bands.py @@ -825,7 +825,7 @@ def _prepare_mpl_singlefile(self, *args, **kwargs): s_header = matplotlib_header_template.substitute() s_import = matplotlib_import_data_inline_template.substitute(all_data_json=json.dumps(all_data, indent=2)) - s_body = matplotlib_body_template.substitute() + s_body = self._get_mpl_body_template(all_data['paths']) s_footer = matplotlib_footer_template_show.substitute() s = s_header + s_import + s_body + s_footer @@ -854,114 +854,13 @@ def _prepare_mpl_withjson(self, main_file_name='', *args, **kwargs): s_header = matplotlib_header_template.substitute() s_import = matplotlib_import_data_fromfile_template.substitute(json_fname=json_fname) - s_body = matplotlib_body_template.substitute() + s_body = self._get_mpl_body_template(all_data['paths']) s_footer = matplotlib_footer_template_show.substitute() s = s_header + s_import + s_body + s_footer return s.encode('utf-8'), ext_files - def _prepare_gnuplot(self, - main_file_name='', - title='', - comments=True, - prettify_format=None, - y_max_lim=None, - y_min_lim=None, - y_origin=0.): - """ - Prepare an gnuplot script to plot the bands, with the .dat file - returned as an independent file. - - :param main_file_name: if the user asks to write the main content on a - file, this contains the filename. This should be used to infer a - good filename for the additional files. - In this case, we remove the extension, and add '_data.dat' - :param title: if specified, add a title to the plot - :param comments: if True, print comments (if it makes sense for the given - format) - :param prettify_format: if None, use the default prettify format. Otherwise - specify a string with the prettifier to use. - """ - import os - - dat_filename = os.path.splitext(main_file_name)[0] + '_data.dat' - - if prettify_format is None: - # Default. Specified like this to allow caller functions to pass 'None' - prettify_format = 'gnuplot_seekpath' - - plot_info = self._get_bandplot_data( - cartesian=True, prettify_format=prettify_format, join_symbol='|', y_origin=y_origin) - - bands = plot_info['y'] - x = plot_info['x'] - labels = plot_info['labels'] - - num_labels = len(labels) - num_bands = bands.shape[1] - - # axis limits - if y_max_lim is None: - y_max_lim = bands.max() - if y_min_lim is None: - y_min_lim = bands.min() - x_min_lim = min(x) # this isn't a numpy array, but a list - x_max_lim = max(x) - - # first prepare the xy coordinates of the sets - raw_data, _ = self._prepare_dat_blocks(plot_info, comments=comments) - - xtics_string = ', '.join('"{}" {}'.format(label, pos) for pos, label in plot_info['labels']) - - script = [] - # Start with some useful comments - - if comments: - script.append(prepare_header_comment(self.uuid, plot_info=plot_info, comment_char='# ')) - script.append('') - - script.append(u"""## Uncomment the next two lines to write directly to PDF -## Note: You need to have gnuplot installed with pdfcairo support! -#set term pdfcairo -#set output 'out.pdf' - -### Uncomment one of the options below to change font -### For the LaTeX fonts, you can download them from here: -### https://sourceforge.net/projects/cm-unicode/ -### And then install them in your system -## LaTeX Serif font, if installed -#set termopt font "CMU Serif, 12" -## LaTeX Sans Serif font, if installed -#set termopt font "CMU Sans Serif, 12" -## Classical Times New Roman -#set termopt font "Times New Roman, 12" -""") - - # Actual logic - script.append('set termopt enhanced') # Properly deals with e.g. subscripts - script.append('set encoding utf8') # To deal with Greek letters - script.append('set xtics ({})'.format(xtics_string)) - script.append('set grid xtics lt 1 lc rgb "#888888"') - - script.append('unset key') - - script.append('set xrange [{}:{}]'.format(x_min_lim, x_max_lim)) - script.append('set yrange [{}:{}]'.format(y_min_lim, y_max_lim)) - - script.append('set ylabel "{}"'.format('Dispersion ({})'.format(self.units))) - - if title: - script.append('set title "{}"'.format(title.replace('"', '\"'))) - - # Plot, escaping filename - script.append('plot "{}" with l lc rgb "#000000"'.format(os.path.basename(dat_filename).replace('"', '\"'))) - - script_data = '\n'.join(script) + '\n' - extra_files = {dat_filename: raw_data} - - return script_data.encode('utf-8'), extra_files - def _prepare_mpl_pdf(self, main_file_name='', *args, **kwargs): """ Prepare a python script using matplotlib to plot the bands, with the JSON @@ -982,7 +881,7 @@ def _prepare_mpl_pdf(self, main_file_name='', *args, **kwargs): # Use the Agg backend s_header = matplotlib_header_agg_template.substitute() s_import = matplotlib_import_data_inline_template.substitute(all_data_json=json.dumps(all_data, indent=2)) - s_body = matplotlib_body_template.substitute() + s_body = self._get_mpl_body_template(all_data['paths']) # I get a temporary file name handle, filename = tempfile.mkstemp() @@ -1033,7 +932,7 @@ def _prepare_mpl_png(self, main_file_name='', *args, **kwargs): # Use the Agg backend s_header = matplotlib_header_agg_template.substitute() s_import = matplotlib_import_data_inline_template.substitute(all_data_json=json.dumps(all_data, indent=2)) - s_body = matplotlib_body_template.substitute() + s_body = self._get_mpl_body_template(all_data['paths']) # I get a temporary file name handle, filename = tempfile.mkstemp() @@ -1064,6 +963,17 @@ def _prepare_mpl_png(self, main_file_name='', *args, **kwargs): return imgdata, {} + @staticmethod + def _get_mpl_body_template(paths): + """ + :param paths: paths of k-points + """ + if len(paths) == 1: + s_body = matplotlib_body_template.substitute(plot_code=single_kp) + else: + s_body = matplotlib_body_template.substitute(plot_code=multi_kp) + return s_body + def show_mpl(self, **kwargs): """ Call a show() command for the band structure using matplotlib. @@ -1074,6 +984,113 @@ def show_mpl(self, **kwargs): """ exec(*self._exportcontent(fileformat='mpl_singlefile', main_file_name='', **kwargs)) # pylint: disable=exec-used + def _prepare_gnuplot(self, + main_file_name=None, + title='', + comments=True, + prettify_format=None, + y_max_lim=None, + y_min_lim=None, + y_origin=0.): + """ + Prepare an gnuplot script to plot the bands, with the .dat file + returned as an independent file. + + :param main_file_name: if the user asks to write the main content on a + file, this contains the filename. This should be used to infer a + good filename for the additional files. + In this case, we remove the extension, and add '_data.dat' + :param title: if specified, add a title to the plot + :param comments: if True, print comments (if it makes sense for the given + format) + :param prettify_format: if None, use the default prettify format. Otherwise + specify a string with the prettifier to use. + """ + import os + + main_file_name = main_file_name or 'band.dat' + dat_filename = os.path.splitext(main_file_name)[0] + '_data.dat' + + if prettify_format is None: + # Default. Specified like this to allow caller functions to pass 'None' + prettify_format = 'gnuplot_seekpath' + + plot_info = self._get_bandplot_data( + cartesian=True, prettify_format=prettify_format, join_symbol='|', y_origin=y_origin) + + bands = plot_info['y'] + x = plot_info['x'] + labels = plot_info['labels'] + + num_labels = len(labels) + num_bands = bands.shape[1] + + # axis limits + if y_max_lim is None: + y_max_lim = bands.max() + if y_min_lim is None: + y_min_lim = bands.min() + x_min_lim = min(x) # this isn't a numpy array, but a list + x_max_lim = max(x) + + # first prepare the xy coordinates of the sets + raw_data, _ = self._prepare_dat_blocks(plot_info, comments=comments) + + xtics_string = ', '.join('"{}" {}'.format(label, pos) for pos, label in plot_info['labels']) + + script = [] + # Start with some useful comments + + if comments: + script.append(prepare_header_comment(self.uuid, plot_info=plot_info, comment_char='# ')) + script.append('') + + script.append(u"""## Uncomment the next two lines to write directly to PDF +## Note: You need to have gnuplot installed with pdfcairo support! +#set term pdfcairo +#set output 'out.pdf' + +### Uncomment one of the options below to change font +### For the LaTeX fonts, you can download them from here: +### https://sourceforge.net/projects/cm-unicode/ +### And then install them in your system +## LaTeX Serif font, if installed +#set termopt font "CMU Serif, 12" +## LaTeX Sans Serif font, if installed +#set termopt font "CMU Sans Serif, 12" +## Classical Times New Roman +#set termopt font "Times New Roman, 12" +""") + + # Actual logic + script.append('set termopt enhanced') # Properly deals with e.g. subscripts + script.append('set encoding utf8') # To deal with Greek letters + script.append('set xtics ({})'.format(xtics_string)) + + script.append('unset key') + + + script.append('set yrange [{}:{}]'.format(y_min_lim, y_max_lim)) + + script.append('set ylabel "{}"'.format('Dispersion ({})'.format(self.units))) + + if title: + script.append('set title "{}"'.format(title.replace('"', '\"'))) + + # Plot, escaping filename + if len(x) > 1: + script.append('set xrange [{}:{}]'.format(x_min_lim, x_max_lim)) + script.append('set grid xtics lt 1 lc rgb "#888888"') + script.append('plot "{}" with l lc rgb "#000000"'.format(os.path.basename(dat_filename).replace('"', '\"'))) + else: + script.append('set xrange [-1.0:1.0]') + script.append('plot "{}" using ($1-0.25):($2):(0.5):(0) with vectors nohead lc rgb "#000000"'.format(os.path.basename(dat_filename).replace('"', '\"'))) + + script_data = '\n'.join(script) + '\n' + extra_files = {dat_filename: raw_data} + + return script_data.encode('utf-8'), extra_files + def _prepare_agr(self, main_file_name='', comments=True, @@ -1646,6 +1663,42 @@ def _prepare_json(self, main_file_name='', comments=True): all_data_str = f.read() ''') +multi_kp = ''' +for path in paths: + if path['length'] <= 1: + # Avoid printing empty lines + continue + x = path['x'] + #for band in bands: + for band, band_type in zip(path['values'], all_data['band_type_idx']): + + # For now we support only two colors + if band_type % 2 == 0: + further_plot_options = further_plot_options1 + else: + further_plot_options = further_plot_options2 + + # Put the legend text only once + label = None + if first_band_1 and band_type % 2 == 0: + first_band_1 = False + label = all_data.get('legend_text', None) + elif first_band_2 and band_type % 2 == 1: + first_band_2 = False + label = all_data.get('legend_text2', None) + + p.plot(x, band, label=label, + **further_plot_options + ) +''' + +single_kp = ''' +path = paths[0] +values = path['values'] +x = [path['x'] for _ in values] +p.scatter(x, values, marker="_") +''' + matplotlib_body_template = Template('''all_data = json.loads(all_data_str) if not all_data.get('use_latex', False): @@ -1700,33 +1753,7 @@ def _prepare_json(self, main_file_name='', comments=True): first_band_1 = True first_band_2 = True -for path in paths: - if path['length'] <= 1: - # Avoid printing empty lines - continue - x = path['x'] - #for band in bands: - for band, band_type in zip(path['values'], all_data['band_type_idx']): - - # For now we support only two colors - if band_type % 2 == 0: - further_plot_options = further_plot_options1 - else: - further_plot_options = further_plot_options2 - - # Put the legend text only once - label = None - if first_band_1 and band_type % 2 == 0: - first_band_1 = False - label = all_data.get('legend_text', None) - elif first_band_2 and band_type % 2 == 1: - first_band_2 = False - label = all_data.get('legend_text2', None) - - p.plot(x, band, label=label, - **further_plot_options - ) - +${plot_code} p.set_xticks(tick_pos) p.set_xticklabels(tick_labels) diff --git a/aiida/orm/nodes/data/array/xy.py b/aiida/orm/nodes/data/array/xy.py index f652987504..a3d2674320 100644 --- a/aiida/orm/nodes/data/array/xy.py +++ b/aiida/orm/nodes/data/array/xy.py @@ -43,10 +43,10 @@ class XyData(ArrayData): def _arrayandname_validator(self, array, name, units): """ Validates that the array is an numpy.ndarray and that the name is - of type basestring. Raises InputValidationError if this not the case. + of type str. Raises InputValidationError if this not the case. """ if not isinstance(name, str): - raise InputValidationError('The name must always be an instance of basestring.') + raise InputValidationError('The name must always be a str.') if not isinstance(array, np.ndarray): raise InputValidationError('The input array must always be a numpy array') @@ -55,7 +55,7 @@ def _arrayandname_validator(self, array, name, units): except ValueError: raise InputValidationError('The input array must only contain floats') if not isinstance(units, str): - raise InputValidationError('The units must always be an instance of basestring.') + raise InputValidationError('The units must always be a str.') def set_x(self, x_array, x_name, x_units): """ diff --git a/aiida/orm/nodes/data/remote.py b/aiida/orm/nodes/data/remote.py index 03a089e515..03e6bf6453 100644 --- a/aiida/orm/nodes/data/remote.py +++ b/aiida/orm/nodes/data/remote.py @@ -55,11 +55,10 @@ def is_empty(self): def getfile(self, relpath, destpath): """ - Connects to the remote folder and gets a string with the (full) content of the file. + Connects to the remote folder and retrieves the content of a file. - :param relpath: The relative path of the file to show. - :param destpath: A path on the local computer to get the file - :return: a string with the file content + :param relpath: The relative path of the file on the remote to retrieve. + :param destpath: The absolute path of where to store the file on the local machine. """ authinfo = self.get_authinfo() t = authinfo.get_transport() @@ -76,8 +75,6 @@ def getfile(self, relpath, destpath): else: raise - return t.listdir() - def listdir(self, relpath='.'): """ Connects to the remote folder and lists the directory content. diff --git a/aiida/orm/nodes/data/upf.py b/aiida/orm/nodes/data/upf.py index d35e1b35ee..33cf9b6421 100644 --- a/aiida/orm/nodes/data/upf.py +++ b/aiida/orm/nodes/data/upf.py @@ -8,20 +8,14 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Module of `Data` sub class to represent a pseudopotential single file in UPF format and related utilities.""" - import json import re from upf_to_json import upf_to_json - -from aiida.common.lang import classproperty -from aiida.orm import GroupTypeString from .singlefile import SinglefileData __all__ = ('UpfData',) -UPFGROUP_TYPE = GroupTypeString.UPFGROUP_TYPE.value - REGEX_UPF_VERSION = re.compile(r""" \s*.*)"> @@ -107,9 +101,7 @@ def upload_upf_family(folder, group_label, group_description, stop_if_existing=T nfiles = len(filenames) automatic_user = orm.User.objects.get_default() - group, group_created = orm.Group.objects.get_or_create( - label=group_label, type_string=UPFGROUP_TYPE, user=automatic_user - ) + group, group_created = orm.UpfFamily.objects.get_or_create(label=group_label, user=automatic_user) if group.user.email != automatic_user.email: raise UniquenessError( @@ -312,12 +304,6 @@ def get_or_create(cls, filepath, use_first=False, store_upf=True): return (pseudos[0], False) - @classproperty - def upffamily_type_string(cls): - """Return the type string used for UPF family groups.""" - # pylint: disable=no-self-argument,no-self-use - return UPFGROUP_TYPE - def store(self, *args, **kwargs): """Store the node, reparsing the file so that the md5 and the element are correctly reset.""" # pylint: disable=arguments-differ @@ -388,11 +374,11 @@ def set_file(self, file, filename=None): def get_upf_family_names(self): """Get the list of all upf family names to which the pseudo belongs.""" - from aiida.orm import Group + from aiida.orm import UpfFamily from aiida.orm import QueryBuilder query = QueryBuilder() - query.append(Group, filters={'type_string': {'==': self.upffamily_type_string}}, tag='group', project='label') + query.append(UpfFamily, tag='group', project='label') query.append(UpfData, filters={'id': {'==': self.id}}, with_group='group') return [label for label, in query.all()] @@ -465,9 +451,9 @@ def get_upf_group(cls, group_label): :param group_label: the family group label :return: the `Group` with the given label, if it exists """ - from aiida.orm import Group + from aiida.orm import UpfFamily - return Group.get(label=group_label, type_string=cls.upffamily_type_string) + return UpfFamily.get(label=group_label) @classmethod def get_upf_groups(cls, filter_elements=None, user=None): @@ -480,12 +466,12 @@ def get_upf_groups(cls, filter_elements=None, user=None): If defined, it should be either a `User` instance or the user email. :return: list of `Group` entities of type UPF. """ - from aiida.orm import Group + from aiida.orm import UpfFamily from aiida.orm import QueryBuilder from aiida.orm import User builder = QueryBuilder() - builder.append(Group, filters={'type_string': {'==': cls.upffamily_type_string}}, tag='group', project='*') + builder.append(UpfFamily, tag='group', project='*') if user: builder.append(User, filters={'email': {'==': user}}, with_group='group') @@ -496,7 +482,7 @@ def get_upf_groups(cls, filter_elements=None, user=None): if filter_elements is not None: builder.append(UpfData, filters={'attributes.element': {'in': filter_elements}}, with_group='group') - builder.order_by({Group: {'id': 'asc'}}) + builder.order_by({UpfFamily: {'id': 'asc'}}) return [group for group, in builder.all()] diff --git a/aiida/orm/nodes/node.py b/aiida/orm/nodes/node.py index 86f6d9ace3..f776809952 100644 --- a/aiida/orm/nodes/node.py +++ b/aiida/orm/nodes/node.py @@ -23,6 +23,7 @@ from aiida.orm.utils.links import LinkManager, LinkTriple from aiida.orm.utils.repository import Repository from aiida.orm.utils.node import AbstractNodeMeta, validate_attribute_extra_key +from aiida.orm import autogroup from ..comments import Comment from ..computers import Computer @@ -163,6 +164,20 @@ def _validate(self): # pylint: disable=no-self-use return True + def validate_storability(self): + """Verify that the current node is allowed to be stored. + + :raises `aiida.common.exceptions.StoringNotAllowed`: if the node does not match all requirements for storing + """ + from aiida.plugins.entry_point import is_registered_entry_point + + if not self._storable: + raise exceptions.StoringNotAllowed(self._unstorable_message) + + if not is_registered_entry_point(self.__module__, self.__class__.__name__, groups=('aiida.node', 'aiida.data')): + msg = 'class `{}:{}` does not have registered entry point'.format(self.__module__, self.__class__.__name__) + raise exceptions.StoringNotAllowed(msg) + @classproperty def class_node_type(cls): """Returns the node type of this node (sub) class.""" @@ -998,11 +1013,10 @@ def store(self, with_transaction=True, use_cache=None): # pylint: disable=argum 'the `use_cache` argument is deprecated and will be removed in `v2.0.0`', AiidaDeprecationWarning ) - if not self._storable: - raise exceptions.StoringNotAllowed(self._unstorable_message) - if not self.is_stored: + # Call `validate_storability` directly and not in `_validate` in case sub class forgets to call the super. + self.validate_storability() self._validate() # Verify that parents are already stored. Raises if this is not the case. @@ -1024,18 +1038,9 @@ def store(self, with_transaction=True, use_cache=None): # pylint: disable=argum self._store(with_transaction=with_transaction, clean=True) # Set up autogrouping used by verdi run - from aiida.orm.autogroup import current_autogroup, Autogroup, VERDIAUTOGROUP_TYPE - from aiida.orm import Group - - if current_autogroup is not None: - if not isinstance(current_autogroup, Autogroup): - raise exceptions.ValidationError('`current_autogroup` is not of type `Autogroup`') - - if current_autogroup.is_to_be_grouped(self): - group_label = current_autogroup.get_group_name() - if group_label is not None: - group = Group.objects.get_or_create(label=group_label, type_string=VERDIAUTOGROUP_TYPE)[0] - group.add_nodes(self) + if autogroup.CURRENT_AUTOGROUP is not None and autogroup.CURRENT_AUTOGROUP.is_to_be_grouped(self): + group = autogroup.CURRENT_AUTOGROUP.get_or_create_group() + group.add_nodes(self) return self diff --git a/aiida/orm/querybuilder.py b/aiida/orm/querybuilder.py index 7f622bed06..8a599522f6 100644 --- a/aiida/orm/querybuilder.py +++ b/aiida/orm/querybuilder.py @@ -49,6 +49,14 @@ _LOGGER = logging.getLogger(__name__) +# This global variable is necessary to enable the subclassing functionality for the `Group` entity. The current +# implementation of the `QueryBuilder` was written with the assumption that only `Node` was subclassable. Support for +# subclassing was added later for `Group` and is based on its `type_string`, but the current implementation does not +# allow to extend this support to the `QueryBuilder` in an elegant way. The prefix `group.` needs to be used in various +# places to make it work, but really the internals of the `QueryBuilder` should be rewritten to in principle support +# subclassing for any entity type. This workaround should then be able to be removed. +GROUP_ENTITY_TYPE_PREFIX = 'group.' + def get_querybuilder_classifiers_from_cls(cls, qb): """ @@ -83,10 +91,10 @@ def get_querybuilder_classifiers_from_cls(cls, qb): # Groups: elif issubclass(cls, qb.Group): - classifiers['ormclass_type_string'] = 'group' + classifiers['ormclass_type_string'] = GROUP_ENTITY_TYPE_PREFIX + cls._type_string ormclass = cls elif issubclass(cls, groups.Group): - classifiers['ormclass_type_string'] = 'group' + classifiers['ormclass_type_string'] = GROUP_ENTITY_TYPE_PREFIX + cls._type_string ormclass = qb.Group # Computers: @@ -164,7 +172,8 @@ def get_querybuilder_classifiers_from_type(ormclass_type_string, qb): classifiers['process_type_string'] = None classifiers['ormclass_type_string'] = ormclass_type_string.lower() - if classifiers['ormclass_type_string'] == 'group': + if classifiers['ormclass_type_string'].startswith(GROUP_ENTITY_TYPE_PREFIX): + classifiers['ormclass_type_string'] = 'group.core' ormclass = qb.Group elif classifiers['ormclass_type_string'] == 'computer': ormclass = qb.Computer @@ -179,11 +188,10 @@ def get_querybuilder_classifiers_from_type(ormclass_type_string, qb): if ormclass == qb.Node: is_valid_node_type_string(classifiers['ormclass_type_string'], raise_on_false=True) - return ormclass, classifiers -def get_type_filter(classifiers, subclassing): +def get_node_type_filter(classifiers, subclassing): """ Return filter dictionaries given a set of classifiers. @@ -199,13 +207,14 @@ def get_type_filter(classifiers, subclassing): value = classifiers['ormclass_type_string'] if not subclassing: - filter = {'==': value} + filters = {'==': value} else: # Note: the query_type_string always ends with a dot. This ensures that "like {str}%" matches *only* # the query type string - filter = {'like': '{}%'.format(escape_for_sql_like(get_query_type_from_type_string(value)))} + filters = {'like': '{}%'.format(escape_for_sql_like(get_query_type_from_type_string(value)))} + + return filters - return filter def get_process_type_filter(classifiers, subclassing): """ @@ -229,7 +238,7 @@ def get_process_type_filter(classifiers, subclassing): value = classifiers['process_type_string'] if not subclassing: - filter = {'==': value} + filters = {'==': value} else: if ':' in value: # if value is an entry point, do usual subclassing @@ -237,7 +246,7 @@ def get_process_type_filter(classifiers, subclassing): # Note: the process_type_string stored in the database does *not* end in a dot. # In order to avoid that querying for class 'Begin' will also find class 'BeginEnd', # we need to search separately for equality and 'like'. - filter = {'or': [ + filters = {'or': [ {'==': value}, {'like': escape_for_sql_like(get_query_string_from_process_type_string(value))}, ]} @@ -248,19 +257,46 @@ def get_process_type_filter(classifiers, subclassing): # between process classes and node classes # Note: Improve this when issue #2475 is addressed - filter = {'like': '%'} + filters = {'like': '%'} else: warnings.warn("Process type '{}' does not correspond to a registered entry. " 'This risks queries to fail once the location of the process class changes. ' "Add an entry point for '{}' to remove this warning.".format(value, value), AiidaEntryPointWarning) - filter = {'or': [ + filters = {'or': [ {'==': value}, {'like': escape_for_sql_like(get_query_string_from_process_type_string(value))}, ]} + return filters + + +def get_group_type_filter(classifiers, subclassing): + """Return filter dictionaries for `Group.type_string` given a set of classifiers. + + :param classifiers: a dictionary with classifiers (note: does *not* support lists) + :param subclassing: if True, allow for subclasses of the ormclass + + :returns: dictionary in QueryBuilder filter language to pass into {'type_string': ... } + :rtype: dict + """ + from aiida.common.escaping import escape_for_sql_like + + value = classifiers['ormclass_type_string'].lstrip(GROUP_ENTITY_TYPE_PREFIX) + + if not subclassing: + filters = {'==': value} + else: + # This is a hardcoded solution to the problem that the base class `Group` should match all subclasses, however + # its entry point string is `core` and so will only match those subclasses whose entry point also starts with + # 'core', however, this is only the case for group subclasses shipped with `aiida-core`. Any plugins from + # external packages will never be matched. Making the entry point name of `Group` an empty string is also not + # possible so we perform the switch here in code. + if value == 'core': + value = '' + filters = {'like': '{}%'.format(escape_for_sql_like(value))} - return filter + return filters class QueryBuilder: @@ -692,20 +728,16 @@ def append(self, # FILTERS ###################################### try: self._filters[tag] = {} - # So far, only Node and its subclasses need additional filters on column type - # (for other classes, the "classifi. - # This so far only is necessary for AiidaNodes not for groups. - # Now here there is the issue that for everything else, - # the query_type_string is either None (e.g. if Group was passed) - # or a list of None (if (Group, ) was passed. - # Here we have to only call the function _add_type_filter essentially if it makes sense to - # For now that is only nodes, and it is hardcoded. In the future (e.g. we subclass group) - # this has to be added + # Subclassing is currently only implemented for the `Node` and `Group` classes. So for those cases we need + # to construct the correct filters corresponding to the provided classes and value of `subclassing`. if ormclass == self._impl.Node: - self._add_type_filter(tag, classifiers, subclassing) + self._add_node_type_filter(tag, classifiers, subclassing) self._add_process_type_filter(tag, classifiers, subclassing) - # The order has to be first _add_type_filter and then add_filter. + elif ormclass == self._impl.Group: + self._add_group_type_filter(tag, classifiers, subclassing) + + # The order has to be first _add_node_type_filter and then add_filter. # If the user adds a query on the type column, it overwrites what I did # if the user specified a filter, add it: if filters is not None: @@ -993,23 +1025,21 @@ def _process_filters(self, filters): return processed_filters - def _add_type_filter(self, tagspec, classifiers, subclassing): + def _add_node_type_filter(self, tagspec, classifiers, subclassing): """ - Add a filter based on type. + Add a filter based on node type. :param tagspec: The tag, which has to exist already as a key in self._filters :param classifiers: a dictionary with classifiers :param subclassing: if True, allow for subclasses of the ormclass """ - tag = self._get_tag_from_specification(tagspec) - if isinstance(classifiers, list): # If a list was passed to QueryBuilder.append, this propagates to a list in the classifiers entity_type_filter = {'or': []} for c in classifiers: - entity_type_filter['or'].append(get_type_filter(c, subclassing)) + entity_type_filter['or'].append(get_node_type_filter(c, subclassing)) else: - entity_type_filter = get_type_filter(classifiers, subclassing) + entity_type_filter = get_node_type_filter(classifiers, subclassing) self.add_filter(tagspec, {'node_type': entity_type_filter}) @@ -1023,8 +1053,6 @@ def _add_process_type_filter(self, tagspec, classifiers, subclassing): Note: This function handles the case when process_type_string is None. """ - tag = self._get_tag_from_specification(tagspec) - if isinstance(classifiers, list): # If a list was passed to QueryBuilder.append, this propagates to a list in the classifiers process_type_filter = {'or': []} @@ -1040,6 +1068,23 @@ def _add_process_type_filter(self, tagspec, classifiers, subclassing): process_type_filter = get_process_type_filter(classifiers, subclassing) self.add_filter(tagspec, {'process_type': process_type_filter}) + def _add_group_type_filter(self, tagspec, classifiers, subclassing): + """ + Add a filter based on group type. + + :param tagspec: The tag, which has to exist already as a key in self._filters + :param classifiers: a dictionary with classifiers + :param subclassing: if True, allow for subclasses of the ormclass + """ + if isinstance(classifiers, list): + # If a list was passed to QueryBuilder.append, this propagates to a list in the classifiers + type_string_filter = {'or': []} + for classifier in classifiers: + type_string_filter['or'].append(get_group_type_filter(classifier, subclassing)) + else: + type_string_filter = get_group_type_filter(classifiers, subclassing) + + self.add_filter(tagspec, {'type_string': type_string_filter}) def add_projection(self, tag_spec, projection_spec): r""" @@ -1678,10 +1723,14 @@ def _get_connecting_node(self, index, joining_keyword=None, joining_value=None, :param joining_value: the tag of the nodes to be joined """ # Set the calling entity - to allow for the correct join relation to be set - if self._path[index]['entity_type'] not in ['computer', 'user', 'group', 'comment', 'log']: + entity_type = self._path[index]['entity_type'] + + if isinstance(entity_type, str) and entity_type.startswith(GROUP_ENTITY_TYPE_PREFIX): + calling_entity = 'group' + elif entity_type not in ['computer', 'user', 'comment', 'log']: calling_entity = 'node' else: - calling_entity = self._path[index]['entity_type'] + calling_entity = entity_type if joining_keyword == 'direction': if joining_value > 0: diff --git a/aiida/orm/utils/loaders.py b/aiida/orm/utils/loaders.py index 5e73ff46e1..ecf08e6215 100644 --- a/aiida/orm/utils/loaders.py +++ b/aiida/orm/utils/loaders.py @@ -456,15 +456,19 @@ def _get_query_builder_label_identifier(cls, identifier, classes, operator='==', :raises ValueError: if the identifier is invalid :raises aiida.common.NotExistent: if the orm base class does not support a LABEL like identifier """ + from aiida.common.escaping import escape_for_sql_like from aiida.orm import Computer try: - label, _, machinename = identifier.partition('@') + identifier, _, machinename = identifier.partition('@') except AttributeError: raise ValueError('the identifier needs to be a string') + if operator == 'like': + identifier = escape_for_sql_like(identifier) + '%' + builder = QueryBuilder() - builder.append(cls=classes, tag='code', project=project, filters={'label': {'==': label}}) + builder.append(cls=classes, tag='code', project=project, filters={'label': {operator: identifier}}) if machinename: builder.append(Computer, filters={'name': {'==': machinename}}, with_node='code') diff --git a/aiida/orm/utils/node.py b/aiida/orm/utils/node.py index f48ec2ae16..0432964467 100644 --- a/aiida/orm/utils/node.py +++ b/aiida/orm/utils/node.py @@ -12,6 +12,7 @@ import logging import math import numbers +import warnings from collections.abc import Iterable, Mapping from aiida.common import exceptions @@ -70,7 +71,14 @@ def load_node_class(type_string): entry_point_name = strip_prefix(base_path, 'nodes.') return load_entry_point('aiida.node', entry_point_name) - raise exceptions.EntryPointError('unknown type string {}'.format(type_string)) + # At this point we really have an anomalous type string. At some point, storing nodes with unresolvable type strings + # was allowed, for example by creating a sub class in a shell and then storing an instance. Attempting to load the + # node then would fail miserably. This is now no longer allowed, but we need a fallback for existing cases, which + # should be rare. We fallback on `Data` and not `Node` because bare node instances are also not storable and so the + # logic of the ORM is not well defined for a loaded instance of the base `Node` class. + warnings.warn('unknown type string `{}`, falling back onto `Data` class'.format(type_string)) # pylint: disable=no-member + + return Data def get_type_string_from_class(class_module, class_name): @@ -82,13 +90,13 @@ def get_type_string_from_class(class_module, class_name): :param class_module: module of the class :param class_name: name of the class """ - from aiida.plugins.entry_point import get_entry_point_from_class, entry_point_group_to_module_path_map + from aiida.plugins.entry_point import get_entry_point_from_class, ENTRY_POINT_GROUP_TO_MODULE_PATH_MAP group, entry_point = get_entry_point_from_class(class_module, class_name) # If we can reverse engineer an entry point group and name, we're dealing with an external class if group and entry_point: - module_base_path = entry_point_group_to_module_path_map[group] + module_base_path = ENTRY_POINT_GROUP_TO_MODULE_PATH_MAP[group] type_string = '{}.{}.{}.'.format(module_base_path, entry_point.name, class_name) # Otherwise we are dealing with an internal class @@ -247,14 +255,10 @@ def clean_builtin(val): class AbstractNodeMeta(ABCMeta): # pylint: disable=too-few-public-methods - """ - Some python black magic to set correctly the logger also in subclasses. - """ - - # pylint: disable=arguments-differ,protected-access,too-many-function-args + """Some python black magic to set correctly the logger also in subclasses.""" - def __new__(mcs, name, bases, namespace): - newcls = ABCMeta.__new__(mcs, name, bases, namespace) + def __new__(mcs, name, bases, namespace): # pylint: disable=arguments-differ,protected-access,too-many-function-args + newcls = ABCMeta.__new__(mcs, name, bases, namespace) # pylint: disable=too-many-function-args newcls._logger = logging.getLogger('{}.{}'.format(namespace['__module__'], name)) # Set the plugin type string and query type string based on the plugin type string diff --git a/aiida/plugins/entry_point.py b/aiida/plugins/entry_point.py index 92a496927b..46e4bf3c7e 100644 --- a/aiida/plugins/entry_point.py +++ b/aiida/plugins/entry_point.py @@ -7,7 +7,7 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### - +"""Module to manage loading entrypoints.""" import enum import traceback import functools @@ -24,7 +24,6 @@ __all__ = ('load_entry_point', 'load_entry_point_from_string') - ENTRY_POINT_GROUP_PREFIX = 'aiida.' ENTRY_POINT_STRING_SEPARATOR = ':' @@ -51,10 +50,11 @@ class EntryPointFormat(enum.Enum): MINIMAL = 3 -entry_point_group_to_module_path_map = { +ENTRY_POINT_GROUP_TO_MODULE_PATH_MAP = { 'aiida.calculations': 'aiida.orm.nodes.process.calculation.calcjob', 'aiida.cmdline.data': 'aiida.cmdline.data', 'aiida.data': 'aiida.orm.nodes.data', + 'aiida.groups': 'aiida.orm.groups', 'aiida.node': 'aiida.orm.nodes', 'aiida.parsers': 'aiida.parsers.plugins', 'aiida.schedulers': 'aiida.schedulers.plugins', @@ -65,7 +65,7 @@ class EntryPointFormat(enum.Enum): } -def validate_registered_entry_points(): +def validate_registered_entry_points(): # pylint: disable=invalid-name """Validate all registered entry points by loading them with the corresponding factory. :raises EntryPointError: if any of the registered entry points cannot be loaded. This can happen if: @@ -79,6 +79,7 @@ def validate_registered_entry_points(): factory_mapping = { 'aiida.calculations': factories.CalculationFactory, 'aiida.data': factories.DataFactory, + 'aiida.groups': factories.GroupFactory, 'aiida.parsers': factories.ParserFactory, 'aiida.schedulers': factories.SchedulerFactory, 'aiida.transports': factories.TransportFactory, @@ -108,12 +109,11 @@ def format_entry_point_string(group, name, fmt=EntryPointFormat.FULL): if fmt == EntryPointFormat.FULL: return '{}{}{}'.format(group, ENTRY_POINT_STRING_SEPARATOR, name) - elif fmt == EntryPointFormat.PARTIAL: + if fmt == EntryPointFormat.PARTIAL: return '{}{}{}'.format(group[len(ENTRY_POINT_GROUP_PREFIX):], ENTRY_POINT_STRING_SEPARATOR, name) - elif fmt == EntryPointFormat.MINIMAL: + if fmt == EntryPointFormat.MINIMAL: return '{}'.format(name) - else: - raise ValueError('invalid EntryPointFormat') + raise ValueError('invalid EntryPointFormat') def parse_entry_point_string(entry_point_string): @@ -146,14 +146,13 @@ def get_entry_point_string_format(entry_point_string): :rtype: EntryPointFormat """ try: - group, name = entry_point_string.split(ENTRY_POINT_STRING_SEPARATOR) + group, _ = entry_point_string.split(ENTRY_POINT_STRING_SEPARATOR) except ValueError: return EntryPointFormat.MINIMAL else: if group.startswith(ENTRY_POINT_GROUP_PREFIX): return EntryPointFormat.FULL - else: - return EntryPointFormat.PARTIAL + return EntryPointFormat.PARTIAL def get_entry_point_from_string(entry_point_string): @@ -186,6 +185,7 @@ def load_entry_point_from_string(entry_point_string): group, name = parse_entry_point_string(entry_point_string) return load_entry_point(group, name) + def load_entry_point(group, name): """ Load the class registered under the entry point for a given name and group @@ -215,7 +215,7 @@ def get_entry_point_groups(): :return: a list of valid entry point groups """ - return entry_point_group_to_module_path_map.keys() + return ENTRY_POINT_GROUP_TO_MODULE_PATH_MAP.keys() def get_entry_point_names(group, sort=True): @@ -242,7 +242,8 @@ def get_entry_points(group): :param group: the entry point group :return: a list of entry points """ - return [ep for ep in ENTRYPOINT_MANAGER.iter_entry_points(group=group)] + return list(ENTRYPOINT_MANAGER.iter_entry_points(group=group)) + @functools.lru_cache(maxsize=None) def get_entry_point(group, name): @@ -258,10 +259,16 @@ def get_entry_point(group, name): entry_points = [ep for ep in get_entry_points(group) if ep.name == name] if not entry_points: - raise MissingEntryPointError("Entry point '{}' not found in group '{}'".format(name, group)) + raise MissingEntryPointError( + "Entry point '{}' not found in group '{}'. Try running `reentry scan` to update " + 'the entry point cache.'.format(name, group) + ) if len(entry_points) > 1: - raise MultipleEntryPointError("Multiple entry points '{}' found in group".format(name, group)) + raise MultipleEntryPointError( + "Multiple entry points '{}' found in group '{}'.Try running `reentry scan` to " + 'repopulate the entry point cache.'.format(name, group) + ) return entry_points[0] @@ -287,7 +294,7 @@ def get_entry_point_from_class(class_module, class_name): return None, None -def get_entry_point_string_from_class(class_module, class_name): +def get_entry_point_string_from_class(class_module, class_name): # pylint: disable=invalid-name """ Given the module and name of a class, attempt to obtain the corresponding entry point if it exists and return the entry point string which will be the entry point group and entry point @@ -309,8 +316,7 @@ def get_entry_point_string_from_class(class_module, class_name): if group and entry_point: return ENTRY_POINT_STRING_SEPARATOR.join([group, entry_point.name]) - else: - return None + return None def is_valid_entry_point_string(entry_point_string): @@ -324,9 +330,31 @@ def is_valid_entry_point_string(entry_point_string): :return: True if the string is considered valid, False otherwise """ try: - group, name = entry_point_string.split(ENTRY_POINT_STRING_SEPARATOR) + group, _ = entry_point_string.split(ENTRY_POINT_STRING_SEPARATOR) except (AttributeError, ValueError): # Either `entry_point_string` is not a string or it does not contain the separator return False - return group in entry_point_group_to_module_path_map + return group in ENTRY_POINT_GROUP_TO_MODULE_PATH_MAP + + +@functools.lru_cache(maxsize=None) +def is_registered_entry_point(class_module, class_name, groups=None): + """Verify whether the class with the given module and class name is a registered entry point. + + .. note:: this function only checks whether the class has a registered entry point. It does explicitly not verify + if the corresponding class is also importable. Use `load_entry_point` for this purpose instead. + + :param class_module: the module of the class + :param class_name: the name of the class + :param groups: optionally consider only these entry point groups to look for the class + :return: boolean, True if the class is a registered entry point, False otherwise. + """ + if groups is None: + groups = list(ENTRY_POINT_GROUP_TO_MODULE_PATH_MAP.keys()) + + for group in groups: + for entry_point in ENTRYPOINT_MANAGER.iter_entry_points(group): + if class_module == entry_point.module_name and [class_name] == entry_point.attrs: + return True + return False diff --git a/aiida/plugins/factories.py b/aiida/plugins/factories.py index 6e5a9296e9..1675ac6cb6 100644 --- a/aiida/plugins/factories.py +++ b/aiida/plugins/factories.py @@ -14,8 +14,8 @@ from aiida.common.exceptions import InvalidEntryPointTypeError __all__ = ( - 'BaseFactory', 'CalculationFactory', 'DataFactory', 'DbImporterFactory', 'OrbitalFactory', 'ParserFactory', - 'SchedulerFactory', 'TransportFactory', 'WorkflowFactory' + 'BaseFactory', 'CalculationFactory', 'DataFactory', 'DbImporterFactory', 'GroupFactory', 'OrbitalFactory', + 'ParserFactory', 'SchedulerFactory', 'TransportFactory', 'WorkflowFactory' ) @@ -107,6 +107,25 @@ def DbImporterFactory(entry_point_name): raise_invalid_type_error(entry_point_name, entry_point_group, valid_classes) +def GroupFactory(entry_point_name): + """Return the `Group` sub class registered under the given entry point. + + :param entry_point_name: the entry point name + :return: sub class of :py:class:`~aiida.orm.groups.Group` + :raises aiida.common.InvalidEntryPointTypeError: if the type of the loaded entry point is invalid. + """ + from aiida.orm import Group + + entry_point_group = 'aiida.groups' + entry_point = BaseFactory(entry_point_group, entry_point_name) + valid_classes = (Group,) + + if isclass(entry_point) and issubclass(entry_point, Group): + return entry_point + + raise_invalid_type_error(entry_point_name, entry_point_group, valid_classes) + + def OrbitalFactory(entry_point_name): """Return the `Orbital` sub class registered under the given entry point. diff --git a/aiida/restapi/common/config.py b/aiida/restapi/common/config.py index 2f719a2654..382e334ea4 100644 --- a/aiida/restapi/common/config.py +++ b/aiida/restapi/common/config.py @@ -8,48 +8,26 @@ # For further information please visit http://www.aiida.net # ########################################################################### """ -Constants used in rest api +Default configuration for the REST API """ +import os -## Pagination defaults -LIMIT_DEFAULT = 400 -PERPAGE_DEFAULT = 20 - -##Version prefix for all the URLs -PREFIX = '/api/v4' -VERSION = '4.0.1' -""" -Flask app configs. - -DEBUG: True/False. enables debug mode N.B. -!!!For production run use ALWAYS False!!! - -PROPAGATE_EXCEPTIONS: True/False serve REST exceptions to the client (and not a -generic 500: Internal Server Error exception) +API_CONFIG = { + 'LIMIT_DEFAULT': 400, # default records total + 'PERPAGE_DEFAULT': 20, # default records per page + 'PREFIX': '/api/v4', # prefix for all URLs + 'VERSION': '4.0.1', +} -""" APP_CONFIG = { - 'DEBUG': False, - 'PROPAGATE_EXCEPTIONS': True, + 'DEBUG': False, # use False for production + 'PROPAGATE_EXCEPTIONS': True, # serve REST exceptions to client instead of generic 500 internal server error } -""" -JSON serialization config. Leave this dictionary empty if default Flask -serializer is desired. - -Here is a list a all supported fields. If a field is not present in the -dictionary its value is assumed to be 'default'. -DATETIME_FORMAT: allowed values are 'asinput' and 'default'. +SERIALIZER_CONFIG = {'datetime_format': 'default'} # use 'asinput' or 'default' -""" -SERIALIZER_CONFIG = {'datetime_format': 'default'} -""" -Caching configuration - -memcached: backend caching system -""" CACHE_CONFIG = {'CACHE_TYPE': 'memcached'} -CACHING_TIMEOUTS = { #Caching TIMEOUTS (in seconds) +CACHING_TIMEOUTS = { # Caching timeouts in seconds 'nodes': 10, 'users': 10, 'calculations': 10, @@ -61,13 +39,12 @@ # IO tree MAX_TREE_DEPTH = 5 -""" -Aiida profile used by the REST api when no profile is specified (ex. by ---aiida-profile flag). -This has to be one of the profiles registered in .aiida/config.json - -In case you want to use the default stored in -.aiida/config.json, set this varibale to "default" -""" -DEFAULT_AIIDA_PROFILE = None +CLI_DEFAULTS = { + 'HOST_NAME': '127.0.0.1', + 'PORT': 5000, + 'CONFIG_DIR': os.path.dirname(os.path.abspath(__file__)), + 'WSGI_PROFILE': False, + 'HOOKUP_APP': True, + 'CATCH_INTERNAL_SERVER': False, +} diff --git a/aiida/restapi/resources.py b/aiida/restapi/resources.py index c746169681..49ccadec17 100644 --- a/aiida/restapi/resources.py +++ b/aiida/restapi/resources.py @@ -48,20 +48,20 @@ def get(self): response = {} - import aiida.restapi.common.config as conf + from aiida.restapi.common.config import API_CONFIG from aiida import __version__ if resource_type == 'info': response = {} # Add Rest API version - api_version = conf.VERSION.split('.') + api_version = API_CONFIG['VERSION'].split('.') response['API_major_version'] = api_version[0] response['API_minor_version'] = api_version[1] response['API_revision_version'] = api_version[2] # Add Rest API prefix - response['API_prefix'] = conf.PREFIX + response['API_prefix'] = API_CONFIG['PREFIX'] # Add AiiDA version response['AiiDA_version'] = __version__ diff --git a/aiida/restapi/run_api.py b/aiida/restapi/run_api.py index 6dfadff896..9c0696f2de 100755 --- a/aiida/restapi/run_api.py +++ b/aiida/restapi/run_api.py @@ -16,64 +16,64 @@ import os from flask_cors import CORS +from .common.config import CLI_DEFAULTS, APP_CONFIG, API_CONFIG +from . import api as api_classes -def run_api(flask_app, flask_api, **kwargs): +def run_api(flask_app=api_classes.App, flask_api=api_classes.AiidaApi, **kwargs): """ Takes a flask.Flask instance and runs it. - flask_app: Class inheriting from Flask app class - flask_api = flask_restful API class to be used to wrap the app - - kwargs: - List of valid parameters: - prog_name: name of the command before arguments are parsed. Useful when - api is embedded in a command, such as verdi restapi - hostname: self-explainatory - port: self-explainatory - config: directory containing the config.py file used to - configure the RESTapi - catch_internal_server: If true, catch and print all inter server errors - debug: self-explainatory - wsgi_profile:to use WSGI profiler middleware for finding bottlenecks in web application - hookup: to hookup app - All other passed parameters are ignored. + :param flask_app: Class inheriting from flask app class + :type flask_app: :py:class:`flask.Flask` + :param flask_api: flask_restful API class to be used to wrap the app + :type flask_api: :py:class:`flask_restful.Api` + + List of valid keyword arguments: + :param hostname: hostname to run app on (only when using built-in server) + :param port: port to run app on (only when using built-in server) + :param config: directory containing the config.py file used to configure the RESTapi + :param catch_internal_server: If true, catch and print all inter server errors + :param debug: enable debugging + :param wsgi_profile: use WSGI profiler middleware for finding bottlenecks in web application + :param hookup: If true, hook up application to built-in server - else just return it """ # pylint: disable=too-many-locals # Unpack parameters - hostname = kwargs['hostname'] - port = kwargs['port'] - config = kwargs['config'] + hostname = kwargs.pop('hostname', CLI_DEFAULTS['HOST_NAME']) + port = kwargs.pop('port', CLI_DEFAULTS['PORT']) + config = kwargs.pop('config', CLI_DEFAULTS['CONFIG_DIR']) - catch_internal_server = kwargs.pop('catch_internal_server', False) - debug = kwargs['debug'] - wsgi_profile = kwargs['wsgi_profile'] - hookup = kwargs['hookup'] + catch_internal_server = kwargs.pop('catch_internal_server', CLI_DEFAULTS['CATCH_INTERNAL_SERVER']) + debug = kwargs.pop('debug', APP_CONFIG['DEBUG']) + wsgi_profile = kwargs.pop('wsgi_profile', CLI_DEFAULTS['WSGI_PROFILE']) + hookup = kwargs.pop('hookup', CLI_DEFAULTS['HOOKUP_APP']) - # Import the right configuration file + if kwargs: + raise ValueError('Unknown keyword arguments: {}'.format(kwargs)) + + # Import the configuration file spec = importlib.util.spec_from_file_location(os.path.join(config, 'config'), os.path.join(config, 'config.py')) - confs = importlib.util.module_from_spec(spec) - spec.loader.exec_module(confs) + config_module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(config_module) # Instantiate an app - app_kwargs = dict(catch_internal_server=catch_internal_server) - app = flask_app(__name__, **app_kwargs) + app = flask_app(__name__, catch_internal_server=catch_internal_server) - # Config the app - app.config.update(**confs.APP_CONFIG) + # Apply default configuration + app.config.update(**config_module.APP_CONFIG) - # cors - cors_prefix = os.path.join(confs.PREFIX, '*') - CORS(app, resources={r'' + cors_prefix: {'origins': '*'}}) + # Allow cross-origin resource sharing + cors_prefix = r'{}/*'.format(config_module) + CORS(app, resources={cors_prefix: {'origins': '*'}}) - # Config the serializer used by the app - if confs.SERIALIZER_CONFIG: + # Configure the serializer + if config_module.SERIALIZER_CONFIG: from aiida.restapi.common.utils import CustomJSONEncoder app.json_encoder = CustomJSONEncoder - # If the user selects the profiling option, then we need - # to do a little extra setup + # Set up WSGI profile if requested if wsgi_profile: from werkzeug.middleware.profiler import ProfilerMiddleware @@ -81,18 +81,14 @@ def run_api(flask_app, flask_api, **kwargs): app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30]) # Instantiate an Api by associating its app - api_kwargs = dict(PREFIX=confs.PREFIX, PERPAGE_DEFAULT=confs.PERPAGE_DEFAULT, LIMIT_DEFAULT=confs.LIMIT_DEFAULT) - api = flask_api(app, **api_kwargs) + api = flask_api(app, **API_CONFIG) - # Check if the app has to be hooked-up or just returned if hookup: - print(' * REST API running on http://{}:{}{}'.format(hostname, port, confs.PREFIX)) + # Run app through built-in werkzeug server + print(' * REST API running on http://{}:{}{}'.format(hostname, port, API_CONFIG['PREFIX'])) api.app.run(debug=debug, host=hostname, port=int(port), threaded=True) else: - # here we return the app, and the api with no specifications on debug - # mode, port and host. This can be handled by an external server, - # e.g. apache2, which will set the host and port. This implies that - # the user-defined configuration of the app is ineffective (it only - # affects the internal werkzeug server used by Flask). + # Return the app & api without specifying port/host to be handled by an external server (e.g. apache). + # Some of the user-defined configuration of the app is ineffective (only affects built-in server). return (app, api) diff --git a/aiida/tools/graph/age_rules.py b/aiida/tools/graph/age_rules.py index ed023aa6a7..09f373a901 100644 --- a/aiida/tools/graph/age_rules.py +++ b/aiida/tools/graph/age_rules.py @@ -77,6 +77,8 @@ def __init__(self, querybuilder, max_iterations=1, track_edges=False): super().__init__(max_iterations, track_edges=track_edges) def get_spec_from_path(queryhelp, idx): + from aiida.orm.querybuilder import GROUP_ENTITY_TYPE_PREFIX + if ( queryhelp['path'][idx]['entity_type'].startswith('node') or queryhelp['path'][idx]['entity_type'].startswith('data') or @@ -84,7 +86,7 @@ def get_spec_from_path(queryhelp, idx): queryhelp['path'][idx]['entity_type'] == '' ): result = 'nodes' - elif queryhelp['path'][idx]['entity_type'] == 'group': + elif queryhelp['path'][idx]['entity_type'].startswith(GROUP_ENTITY_TYPE_PREFIX): result = 'groups' else: raise Exception('not understood entity from ( {} )'.format(queryhelp['path'][idx]['entity_type'])) diff --git a/conftest.py b/aiida/tools/groups/__init__.py similarity index 50% rename from conftest.py rename to aiida/tools/groups/__init__.py index 1d2336820d..19e936839b 100644 --- a/conftest.py +++ b/aiida/tools/groups/__init__.py @@ -7,8 +7,14 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""Configuration file for pytest tests.""" - -import pytest # pylint: disable=unused-import +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=wildcard-import,undefined-variable +"""Provides tools for interacting with AiiDA Groups.""" +from .paths import * -pytest_plugins = ['aiida.manage.tests.pytest_fixtures'] # pylint: disable=invalid-name +__all__ = paths.__all__ diff --git a/aiida/tools/groups/paths.py b/aiida/tools/groups/paths.py new file mode 100644 index 0000000000..362d1da1f8 --- /dev/null +++ b/aiida/tools/groups/paths.py @@ -0,0 +1,339 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Provides functionality for managing large numbers of AiiDA Groups, via label delimitation.""" +from collections import namedtuple +from functools import total_ordering +import re +from typing import Any, Iterable, List, Optional # pylint: disable=unused-import +import warnings + +from aiida import orm +from aiida.common.exceptions import NotExistent + +__all__ = ('GroupPath', 'InvalidPath', 'GroupNotFoundError', 'GroupNotUniqueError', 'NoGroupsInPathError') + +REGEX_ATTR = re.compile('^[a-zA-Z][\\_a-zA-Z0-9]*$') + + +class InvalidPath(Exception): + """An exception to indicate that a path is not valid.""" + + +class GroupNotFoundError(Exception): + """An exception raised when a path does not have an associated group.""" + + def __init__(self, grouppath): + msg = 'No such group: {}'.format(grouppath.path) + super().__init__(msg) + + +class GroupNotUniqueError(Exception): + """An exception raised when a path has multiple associated groups.""" + + def __init__(self, grouppath): + msg = 'The path is not unique: {}'.format(grouppath.path) + super().__init__(msg) + + +class NoGroupsInPathError(Exception): + """An exception raised when a path has multiple associated groups.""" + + def __init__(self, grouppath): + msg = 'The path does not contain any descendant groups: {}'.format(grouppath.path) + super().__init__(msg) + + +WalkNodeResult = namedtuple('WalkNodeResult', ['group_path', 'node']) + + +@total_ordering +class GroupPath: + """A class to provide label delimited access to groups. + + See tests for usage examples. + """ + + def __init__(self, path='', cls=orm.Group, warn_invalid_child=True): + # type: (str, Optional[str], Optional[GroupPath]) + """Instantiate the class. + + :param path: The initial path of the group. + :param cls: The subclass of `Group` to operate on. + :param warn_invalid_child: Issue a warning, when iterating children, if a child path is invalid. + + """ + if not issubclass(cls, orm.Group): + raise TypeError('cls must a subclass of Group: {}'.format(cls)) + + self._delimiter = '/' + self._cls = cls + self._path_string = self._validate_path(path) + self._path_list = self._path_string.split(self._delimiter) if path else [] + self._warn_invalid_child = warn_invalid_child + + def _validate_path(self, path): + """Validate the supplied path.""" + if path == self._delimiter: + return '' + if self._delimiter * 2 in path: + raise InvalidPath("The path may not contain a duplicate delimiter '{}': {}".format(self._delimiter, path)) + if (path.startswith(self._delimiter) or path.endswith(self._delimiter)): + raise InvalidPath("The path may not start/end with the delimiter '{}': {}".format(self._delimiter, path)) + return path + + def __repr__(self): + # type: () -> str + """Represent the instantiated class.""" + return "{}('{}', cls='{}')".format(self.__class__.__name__, self.path, self.cls) + + def __eq__(self, other): + # type: (Any) -> bool + """Compare equality of path and ``Group`` subclass to another ``GroupPath`` object.""" + if not isinstance(other, GroupPath): + return NotImplemented + return (self.path, self.cls) == (other.path, other.cls) + + def __lt__(self, other): + # type: (Any) -> bool + """Compare less-than operator of path and ``Group`` subclass to another ``GroupPath`` object.""" + if not isinstance(other, GroupPath): + return NotImplemented + return (self.path, self.cls) < (other.path, other.cls) + + @property + def path(self): + # type: () -> str + """Return the path string.""" + return self._path_string + + @property + def path_list(self): + # type: () -> List[str] + """Return a list of the path components.""" + return self._path_list[:] + + @property + def key(self): + # type: () -> str + """Return the final component of the the path.""" + if self._path_list: + return self._path_list[-1] + return None + + @property + def delimiter(self): + # type: () -> str + """Return the delimiter used to split path into components.""" + return self._delimiter + + @property + def cls(self): + # type: () -> str + """Return the cls used to query for and instantiate a ``Group`` with.""" + return self._cls + + @property + def parent(self): + # type: () -> Optional[GroupPath] + """Return the parent path.""" + if self.path_list: + return GroupPath( + self.delimiter.join(self.path_list[:-1]), cls=self.cls, warn_invalid_child=self._warn_invalid_child + ) + return None + + def __truediv__(self, path): + # type: (str) -> GroupPath + """Return a child ``GroupPath``, with a new path formed by appending ``path`` to the current path.""" + if not isinstance(path, str): + raise TypeError('path is not a string: {}'.format(path)) + path = self._validate_path(path) + child = GroupPath( + path=self.path + self.delimiter + path if self.path else path, + cls=self.cls, + warn_invalid_child=self._warn_invalid_child + ) + return child + + def __getitem__(self, path): + # type: (str) -> GroupPath + """Return a child ``GroupPath``, with a new path formed by appending ``path`` to the current path.""" + return self.__truediv__(path) + + def get_group(self): + # type: () -> Optional[self.cls] + """Return the concrete group associated with this path.""" + try: + return orm.QueryBuilder().append(self.cls, subclassing=False, filters={'label': self.path}).one()[0] + except NotExistent: + return None + + @property + def group_ids(self): + # type: () -> List[int] + """Return all the UUID associated with this GroupPath. + + :returns: and empty list, if no group associated with this label, + or can be multiple if cls was None + + This is an efficient method for checking existence, + which does not require the (slow) loading of the ORM entity. + """ + query = orm.QueryBuilder() + filters = {'label': self.path} + query.append(self.cls, subclassing=False, filters=filters, project='id') + return [r[0] for r in query.all()] + + @property + def is_virtual(self): + # type: () -> bool + """Return whether there is one or more concrete groups associated with this path.""" + return len(self.group_ids) == 0 + + def get_or_create_group(self): + # type: () -> (self.cls, bool) + """Return the concrete group associated with this path or, create it, if it does not already exist.""" + return self.cls.objects.get_or_create(label=self.path) + + def delete_group(self): + """Delete the concrete group associated with this path. + + :raises: GroupNotFoundError, GroupNotUniqueError + """ + ids = self.group_ids + if not ids: + raise GroupNotFoundError(self) + if len(ids) > 1: + raise GroupNotUniqueError(self) + self.cls.objects.delete(ids[0]) + + @property + def children(self): + # type: () -> Iterable[GroupPath] + """Iterate through all (direct) children of this path.""" + query = orm.QueryBuilder() + filters = {} + if self.path: + filters['label'] = {'like': self.path + self.delimiter + '%'} + query.append(self.cls, subclassing=False, filters=filters, project='label') + if query.count() == 0 and self.is_virtual: + raise NoGroupsInPathError(self) + + yielded = [] + for (label,) in query.iterall(): + path = label.split(self._delimiter) + if len(path) <= len(self._path_list): + continue + path_string = self._delimiter.join(path[:len(self._path_list) + 1]) + if (path_string not in yielded and path[:len(self._path_list)] == self._path_list): + yielded.append(path_string) + try: + yield GroupPath(path=path_string, cls=self.cls, warn_invalid_child=self._warn_invalid_child) + except InvalidPath: + if self._warn_invalid_child: + warnings.warn('invalid path encountered: {}'.format(path_string)) # pylint: disable=no-member + + def __iter__(self): + # type: () -> Iterable[GroupPath] + """Iterate through all (direct) children of this path.""" + return self.children + + def __len__(self): + # type: () -> int + """Return the number of children for this path.""" + return sum(1 for _ in self.children) + + def __contains__(self, key): + # type: (str) -> bool + """Return whether a child exists for this key.""" + for child in self.children: + if child.path_list[-1] == key: + return True + return False + + def walk(self, return_virtual=True): + # type: () -> Iterable[GroupPath] + """Recursively iterate through all children of this path.""" + for child in self: + if return_virtual or not child.is_virtual: + yield child + for sub_child in child.walk(return_virtual=return_virtual): + if return_virtual or not sub_child.is_virtual: + yield sub_child + + def walk_nodes(self, filters=None, node_class=None, query_batch=None): + # type: () -> Iterable[WalkNodeResult] + """Recursively iterate through all nodes of this path and its children. + + :param filters: filters to apply to the node query + :param node_class: return only nodes of a certain class (or list of classes) + :param int batch_size: The size of the batches to ask the backend to batch results in subcollections. + You can optimize the speed of the query by tuning this parameter. + Be aware though that is only safe if no commit will take place during this transaction. + """ + query = orm.QueryBuilder() + group_filters = {} + if self.path: + group_filters['label'] = {'or': [{'==': self.path}, {'like': self.path + self.delimiter + '%'}]} + query.append(self.cls, subclassing=False, filters=group_filters, project='label', tag='group') + query.append( + orm.Node if node_class is None else node_class, + with_group='group', + filters=filters, + project=['*'], + ) + for (label, node) in query.iterall(query_batch) if query_batch else query.all(): + yield WalkNodeResult(GroupPath(label, cls=self.cls), node) + + @property + def browse(self): + """Return a ``GroupAttr`` instance, for attribute access to children.""" + return GroupAttr(self) + + +class GroupAttr: + """A class to provide attribute access to a ``GroupPath`` children. + + The only public attributes on this class are dynamically created from the ``GroupPath`` child keys. + NOTE: any child keys that do not conform to an acceptable (public) attribute string will be ignored. + The ``GroupPath`` can be retrieved *via* a function call, e.g.:: + + group_path = GroupPath() + group_attr = GroupAttr(group_path) + group_attr.a.b.c() == GroupPath("a/b/c") + + """ + + def __init__(self, group_path): + # type: (GroupPath) + """Instantiate the ``GroupPath``, and a mapping of its children.""" + self._group_path = group_path + + def __repr__(self): + # type: () -> str + """Represent the instantiated class.""" + return "{}('{}', type='{}')".format(self.__class__.__name__, self._group_path.path, self._group_path.cls) + + def __call__(self): + # type: () -> GroupPath + """Return the ``GroupPath``.""" + return self._group_path + + def __dir__(self): + """Return a list of available attributes.""" + return [c.path_list[-1] for c in self._group_path.children if REGEX_ATTR.match(c.path_list[-1])] + + def __getattr__(self, attr): + # type: (str) -> GroupAttr + """Return the requested attribute name.""" + for child in self._group_path.children: + if attr == child.path_list[-1]: + return GroupAttr(child) + raise AttributeError(attr) diff --git a/aiida/tools/importexport/common/config.py b/aiida/tools/importexport/common/config.py index 0baac376c9..5f5a8e0751 100644 --- a/aiida/tools/importexport/common/config.py +++ b/aiida/tools/importexport/common/config.py @@ -9,15 +9,13 @@ ########################################################################### # pylint: disable=invalid-name """ Configuration file for AiiDA Import/Export module """ - -from aiida.orm import Computer, Group, GroupTypeString, Node, User, Log, Comment +from aiida.orm import Computer, Group, Node, User, Log, Comment __all__ = ('EXPORT_VERSION',) # Current export version -EXPORT_VERSION = '0.8' +EXPORT_VERSION = '0.9' -IMPORTGROUP_TYPE = GroupTypeString.IMPORTGROUP_TYPE.value DUPL_SUFFIX = ' (Imported #{})' # The name of the subfolder in which the node files are stored diff --git a/aiida/tools/importexport/dbimport/backends/django/__init__.py b/aiida/tools/importexport/dbimport/backends/django/__init__.py index d97ad70d1d..aa463f5ffb 100644 --- a/aiida/tools/importexport/dbimport/backends/django/__init__.py +++ b/aiida/tools/importexport/dbimport/backends/django/__init__.py @@ -21,10 +21,10 @@ from aiida.common.links import LinkType, validate_link_label from aiida.common.utils import grouper, get_object_from_string from aiida.orm.utils.repository import Repository -from aiida.orm import QueryBuilder, Node, Group +from aiida.orm import QueryBuilder, Node, Group, ImportGroup from aiida.tools.importexport.common import exceptions from aiida.tools.importexport.common.archive import extract_tree, extract_tar, extract_zip -from aiida.tools.importexport.common.config import DUPL_SUFFIX, IMPORTGROUP_TYPE, EXPORT_VERSION, NODES_EXPORT_SUBFOLDER +from aiida.tools.importexport.common.config import DUPL_SUFFIX, EXPORT_VERSION, NODES_EXPORT_SUBFOLDER from aiida.tools.importexport.common.config import ( NODE_ENTITY_NAME, GROUP_ENTITY_NAME, COMPUTER_ENTITY_NAME, USER_ENTITY_NAME, LOG_ENTITY_NAME, COMMENT_ENTITY_NAME ) @@ -673,7 +673,7 @@ def import_data_dj( "Overflow of import groups (more than 100 import groups exists with basename '{}')" ''.format(basename) ) - group = Group(label=group_label, type_string=IMPORTGROUP_TYPE).store() + group = ImportGroup(label=group_label).store() # Add all the nodes to the new group # TODO: decide if we want to return the group label diff --git a/aiida/tools/importexport/dbimport/backends/sqla/__init__.py b/aiida/tools/importexport/dbimport/backends/sqla/__init__.py index f08de125ec..2e800b1361 100644 --- a/aiida/tools/importexport/dbimport/backends/sqla/__init__.py +++ b/aiida/tools/importexport/dbimport/backends/sqla/__init__.py @@ -20,13 +20,13 @@ from aiida.common.folders import SandboxFolder, RepositoryFolder from aiida.common.links import LinkType from aiida.common.utils import get_object_from_string -from aiida.orm import QueryBuilder, Node, Group, WorkflowNode, CalculationNode, Data +from aiida.orm import QueryBuilder, Node, Group, ImportGroup from aiida.orm.utils.links import link_triple_exists, validate_link from aiida.orm.utils.repository import Repository from aiida.tools.importexport.common import exceptions from aiida.tools.importexport.common.archive import extract_tree, extract_tar, extract_zip -from aiida.tools.importexport.common.config import DUPL_SUFFIX, IMPORTGROUP_TYPE, EXPORT_VERSION, NODES_EXPORT_SUBFOLDER +from aiida.tools.importexport.common.config import DUPL_SUFFIX, EXPORT_VERSION, NODES_EXPORT_SUBFOLDER from aiida.tools.importexport.common.config import ( NODE_ENTITY_NAME, GROUP_ENTITY_NAME, COMPUTER_ENTITY_NAME, USER_ENTITY_NAME, LOG_ENTITY_NAME, COMMENT_ENTITY_NAME ) @@ -664,7 +664,7 @@ def import_data_sqla( "Overflow of import groups (more than 100 import groups exists with basename '{}')" ''.format(basename) ) - group = Group(label=group_label, type_string=IMPORTGROUP_TYPE) + group = ImportGroup(label=group_label) session.add(group.backend_entity._dbmodel) # Adding nodes to group avoiding the SQLA ORM to increase speed diff --git a/aiida/tools/importexport/migration/__init__.py b/aiida/tools/importexport/migration/__init__.py index e5772c1f8f..402147ff7b 100644 --- a/aiida/tools/importexport/migration/__init__.py +++ b/aiida/tools/importexport/migration/__init__.py @@ -8,9 +8,9 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Migration export files from old export versions to the newest, used by `verdi export migrate` command.""" - -from aiida.cmdline.utils import echo -from aiida.tools.importexport.common.exceptions import DanglingLinkError +from aiida.common.lang import type_check +from aiida.tools.importexport import EXPORT_VERSION +from aiida.tools.importexport.common.exceptions import DanglingLinkError, ArchiveMigrationError from .utils import verify_metadata_version from .v01_to_v02 import migrate_v1_to_v2 @@ -20,6 +20,7 @@ from .v05_to_v06 import migrate_v5_to_v6 from .v06_to_v07 import migrate_v6_to_v7 from .v07_to_v08 import migrate_v7_to_v8 +from .v08_to_v09 import migrate_v8_to_v9 __all__ = ('migrate_recursively', 'verify_metadata_version') @@ -31,37 +32,41 @@ '0.5': migrate_v5_to_v6, '0.6': migrate_v6_to_v7, '0.7': migrate_v7_to_v8, + '0.8': migrate_v8_to_v9, } -def migrate_recursively(metadata, data, folder): - """ - Recursive migration of export files from v0.1 to newest version, +def migrate_recursively(metadata, data, folder, version=EXPORT_VERSION): + """Recursive migration of export files from v0.1 to a newer version. + See specific migration functions for detailed descriptions. :param metadata: the content of an export archive metadata.json file :param data: the content of an export archive data.json file :param folder: SandboxFolder in which the archive has been unpacked (workdir) + :param version: the version to migrate to, by default the current export version """ - from aiida.tools.importexport import EXPORT_VERSION as newest_version - old_version = verify_metadata_version(metadata) + type_check(version, str) + try: - if old_version == newest_version: - echo.echo_critical('Your export file is already at the newest export version {}'.format(newest_version)) + if old_version == version: + raise ArchiveMigrationError('Your export file is already at the version {}'.format(version)) + elif old_version > version: + raise ArchiveMigrationError('Backward migrations are not supported') elif old_version in MIGRATE_FUNCTIONS: MIGRATE_FUNCTIONS[old_version](metadata, data, folder) else: - echo.echo_critical('Cannot migrate from version {}'.format(old_version)) + raise ArchiveMigrationError('Cannot migrate from version {}'.format(old_version)) except ValueError as exception: - echo.echo_critical(exception) + raise ArchiveMigrationError(exception) except DanglingLinkError: - echo.echo_critical('Export file is invalid because it contains dangling links') + raise ArchiveMigrationError('Export file is invalid because it contains dangling links') new_version = verify_metadata_version(metadata) - if new_version < newest_version: - new_version = migrate_recursively(metadata, data, folder) + if new_version < version: + new_version = migrate_recursively(metadata, data, folder, version) return new_version diff --git a/aiida/tools/importexport/migration/v03_to_v04.py b/aiida/tools/importexport/migration/v03_to_v04.py index cd7a8e32da..32745f73f7 100644 --- a/aiida/tools/importexport/migration/v03_to_v04.py +++ b/aiida/tools/importexport/migration/v03_to_v04.py @@ -432,7 +432,7 @@ def add_extras(data): data.update({'node_extras': node_extras, 'node_extras_conversion': node_extras_conversion}) -def migrate_v3_to_v4(metadata, data, folder, *args): # pylint: disable=unused-argument +def migrate_v3_to_v4(metadata, data, *args): """ Migration of export files from v0.3 to v0.4 @@ -446,6 +446,9 @@ def migrate_v3_to_v4(metadata, data, folder, *args): # pylint: disable=unused-a verify_metadata_version(metadata, old_version) update_metadata(metadata, new_version) + # The trajectory data migration requires the folder containing all the repository files of the archive + folder = args[0] + # Apply migrations in correct sequential order migration_base_data_plugin_type_string(data) migration_process_type(metadata, data) diff --git a/aiida/tools/importexport/migration/v08_to_v09.py b/aiida/tools/importexport/migration/v08_to_v09.py new file mode 100644 index 0000000000..bfe1f5ea94 --- /dev/null +++ b/aiida/tools/importexport/migration/v08_to_v09.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Migration from v0.8 to v0.9, used by `verdi export migrate` command. + +The migration steps are named similarly to the database migrations for Django and SQLAlchemy. +In the description of each migration, a revision number is given, which refers to the Django migrations. +The individual Django database migrations may be found at: + + `aiida.backends.djsite.db.migrations.00XX_.py` + +Where XX are the numbers in the migrations' documentation: REV. 1.0.XX +And migration-name is the name of the particular migration. +The individual SQLAlchemy database migrations may be found at: + + `aiida.backends.sqlalchemy.migrations.versions._.py` + +Where id is a SQLA id and migration-name is the name of the particular migration. +""" +# pylint: disable=invalid-name + +from aiida.tools.importexport.migration.utils import verify_metadata_version, update_metadata + + +def migration_dbgroup_type_string(data): + """Apply migration 0044 - REV. 1.0.44 + + Rename the `type_string` columns of all `Group` instances. + """ + mapping = { + 'user': 'core', + 'data.upf': 'core.upf', + 'auto.import': 'core.import', + 'auto.run': 'core.auto', + } + + for attributes in data.get('export_data', {}).get('Group', {}).values(): + for old, new in mapping.items(): + if attributes['type_string'] == old: + attributes['type_string'] = new + + +def migrate_v8_to_v9(metadata, data, *args): # pylint: disable=unused-argument + """Migration of export files from v0.8 to v0.9.""" + old_version = '0.8' + new_version = '0.9' + + verify_metadata_version(metadata, old_version) + update_metadata(metadata, new_version) + + # Apply migrations + migration_dbgroup_type_string(data) diff --git a/aiida/tools/ipython/ipython_magics.py b/aiida/tools/ipython/ipython_magics.py index af3d8cb395..66310c37b9 100644 --- a/aiida/tools/ipython/ipython_magics.py +++ b/aiida/tools/ipython/ipython_magics.py @@ -34,8 +34,8 @@ In [2]: %aiida """ -from IPython import version_info -from IPython.core import magic +from IPython import version_info # pylint: disable=no-name-in-module +from IPython.core import magic # pylint: disable=no-name-in-module,import-error from aiida.common import json diff --git a/bin/runaiida b/bin/runaiida deleted file mode 100755 index d5c1c951c5..0000000000 --- a/bin/runaiida +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash -# Pass all parameters to 'verdi run' -# This is useful to use in a shebang line: i.e., you can put -# the following line as the first line in a file: - -#!/usr/bin/env runaiida - -# and the script will be run with 'verdi run' upon execution -# (if it has the correct execution bits set, i.e. using -# chmod +x ...) - -# With "$@", each parameter is correctly escaped -verdi run "$@" - diff --git a/docs/requirements_for_rtd.txt b/docs/requirements_for_rtd.txt index 6ac780528c..f518609215 100644 --- a/docs/requirements_for_rtd.txt +++ b/docs/requirements_for_rtd.txt @@ -1,5 +1,5 @@ PyCifRW~=4.4 -aiida-export-migration-tests==0.8.0 +aiida-export-migration-tests==0.9.0 aldjemy~=0.9.1 alembic~=1.2 ase~=3.18 @@ -8,6 +8,7 @@ click-completion~=0.5.1 click-config-file~=0.5.0 click-spinner~=0.1.8 click~=7.0 +coverage<5.0 django~=2.2 docutils==0.15.2 ete3~=3.1 @@ -18,19 +19,21 @@ graphviz~=0.13 ipython~=7.0 jinja2~=2.10 kiwipy[rmq]~=0.5.1 -numpy~=1.17,<1.18 +numpy<1.18,~=1.17 paramiko~=2.6 pg8000~=1.13 -pgtest~=1.3,>=1.3.1 +pgsu~=0.1.0 +pgtest>=1.3.1,~=1.3 pika~=1.1 plumpy~=0.14.5 psutil~=5.6 -psycopg2-binary~=2.8,>=2.8.3 -pyblake2~=1.1; python_version<'3.6' +psycopg2-binary>=2.8.3,~=2.8 +pyblake2~=1.1; python_version < "3.6" pygments~=2.5 pymatgen>=2019.7.2 pymysql~=0.9.3 pyparsing~=2.4 +pytest-cov~=2.7 pytest-timeout~=1.3 pytest~=5.3 python-dateutil~=2.8 @@ -38,7 +41,7 @@ python-memcached~=1.59 pytz~=2019.3 pyyaml~=5.1.2 reentry~=1.3 -seekpath~=1.9,>=1.9.3 +seekpath>=1.9.3,~=1.9 simplejson~=3.16 spglib~=1.14 sphinx-rtd-theme~=0.4.3 @@ -47,7 +50,7 @@ sphinxcontrib-details-directive~=0.1.0 sphinx~=2.2 sqlalchemy-diff~=0.1.3 sqlalchemy-utils~=0.34.2 -sqlalchemy~=1.3,>=1.3.10 +sqlalchemy>=1.3.10,~=1.3 tabulate~=0.8.5 tornado<5.0 tzlocal~=2.0 diff --git a/docs/source/concepts/calculations.rst b/docs/source/concepts/calculations.rst index 390327fd93..a79126897c 100644 --- a/docs/source/concepts/calculations.rst +++ b/docs/source/concepts/calculations.rst @@ -147,7 +147,7 @@ When a calculation job is launched, the engine will take it roughly through the * **Upload**: the calculation job implementation is used to transform the input nodes into the required input files, which are uploaded to a 'working' directory on the target machine * **Submit**: to execute the calculation, a job is submitted to the scheduler of the computer on which the input `code` is configured. * **Update**: the engine will query the scheduler to check for the status of the calculation job - * **Retrieve**: once the job has finished, the engine will retrieve the output files, specified by the plugin and store them in a node attached as an output node to the calculation + * **Retrieve**: once the job has finished, the engine will retrieve the output files, specified by the calculation plugin and store them in a node attached as an output node to the calculation All of these tasks require the engine to interact with the computer, or machine, that will actually run the external code. Since the :py:class:`~aiida.orm.nodes.data.code.Code` that is used as an input for the calculation job, which is configured for a specific :py:class:`~aiida.orm.computers.Computer`, the engine knows exactly how to execute all these tasks. diff --git a/docs/source/conf.py b/docs/source/conf.py index aac8f853ab..e13004f880 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -49,7 +49,11 @@ # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.imgmath', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', 'IPython.sphinxext.ipython_console_highlighting', 'IPython.sphinxext.ipython_directive', 'sphinxcontrib.contentui', 'aiida.sphinxext'] +extensions = [ + 'sphinx.ext.intersphinx', 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.viewcode', 'sphinx.ext.coverage', + 'sphinx.ext.imgmath', 'sphinx.ext.ifconfig', 'sphinx.ext.todo', 'IPython.sphinxext.ipython_console_highlighting', + 'IPython.sphinxext.ipython_directive', 'sphinxcontrib.contentui', 'aiida.sphinxext' +] ipython_mplbackend = '' todo_include_todos = True @@ -115,6 +119,14 @@ # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] +intersphinx_mapping = { + 'click': ('https://click.palletsprojects.com/', None), + 'flask': ('http://flask.pocoo.org/docs/latest/', None), + 'flask_restful': ('https://flask-restful.readthedocs.io/en/latest/', None), + 'kiwipy': ('https://kiwipy.readthedocs.io/en/latest/', None), + 'plumpy': ('https://plumpy.readthedocs.io/en/latest/', None), + 'python': ('https://docs.python.org/3', None), +} # -- Options for HTML output --------------------------------------------------- @@ -361,17 +373,8 @@ def setup(app): # Allow duplicate toc entries. #epub_tocdup = True -# otherwise, readthedocs.org uses their theme by default, so no need -# to specify it - - # Warnings to ignore when using the -n (nitpicky) option -# We should ignore any python built-in exception, for instance -nitpick_ignore = [('py:class','Warning'), ('py:class', 'exceptions.Warning')] - -for line in open('nitpick-exceptions'): - if line.strip() == '' or line.startswith('#'): - continue - dtype, target = line.split(None, 1) - target = target.strip() - nitpick_ignore.append((dtype, target)) +with open('nitpick-exceptions', 'r') as handle: + nitpick_ignore = [ + tuple(line.strip().split(None, 1)) for line in handle.readlines() if line.strip() and not line.startswith('#') + ] diff --git a/docs/source/developer_guide/core/caching.rst b/docs/source/developer_guide/core/caching.rst index 29bd390038..9853889b8b 100644 --- a/docs/source/developer_guide/core/caching.rst +++ b/docs/source/developer_guide/core/caching.rst @@ -2,7 +2,7 @@ Caching: implementation details +++++++++++++++++++++++++++++++ This section covers some details of the caching mechanism which are not discussed in the :ref:`user guide `. -If you are developing a plugin and want to modify the caching behavior of your classes, we recommend you read :ref:`this section ` first. +If you are developing plugins and want to modify the caching behavior of your classes, we recommend you read :ref:`this section ` first. .. _devel_controlling_hashing: diff --git a/docs/source/developer_guide/design/changes.rst b/docs/source/developer_guide/design/changes.rst index adc3988dbc..227549be8e 100644 --- a/docs/source/developer_guide/design/changes.rst +++ b/docs/source/developer_guide/design/changes.rst @@ -95,4 +95,33 @@ In particular we will strive to: - if we are forced to change it anyway, deprecate a signifcant amount of time in advance - for backwards incompatible changes, increase the major version -For better clarity, we are :ref:`curating a list of classes and functions` (exposed at the second level) that are intended to be public and for which the above policy will be enforced \ No newline at end of file +For better clarity, we are :ref:`curating a list of classes and functions` (exposed at the second level) that are intended to be public and for which the above policy will be enforced + +Version 0.9.0 ++++++++++++++ + +The plugin system +----------------- + +The plugin system was designed with the following goals in mind. + +* **Sharing of calculations, workflows and data types**: plugins are bundled in a python package, distributed as a zip source archive, python ``egg`` or PyPI package. There is extensive documentation available for how to distribute python packages `here `_. + +* **Ease of use**: plugins are listed on the `AiiDA plugin registry `_ and can be installed with one simple command. This process is familiar to every regular python user. + +* **Decouple development and update cycles of AiiDA and plugins**: since plugins are separate python packages, they can be developed in a separate code repository and updated when the developer sees fit without a need to update AiiDA. Similarly, if AiiDA is updated, plugins may not need to release a new version. + +* **Promote modular design in AiiDA development**: separating plugins into their own python packages ensures that plugins can not (easily) access parts of the AiiDA code which are not part of the public API, enabling AiiDA development to stay agile. The same applies to plugins relying on other plugins. + +* **Low overhead for developers**: plugin developers can write their extensions the same way they would write any python code meant for distribution. + +* **Automatic AiiDA setup and testing of plugins**: installation of complete python environments consisting of many packages can be automated, provided all packages use ``setuptools`` as a distribution tool. This enables use of AiiDA in a service-based way using, e.g., docker images. At the same it becomes possible to create automated tests for any combination of plugins, as long as the plugins provide test entry points. + + +The chosen approach to plugins has some limitations: + +* the interface for entry point objects is enforced implicitly by the way the object is used. It is the responsibility of the plugin developer to test for compliance, especially if the object is not derived from the recommended base classes provided by AiiDA. This is to be clearly communicated in the documentation for plugin developers; +* The freedom of the plugin developer to name and rename classes ends where the information in question is stored in the database as, e.g., node attributes. +* The system is designed with the possibility of plugin versioning in mind, however this is not implemented yet. +* In principle, two different plugins can give the same name to an entry point, creating ambiguity when trying to load the associated objects. Plugin development guidelines in the documentation will advise on how to avoid this problem, and this is addressed via the use of a centralized registry of known AiiDA plugins. +* Plugins can potentially contain malicious or otherwise dangerous code. In the registry of AiiDA plugins, we try to flag plugins that we know are safe to be used. diff --git a/docs/source/developer_guide/plugins.rst b/docs/source/developer_guide/plugins.rst index d8520a507c..46b915622a 100644 --- a/docs/source/developer_guide/plugins.rst +++ b/docs/source/developer_guide/plugins.rst @@ -12,4 +12,3 @@ Plugin development plugins/documenting plugins/plugin_tests plugins/publish - plugins/update_plugin diff --git a/docs/source/developer_guide/plugins/basics.rst b/docs/source/developer_guide/plugins/basics.rst index 31b51f5a6f..5ae649c343 100644 --- a/docs/source/developer_guide/plugins/basics.rst +++ b/docs/source/developer_guide/plugins/basics.rst @@ -4,72 +4,28 @@ Basics ====== -What a plugin Is ----------------- +Nomenclature +------------ -An AiiDA plugin is a `python package `_ that provides a set of extensions to AiiDA. +An AiiDA plugin is an extension of AiiDA, announcing itself to ``aiida-core`` by means of a new :ref:`entry point `. -AiiDA plugins can use :ref:`entry points ` in order to make the ``aiida-core`` package aware of the extensions. +AiiDA plugins can be bundled and distributed in a `python package `_ that provides a set of extensions to AiiDA. .. note:: - In the python community, the term 'package' is used rather loosely. + The python community uses the term 'package' rather loosely. Depending on context, it can refer to a collection of python modules or it may, in addition, include the files necessary for building and installing the package. .. _packages: https://docs.python.org/2/tutorial/modules.html?highlight=package#packages -Goals ------ - -The plugin system was designed with the following goals in mind. - -* **Sharing of workflows and extensions**: a workflow or extension is written as a python package, distributed as a zip source archive, python ``egg`` or PyPI package. There is extensive documentation available for how to distribute python packages `here `_. - -* **Ease of use**: plugins can be found in an online curated list of plugins and installed with one simple command. This process is familiar to every regular python user. - -* **Decouple development and update cycles of AiiDA and plugins**: since plugins are separate python packages, they can be developed in a separate code repository and updated when the developer sees fit without a need to update AiiDA. Similarly, if AiiDA is updated, plugins may not need to release a new version. - -* **Promote modular design in AiiDA development**: separating plugins into their own python packages ensures that plugins can not (easily) access parts of the AiiDA code which are not part of the public API, enabling AiiDA development to stay agile. The same applies to plugins relying on other plugins. - -* **Low overhead for developers**: plugin developers can write their extensions the same way they would write any python code meant for distribution. - -* **Automatic AiiDA setup and testing of plugins**: installation of complete python environments consisting of many packages can be automated, provided all packages use ``setuptools`` as a distribution tool. This enables use of AiiDA in a service-based way using, e.g., docker images. At the same it becomes possible to create automated tests for any combination of plugins, as long as the plugins provide test entry points. - - -Design guidelines ------------------- - -* **Start simple.**: make use of existing classes like :py:class:`~aiida.orm.nodes.process.calculation.calcjob.CalcJobNode`, :py:class:`~aiida.orm.nodes.data.dict.Dict`, :py:class:`~aiida.orm.nodes.data.singlefile.SinglefileData`, ... Write only what is necessary to pass information from and to AiiDA. - -* **Don't break data provenance.**: store *at least* what is needed for full reproducibility. - -* **Parse what you want to query for.**: make a list of which information to: - - #. parse into the database for querying (:py:class:`~aiida.orm.nodes.data.dict.Dict`, ...) - #. store in files for safe-keeping (:py:class:`~aiida.orm.nodes.data.singlefile.SinglefileData`, ...) - #. leave on the remote computer (:py:class:`~aiida.orm.nodes.data.remote.RemoteData`, ...) - -* **Expose the full functionality.**: standardization is good but don't artificially limit the power of a code you are wrapping - or your users will get frustrated. If the code can do it, there should be *some* way to do it with your plugin. - - What a plugin can do -------------------- -* Add new classes to AiiDA's unified interface, including: - - - calculations - - parsers - - data types - - schedulers - - transports - - db importers - - db exporters - - subcommands to some ``verdi`` commands - +* Add a new class to AiiDA's :ref:`entry point groups `, including:: calculations, parsers, workflows, data types, verdi commands, schedulers, transports and importers/exporters from external databases. This typically involves subclassing the respective base class AiiDA provides for that purpose. -* Install separate commandline and/or GUI executables -* Depend on any number of other plugins (the required versions must not clash with AiiDA's requirements) +* Install new commandline and/or GUI executables +* Depend on, and build on top of any number of other plugins (as long as their requirements do not clash) .. _plugins.maynot: @@ -93,13 +49,70 @@ We will advise on how to proceed. .. _registry: https://github.com/aiidateam/aiida-registry -Limitations ------------ - -The chosen approach to plugins has some limitations: +Design guidelines +------------------ -* In the current version the interface for entry point objects is enforced implicitly by the way the object is used. It is the responsibility of the plugin developer to test for compliance, especially if the object is not derived from the recommended base classes provided by AiiDA. This is to be clearly communicated in the documentation for plugin developers; -* The freedom of the plugin developer to name and rename classes ends where the information in question is stored in the database as, e.g., node attributes. -* The system is designed with the possibility of plugin versioning in mind, however this is not implemented yet. -* In principle, two different plugins can give the same name to an entry point, creating ambiguity when trying to load the associated objects. Plugin development guidelines in the documentation will advise on how to avoid this problem, and this is addressed via the use of a centralized registry of known AiiDA plugins. -* Plugins can potentially contain malicious or otherwise dangerous code. In the registry of AiiDA plugins, we try to flag plugins that we know are safe to be used. +Wrapping an external code +......................... + +In order to wrap an external simulation code for use in AiiDA, you will need to write a calculation input plugin (subclassing the :py:class:`~aiida.engine.CalcJob` class) and an output parser plugin (subclassing the :py:class:`~aiida.parsers.Parser` class): + + * | **Start simple.** + | Make use of existing classes like :py:class:`~aiida.orm.nodes.data.dict.Dict`, :py:class:`~aiida.orm.nodes.data.singlefile.SinglefileData`, ... + | Write only what is necessary to pass information from and to AiiDA. + * | **Don't break data provenance.** + | Store *at least* what is needed for full reproducibility. + * | **Parse what you want to query for.** + | Make a list of which information to: + + #. parse into the database for querying (:py:class:`~aiida.orm.nodes.data.dict.Dict`, ...) + #. store in files for safe-keeping (:py:class:`~aiida.orm.nodes.data.singlefile.SinglefileData`, ...) + #. leave on the remote computer (:py:class:`~aiida.orm.nodes.data.remote.RemoteData`, ...) + + * | **Expose the full functionality.** + | Standardization is good but don't artificially limit the power of a code you are wrapping - or your users will get frustrated. + | If the code can do it, there should be *some* way to do it with your plugin. + + * | **Don't rely on AiiDA internals.** + | AiiDA's :ref:`public python API` includes anything that you can import via ``from aiida.module import thing``. + | Functionality at deeper nesting levels is not considered part of the public API and may change between minor AiiDA releases, forcing you to update your plugin. + +Folder structure +................ + +While it is up to you to decide the folder structure for your plugin, here is how a typical AiiDA plugin package may look like (see also the `aiida-diff`_ demo plugin):: + + aiida-mycode/ - distribution folder + aiida_mycode/ - toplevel package (from aiida_mycode import ..) + __init__.py + calculations/ + __init__.py + mycode.py - contains MycodeCalculation + parsers/ + __init__.py + mycode.py - contains MycodeParser + data/ + __init__.py + mydat.py - contains MyData (supports code specific format) + commands/ + __init__.py + mydat.py - contains visualization subcommand for MyData + workflows/ + __init__.py + mywf.py - contains a basic workflow using mycode + ... + setup.py - install script + setup.json - install configuration + ... + +A minimal plugin package instead might look like:: + + aiida-minimal/ + aiida_minimal/ + __init__.py + simpledata.py + setup.py + setup.json + + +.. _aiida-diff: https://github.com/aiidateam/aiida-diff diff --git a/docs/source/developer_guide/plugins/documenting.rst b/docs/source/developer_guide/plugins/documenting.rst index ad2b03168a..5d9baf2411 100644 --- a/docs/source/developer_guide/plugins/documenting.rst +++ b/docs/source/developer_guide/plugins/documenting.rst @@ -1,8 +1,8 @@ -==================== -Documenting a plugin -==================== +=========================== +Documenting plugin packages +=========================== -If you used the `AiiDA plugin cutter`_, your plugin already comes with a basic +If you used the `AiiDA plugin cutter`_, your plugin package already comes with a basic documentation that just needs to be adjusted to your needs. #. Install the ``docs`` extra:: @@ -29,7 +29,7 @@ documentation that just needs to be adjusted to your needs. requirements file ``docs/requirements_for_rtd.txt`` and the Python configuration file ``docs/source/conf.py`` in Admin => Advanced settings. -Note: When updating the plugin to a new version, remember to update the +Note: When updating the plugin package to a new version, remember to update the version number both in ``setup.json`` and ``aiida_mycode/__init__.py``. .. _aiida plugin cutter: https://github.com/aiidateam/aiida-plugin-cutter diff --git a/docs/source/developer_guide/plugins/entry_points.rst b/docs/source/developer_guide/plugins/entry_points.rst index 13013d9826..ee636ffb50 100644 --- a/docs/source/developer_guide/plugins/entry_points.rst +++ b/docs/source/developer_guide/plugins/entry_points.rst @@ -15,59 +15,22 @@ the entry point specifications are written to a file inside the distribution's can find these entry points by distribution, group and/or name and load the data structure to which it points. -This is the way AiiDA finds and loads classes provided by plugins. +This is the way AiiDA finds plugins and and loads the functionality they provide. .. _Entry points: https://setuptools.readthedocs.io/en/latest/setuptools.html#dynamic-discovery-of-services-and-plugins +.. _plugins.aiida_entry_points: + AiiDA Entry Points ------------------- -.. _aiida plugin template: https://github.com/aiidateam/aiida-plugin-template - -This document contains a list of entry point groups AiiDA uses, with an example -usage for each. -In the following, we assume the following folder structure:: - - aiida-mycode/ - distribution folder - aiida_mycode/ - toplevel package (from aiida_myplug import ..) - __init__.py - calcs/ - __init__.py - mycode.py - contains MycodeCalculation - parsers/ - __init__.py - mycode.py - contains MycodeParser - data/ - __init__.py - mydat.py - contains MyData (supports code specific format) - commands/ - __init__.py - mydat.py - contains visualization subcommand for MyData - workflows/ - __init__.py - mywf.py - contains a basic workflow using mycode - ... - setup.py - install script - setup.json - install configuration - ... - - -For a plugin that uses this folder structure, see the `aiida plugin template`_. +AiiDA defines a set of entry point groups that it will search for new functionality provided by plugins. +You can list those groups and their contents via:: -Note, however, that the folder structure inside ``aiida-mycode/`` is entirely up to you. -A very simple plugin might look like:: + verdi plugin list # list all groups + verdi plugin list aiida.calculations # show contents of one group - aiida-mysimple/ - aiida_mysimple/ - __init__.py - simpledata.py - setup.py - setup.json - - -The plugin has to tell AiiDA where to look for the classes to be used as -calculations, parsers, transports, etc. This is done inside ``setup.json`` by way -of the ``entry_points`` keyword:: +Plugin packages can add new entry points through the ``entry_points`` field in the ``setup.json`` file:: ... entry_points={ @@ -77,18 +40,16 @@ of the ``entry_points`` keyword:: ], ... -It is given as a dictionary containing entry point group names as keywords. The list for each entry point group contains entry point specifications. - -A specification in turn is given as a string and consists of two parts, a name and an import path describing where the class is to be imported from. The two parts are sparated by an `=` sign:: +Here, ```` can be any of the groups shown in the output of ``verdi plugin list``, and the ```` contains the entry point name and the path to the Python object it points to:: "mycode.mydat = aiida_mycode.data.mydat:MyData" -We *strongly* suggest to start the name of each entry point with the name of -the plugin, ommitting the leading 'aiida-'. -In our example this leads to entry specifications like ``"mycode. = "``, just like the above example. -Exceptions to this rule are schedulers, transports and potentially data ones. Further exceptions can be tolerated in order to provide backwards compatibility if the plugin was in use before aiida-0.9 and its modules were installed in locations which does not make it possible to follow this rule. +We *strongly* suggest to start the name of each entry point with the name of the plugin package (omitting the 'aiida-' prefix). +For a package ``aiida-mycode``, this leads to specifications like ``"mycode. = "``. +Exceptions to this rule can be tolerated if required for backwards compatibility. + +Below, we list the entry point groups defined and searched by AiiDA. -Below, a list of valid entry points recognized by AiiDA follows. ``aiida.calculations`` ---------------------- @@ -165,7 +126,7 @@ Usage:: ``aiida.workflows`` ------------------- -For AiiDA workflows. Instead of putting a workflow somewhere under the ``aiida.workflows`` package, it can now be packaged as a plugin and exposed to aiida as follows: +Package AiiDA workflows as follows: Spec:: @@ -194,8 +155,7 @@ Usage:: ``aiida.cmdline`` ----------------- -For subcommands to verdi commands like ``verdi data mydata``. -Plugin support for commands is possible due to using `click`_. +``verdi`` uses the `click_` framework, which makes it possible to add new subcommands to existing verdi commands, such as ``verdi data mydata``. AiiDA expects each entry point to be either a ``click.Command`` or ``click.CommandGroup``. @@ -230,7 +190,7 @@ Usage: ``aiida.tools.dbexporters`` --------------------------- -If your plugin adds support for exporting to an external database, use this entry point to have aiida find the module where you define the necessary functions. +If your plugin package adds support for exporting to an external database, use this entry point to have aiida find the module where you define the necessary functions. .. Not sure how dbexporters work .. .. Spec:: @@ -244,7 +204,7 @@ If your plugin adds support for exporting to an external database, use this entr ``aiida.tools.dbimporters`` --------------------------- -If your plugin adds support for importing from an external database, use this entry point to have aiida find the module where you define the necessary functions. +If your plugin package adds support for importing from an external database, use this entry point to have aiida find the module where you define the necessary functions. .. .. Spec:: .. @@ -259,7 +219,7 @@ If your plugin adds support for importing from an external database, use this en ``aiida.schedulers`` -------------------- -For scheduler plugins. Note that the entry point name is not prefixed by the plugin name. This is because typically a scheduler should be distributed in a plugin on its own, and only one plugin per scheduler should be necessary. +We recommend naming the plugin package after the scheduler (e.g. ``aiida-myscheduler``), so that the entry point name can simply equal the name of the scheduler: Spec:: @@ -280,7 +240,8 @@ Usage: The scheduler is used in the familiar way by entering 'myscheduler' as th ``aiida.transports`` -------------------- -Like schedulers, transports are supposed to be distributed in a separate plugin. Therefore we will again omit the plugin's name in the entry point name. +``aiida-core`` ships with two modes of transporting files and folders to remote computers: ``ssh`` and ``local`` (stub for when the remote computer is actually the same). +We recommend naming the plugin package after the mode of transport (e.g. ``aiida-mytransport``), so that the entry point name can simply equal the name of the transport: Spec:: @@ -301,7 +262,7 @@ Usage:: from aiida.plugins import TransportFactory transport = TransportFactory('mytransport') -Jus like one would expect, when a computer is setup, ``mytransport`` can be given as the transport option. +When setting up a new computer, specify ``mytransport`` as the transport mode. .. _click: https://click.pocoo.org/6/ .. _aiida-verdi: https://github.com/DropD/aiida-verdi diff --git a/docs/source/developer_guide/plugins/plugin_tests.rst b/docs/source/developer_guide/plugins/plugin_tests.rst index a5e6c832d7..105b71e7c6 100644 --- a/docs/source/developer_guide/plugins/plugin_tests.rst +++ b/docs/source/developer_guide/plugins/plugin_tests.rst @@ -3,8 +3,23 @@ Testing AiiDA plugins ===================== -When developing a plugin it is important to write tests. -We recommend using the `pytest`_ framework, while the `unittest`_ framework is also supported. +We highly recommend writing tests for your AiiDA plugins and running continous integration tests using free platforms like `GitHub Actions `_. + +We recommend the following folder structure for AiiDA plugin packages:: + + aiida-mycode/ - distribution folder + aiida_mycode/ - plugin package + tests/ - tests directory (possibly with subdirectories) + +.. note:: + Keeping the tests outside the plugin package keeps the distribution of your plugin package light. + +.. _ghactions: https://github.com/features/actions + +Using the pytest framework +-------------------------- + +We recommend the `pytest`_ framework for testing AiiDA plugins. One concern when running tests for AiiDA plugins is to separate the test environment from your production environment. Depending on the kind of test, each should even be run against a fresh AiiDA database. @@ -33,16 +48,10 @@ If you prefer to run tests on an existing profile, say ``test_profile``, simply In order to prevent accidental data loss, AiiDA only allows to run tests on profiles whose name starts with ``test_``. - .. _pytest: https://pytest.org .. _unittest: https://docs.python.org/library/unittest.html .. _fixture: https://docs.pytest.org/en/latest/fixture.html -Using the pytest framework --------------------------- - -We recommend the `pytest`_ framework for testing AiiDA plugins. - AiiDA's fixtures ^^^^^^^^^^^^^^^^ @@ -58,9 +67,9 @@ For example: * The :py:func:`~aiida.manage.tests.pytest_fixtures.clear_database` fixture depends on the :py:func:`~aiida.manage.tests.pytest_fixtures.aiida_profile` fixture and tells the received :py:class:`~aiida.manage.tests.TestManager` instance to reset the database. This fixture lets each test start in a fresh AiiDA environment. * The :py:func:`~aiida.manage.tests.pytest_fixtures.temp_dir` fixture returns a temporary directory for file operations and deletes it after the test is finished. - * ... you may want to add your own fixtures tailored for your plugin to set up specific ``Data`` nodes & more. + * ... you may want to add your own fixtures tailored for your plugins to set up specific ``Data`` nodes & more. -In order to make these fixtures available to your tests, add them to your ``conftest.py`` file at the root level of your plugin as follows:: +In order to make these fixtures available to your tests, add them to your ``conftest.py`` file at the root level of your plugin package as follows:: import pytest pytest_plugins = ['aiida.manage.tests.pytest_fixtures'] @@ -92,7 +101,7 @@ You can now start writing tests e.g. in a ``test_calculations.py`` file:: # check outputs of calculation assert result['...'] == ... -Feel free to check out the tests of the `aiida-diff`_ demo plugin. +Feel free to check out the tests of the `aiida-diff`_ demo plugin package. .. _conftest: https://docs.pytest.org/en/stable/fixture.html?highlight=conftest#conftest-py-sharing-fixture-functions .. _aiida-diff: https://github.com/aiidateam/aiida-diff/ @@ -115,7 +124,6 @@ Using the unittest framework The ``unittest`` package is included in the python standard library and is widely used despite its limitations. -It is also still used for testing ``aiida-core``. In analogy to the fixtures of ``pytest``, for ``unittest`` we provide a :py:class:`aiida.manage.tests.unittest_classes.PluginTestCase` class that your test cases can inherit from. diff --git a/docs/source/developer_guide/plugins/publish.rst b/docs/source/developer_guide/plugins/publish.rst index 017ea480b1..e3c03d6648 100644 --- a/docs/source/developer_guide/plugins/publish.rst +++ b/docs/source/developer_guide/plugins/publish.rst @@ -1,41 +1,27 @@ -=================== -Publishing a plugin -=================== +=========================== +Publishing a plugin package +=========================== .. _plugins.get_listed: 1. Choose a name ---------------- -The naming convention for AiiDA plugins is ``aiida-mycode`` for the plugin -and ``aiida_mycode`` for the corresponding python package, leading to the -following folder structure:: +The naming convention for AiiDA plugin packages is ``aiida-mycode`` for the plugin distribution on `PyPI`_ and ``aiida_mycode`` for the corresponding python package, leading to the following folder structure:: aiida-mycode/ aiida_mycode/ __init__.py -This marks your plugin as an AiiDA package and makes it easy to find on package indices like `PyPI`_. - **Note:** Python packages cannot contain dashes, thus the underscore. -2. Get Your Plugin Listed +2. Add to plugin registry ------------------------- -AiiDA plugins should be listed on the AiiDA plugin `registry`_ to -avoid name-clashes with other plugins. +AiiDA plugin packages should be listed on the AiiDA plugin `registry`_ to avoid name-clashes with other plugins. -If you wish to get your plugin listed on the official registry for AiiDA -plugins, you will provide the following keyword arguments as key-value pairs in -a ``setup.json`` or ``setup.yaml``. It is recommended to have setup.py -read the keyword arguments from that file:: - - aiida-myplugin/ - aiida_myplugin/ - ... - setup.py - setup.json # or setup.yaml +If you wish to get your plugin package listed on the official plugin registry, please provide the following keyword arguments as key-value pairs in a ``setup.json`` or ``setup.yaml`` file. * ``name`` * ``author`` @@ -49,18 +35,20 @@ read the keyword arguments from that file:: * ``entry_points`` * ``scripts`` (optional) -Now, fork the plugin `registry`_ repository, fill in your plugin's information -in the same fashion as the plugins already registered, and create a pull -request. The registry will allow users to discover your plugin using ``verdi -plugin search`` (note: the latter verdi command is not yet implemented in -AiiDA). +It is recommended to have your ``setup.py`` file simply read the keyword arguments from the ``setup.json``:: -3. Get Your Plugin On PyPI --------------------------- + aiida-myplugin/ + aiida_myplugin/ + ... + setup.py + setup.json # or setup.yaml + +Now, fork the plugin `registry`_ repository, fill in the information for your plugin package, and create a pull request. -For packaging and distributing AiiDA plugins, we recommend to follow existing -`guidelines for packaging python `_, -which include making the plugin available on the `python package index `_. +3. Upload to PyPI +----------------- + +For packaging and distributing AiiDA plugins, we recommend to follow existing `guidelines for packaging python `_, which include making the plugin available on the `python package index `_. This makes it possible for users to simply ``pip install aiida-myplugin``. Our suggested layout:: @@ -74,14 +62,8 @@ Our suggested layout:: setup.py installation script setup.json contains requirements, metainformation, etc -Note: In principle, ``aiida-compute`` could contain and install multiple packages. - -Incidentally a distribution can contain and install more than one package at a time. - -The most user-friendly way to distribute a package is to create such a -distribution and uploading it to `PyPI`_. Users then can simply install the -package(s) by running ``pip ``. - +Note: In principle, the ``aiida-compute`` folder could contain and install multiple python packages. +We recommend against this practice, unless there are good reasons to keep multiple packages in the same repository. .. _pypi: https://pypi.python.org .. _packaging: https://packaging.python.org/distributing/#configuring-your-project diff --git a/docs/source/developer_guide/plugins/quickstart.rst b/docs/source/developer_guide/plugins/quickstart.rst index a57f163986..16dfb97a0a 100644 --- a/docs/source/developer_guide/plugins/quickstart.rst +++ b/docs/source/developer_guide/plugins/quickstart.rst @@ -6,21 +6,21 @@ You have a code and would like to use it from AiiDA? You need a special data type, parser, scheduler, ... that is not available? Then you'll need to write an **AiiDA plugin**. -Let's get started with creating a new plugin ``aiida-mycode``. +Let's get started with creating a new plugin packacge ``aiida-mycode``. - 0. At least once, :ref:`install an existing aiida plugin ` to make sure this works. + 0. At least once, :ref:`install an existing aiida plugin package ` to make sure this works. 1. Check on the `aiida plugin registry `_ that your desired plugin name is still available - #. Use the `AiiDA plugin cutter `_ to jumpstart your plugin:: + #. Use the `AiiDA plugin cutter `_ to jumpstart your plugin package:: pip install cookiecutter cookiecutter https://github.com/aiidateam/aiida-plugin-cutter.git # follow instructions ... cd aiida-mycode - #. Install your new plugin:: + #. Install your new plugin package:: workon # if you have one pip install -e . @@ -30,8 +30,7 @@ That's it - now you can ``import aiida_mycode`` and start developing your plugin A few things to keep in mind: * Be sure to update the `setup.json`_, in particular the license and version number - * :ref:`Get your plugin listed ` as soon as possible to - reserve your plugin name and to inform others of your ongoing development + * :ref:`Get your plugin package listed ` as soon as possible to reserve your plugin name and to inform others of your ongoing development .. _setup.json: https://github.com/aiidateam/aiida-plugin-template/blob/master/setup.json .. _registry: https://github.com/aiidateam/aiida-registry diff --git a/docs/source/developer_guide/plugins/update_plugin.rst b/docs/source/developer_guide/plugins/update_plugin.rst deleted file mode 100644 index 4685c38a62..0000000000 --- a/docs/source/developer_guide/plugins/update_plugin.rst +++ /dev/null @@ -1,101 +0,0 @@ -Updating an Existing Plugin -============================ - -This document describes the process of updating an AiiDA plugin written using -the old plugin system (pre AiiDA version 0.8) to the current plugin system. - -Once the update is complete, make sure to :ref:`get your plugin listed `. - -Folder structure ------------------ - -Old plugin system:: - - aiida/ - orm/ - calculation/ - job/ - myplugin/ - __init__.py - mycalc.py - myothercalc.py - parsers/ - plugins/ - myplugin/ - __init__.py - myparser.py - myotherparser.py - data/ - myplugin/ - __init__.py - mydata.py - tools/ - codespecific/ - myplugin/ - __init__.py - ... - -Turns into:: - - aiida-myplugin/ - aiida_myplugin/ - __init__.py - calculations/ - __init__.py - mycalc.py - myothercalc.py - parsers/ - __init__.py - myparser.py - myotherparser.py - data/ - __init__.py - mydata.py - tools/ - __init__.py - ... - -Entry points -------------- - -If you are converting a plugin from the old system to new new system, the name -of your entry points must correspond to where your plugin module was installed -inside the AiiDA package. *Otherwise, your plugin will not be backwards -compatible*. For example, if you were using a calculation as:: - - from aiida.orm.calculation.job.myplugin.mycalc import MycalcCalculation - # or - CalculationFactory('myplugin.mycalc') - -Then in ``setup.py``:: - - setup( - ..., - entry_points: { - 'aiida.calculations': [ - 'myplugin.mycalc = aiida_myplugin.calculations.mycalc:MycalcCalculation' - ], - ... - }, - ... - ) - -As you see, the name of the entry point matches the argument to the factory method. - -import statements ------------------- - -If you haven't done so already, now would be a good time to search and replace -any import statements that refer to the old locations of your modules inside -AiiDA. We recommend to change them to absolute imports from your top-level -package: - -old:: - - from aiida.tools.codespecific.myplugin.thistool import this_convenience_func - -new:: - - from aiida_myplugin.tools.thistool import this_convenience_func - - diff --git a/docs/source/get_started/index.rst b/docs/source/get_started/index.rst index b71a7f837f..7b103a1b7b 100644 --- a/docs/source/get_started/index.rst +++ b/docs/source/get_started/index.rst @@ -6,21 +6,21 @@ Install Plugins While the ``aiida-core`` package provides the workflow engine and database model, it relies on *plugins* for connecting to specific simulation codes. -Search for AiiDA plugins on the `AiiDA plugin registry `_. If a plugin for your code does not yet exist, you may need to :ref:`write one `. +Search for AiiDA plugin packages on the `AiiDA plugin registry `_. +If a plugin package for your code does not yet exist, you may need to :ref:`write one `. -Most plugins are hosted on the `Python Package Index `_ and can be installed as follows:: +Most plugin packages are hosted on the `Python Package Index `_ and can be installed as follows:: pip install aiida-diff # install 'aiida-diff' plugin from PyPI reentry scan -r aiida # notify aiida of new entry points -If no PyPI package is available for a plugin, you can install -the plugin directly from a source code repository, e.g.:: +If no PyPI package is available for a plugin, you can install the plugin package directly from a source code repository, e.g.:: git clone https://github.com/aiidateam/aiida-diff pip install aiida-diff # install 'aiida-diff' plugin from local folder reentry scan -r aiida # notify aiida of new entry points -After installing new plugins, **restart the daemon** using ``verdi daemon restart``. +After installing new plugin packages, update the reentry cache using ``reentry scan`` and **restart the daemon** using ``verdi daemon restart``. .. note:: The reentry cache can also be updated from python when access to the commandline is not available (e.g. in jupyter notebooks). diff --git a/docs/source/install/installation.rst b/docs/source/install/installation.rst index 4e6a20b717..6cabea822e 100644 --- a/docs/source/install/installation.rst +++ b/docs/source/install/installation.rst @@ -79,13 +79,12 @@ There are additional optional packages that you may want to install, which are g * ``ssh_kerberos``: adds support for ssh transport authentication through Kerberos * ``REST``: allows a REST server to be ran locally to serve AiiDA data * ``docs``: tools to build the documentation - * ``advanced_plotting``: tools for advanced plotting * ``notebook``: jupyter notebook - to allow it to import AiiDA modules * ``testing``: python modules required to run the automatic unit tests In order to install any of these package groups, simply append them as a comma separated list in the ``pip`` install command:: - (aiida) $ pip install -e aiida-core[atomic_tools,docs,advanced_plotting] + (aiida) $ pip install -e aiida-core[atomic_tools,docs] .. note:: If you are installing the optional ``ssh_kerberos`` and you are on Ubuntu you might encounter an error related to the ``gss`` package. To fix this you need to install the ``libffi-dev`` and ``libkrb5-dev`` packages:: @@ -120,22 +119,18 @@ and set up the database manually as explained below. Database setup -------------- -AiiDA uses a database to store the nodes, node attributes and other -information, allowing the end user to perform fast queries of the results. -Currently, only `PostgreSQL`_ is allowed as a database backend. +AiiDA uses a database to store the nodes, node attributes and other information, allowing the end user to perform fast queries of the results. +Currently, the highly performant `PostgreSQL`_ database is supported as a database backend. .. _PostgreSQL: https://www.postgresql.org/downloads -To manually create the database for AiiDA, you need to run the program ``psql`` -to interact with postgres. -On most operating systems, you need to do so as the ``postgres`` user that was -created upon installing the software. +To manually create the database for AiiDA, you need to run the program ``psql`` to interact with postgres. +On most operating systems, you need to do so as the ``postgres`` user that was created upon installing the software. To assume the role of ``postgres`` run as root:: su - postgres -(or, equivalently, type ``sudo su - postgres``, depending on your distribution) -and launch the postgres program:: +(or, equivalently, type ``sudo su - postgres``, depending on your distribution) and launch the postgres program:: psql @@ -144,12 +139,13 @@ Create a new database user account for AiiDA by running:: CREATE USER aiida WITH PASSWORD ''; replacing ```` with a password of your choice. -Make sure to remember it, as you will need it again when you configure AiiDA to use this database through ``verdi setup``. + +You will need to provide the password again when you configure AiiDA to use this database through ``verdi setup``. If you want to change the password you just created use the command:: ALTER USER aiida PASSWORD ''; -Next we create the database itself. Keep in mind that we enforce the UTF-8 encoding and specific locales:: +Next, we create the database itself. We enforce the UTF-8 encoding and specific locales:: CREATE DATABASE aiidadb OWNER aiida ENCODING 'UTF8' LC_COLLATE='en_US.UTF-8' LC_CTYPE='en_US.UTF-8' TEMPLATE=template0; @@ -168,8 +164,8 @@ If everything worked well, you should get no error and see the prompt of the ``p If you use the same names as in the example commands above, then during the ``verdi setup`` phase the following parameters will apply to the newly created database:: Database engine: postgresql_psycopg2 - PostgreSQL host: localhost - PostgreSQL port: 5432 + Database host: localhost + Database port: 5432 AiiDA Database name: aiidadb AiiDA Database user: aiida AiiDA Database password: @@ -181,44 +177,45 @@ If you use the same names as in the example commands above, then during the ``ve instructions :ref:`here`. -Database setup using Unix sockets -+++++++++++++++++++++++++++++++++ +Database setup using 'peer' authentication +++++++++++++++++++++++++++++++++++++++++++ -Instead of using passwords to protect access to the database -(which could be used by other users on the same machine), -PostgreSQL allows password-less logins via Unix sockets. +On Ubuntu Linux, the default PostgreSQL setup is configured to use ``peer`` authentication, which allows password-less login via local Unix sockets. +In this mode, PostgreSQL compares the Unix user connecting to the socket with its own database of users and allows a connection if a matching user exists. -In this scenario PostgreSQL compares the user connecting to the socket with its -own database of users and will allow a connection if a matching user exists. +.. note:: + This is an alternative route to set up your database - the standard approach will work on Ubuntu just as well. -Assume the role of ``postgres`` by running the following as root:: +Below we are going to take advantage of the command-line utilities shipped on Ubuntu to simplify creating users and databases compared to issuing the SQL commands directly. - su - postgres +Assume the role of ``postgres``:: + + sudo su postgres -Create a database user with the **same name** as the user you are using to run AiiDA (usually your login name):: +Create a database user with the **same name** as the UNIX user who will be running AiiDA (usually your login name):: createuser replacing ```` with your username. -Next, create the database itself making sure that your user is the owner:: +Next, create the database itself with your user as the owner:: createdb -O aiidadb -To test if the database was created successfully, you can run the following command as your user in a bash terminal:: +Exit the shell to go back to your login user. +To test if the database was created successfully, try:: psql aiidadb -Make sure to leave the host, port and password empty when specifying the parameters during the ``verdi setup`` phase -and specify your username as the *AiiDA Database user*:: +During the ``verdi setup`` phase, use ``!`` to leave host empty and specify your Unix user name as the *AiiDA Database user*.:: Database engine: postgresql_psycopg2 - PostgreSQL host: - PostgreSQL port: + Database host: ! + Database port: 5432 AiiDA Database name: aiidadb AiiDA Database user: - AiiDA Database password: + AiiDA Database password: "" Setup instructions diff --git a/docs/source/install/updating_installation.rst b/docs/source/install/updating_installation.rst index f687d46e0f..8a77d12b73 100644 --- a/docs/source/install/updating_installation.rst +++ b/docs/source/install/updating_installation.rst @@ -3,45 +3,47 @@ ************** Updating AiiDA ************** - .. _updating_instructions: -Instructions -============ - -.. warning:: +Generic update instructions +=========================== - The following instructions are how to update from ``v0.12.*`` to ``v1.0.0``. - Each version increase may come with its own necessary migrations and you should only ever update the version by one at a time. - To find the instructions for older versions, refer to the :ref:`table below`. - -1. Finish all running calculations. After migrating your database, you will not be able to resume unfinished calculations. Data of finished calculations will of course be automatically migrated. -2. Finish all running legacy workflows. The legacy workflows are completely deprecated and all data will be removed from your database, so make sure to create a backup (see point 5). -3. Enter the python environment where AiiDA is installed -4. Stop the daemon using ``verdi daemon stop`` -5. Create a backup of your :ref:`database and repository` +1. Enter the python environment where AiiDA is installed +2. Finish all running calculations. After migrating your database, you will not be able to resume unfinished calculations. Data of finished calculations will of course be automatically migrated. +3. Stop the daemon using ``verdi daemon stop`` +4. :ref:`Create a backup of your database and repository` .. warning:: - Once you have migrated your database, you can no longer go back to an older version of ``aiida-core``, unless you restore your database and repository from a backup of course. - In addition, the data migration can take quite some time depending on the size of your database, so please be patient. - Big databases of multiple millions of nodes can take up to a few hours to migrate. + Once you have migrated your database, you can no longer go back to an older version of ``aiida-core`` (unless you restore your database and repository from a backup). -6. Update your ``aiida-core`` installation +5. Update your ``aiida-core`` installation - If you have installed AiiDA through ``pip`` simply run: ``pip install --upgrade aiida-core`` - If you have installed from the git repository using ``pip install -e .``, first delete all the ``.pyc`` files (``find . -name "*.pyc" -delete``) before updating your branch. -7. Finally, after having upgraded the installation, migrate your database with ``verdi -p database migrate`` +6. Migrate your database with ``verdi -p database migrate``. + Depending on the size of your database and the number of migrations to perform, data migration can take time, so please be patient. After the database migration finishes, you will be able to continue working with your existing data. -However, :ref:`backwards incompatible changes` were introduced in the python API, so you probably will have to update your code and installed plugins. + +.. note:: + If your update involved a change in the major version number of ``aiida-core``, expect :ref:`backwards incompatible changes` and check whether you also need to update your installed plugin packages. +Updating from 0.12.* to 1.* +=========================== + +Besides the generic update instructions, the following applies: + + * Finish all running legacy workflows. + The legacy workflows are completely deprecated and all data will be removed from your database, so make sure to create a backup (see point 5). + * The upgrade involves several long-running migrations. Migrating databases containing millions of nodes can take a few hours. + .. _updating_backward_incompatible_changes: -Backwards incompatible changes -============================== +Breaking changes from 0.12.* to 1.* +=================================== The following list covers the most important backward incompatible changes between ``aiida-core==0.12.*`` and ``aiida-core==1.0.0``. @@ -269,6 +271,7 @@ Update instructions for older versions can be found in the documentation of the * `0.5.* Django`_ * `0.4.* Django`_ + .. _0.11.*: https://aiida-core.readthedocs.io/en/v0.12.2/installation/updating.html#updating-from-0-11-to-0-12-0 .. _0.10.*: http://aiida-core.readthedocs.io/en/v0.10.0/installation/updating.html#updating-from-0-9-to-0-10-0 .. _0.9.*: http://aiida-core.readthedocs.io/en/v0.10.0/installation/updating.html#updating-from-0-9-to-0-10-0 diff --git a/docs/source/nitpick-exceptions b/docs/source/nitpick-exceptions index 3e5c3007a3..46dc269e12 100644 --- a/docs/source/nitpick-exceptions +++ b/docs/source/nitpick-exceptions @@ -1,243 +1,86 @@ -# built-in python exceptions -py:exc ArithmeticError -py:exc AssertionError -py:exc AttributeError -py:exc BaseException -py:exc BufferError -py:exc DeprecationWarning -py:exc EOFError -py:exc EnvironmentError -py:exc Exception -py:exc FloatingPointError -py:exc FutureWarning -py:exc GeneratorExit -py:exc IOError -py:exc ImportError -py:exc ImportWarning -py:exc IndentationError -py:exc IndexError -py:exc KeyError -py:exc KeyboardInterrupt -py:exc LookupError -py:exc MemoryError -py:exc NameError -py:exc NotImplementedError -py:exc OSError -py:exc OverflowError -py:exc PendingDeprecationWarning -py:exc ReferenceError -py:exc RuntimeError -py:exc RuntimeWarning -py:exc StandardError -py:exc StopIteration -py:exc SyntaxError -py:exc SyntaxWarning -py:exc SystemError -py:exc SystemExit -py:exc TabError -py:exc TypeError -py:exc UnboundLocalError -py:exc UnicodeDecodeError -py:exc UnicodeEncodeError -py:exc UnicodeError -py:exc UnicodeTranslateError -py:exc UnicodeWarning -py:exc UserWarning -py:exc VMSError -py:exc ValueError -py:exc Warning -py:exc WindowsError -py:exc ZeroDivisionError +### python builtins -# python builtins -py:class classmethod -py:class dict -py:class callable -py:class filter -py:class list -py:class object -py:class unittest.case.TestCase -py:class unittest.runner.TextTestRunner -py:class unittest2.case.TestCase -py:meth unittest.TestLoader.discover -py:meth copy.copy -py:class abc.ABC -py:class exceptions.Exception -py:class exceptions.ValueError -py:class exceptions.BaseException -# repeat for Python 3 -py:class Exception -py:class ValueError -py:class BaseException -py:class str -py:class bytes -py:class tuple -py:class int -py:class float -py:class bool -py:class basestring -py:class None -py:class type -py:class typing.Final -# this is required for metaclasses(?) -py:class __builtin__.bool -py:class __builtin__.float -py:class __builtin__.int -py:class __builtin__.object -py:class __builtin__.str -py:class __builtin__.dict -# ... and the same for Python 3 -py:class builtins.bool -py:class builtins.float -py:class builtins.int -py:class builtins.object -py:class builtins.str -py:class builtins.dict -py:class set +# note: there doesn't seem to be a standard way of indicating a callable in python3 +# https://stackoverflow.com/questions/23571253/how-to-define-a-callable-parameter-in-a-python-docstring +py:class callable -# python builtin objects -py:obj basestring -py:obj bool -py:obj float -py:obj int -py:obj str -py:obj string -py:obj tuple -py:obj None -py:obj bool +# For some reason, "filter" does not seem to be found +py:class filter -# python packages -# Note: These are needed, if they are provided, e.g. -# as types or rtypes without actually being imported -py:class abc.ABCMeta +py:class unittest.case.TestCase +py:class unittest.runner.TextTestRunner -py:exc click.BadParameter -py:exc click.UsageError -py:class click.ParamType -py:class click.core.Group -py:class click.core.Option -py:class click.Command -py:class click.Group -py:class click.Option -py:class click.types.ParamType -py:class click.types.Choice -py:class click.types.IntParamType -py:class click.types.StringParamType -py:class click.types.Path +# required for metaclasses(?) +py:class builtins.bool +py:class builtins.float +py:class builtins.int +py:class builtins.object +py:class builtins.str +py:class builtins.dict -py:class concurrent.futures._base.TimeoutError +### AiiDA -py:class distutils.version.Version +# not quite clear why necessary... +py:class WorkChainSpec -py:class docutils.parsers.rst.Directive +### python packages +# Note: These exceptions are needed if +# * the objects are referenced e.g. as param/return types types in method docstrings (without intersphinx mapping) +# * the documentation linked via intersphinx lists the objects at a different (usually lower) import hierarchy +py:class click.core.Group +py:class click.core.Option +py:class click.types.ParamType +py:class click.types.Choice +py:class click.types.IntParamType +py:class click.types.StringParamType +py:class click.types.Path +py:meth click.Option.get_default -py:class enum.Enum -py:class enum.IntEnum +py:class concurrent.futures._base.TimeoutError -py:class flask.app.Flask -py:class flask.json.JSONEncoder -py:class flask_restful.Api -py:class flask_restful.Resource +py:class docutils.parsers.rst.Directive -py:class frozenset +py:class frozenset -py:class logging.Filter -py:class logging.Handler -py:class logging.record -py:class logging.Logger -py:class logging.LoggerAdapter +py:class paramiko.proxy.ProxyCommand -py:class paramiko.proxy.ProxyCommand +# These can be removed once they are properly included in the `__all__` in `plumpy` +py:class plumpy.ports.PortNamespace +py:class plumpy.utils.AttributesDict -py:class StateMachine -py:class plumpy.futures.Future -py:class plumpy.processes.Process -py:class plumpy.process_comms.ProcessLauncher -py:class plumpy.process_spec.ProcessSpec -py:class plumpy.Port -py:class plumpy.Process -py:class plumpy.Communicator -py:class plumpy.RemoteProcessThreadController -py:class plumpy.Bundle -py:class plumpy.workchains.WorkChainSpec -py:class plumpy.WorkChainSpec -py:class plumpy.Persister -py:class plumpy.persistence.Persister -py:class plumpy.PersistenceError -py:class plumpy.ports.Port -py:class plumpy.ports.InputPort -py:class plumpy.ports.OutputPort -py:class plumpy.ports.PortNamespace -py:class plumpy.utils.AttributesDict -py:class plumpy.loaders.DefaultObjectLoader -py:class plumpy.ObjectLoader -py:class plumpy.process_states.Waiting -py:class plumpy.process_comms.RemoteProcessThreadController -py:exc plumpy.TaskRejected -py:meth plumpy.ProcessSpec.output -py:meth plumpy.process_spec.ProcessSpec.expose_inputs -py:meth plumpy.process_spec.ProcessSpec.expose_outputs +py:class topika.Connection -py:class kiwipy.futures.Future -py:class kiwipy.communications.TimeoutError -py:class kiwipy.Communicator -py:class kiwipy.rmq.communicator.RmqThreadCommunicator +py:class tornado.ioloop.IOLoop +py:class tornado.concurrent.Future -py:class topika.Connection +py:class IPython.core.magic.Magics -py:class tornado.ioloop.IOLoop -py:class tornado.concurrent.Future +py:class HTMLParser.HTMLParser +py:class html.parser.HTMLParser -py:class IPython.core.magic.Magics +py:class django.contrib.auth.base_user.AbstractBaseUser +py:class django.contrib.auth.base_user.BaseUserManager +py:class django.contrib.auth.models.AbstractBaseUser +py:class django.contrib.auth.models.BaseUserManager +py:class django.contrib.auth.models.PermissionsMixin +py:class django.core.exceptions.MultipleObjectsReturned +py:class django.core.exceptions.ObjectDoesNotExist +py:class django.db.models.base.Model +py:class django.db.models.manager.Manager +py:class django.db.models.query.QuerySet +py:class django.db.migrations.migration.Migration -py:class HTMLParser.HTMLParser -py:class html.parser.HTMLParser +py:class flask.app.Flask -py:class tuple +py:class sqlalchemy.ext.declarative.api.Base +py:class sqlalchemy.ext.declarative.api.Model +py:class sqlalchemy.sql.functions.FunctionElement +py:class sqlalchemy.orm.query.Query +py:class sqlalchemy.orm.util.AliasedClass +py:class sqlalchemy.orm.session.Session +py:exc sqlalchemy.orm.exc.MultipleResultsFound -py:class staticmethod - -py:class django.contrib.auth.base_user.AbstractBaseUser -py:class django.contrib.auth.base_user.BaseUserManager -py:class django.contrib.auth.models.AbstractBaseUser -py:class django.contrib.auth.models.BaseUserManager -py:class django.contrib.auth.models.PermissionsMixin -py:class django.core.exceptions.MultipleObjectsReturned -py:class django.core.exceptions.ObjectDoesNotExist -py:class django.db.models.base.Model -py:class django.db.models.manager.Manager -py:class django.db.models.query.QuerySet -py:class django.db.migrations.migration.Migration - -py:class sqlalchemy.ext.declarative.api.Base -py:class sqlalchemy.ext.declarative.api.Model -py:class sqlalchemy.sql.functions.FunctionElement -py:class sqlalchemy.orm.query.Query -py:class sqlalchemy.orm.util.AliasedClass -py:class sqlalchemy.orm.session.Session -py:exc sqlalchemy.orm.exc.MultipleResultsFound - -py:class sphinx.ext.autodoc.ClassDocumenter - -py:class collections.abc.Mapping -py:class collections.abc.MutableMapping -py:class collections.abc.MutableSequence -py:class collections.abc.Iterator -py:class collections.abc.Sized - -# backend-dependent implementation -py:class WorkChainSpec -py:class aiida.orm.nodes.Node -py:meth aiida.engine.processes.process_spec.ProcessSpec.input -py:meth aiida.engine.processes.process_spec.ProcessSpec.output -py:meth aiida.engine.processes.process_spec.ProcessSpec.outline - -# This comes from ABCMeta -py:meth aiida.orm.groups.Group.get_from_string - -py:mod click -py:class click.Choice -py:func click.Option.get_default +py:class sphinx.ext.autodoc.ClassDocumenter py:class yaml.Dumper py:class yaml.Loader @@ -245,21 +88,10 @@ py:class yaml.dumper.Dumper py:class yaml.loader.Loader py:class yaml.FullLoader py:class yaml.loader.FullLoader - py:class uuid.UUID -# typing -py:class typing.Generic -py:class typing.TypeVar - -# Python 3 complains about this because of orm.Entity.Collection inner class (no idea why) -py:class Collection - - -# psychopg2 py:class psycopg2.extensions.cursor -# Aldjemy exceptions py:class aldjemy.orm.DbNode py:class aldjemy.orm.DbLink py:class aldjemy.orm.DbComputer @@ -270,5 +102,7 @@ py:class aldjemy.orm.DbComment py:class aldjemy.orm.DbLog py:class aldjemy.orm.DbSetting -# Alembic py:class alembic.config.Config + +py:class pgsu.PGSU +py:meth pgsu.PGSU.__init__ diff --git a/docs/source/verdi/verdi_user_guide.rst b/docs/source/verdi/verdi_user_guide.rst index 8019a2031c..abcc992071 100644 --- a/docs/source/verdi/verdi_user_guide.rst +++ b/docs/source/verdi/verdi_user_guide.rst @@ -394,7 +394,7 @@ Below is a list with all available subcommands. Commands: create Export subsets of the provenance graph to file for sharing. inspect Inspect contents of an exported archive without importing it. - migrate Migrate an old export archive file to the most recent format. + migrate Migrate an export archive to a more recent format version. .. _verdi_graph: @@ -436,6 +436,7 @@ Below is a list with all available subcommands. delete Delete a group. description Change the description of a group. list Show a list of existing groups. + path Inspect groups of nodes, with delimited label paths. relabel Change the label of a group. remove-nodes Remove nodes from a group. show Show information for a given group. @@ -476,9 +477,11 @@ Below is a list with all available subcommands. addresses. Automatically discovered archive URLs will be downloadeded and added to ARCHIVES for importing + -G, --group GROUP Specify group to which all the import nodes will be added. If such a group does not exist, it will be created automatically. + -e, --extras-mode-existing [keep_existing|update_existing|mirror|none|ask] Specify which extras from the export archive should be imported for nodes that are @@ -491,20 +494,25 @@ Below is a list with all available subcommands. mirror: import all extras and remove any existing extras that are not present in the archive. none: do not import any extras. + -n, --extras-mode-new [import|none] Specify whether to import extras of new nodes: import: import extras. none: do not import extras. + --comment-mode [newest|overwrite] Specify the way to import Comments with identical UUIDs: newest: Only the newest Comments (based on mtime) (default).overwrite: Replace existing Comments with those from the import file. + --migration / --no-migration Force migration of export file archives, if needed. [default: True] + -n, --non-interactive Non-interactive mode: never prompt for input. + --help Show this message and exit. @@ -615,31 +623,38 @@ Below is a list with all available subcommands. Options: -n, --non-interactive Non-interactive mode: never prompt for input. + --profile PROFILE The name of the new profile. [required] - --email TEXT Email address that serves as the user name - and a way to identify data created by it. + --email EMAIL Email address associated with the data you + generate. The email address is exported + along with the data, when sharing it. [required] - --first-name TEXT First name of the user. [required] - --last-name TEXT Last name of the user. [required] - --institution TEXT Institution of the user. [required] + + --first-name NONEMPTYSTRING First name of the user. [required] + --last-name NONEMPTYSTRING Last name of the user. [required] + --institution NONEMPTYSTRING Institution of the user. [required] --db-engine [postgresql_psycopg2] Engine to use to connect to the database. --db-backend [django|sqlalchemy] - Backend type to use to map the database. - --db-host TEXT Hostname to connect to the database. - --db-port INTEGER Port to connect to the database. - --db-name TEXT Name of the database to create. - --db-username TEXT Name of the database user to create. - --db-password TEXT Password to connect to the database. + Database backend to use. + --db-host HOSTNAME Database server host. Leave empty for "peer" + authentication. + + --db-port INTEGER Database server port. + --db-name NONEMPTYSTRING Name of the database to create. + --db-username NONEMPTYSTRING Name of the database user to create. + --db-password TEXT Password of the database user. --su-db-name TEXT Name of the template database to connect to as the database superuser. + --su-db-username TEXT User name of the database super user. --su-db-password TEXT Password to connect as the database superuser. - --repository DIRECTORY Absolute path for the file system - repository. + + --repository DIRECTORY Absolute path to the file repository. --config FILE Load option values from configuration file in yaml format. + --help Show this message and exit. @@ -660,6 +675,7 @@ Below is a list with all available subcommands. Options: -e, --entry-point PLUGIN Only include nodes that are class or sub class of the class identified by this entry point. + -f, --force Do not ask for confirmation. --help Show this message and exit. @@ -677,18 +693,18 @@ Below is a list with all available subcommands. Example Usage: - verdi -p restapi --hostname 127.0.0.5 --port 6789 --config-dir - --debug --wsgi-profile --hookup + verdi -p restapi --hostname 127.0.0.5 --port 6789 Options: - -H, --hostname TEXT Hostname. - -P, --port INTEGER Port number. - -c, --config-dir PATH the path of the configuration directory - --debug run app in debug mode - --wsgi-profile to use WSGI profiler middleware for finding - bottlenecks in web application - --hookup / --no-hookup to hookup app - --help Show this message and exit. + -H, --hostname HOSTNAME Hostname. + -P, --port INTEGER Port number. + -c, --config-dir PATH Path to the configuration directory + --debug Enable debugging + --wsgi-profile Whether to enable WSGI profiler middleware for + finding bottlenecks + + --hookup / --no-hookup Hookup app to flask server + --help Show this message and exit. .. _verdi_run: @@ -703,15 +719,23 @@ Below is a list with all available subcommands. Execute scripts with preloaded AiiDA environment. Options: - -g, --group Enables the autogrouping [default: True] - -n, --group-name TEXT Specify the name of the auto group - -e, --exclude TEXT Exclude these classes from auto grouping - -i, --include TEXT Include these classes from auto grouping - -E, --excludesubclasses TEXT Exclude these classes and their sub classes - from auto grouping - -I, --includesubclasses TEXT Include these classes and their sub classes - from auto grouping - --help Show this message and exit. + --auto-group Enables the autogrouping + -l, --auto-group-label-prefix TEXT + Specify the prefix of the label of the auto + group (numbers might be automatically + appended to generate unique names per run). + + -n, --group-name TEXT Specify the name of the auto group + [DEPRECATED, USE --auto-group-label-prefix + instead]. This also enables auto-grouping. + + -e, --exclude TEXT Exclude these classes from auto grouping + (use full entrypoint strings). + + -i, --include TEXT Include these classes from auto grouping + (use full entrypoint strings or "all"). + + --help Show this message and exit. .. _verdi_setup: @@ -728,28 +752,33 @@ Below is a list with all available subcommands. Options: -n, --non-interactive Non-interactive mode: never prompt for input. + --profile PROFILE The name of the new profile. [required] - --email TEXT Email address that serves as the user name - and a way to identify data created by it. + --email EMAIL Email address associated with the data you + generate. The email address is exported + along with the data, when sharing it. [required] - --first-name TEXT First name of the user. [required] - --last-name TEXT Last name of the user. [required] - --institution TEXT Institution of the user. [required] + + --first-name NONEMPTYSTRING First name of the user. [required] + --last-name NONEMPTYSTRING Last name of the user. [required] + --institution NONEMPTYSTRING Institution of the user. [required] --db-engine [postgresql_psycopg2] Engine to use to connect to the database. --db-backend [django|sqlalchemy] - Backend type to use to map the database. - --db-host TEXT Hostname to connect to the database. - --db-port INTEGER Port to connect to the database. - --db-name TEXT Name of the database to create. [required] - --db-username TEXT Name of the database user to create. - [required] - --db-password TEXT Password to connect to the database. + Database backend to use. + --db-host HOSTNAME Database server host. Leave empty for "peer" + authentication. + + --db-port INTEGER Database server port. + --db-name NONEMPTYSTRING Name of the database to create. [required] + --db-username NONEMPTYSTRING Name of the database user to create. [required] - --repository DIRECTORY Absolute path for the file system - repository. + + --db-password TEXT Password of the database user. [required] + --repository DIRECTORY Absolute path to the file repository. --config FILE Load option values from configuration file in yaml format. + --help Show this message and exit. @@ -769,9 +798,11 @@ Below is a list with all available subcommands. --no-startup When using plain Python, ignore the PYTHONSTARTUP environment variable and ~/.pythonrc.py script. + -i, --interface [ipython|bpython] Specify an interactive interpreter interface. + --help Show this message and exit. diff --git a/docs/source/working/calculations.rst b/docs/source/working/calculations.rst index 4fae92b4ba..6f55f72a55 100644 --- a/docs/source/working/calculations.rst +++ b/docs/source/working/calculations.rst @@ -115,7 +115,7 @@ Next we should define what outputs we expect the calculation to produce: Just as for the inputs, one can specify what node type each output should have. By default a defined output will be 'required', which means that if the calculation job terminates and the output has not been attached, the process will be marked as failed. To indicate that an output is optional, one can use ``required=False`` in the ``spec.output`` call. -Note that the process spec, and its :py:meth:`~aiida.engine.processes.process_spec.ProcessSpec.input` and :py:meth:`~aiida.engine.processes.process_spec.ProcessSpec.output` methods provide a lot more functionality. +Note that the process spec, and its :py:meth:`~plumpy.ProcessSpec.input` and :py:meth:`~plumpy.ProcessSpec.output` methods provide a lot more functionality. Fore more details, please refer to the section on :ref:`process specifications`. diff --git a/docs/source/working/functions.rst b/docs/source/working/functions.rst index c58f3f6e1b..93b676e235 100644 --- a/docs/source/working/functions.rst +++ b/docs/source/working/functions.rst @@ -177,7 +177,7 @@ In the case of the example above, it would look something like the following: However, in this particular example the exception is not so much an unexpected error, but one we could have considered and have seen coming, so it might be more applicable to simply mark the process as failed. To accomplish this, there is the concept of an :ref:`exit status` that can be set on the process, which is an integer that, when non-zero, marks a process in the ``Finished`` state as 'failed'. Since the exit status is set as an attribute on the process node, it also makes it very easy to query for failed processes. -To set a non-zero exit status on a calculation function to indicate it as failed, simply return an instance of the :py:class:`~aiida.engine.processes.exit_code.ExitCode` named tuple. +To set a non-zero exit status on a calculation function to indicate it as failed, simply return an instance of the :py:class:`~aiida.engine.processes.exit_code.ExitCode` class. Time for a demonstration: .. include:: include/snippets/processes/functions/calcfunction_exit_code.py @@ -239,7 +239,7 @@ Likewise, you should not load any existing data from the database through the AP A similar problem occurs when importing other python code. Practically, it is almost impossible to never import code into process functions, as this would force massive code duplication. However, there is still a difference between importing code from the ``aiida-core`` library or the repository in which the process function is hosted, and the importing of a local python file. -Even though for both cases there can no be guarantee of reproducibility, the former stands a better chance by far, as the version number of the plugin should be recorded. +Even though for both cases there can no be guarantee of reproducibility, the former stands a better chance by far, as the version number of the plugin package should be recorded. The rule of thumb then is to keep the importing of code to a minimum, but if you have to, make sure to make it part of a plugin package with a well-defined version number. Finally, as mentioned in the introduction, the source file of a process function is stored as a file in the repository for *each execution*. diff --git a/docs/source/working/workflows.rst b/docs/source/working/workflows.rst index 4f7166e530..a58595e766 100644 --- a/docs/source/working/workflows.rst +++ b/docs/source/working/workflows.rst @@ -109,8 +109,8 @@ Exit codes ---------- To terminate the execution of a work function and mark it as failed, one simply has to return an :ref:`exit code`. -The :py:class:`~aiida.engine.processes.exit_code.ExitCode` named tuple is constructed with an integer, to denote the desired exit status and an optional message -When such as exit code is returned, the engine will mark the node of the work function as ``Finished`` and set the exit status and message to the value of the tuple. +The :py:class:`~aiida.engine.processes.exit_code.ExitCode` class is constructed with an integer, to denote the desired exit status and an optional message +When such as exit code is returned, the engine will mark the node of the work function as ``Finished`` and set the exit status and message to the value of the exit code. Consider the following example: .. code:: python @@ -120,7 +120,7 @@ Consider the following example: from aiida.engine import ExitCode return ExitCode(418, 'I am a teapot') -The execution of the work function will be immediately terminated as soon as the tuple is returned, and the exit status and message will be set to ``418`` and ``I am a teapot``, respectively. +The execution of the work function will be immediately terminated as soon as the exit code is returned, and the exit status and message will be set to ``418`` and ``I am a teapot``, respectively. Since no output nodes are returned, the ``WorkFunctionNode`` node will have no outputs and the value returned from the function call will be an empty dictionary. @@ -178,8 +178,8 @@ The third and final line is extremely important, as it will call the ``define`` Inputs and outputs ------------------ With those formalities out of the way, you can start defining the interesting properties of the work chain through the ``spec``. -In the example you can see how the method :py:meth:`~aiida.engine.processes.process_spec.ProcessSpec.input` is used to define multiple input ports, which document exactly which inputs the work chain expects. -Similarly, :py:meth:`~aiida.engine.processes.process_spec.ProcessSpec.output` is called to instruct that the work chain will produce an output with the label ``result``. +In the example you can see how the method :py:meth:`~plumpy.ProcessSpec.input` is used to define multiple input ports, which document exactly which inputs the work chain expects. +Similarly, :py:meth:`~plumpy.ProcessSpec.output` is called to instruct that the work chain will produce an output with the label ``result``. These two port creation methods support a lot more functionality, such as adding help string, validation and more, all of which is documented in detail in the section on :ref:`ports and port namespace`. @@ -189,7 +189,7 @@ Outline ------- The outline is what sets the work chain apart from other processes. It is a way of defining the higher-level logic that encodes the workflow that the work chain takes. -The outline is defined in the ``define`` method through the :py:meth:`~aiida.engine.processes.process_spec.ProcessSpec.outline`. +The outline is defined in the ``define`` method through the :py:meth:`~plumpy.WorkChainSpec.outline`. It takes a sequence of instructions that the work chain will execute, each of which is implemented as a method of the work chain class. In the simple example above, the outline consists of three simple instructions: ``add``, ``multiply``, ``results``. Since these are implemented as instance methods, they are prefixed with ``cls.`` to indicate that they are in fact methods of the work chain class. @@ -483,15 +483,40 @@ In the ``inspect_calculation`` outline, we retrieve the calculation that was sub If this returns ``False``, in this example we simply fire a report message and return the exit code corresponding to the label ``ERROR_CALCULATION_FAILED``. Note that the specific exit code can be retrieved through the ``WorkChain`` property ``exit_codes``. This will return a collection of exit codes that have been defined for that ``WorkChain`` and any specific exit code can then be retrieved by accessing it as an attribute. -Returning this exit code, which will be an instance of the :py:class:`~aiida.engine.processes.exit_code.ExitCode` named tuple, will cause the work chain to be aborted and the ``exit_status`` and ``exit_message`` to be set on the node, which were defined in the spec. +Returning this exit code, which will be an instance of the :py:class:`~aiida.engine.processes.exit_code.ExitCode` class, will cause the work chain to be aborted and the ``exit_status`` and ``exit_message`` to be set on the node, which were defined in the spec. .. note:: - The notation ``self.exit_codes.ERROR_CALCULATION_FAILED`` is just syntactic sugar to retrieve the ``ExitCode`` tuple that was defined in the spec with that error label. + The notation ``self.exit_codes.ERROR_CALCULATION_FAILED`` is just syntactic sugar to retrieve the ``ExitCode`` instance that was defined in the spec with that error label. Constructing your own ``ExitCode`` directly and returning that from the outline step will have exactly the same effect in terms of aborting the work chain execution and setting the exit status and message. However, it is strongly advised to define the exit code through the spec and retrieve it through the ``self.exit_codes`` collection, as that makes it easily retrievable through the spec by the caller of the work chain. -The best part about this method of aborting a work chains execution, is that the exit status can now be used programmatically, by for example a parent work chain. +The ``message`` attribute of an ``ExitCode`` can also be a string that contains placeholders. +This is useful when the exit code's message is generic enough to a host of situations, but one would just like to parameterize the exit message. +To concretize the template message of an exit code, simply call the :meth:`~aiida.engine.processes.exit_code.ExitCode.format` method and pass the parameters as keyword arguments:: + +.. code:: python + + exit_code_template = ExitCode(450, 'the parameter {parameter} is invalid.') + exit_code_concrete = exit_code_template.format(parameter='some_specific_key') + +This concept can also be applied within the scope of a process. +In the process spec, we can declare a generic exit code whose exact message should depend on one or multiple parameters:: + +.. code:: python + + spec.exit_code(450, 'ERROR_INVALID_PARAMETER, 'the parameter {parameter} is invalid.') + +Through the ``self.exit_codes`` collection of a ``WorkChain``, this generic can be easily customized as follows: + +.. code:: python + + def inspect_calculation(self): + return self.exit_codes.ERROR_INVALID_PARAMETER.format(parameter='some_specific_key') + +This is no different than the example before, because ``self.exit_codes.ERROR_INVALID_PARAMETER`` simply returns an instance of ``ExitCode``, which we then call ``format`` on with the substitution parameters. + +In conclusion, the best part about using exit codes to abort a work chain's execution, is that the exit status can now be used programmatically, by for example a parent work chain. Imagine that a parent work chain submitted this work chain. After it has terminated its execution, the parent work chain will want to know what happened to the child work chain. As already noted in the :ref:`report` section, the report messages of the work chain should not be used. diff --git a/docs/source/working_with_aiida/groups.rst b/docs/source/working_with_aiida/groups.rst index 58eadcc024..55bd82ebd5 100644 --- a/docs/source/working_with_aiida/groups.rst +++ b/docs/source/working_with_aiida/groups.rst @@ -18,144 +18,162 @@ be performed with groups: Create a new Group ------------------ - From the command line interface:: +From the command line interface:: - verdi group create test_group + verdi group create test_group - From the python interface:: +From the python interface:: - In [1]: group = Group(label="test_group") - - In [2]: group.store() - Out[2]: + In [1]: group = Group(label="test_group") + In [2]: group.store() + Out[2]: List available Groups --------------------- - Example:: +Example:: - verdi group list + verdi group list - By default ``verdi group list`` only shows groups of the type *user*. - In case you want to show groups of another type use ``-t/--type`` option. If - you want to show groups of all types, use the ``-a/--all-types`` option. +By default ``verdi group list`` only shows groups of the type *user*. +In case you want to show groups of another type use ``-t/--type`` option. If +you want to show groups of all types, use the ``-a/--all-types`` option. - From the command line interface:: +From the command line interface:: - verdi group list -t user + verdi group list -t user - From the python interface:: +From the python interface:: - In [1]: query = QueryBuilder() + In [1]: query = QueryBuilder() - In [2]: query.append(Group, filters={'type_string':'user'}) - Out[2]: + In [2]: query.append(Group, filters={'type_string':'user'}) + Out[2]: - In [3]: query.all() - Out[3]: - [[], - [], - []] + In [3]: query.all() + Out[3]: + [[], + [], + []] Add nodes to a Group -------------------- - Once the ``test_group`` has been created, we can add nodes to it. To add the node with ``pk=1`` to the group we need to do the following. - - From the command line interface:: - - verdi group add-nodes -G test_group 1 - Do you really want to add 1 nodes to Group? [y/N]: y - - From the python interface:: +Once the ``test_group`` has been created, we can add nodes to it. To add the node with ``pk=1`` to the group we need to do the following. - In [1]: group = Group.get(label='test_group') +From the command line interface:: - In [2]: from aiida.orm import Dict + verdi group add-nodes -G test_group 1 + Do you really want to add 1 nodes to Group? [y/N]: y - In [3]: p = Dict().store() +From the python interface:: - In [4]: p - Out[4]: + In [1]: group = Group.get(label='test_group') + In [2]: from aiida.orm import Dict + In [3]: p = Dict().store() + In [4]: p + Out[4]: + In [5]: group.add_nodes(p) - In [5]: group.add_nodes(p) Show information about a Group ------------------------------ - From the command line interface:: - - verdi group show test_group - ----------------- ---------------- - Group label test_group - Group type_string user - Group description - ----------------- ---------------- - # Nodes: - PK Type Created - ---- ------ --------------- - 1 Code 26D:21h:45m ago +From the command line interface:: + verdi group show test_group + ----------------- ---------------- + Group label test_group + Group type_string user + Group description + ----------------- ---------------- + # Nodes: + PK Type Created + ---- ------ --------------- + 1 Code 26D:21h:45m ago Remove nodes from a Group ------------------------- - From the command line interface:: +From the command line interface:: - verdi group remove-nodes -G test_group 1 - Do you really want to remove 1 nodes from Group? [y/N]: y + verdi group remove-nodes -G test_group 1 + Do you really want to remove 1 nodes from Group? [y/N]: y - From the python interface:: +From the python interface:: - In [1]: group = Group.get(label='test_group') + In [1]: group = Group.get(label='test_group') + In [2]: group.clear() - In [2]: group.clear() Rename Group ------------ - From the command line interface:: +From the command line interface:: verdi group relabel test_group old_group Success: Label changed to old_group - From the python interface:: +From the python interface:: - In [1]: group = Group.get(label='old_group') + In [1]: group = Group.get(label='old_group') + In [2]: group.label = "another_group" - In [2]: group.label = "another_group" Delete Group ------------ - From the command line interface:: +From the command line interface:: verdi group delete another_group Are you sure to delete Group? [y/N]: y Success: Group deleted. - Copy one group into another --------------------------- - This operation will copy the nodes of the source group into the destination - group. Moreover, if the destination group did not exist before, it will - be created automatically. +This operation will copy the nodes of the source group into the destination +group. Moreover, if the destination group did not exist before, it will +be created automatically. + +From the command line interface:: + + verdi group copy source_group dest_group + Success: Nodes copied from group to group + +From the python interface:: + + In [1]: src_group = Group.objects.get(label='source_group') + In [2]: dest_group = Group(label='destination_group').store() + In [3]: dest_group.add_nodes(list(src_group.nodes)) + + +Create a `Group` subclass +------------------------- +It is possible to create a subclass of `Group` to implement custom functionality. +To make the instances of the subclass storable and loadable, it has to be registered through an entry point in the ``aiida.groups`` entry point category. +For example, assuming we have a subclass ``SubClassGroup`` in the module ``aiida_plugin.groups.sub_class:SubClassGroup``, to register it, one has to add the following to the ``setup.py`` of the plugin package:: - From the command line interface:: + "entry_points": { + "aiida.groups": [ + "plugin.sub_class = aiida_plugin.groups.sub_class:SubClassGroup" + ] + } - verdi group copy source_group dest_group - Success: Nodes copied from group to group +Now that the subclass is properly registered, instances can be stored:: - From the python interface:: + group = SubClassGroup(label='sub-class-group') + group.store() - In [1]: src_group = Group.objects.get(label='source_group') +The ``type_string`` of the group instance corresponds to the entry point name and so in this example is ``plugin.sub_class``. +This is what AiiDA uses to load the correct class when reloading the group from the database:: - In [2]: dest_group = Group(label='destination_group').store() + group = load_group(group.pk) + assert isinstance(group, SubClassGroup) - In [3]: dest_group.add_nodes(list(src_group.nodes)) +If the entry point is not currently registered, because the corresponding plugin package is not installed for example, AiiDA will issue a warning and fallback onto the ``Group`` base class. diff --git a/docs/update_req_for_rtd.py b/docs/update_req_for_rtd.py deleted file mode 100644 index 4322a0fd4a..0000000000 --- a/docs/update_req_for_rtd.py +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -""" -Whenever the requirements in ../setup.json are updated, run -also this script to update the requirements for Read the Docs. -""" - -import os -import json -import click - - -@click.command() -@click.option('--pre-commit', is_flag=True) -def update_req_for_rtd(pre_commit): - """Update the separate requirements file for Read the Docs""" - docs_dir = os.path.abspath(os.path.dirname(__file__)) - root_dir = os.path.join(docs_dir, os.pardir) - - with open(os.path.join(root_dir, 'setup.json'), 'r') as info: - setup_json = json.load(info) - - extras = setup_json['extras_require'] - reqs = set(extras['testing'] + extras['docs'] + extras['rest'] + extras['atomic_tools'] + - setup_json['install_requires']) - reqs_str = '\n'.join(sorted(reqs)) - - basename = 'requirements_for_rtd.txt' - - # pylint: disable=bad-continuation - with open(os.path.join(docs_dir, basename), 'w') as reqs_file: - reqs_file.write(reqs_str) - - click.echo("File '{}' written.".format(basename)) - - if pre_commit: - msg = 'Some requirements for Read the Docs have changed, {}' - local_help = 'please add the changes and commit again' - travis_help = 'please run aiida/docs/update_req_for_rtd.py locally and commit the changes it makes' - help_msg = msg.format(travis_help if os.environ.get('TRAVIS') else local_help) - click.echo(help_msg, err=True) - - -if __name__ == '__main__': - update_req_for_rtd() # pylint: disable=no-value-for-parameter diff --git a/environment.yml b/environment.yml index 249c93a130..e63cbefaa5 100644 --- a/environment.yml +++ b/environment.yml @@ -2,9 +2,8 @@ --- name: aiida channels: -- defaults - conda-forge -- etetoolkit +- defaults dependencies: - python~=3.7 - aldjemy~=0.9.1 @@ -20,19 +19,20 @@ dependencies: - ipython~=7.0 - jinja2~=2.10 - kiwipy[rmq]~=0.5.1 -- numpy~=1.17,<1.18 +- numpy<1.18,~=1.17 - paramiko~=2.6 - pika~=1.1 - plumpy~=0.14.5 +- pgsu~=0.1.0 - psutil~=5.6 -- psycopg2~=2.8,>=2.8.3 +- psycopg2>=2.8.3,~=2.8 - python-dateutil~=2.8 - pytz~=2019.3 - pyyaml~=5.1.2 - reentry~=1.3 - simplejson~=3.16 - sqlalchemy-utils~=0.34.2 -- sqlalchemy~=1.3,>=1.3.10 +- sqlalchemy>=1.3.10,~=1.3 - tabulate~=0.8.5 - tornado<5.0 - tzlocal~=2.0 diff --git a/examples/work/__init__.py b/examples/work/__init__.py deleted file mode 100644 index 2776a55f97..0000000000 --- a/examples/work/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### diff --git a/examples/work/workchain.py b/examples/work/workchain.py deleted file mode 100755 index 6894ef3876..0000000000 --- a/examples/work/workchain.py +++ /dev/null @@ -1,91 +0,0 @@ -#!/usr/bin/env runaiida -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -""" -This example illustrates in a very minimal way how a WorkChain can be defined and how it can be run. This mostly -illustrates how the spec of the WorkChain is defined and how functions in the outline of the spec have to be defined. -""" - -from aiida.engine import WorkChain, run -from aiida.orm import NumericType, Float, Int - - -class SumWorkChain(WorkChain): - - @classmethod - def define(cls, spec): - super().define(spec) - spec.input('a', valid_type=NumericType) - spec.input('b', valid_type=NumericType) - spec.outline( - cls.sum - ) - spec.output('sum', valid_type=NumericType) - - def sum(self): - self.out('sum', self.inputs.a + self.inputs.b) - - -class ProductWorkChain(WorkChain): - - @classmethod - def define(cls, spec): - super().define(spec) - spec.input('a', valid_type=NumericType) - spec.input('b', valid_type=NumericType) - spec.outline( - cls.product - ) - spec.output('product', valid_type=NumericType) - - def product(self): - self.out('product', self.inputs.a * self.inputs.b) - - -class SumProductWorkChain(WorkChain): - - @classmethod - def define(cls, spec): - super().define(spec) - spec.input('a', valid_type=NumericType) - spec.input('b', valid_type=NumericType) - spec.input('c', valid_type=NumericType) - spec.outline( - cls.sum, - cls.product - ) - spec.output('sumproduct', valid_type=NumericType) - - def sum(self): - self.ctx.sum = self.inputs.a + self.inputs.b - - def product(self): - self.out('sumproduct', self.ctx.sum * self.inputs.c) - - -def main(): - inputs = { - 'a': Float(3.14), - 'b': Int(4), - 'c': Int(6) - } - - results = run(SumWorkChain, **inputs) - print('Result of SumWorkChain: {}'.format(results)) - - results = run(ProductWorkChain, **inputs) - print('Result of ProductWorkChain: {}'.format(results)) - - results = run(SumProductWorkChain, **inputs) - print('Result of SumProductWorkChain: {}'.format(results)) - - -if __name__ == '__main__': - main() diff --git a/examples/work/workchain_outline.py b/examples/work/workchain_outline.py deleted file mode 100755 index d82a6cca43..0000000000 --- a/examples/work/workchain_outline.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env runaiida -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -""" -This WorkChain example is a very contrived implementation of the infamous FizzBuzz problem, that serves to illustrate -the various logical blocks that one can incorporate into the outline of the workchain's spec. -""" - -from aiida.engine import WorkChain, run, while_, if_ -from aiida.orm import Int - - -class OutlineWorkChain(WorkChain): - - @classmethod - def define(cls, spec): - super().define(spec) - spec.input('a', valid_type=Int) - spec.outline( - cls.setup, - while_(cls.not_finished)( - if_(cls.if_multiple_of_three_and_five)( - cls.report_fizz_buzz - ).elif_(cls.if_multiple_of_five)( - cls.report_buzz - ).elif_(cls.if_multiple_of_three)( - cls.report_fizz - ).else_( - cls.report_number - ), - cls.decrement - ) - ) - - def setup(self): - self.ctx.counter = abs(self.inputs.a.value) - - def not_finished(self): - return self.ctx.counter > 0 - - def if_multiple_of_three_and_five(self): - return (self.ctx.counter % 3 == 0 and self.ctx.counter % 5 == 0) - - def if_multiple_of_five(self): - return self.ctx.counter % 5 == 0 - - def if_multiple_of_three(self): - return self.ctx.counter % 3 == 0 - - def report_fizz_buzz(self): - print('FizzBuzz') - - def report_fizz(self): - print('Fizz') - - def report_buzz(self): - print('Buzz') - - def report_number(self): - print(self.ctx.counter) - - def decrement(self): - self.ctx.counter -= 1 - - -def main(): - run(OutlineWorkChain, a=Int(16)) - - -if __name__ == '__main__': - main() diff --git a/examples/work/workfunction.py b/examples/work/workfunction.py deleted file mode 100755 index d36a2c66c0..0000000000 --- a/examples/work/workfunction.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env runaiida -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -""" -This example implements exactly the same functionality as seen in the basic WorkChain example, except in this case it -utilizes calcfunctions instead of workchains. -""" - -from aiida.engine import calcfunction -from aiida.orm import Float, Int - - -@calcfunction -def sum(a, b): - return a + b - - -@calcfunction -def product(a, b): - return a * b - - -@calcfunction -def sumproduct(a, b, c): - return product(sum(a, b), c) - - -def main(): - a = Float(3.14) - b = Int(4) - c = Int(6) - - results = sum(a, b) - print('Result of sum: {}'.format(results)) - - results = product(a, b) - print('Result of product: {}'.format(results)) - - results = sumproduct(a, b, c) - print('Result of sumproduct: {}'.format(results)) - - -if __name__ == '__main__': - main() diff --git a/pyproject.toml b/pyproject.toml index a4e1ecf2f4..24eaa8393c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,4 +1,3 @@ [build-system] -# Minimum requirements for the build system to execute. -requires = ["setuptools>=40.8.0", "wheel", "reentry~=1.3"] +requires = [ "setuptools>=40.8.0", "wheel", "reentry~=1.3",] build-backend = "setuptools.build_meta:__legacy__" diff --git a/pytest.ini b/pytest.ini index de85753fa6..c5128a6476 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,4 +1,5 @@ [pytest] +addopts= --durations=50 --cov-config=.coveragerc --cov-report xml --cov=aiida testpaths = tests filterwarnings = ignore::DeprecationWarning:babel: @@ -8,3 +9,5 @@ filterwarnings = ignore::DeprecationWarning:yaml: ignore::DeprecationWarning:pymatgen: ignore::DeprecationWarning:jsonbackend: + ignore::DeprecationWarning:reentry: + ignore::DeprecationWarning:pkg_resources: diff --git a/requirements/requirements-py-3.5.txt b/requirements/requirements-py-3.5.txt new file mode 100644 index 0000000000..96a42ecec3 --- /dev/null +++ b/requirements/requirements-py-3.5.txt @@ -0,0 +1,152 @@ +aiida-export-migration-tests==0.9.0 +alabaster==0.7.12 +aldjemy==0.9.1 +alembic==1.4.1 +aniso8601==8.0.0 +ase==3.19.0 +attrs==19.3.0 +Babel==2.8.0 +backcall==0.1.0 +bcrypt==3.1.7 +bleach==3.1.4 +certifi==2019.11.28 +cffi==1.14.0 +chardet==3.0.4 +circus==0.16.1 +Click==7.0 +click-completion==0.5.2 +click-config-file==0.5.0 +click-spinner==0.1.8 +configobj==5.0.6 +coverage==4.5.4 +cryptography==2.8 +cycler==0.10.0 +decorator==4.4.2 +defusedxml==0.6.0 +Django==2.2.11 +docutils==0.15.2 +entrypoints==0.3 +ete3==3.1.1 +Flask==1.1.1 +Flask-Cors==3.0.8 +Flask-RESTful==0.3.8 +frozendict==1.2 +furl==2.1.0 +future==0.18.2 +graphviz==0.13.2 +idna==2.9 +imagesize==1.2.0 +importlib-metadata==1.5.0 +ipykernel==5.1.4 +ipython==7.9.0 +ipython-genutils==0.2.0 +ipywidgets==7.5.1 +itsdangerous==1.1.0 +jedi==0.16.0 +Jinja2==2.11.1 +jsonschema==3.2.0 +jupyter==1.0.0 +jupyter-client==6.0.0 +jupyter-console==6.1.0 +jupyter-core==4.6.3 +kiwipy==0.5.3 +kiwisolver==1.1.0 +Mako==1.1.2 +MarkupSafe==1.1.1 +matplotlib==3.0.3 +mistune==0.8.4 +monty==3.0.2 +more-itertools==8.2.0 +mpmath==1.1.0 +nbconvert==5.6.1 +nbformat==5.0.4 +networkx==2.4 +notebook==5.7.8 +numpy==1.17.5 +orderedmultidict==1.0.1 +packaging==20.3 +palettable==3.3.0 +pandas==0.25.3 +pandocfilters==1.4.2 +paramiko==2.7.1 +parso==0.6.2 +pathlib2==2.3.5 +pexpect==4.8.0 +pg8000==1.13.2 +pgtest==1.3.2 +pgsu==0.1.0 +pickleshare==0.7.5 +pika==1.1.0 +pluggy==0.13.1 +plumpy==0.14.5 +prometheus-client==0.7.1 +prompt-toolkit==2.0.10 +psutil==5.7.0 +psycopg2-binary==2.8.4 +ptyprocess==0.6.0 +py==1.8.1 +pyblake2==1.1.2 +PyCifRW==4.4.1 +pycparser==2.20 +PyDispatcher==2.0.5 +Pygments==2.6.1 +pymatgen==2019.7.2 +PyMySQL==0.9.3 +PyNaCl==1.3.0 +pyparsing==2.4.6 +pyrsistent==0.15.7 +pytest==5.3.5 +pytest-cov==2.8.1 +pytest-timeout==1.3.4 +python-dateutil==2.8.1 +python-editor==1.0.4 +python-memcached==1.59 +pytz==2019.3 +PyYAML==5.1.2 +pyzmq==19.0.0 +qtconsole==4.7.1 +QtPy==1.9.0 +reentry==1.3.1 +requests==2.23.0 +ruamel.yaml==0.16.10 +ruamel.yaml.clib==0.2.0 +scipy==1.4.1 +scramp==1.1.0 +seekpath==1.9.4 +Send2Trash==1.5.0 +shellingham==1.3.2 +shortuuid==1.0.1 +simplejson==3.17.0 +six==1.14.0 +snowballstemmer==2.0.0 +spglib==1.14.1.post0 +Sphinx==2.4.4 +sphinx-rtd-theme==0.4.3 +sphinxcontrib-applehelp==1.0.2 +sphinxcontrib-contentui==0.2.4 +sphinxcontrib-details-directive==0.1.0 +sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-htmlhelp==1.0.3 +sphinxcontrib-jsmath==1.0.1 +sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-serializinghtml==1.1.4 +SQLAlchemy==1.3.13 +sqlalchemy-diff==0.1.3 +SQLAlchemy-Utils==0.34.2 +sqlparse==0.3.1 +sympy==1.5.1 +tabulate==0.8.6 +terminado==0.8.3 +testpath==0.4.4 +topika==0.2.1 +tornado==4.5.3 +traitlets==4.3.3 +tzlocal==2.0.0 +upf-to-json==0.9.2 +urllib3==1.25.8 +wcwidth==0.1.8 +webencodings==0.5.1 +Werkzeug==1.0.0 +widgetsnbextension==3.5.1 +wrapt==1.11.2 +zipp==1.2.0 diff --git a/requirements/requirements-py-3.6.txt b/requirements/requirements-py-3.6.txt new file mode 100644 index 0000000000..c4e942b38e --- /dev/null +++ b/requirements/requirements-py-3.6.txt @@ -0,0 +1,151 @@ +aiida-export-migration-tests==0.9.0 +alabaster==0.7.12 +aldjemy==0.9.1 +alembic==1.4.1 +aniso8601==8.0.0 +ase==3.19.0 +attrs==19.3.0 +Babel==2.8.0 +backcall==0.1.0 +bcrypt==3.1.7 +bleach==3.1.4 +certifi==2019.11.28 +cffi==1.14.0 +chardet==3.0.4 +circus==0.16.1 +Click==7.0 +click-completion==0.5.2 +click-config-file==0.5.0 +click-spinner==0.1.8 +configobj==5.0.6 +coverage==4.5.4 +cryptography==2.8 +cycler==0.10.0 +dataclasses==0.7 +decorator==4.4.2 +defusedxml==0.6.0 +Django==2.2.11 +docutils==0.15.2 +entrypoints==0.3 +ete3==3.1.1 +Flask==1.1.1 +Flask-Cors==3.0.8 +Flask-RESTful==0.3.8 +frozendict==1.2 +furl==2.1.0 +future==0.18.2 +graphviz==0.13.2 +idna==2.9 +imagesize==1.2.0 +importlib-metadata==1.5.0 +ipykernel==5.1.4 +ipython==7.13.0 +ipython-genutils==0.2.0 +ipywidgets==7.5.1 +itsdangerous==1.1.0 +jedi==0.16.0 +Jinja2==2.11.1 +jsonschema==3.2.0 +jupyter==1.0.0 +jupyter-client==6.0.0 +jupyter-console==6.1.0 +jupyter-core==4.6.3 +kiwipy==0.5.3 +kiwisolver==1.1.0 +Mako==1.1.2 +MarkupSafe==1.1.1 +matplotlib==3.2.0 +mistune==0.8.4 +monty==3.0.2 +more-itertools==8.2.0 +mpmath==1.1.0 +nbconvert==5.6.1 +nbformat==5.0.4 +networkx==2.4 +notebook==5.7.8 +numpy==1.17.5 +orderedmultidict==1.0.1 +packaging==20.3 +palettable==3.3.0 +pandas==0.25.3 +pandocfilters==1.4.2 +paramiko==2.7.1 +parso==0.6.2 +pexpect==4.8.0 +pg8000==1.13.2 +pgsu==0.1.0 +pgtest==1.3.2 +pickleshare==0.7.5 +pika==1.1.0 +pluggy==0.13.1 +plumpy==0.14.5 +prometheus-client==0.7.1 +prompt-toolkit==3.0.4 +psutil==5.7.0 +psycopg2-binary==2.8.4 +ptyprocess==0.6.0 +py==1.8.1 +PyCifRW==4.4.1 +pycparser==2.20 +PyDispatcher==2.0.5 +Pygments==2.6.1 +pymatgen==2020.3.2 +PyMySQL==0.9.3 +PyNaCl==1.3.0 +pyparsing==2.4.6 +pyrsistent==0.15.7 +pytest==5.3.5 +pytest-cov==2.8.1 +pytest-timeout==1.3.4 +python-dateutil==2.8.1 +python-editor==1.0.4 +python-memcached==1.59 +pytz==2019.3 +PyYAML==5.1.2 +pyzmq==19.0.0 +qtconsole==4.7.1 +QtPy==1.9.0 +reentry==1.3.1 +requests==2.23.0 +ruamel.yaml==0.16.10 +ruamel.yaml.clib==0.2.0 +scipy==1.4.1 +scramp==1.1.0 +seekpath==1.9.4 +Send2Trash==1.5.0 +shellingham==1.3.2 +shortuuid==1.0.1 +simplejson==3.17.0 +six==1.14.0 +snowballstemmer==2.0.0 +spglib==1.14.1.post0 +Sphinx==2.4.4 +sphinx-rtd-theme==0.4.3 +sphinxcontrib-applehelp==1.0.2 +sphinxcontrib-contentui==0.2.4 +sphinxcontrib-details-directive==0.1.0 +sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-htmlhelp==1.0.3 +sphinxcontrib-jsmath==1.0.1 +sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-serializinghtml==1.1.4 +SQLAlchemy==1.3.13 +sqlalchemy-diff==0.1.3 +SQLAlchemy-Utils==0.34.2 +sqlparse==0.3.1 +sympy==1.5.1 +tabulate==0.8.6 +terminado==0.8.3 +testpath==0.4.4 +topika==0.2.1 +tornado==4.5.3 +traitlets==4.3.3 +tzlocal==2.0.0 +upf-to-json==0.9.2 +urllib3==1.25.8 +wcwidth==0.1.8 +webencodings==0.5.1 +Werkzeug==1.0.0 +widgetsnbextension==3.5.1 +wrapt==1.11.2 +zipp==3.1.0 diff --git a/requirements/requirements-py-3.7.txt b/requirements/requirements-py-3.7.txt new file mode 100644 index 0000000000..6a5fd19a03 --- /dev/null +++ b/requirements/requirements-py-3.7.txt @@ -0,0 +1,150 @@ +aiida-export-migration-tests==0.9.0 +alabaster==0.7.12 +aldjemy==0.9.1 +alembic==1.4.1 +aniso8601==8.0.0 +ase==3.19.0 +attrs==19.3.0 +Babel==2.8.0 +backcall==0.1.0 +bcrypt==3.1.7 +bleach==3.1.4 +certifi==2019.11.28 +cffi==1.14.0 +chardet==3.0.4 +circus==0.16.1 +Click==7.0 +click-completion==0.5.2 +click-config-file==0.5.0 +click-spinner==0.1.8 +configobj==5.0.6 +coverage==4.5.4 +cryptography==2.8 +cycler==0.10.0 +decorator==4.4.2 +defusedxml==0.6.0 +Django==2.2.11 +docutils==0.15.2 +entrypoints==0.3 +ete3==3.1.1 +Flask==1.1.1 +Flask-Cors==3.0.8 +Flask-RESTful==0.3.8 +frozendict==1.2 +furl==2.1.0 +future==0.18.2 +graphviz==0.13.2 +idna==2.9 +imagesize==1.2.0 +importlib-metadata==1.5.0 +ipykernel==5.1.4 +ipython==7.13.0 +ipython-genutils==0.2.0 +ipywidgets==7.5.1 +itsdangerous==1.1.0 +jedi==0.16.0 +Jinja2==2.11.1 +jsonschema==3.2.0 +jupyter==1.0.0 +jupyter-client==6.0.0 +jupyter-console==6.1.0 +jupyter-core==4.6.3 +kiwipy==0.5.3 +kiwisolver==1.1.0 +Mako==1.1.2 +MarkupSafe==1.1.1 +matplotlib==3.2.0 +mistune==0.8.4 +monty==3.0.2 +more-itertools==8.2.0 +mpmath==1.1.0 +nbconvert==5.6.1 +nbformat==5.0.4 +networkx==2.4 +notebook==5.7.8 +numpy==1.17.5 +orderedmultidict==1.0.1 +packaging==20.3 +palettable==3.3.0 +pandas==0.25.3 +pandocfilters==1.4.2 +paramiko==2.7.1 +parso==0.6.2 +pexpect==4.8.0 +pg8000==1.13.2 +pgsu==0.1.0 +pgtest==1.3.2 +pickleshare==0.7.5 +pika==1.1.0 +pluggy==0.13.1 +plumpy==0.14.5 +prometheus-client==0.7.1 +prompt-toolkit==3.0.4 +psutil==5.7.0 +psycopg2-binary==2.8.4 +ptyprocess==0.6.0 +py==1.8.1 +PyCifRW==4.4.1 +pycparser==2.20 +PyDispatcher==2.0.5 +Pygments==2.6.1 +pymatgen==2020.3.2 +PyMySQL==0.9.3 +PyNaCl==1.3.0 +pyparsing==2.4.6 +pyrsistent==0.15.7 +pytest==5.3.5 +pytest-cov==2.8.1 +pytest-timeout==1.3.4 +python-dateutil==2.8.1 +python-editor==1.0.4 +python-memcached==1.59 +pytz==2019.3 +PyYAML==5.1.2 +pyzmq==19.0.0 +qtconsole==4.7.1 +QtPy==1.9.0 +reentry==1.3.1 +requests==2.23.0 +ruamel.yaml==0.16.10 +ruamel.yaml.clib==0.2.0 +scipy==1.4.1 +scramp==1.1.0 +seekpath==1.9.4 +Send2Trash==1.5.0 +shellingham==1.3.2 +shortuuid==1.0.1 +simplejson==3.17.0 +six==1.14.0 +snowballstemmer==2.0.0 +spglib==1.14.1.post0 +Sphinx==2.4.4 +sphinx-rtd-theme==0.4.3 +sphinxcontrib-applehelp==1.0.2 +sphinxcontrib-contentui==0.2.4 +sphinxcontrib-details-directive==0.1.0 +sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-htmlhelp==1.0.3 +sphinxcontrib-jsmath==1.0.1 +sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-serializinghtml==1.1.4 +SQLAlchemy==1.3.13 +sqlalchemy-diff==0.1.3 +SQLAlchemy-Utils==0.34.2 +sqlparse==0.3.1 +sympy==1.5.1 +tabulate==0.8.6 +terminado==0.8.3 +testpath==0.4.4 +topika==0.2.1 +tornado==4.5.3 +traitlets==4.3.3 +tzlocal==2.0.0 +upf-to-json==0.9.2 +urllib3==1.25.8 +wcwidth==0.1.8 +webencodings==0.5.1 +Werkzeug==1.0.0 +widgetsnbextension==3.5.1 +wrapt==1.11.2 +zipp==3.1.0 diff --git a/requirements/requirements-py-3.8.txt b/requirements/requirements-py-3.8.txt new file mode 100644 index 0000000000..4603457b4b --- /dev/null +++ b/requirements/requirements-py-3.8.txt @@ -0,0 +1,148 @@ +aiida-export-migration-tests==0.9.0 +alabaster==0.7.12 +aldjemy==0.9.1 +alembic==1.4.1 +aniso8601==8.0.0 +ase==3.19.0 +attrs==19.3.0 +Babel==2.8.0 +backcall==0.1.0 +bcrypt==3.1.7 +bleach==3.1.4 +certifi==2019.11.28 +cffi==1.14.0 +chardet==3.0.4 +circus==0.16.1 +Click==7.0 +click-completion==0.5.2 +click-config-file==0.5.0 +click-spinner==0.1.8 +configobj==5.0.6 +coverage==4.5.4 +cryptography==2.8 +cycler==0.10.0 +decorator==4.4.2 +defusedxml==0.6.0 +Django==2.2.11 +docutils==0.15.2 +entrypoints==0.3 +ete3==3.1.1 +Flask==1.1.1 +Flask-Cors==3.0.8 +Flask-RESTful==0.3.8 +frozendict==1.2 +furl==2.1.0 +future==0.18.2 +graphviz==0.13.2 +idna==2.9 +imagesize==1.2.0 +ipykernel==5.1.4 +ipython==7.13.0 +ipython-genutils==0.2.0 +ipywidgets==7.5.1 +itsdangerous==1.1.0 +jedi==0.16.0 +Jinja2==2.11.1 +jsonschema==3.2.0 +jupyter==1.0.0 +jupyter-client==6.0.0 +jupyter-console==6.1.0 +jupyter-core==4.6.3 +kiwipy==0.5.3 +kiwisolver==1.1.0 +Mako==1.1.2 +MarkupSafe==1.1.1 +matplotlib==3.2.0 +mistune==0.8.4 +monty==3.0.2 +more-itertools==8.2.0 +mpmath==1.1.0 +nbconvert==5.6.1 +nbformat==5.0.4 +networkx==2.4 +notebook==5.7.8 +numpy==1.17.5 +orderedmultidict==1.0.1 +packaging==20.3 +palettable==3.3.0 +pandas==0.25.3 +pandocfilters==1.4.2 +paramiko==2.7.1 +parso==0.6.2 +pexpect==4.8.0 +pg8000==1.13.2 +pgsu==0.1.0 +pgtest==1.3.2 +pickleshare==0.7.5 +pika==1.1.0 +pluggy==0.13.1 +plumpy==0.14.5 +prometheus-client==0.7.1 +prompt-toolkit==3.0.4 +psutil==5.7.0 +psycopg2-binary==2.8.4 +ptyprocess==0.6.0 +py==1.8.1 +PyCifRW==4.4.1 +pycparser==2.20 +PyDispatcher==2.0.5 +Pygments==2.6.1 +pymatgen==2020.3.2 +PyMySQL==0.9.3 +PyNaCl==1.3.0 +pyparsing==2.4.6 +pyrsistent==0.15.7 +pytest==5.3.5 +pytest-cov==2.8.1 +pytest-timeout==1.3.4 +python-dateutil==2.8.1 +python-editor==1.0.4 +python-memcached==1.59 +pytz==2019.3 +PyYAML==5.1.2 +pyzmq==19.0.0 +qtconsole==4.7.1 +QtPy==1.9.0 +reentry==1.3.1 +requests==2.23.0 +ruamel.yaml==0.16.10 +ruamel.yaml.clib==0.2.0 +scipy==1.4.1 +scramp==1.1.0 +seekpath==1.9.4 +Send2Trash==1.5.0 +shellingham==1.3.2 +shortuuid==1.0.1 +simplejson==3.17.0 +six==1.14.0 +snowballstemmer==2.0.0 +spglib==1.14.1.post0 +Sphinx==2.4.4 +sphinx-rtd-theme==0.4.3 +sphinxcontrib-applehelp==1.0.2 +sphinxcontrib-contentui==0.2.4 +sphinxcontrib-details-directive==0.1.0 +sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-htmlhelp==1.0.3 +sphinxcontrib-jsmath==1.0.1 +sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-serializinghtml==1.1.4 +SQLAlchemy==1.3.13 +sqlalchemy-diff==0.1.3 +SQLAlchemy-Utils==0.34.2 +sqlparse==0.3.1 +sympy==1.5.1 +tabulate==0.8.6 +terminado==0.8.3 +testpath==0.4.4 +topika==0.2.1 +tornado==4.5.3 +traitlets==4.3.3 +tzlocal==2.0.0 +upf-to-json==0.9.2 +urllib3==1.25.8 +wcwidth==0.1.8 +webencodings==0.5.1 +Werkzeug==1.0.0 +widgetsnbextension==3.5.1 +wrapt==1.11.2 diff --git a/setup.json b/setup.json index 9b1eecbebb..1160389e90 100644 --- a/setup.json +++ b/setup.json @@ -1,6 +1,6 @@ { "name": "aiida-core", - "version": "1.1.1", + "version": "1.2.0", "url": "http://www.aiida.net/", "license": "MIT License", "author": "The AiiDA team", @@ -37,6 +37,7 @@ "paramiko~=2.6", "pika~=1.1", "plumpy~=0.14.5", + "pgsu~=0.1.0", "psutil~=5.6", "psycopg2-binary~=2.8,>=2.8.3", "pyblake2~=1.1; python_version<'3.6'", @@ -87,15 +88,18 @@ "notebook<6" ], "testing": [ - "aiida-export-migration-tests==0.8.0", + "aiida-export-migration-tests==0.9.0", "pg8000~=1.13", "pgtest~=1.3,>=1.3.1", "pytest~=5.3", "pytest-timeout~=1.3", + "pytest-cov~=2.7", + "coverage<5.0", "sqlalchemy-diff~=0.1.3" ], "dev_precommit": [ "astroid==2.3.3", + "packaging==20.3", "pre-commit==1.18.3", "prospector==1.2.0", "pylint==2.4.4", @@ -109,7 +113,8 @@ "reentry_register": true, "entry_points": { "console_scripts": [ - "verdi=aiida.cmdline.commands.cmd_verdi:verdi" + "verdi=aiida.cmdline.commands.cmd_verdi:verdi", + "runaiida=aiida.cmdline.commands.cmd_run:run" ], "aiida.calculations": [ "arithmetic.add = aiida.calculations.plugins.arithmetic.add:ArithmeticAddCalculation", @@ -154,6 +159,12 @@ "structure = aiida.orm.nodes.data.structure:StructureData", "upf = aiida.orm.nodes.data.upf:UpfData" ], + "aiida.groups": [ + "core = aiida.orm.groups:Group", + "core.auto = aiida.orm.groups:AutoGroup", + "core.import = aiida.orm.groups:ImportGroup", + "core.upf = aiida.orm.groups:UpfFamily" + ], "aiida.node": [ "data = aiida.orm.nodes.data.data:Data", "process = aiida.orm.nodes.process.process:ProcessNode", @@ -198,8 +209,5 @@ "realhydrogen = aiida.tools.data.orbital.realhydrogen:RealhydrogenOrbital" ], "aiida.workflows": [] - }, - "scripts": [ - "bin/runaiida" - ] -} + } +} \ No newline at end of file diff --git a/setup.py b/setup.py index 8f2372edd5..0cfa973f5f 100644 --- a/setup.py +++ b/setup.py @@ -10,11 +10,26 @@ # pylint: disable=wrong-import-order """Setup script for aiida-core package.""" import json +import sys import os from utils import fastentrypoints # pylint: disable=unused-import from setuptools import setup, find_packages +if (sys.version_info.major, sys.version_info.minor) == (3, 5): + import setuptools + from distutils.version import StrictVersion + + REQUIRED_SETUPTOOLS_VERSION = StrictVersion('38.2.0') + INSTALLED_SETUPTOOLS_VERSION = StrictVersion(setuptools.__version__) + + if INSTALLED_SETUPTOOLS_VERSION < REQUIRED_SETUPTOOLS_VERSION: + raise RuntimeError( + 'The installation of AiiDA with Python version 3.5, requires setuptools>={}; your version: {}'.format( + REQUIRED_SETUPTOOLS_VERSION, INSTALLED_SETUPTOOLS_VERSION + ) + ) + if __name__ == '__main__': THIS_FOLDER = os.path.split(os.path.abspath(__file__))[0] diff --git a/tests/backends/aiida_django/migrations/test_migrations_0044_dbgroup_type_string.py b/tests/backends/aiida_django/migrations/test_migrations_0044_dbgroup_type_string.py new file mode 100644 index 0000000000..ab1b31d518 --- /dev/null +++ b/tests/backends/aiida_django/migrations/test_migrations_0044_dbgroup_type_string.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=import-error,no-name-in-module,invalid-name +"""Test migration of `type_string` after the `Group` class became pluginnable.""" + +from .test_migrations_common import TestMigrations + + +class TestGroupTypeStringMigration(TestMigrations): + """Test migration of `type_string` after the `Group` class became pluginnable.""" + + migrate_from = '0043_default_link_label' + migrate_to = '0044_dbgroup_type_string' + + def setUpBeforeMigration(self): + DbGroup = self.apps.get_model('db', 'DbGroup') + + # test user group type_string: 'user' -> 'core' + group_user = DbGroup(label='01', user_id=self.default_user.id, type_string='user') + group_user.save() + self.group_user_pk = group_user.pk + + # test data.upf group type_string: 'data.upf' -> 'core.upf' + group_data_upf = DbGroup(label='02', user_id=self.default_user.id, type_string='data.upf') + group_data_upf.save() + self.group_data_upf_pk = group_data_upf.pk + + # test auto.import group type_string: 'auto.import' -> 'core.import' + group_autoimport = DbGroup(label='03', user_id=self.default_user.id, type_string='auto.import') + group_autoimport.save() + self.group_autoimport_pk = group_autoimport.pk + + # test auto.run group type_string: 'auto.run' -> 'core.auto' + group_autorun = DbGroup(label='04', user_id=self.default_user.id, type_string='auto.run') + group_autorun.save() + self.group_autorun_pk = group_autorun.pk + + def test_group_string_update(self): + """Test that the type_string were updated correctly.""" + DbGroup = self.apps.get_model('db', 'DbGroup') + + # 'user' -> 'core' + group_user = DbGroup.objects.get(pk=self.group_user_pk) + self.assertEqual(group_user.type_string, 'core') + + # 'data.upf' -> 'core.upf' + group_data_upf = DbGroup.objects.get(pk=self.group_data_upf_pk) + self.assertEqual(group_data_upf.type_string, 'core.upf') + + # 'auto.import' -> 'core.import' + group_autoimport = DbGroup.objects.get(pk=self.group_autoimport_pk) + self.assertEqual(group_autoimport.type_string, 'core.import') + + # 'auto.run' -> 'core.auto' + group_autorun = DbGroup.objects.get(pk=self.group_autorun_pk) + self.assertEqual(group_autorun.type_string, 'core.auto') diff --git a/tests/backends/aiida_django/migrations/test_migrations_common.py b/tests/backends/aiida_django/migrations/test_migrations_common.py index f8de61f9a6..43f4f03b3d 100644 --- a/tests/backends/aiida_django/migrations/test_migrations_common.py +++ b/tests/backends/aiida_django/migrations/test_migrations_common.py @@ -38,8 +38,8 @@ def setUp(self): from aiida.backends.djsite import get_scoped_session from aiida.orm import autogroup - self.current_autogroup = autogroup.current_autogroup - autogroup.current_autogroup = None + self.current_autogroup = autogroup.CURRENT_AUTOGROUP + autogroup.CURRENT_AUTOGROUP = None assert self.migrate_from and self.migrate_to, \ "TestCase '{}' must define migrate_from and migrate_to properties".format(type(self).__name__) self.migrate_from = [(self.app, self.migrate_from)] @@ -85,7 +85,7 @@ def tearDown(self): """At the end make sure we go back to the latest schema version.""" from aiida.orm import autogroup self._revert_database_schema() - autogroup.current_autogroup = self.current_autogroup + autogroup.CURRENT_AUTOGROUP = self.current_autogroup def setUpBeforeMigration(self): """Anything to do before running the migrations, which should be implemented in test subclasses.""" diff --git a/tests/backends/aiida_sqlalchemy/test_migrations.py b/tests/backends/aiida_sqlalchemy/test_migrations.py index 8bdda5d145..2bb52ceecc 100644 --- a/tests/backends/aiida_sqlalchemy/test_migrations.py +++ b/tests/backends/aiida_sqlalchemy/test_migrations.py @@ -22,7 +22,6 @@ from aiida.backends.sqlalchemy.models.base import Base from aiida.backends.sqlalchemy.utils import flag_modified from aiida.backends.testbase import AiidaTestCase -from aiida.common.utils import Capturing from .test_utils import new_database @@ -57,22 +56,28 @@ def setUp(self): super().setUp() from aiida.orm import autogroup - self.current_autogroup = autogroup.current_autogroup - autogroup.current_autogroup = None + self.current_autogroup = autogroup.CURRENT_AUTOGROUP + autogroup.CURRENT_AUTOGROUP = None assert self.migrate_from and self.migrate_to, \ "TestCase '{}' must define migrate_from and migrate_to properties".format(type(self).__name__) try: - with Capturing(): - self.migrate_db_down(self.migrate_from) + self.migrate_db_down(self.migrate_from) self.setUpBeforeMigration() - with Capturing(): - self.migrate_db_up(self.migrate_to) + self._perform_actual_migration() except Exception: # Bring back the DB to the correct state if this setup part fails self._reset_database_and_schema() + autogroup.CURRENT_AUTOGROUP = self.current_autogroup raise + def _perform_actual_migration(self): + """Perform the actual migration (upwards, to migrate_to). + + Must be called after we are properly set to be in migrate_from. + """ + self.migrate_db_up(self.migrate_to) + def migrate_db_up(self, destination): """ Perform a migration upwards (upgrade) with alembic @@ -99,7 +104,7 @@ def tearDown(self): """ from aiida.orm import autogroup self._reset_database_and_schema() - autogroup.current_autogroup = self.current_autogroup + autogroup.CURRENT_AUTOGROUP = self.current_autogroup super().tearDown() def setUpBeforeMigration(self): # pylint: disable=invalid-name @@ -116,8 +121,7 @@ def _reset_database_and_schema(self): of tests. """ self.reset_database() - with Capturing(): - self.migrate_db_up('head') + self.migrate_db_up('head') @property def current_rev(self): @@ -210,29 +214,12 @@ class TestBackwardMigrationsSQLA(TestMigrationsSQLA): than the migrate_to revision. """ - def setUp(self): - """ - Go to the migrate_from revision, apply setUpBeforeMigration, then - run the migration. - """ - AiidaTestCase.setUp(self) # pylint: disable=bad-super-call - from aiida.orm import autogroup - - self.current_autogroup = autogroup.current_autogroup - autogroup.current_autogroup = None - assert self.migrate_from and self.migrate_to, \ - "TestCase '{}' must define migrate_from and migrate_to properties".format(type(self).__name__) + def _perform_actual_migration(self): + """Perform the actual migration (downwards, to migrate_to). - try: - with Capturing(): - self.migrate_db_down(self.migrate_from) - self.setUpBeforeMigration() - with Capturing(): - self.migrate_db_down(self.migrate_to) - except Exception: - # Bring back the DB to the correct state if this setup part fails - self._reset_database_and_schema() - raise + Must be called after we are properly set to be in migrate_from. + """ + self.migrate_db_down(self.migrate_to) class TestMigrationEngine(TestMigrationsSQLA): @@ -1003,7 +990,7 @@ class TestDbLogUUIDAddition(TestMigrationsSQLA): """ Test that the UUID column is correctly added to the DbLog table and that the uniqueness constraint is added without problems (if the migration arrives until 375c2db70663 then the - constraint is added properly. + constraint is added properly). """ migrate_from = '041a79fc615f' # 041a79fc615f_dblog_cleaning @@ -1655,3 +1642,69 @@ def test_data_migrated(self): finally: session.close() + + +class TestGroupTypeStringMigration(TestMigrationsSQLA): + """Test the migration that renames the DbGroup type strings.""" + + migrate_from = '118349c10896' # 118349c10896_default_link_label.py + migrate_to = 'bf591f31dd12' # bf591f31dd12_dbgroup_type_string.py + + def setUpBeforeMigration(self): + """Create the DbGroups with the old type strings.""" + DbGroup = self.get_current_table('db_dbgroup') # pylint: disable=invalid-name + DbUser = self.get_current_table('db_dbuser') # pylint: disable=invalid-name + + with self.get_session() as session: + try: + default_user = DbUser(email='{}@aiida.net'.format(self.id())) + session.add(default_user) + session.commit() + + # test user group type_string: 'user' -> 'core' + group_user = DbGroup(label='01', user_id=default_user.id, type_string='user') + session.add(group_user) + # test data.upf group type_string: 'data.upf' -> 'core.upf' + group_data_upf = DbGroup(label='02', user_id=default_user.id, type_string='data.upf') + session.add(group_data_upf) + # test auto.import group type_string: 'auto.import' -> 'core.import' + group_autoimport = DbGroup(label='03', user_id=default_user.id, type_string='auto.import') + session.add(group_autoimport) + # test auto.run group type_string: 'auto.run' -> 'core.auto' + group_autorun = DbGroup(label='04', user_id=default_user.id, type_string='auto.run') + session.add(group_autorun) + + session.commit() + + # Store values for later tests + self.group_user_pk = group_user.id + self.group_data_upf_pk = group_data_upf.id + self.group_autoimport_pk = group_autoimport.id + self.group_autorun_pk = group_autorun.id + + finally: + session.close() + + def test_group_string_update(self): + """Test that the type strings are properly migrated.""" + DbGroup = self.get_current_table('db_dbgroup') # pylint: disable=invalid-name + + with self.get_session() as session: + try: + # test user group type_string: 'user' -> 'core' + group_user = session.query(DbGroup).filter(DbGroup.id == self.group_user_pk).one() + self.assertEqual(group_user.type_string, 'core') + + # test data.upf group type_string: 'data.upf' -> 'core.upf' + group_data_upf = session.query(DbGroup).filter(DbGroup.id == self.group_data_upf_pk).one() + self.assertEqual(group_data_upf.type_string, 'core.upf') + + # test auto.import group type_string: 'auto.import' -> 'core.import' + group_autoimport = session.query(DbGroup).filter(DbGroup.id == self.group_autoimport_pk).one() + self.assertEqual(group_autoimport.type_string, 'core.import') + + # test auto.run group type_string: 'auto.run' -> 'core.auto' + group_autorun = session.query(DbGroup).filter(DbGroup.id == self.group_autorun_pk).one() + self.assertEqual(group_autorun.type_string, 'core.auto') + finally: + session.close() diff --git a/tests/cmdline/commands/test_calcjob.py b/tests/cmdline/commands/test_calcjob.py index dc7895c5d4..2f1945d45a 100644 --- a/tests/cmdline/commands/test_calcjob.py +++ b/tests/cmdline/commands/test_calcjob.py @@ -98,6 +98,7 @@ def setUpClass(cls, *args, **kwargs): cls.arithmetic_job = calculations[0] def setUp(self): + super().setUp() self.cli_runner = CliRunner() def test_calcjob_res(self): diff --git a/tests/cmdline/commands/test_code.py b/tests/cmdline/commands/test_code.py index d50bfc78b3..d61c3194ee 100644 --- a/tests/cmdline/commands/test_code.py +++ b/tests/cmdline/commands/test_code.py @@ -7,10 +7,14 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### +# pylint: disable=unused-argument """Tests for the 'verdi code' command.""" import os import subprocess as sp +from textwrap import dedent + from click.testing import CliRunner +import pytest from aiida.backends.testbase import AiidaTestCase from aiida.cmdline.commands.cmd_code import (setup_code, delete, hide, reveal, relabel, code_list, show, code_duplicate) @@ -24,13 +28,11 @@ class TestVerdiCodeSetup(AiidaTestCase): @classmethod def setUpClass(cls, *args, **kwargs): super().setUpClass(*args, **kwargs) - orm.Computer( + cls.computer = orm.Computer( name='comp', hostname='localhost', transport_type='local', scheduler_type='direct', workdir='/tmp/aiida' ).store() def setUp(self): - self.comp = orm.Computer.objects.get(name='comp') - self.cli_runner = CliRunner() self.this_folder = os.path.dirname(__file__) self.this_file = os.path.basename(__file__) @@ -42,46 +44,20 @@ def test_reachable(self): output = sp.check_output(['verdi', 'code', 'setup', '--help']) self.assertIn(b'Usage:', output) - def test_interactive_remote(self): - """Test interactive remote code setup.""" - - from aiida.orm import Code - os.environ['VISUAL'] = 'sleep 1; vim -cwq' - os.environ['EDITOR'] = 'sleep 1; vim -cwq' - label = 'interactive_remote' - user_input = '\n'.join([label, 'description', 'arithmetic.add', 'yes', self.comp.name, '/remote/abs/path']) - result = self.cli_runner.invoke(setup_code, input=user_input) - self.assertClickResultNoException(result) - self.assertIsInstance(Code.get_from_string('{}@{}'.format(label, self.comp.name)), Code) - - def test_interactive_upload(self): - """Test interactive code setup.""" - from aiida.orm import Code - os.environ['VISUAL'] = 'sleep 1; vim -cwq' - os.environ['EDITOR'] = 'sleep 1; vim -cwq' - label = 'interactive_upload' - user_input = '\n'.join([label, 'description', 'arithmetic.add', 'no', self.this_folder, self.this_file]) - result = self.cli_runner.invoke(setup_code, input=user_input) - self.assertIsNone(result.exception, result.output) - self.assertIsInstance(Code.get_from_string('{}'.format(label)), Code) - def test_noninteractive_remote(self): """Test non-interactive remote code setup.""" - - from aiida.orm import Code label = 'noninteractive_remote' options = [ '--non-interactive', '--label={}'.format(label), '--description=description', - '--input-plugin=arithmetic.add', '--on-computer', '--computer={}'.format(self.comp.name), + '--input-plugin=arithmetic.add', '--on-computer', '--computer={}'.format(self.computer.name), '--remote-abs-path=/remote/abs/path' ] result = self.cli_runner.invoke(setup_code, options) self.assertClickResultNoException(result) - self.assertIsInstance(Code.get_from_string('{}@{}'.format(label, self.comp.name)), Code) + self.assertIsInstance(orm.Code.get_from_string('{}@{}'.format(label, self.computer.name)), orm.Code) def test_noninteractive_upload(self): """Test non-interactive code setup.""" - from aiida.orm import Code label = 'noninteractive_upload' options = [ '--non-interactive', '--label={}'.format(label), '--description=description', @@ -90,23 +66,25 @@ def test_noninteractive_upload(self): ] result = self.cli_runner.invoke(setup_code, options) self.assertClickResultNoException(result) - self.assertIsInstance(Code.get_from_string('{}'.format(label)), Code) + self.assertIsInstance(orm.Code.get_from_string('{}'.format(label)), orm.Code) def test_from_config(self): """Test setting up a code from a config file""" - from aiida.orm import Code import tempfile label = 'noninteractive_config' with tempfile.NamedTemporaryFile('w') as handle: handle.write( - """--- -label: {l} -input_plugin: arithmetic.add -computer: {c} -remote_abs_path: /remote/abs/path -""".format(l=label, c=self.comp.name) + dedent( + """ + --- + label: {label} + input_plugin: arithmetic.add + computer: {computer} + remote_abs_path: /remote/abs/path + """ + ).format(label=label, computer=self.computer.name) ) handle.flush() result = self.cli_runner.invoke( @@ -115,17 +93,7 @@ def test_from_config(self): ) self.assertClickResultNoException(result) - self.assertIsInstance(Code.get_from_string('{}'.format(label)), Code) - - def test_mixed(self): - """Test mixed (interactive/from config) code setup.""" - from aiida.orm import Code - label = 'mixed_remote' - options = ['--description=description', '--on-computer', '--remote-abs-path=/remote/abs/path'] - user_input = '\n'.join([label, 'arithmetic.add', self.comp.name]) - result = self.cli_runner.invoke(setup_code, options, input=user_input) - self.assertClickResultNoException(result) - self.assertIsInstance(Code.get_from_string('{}@{}'.format(label, self.comp.name)), Code) + self.assertIsInstance(orm.Code.get_from_string('{}'.format(label)), orm.Code) class TestVerdiCodeCommands(AiidaTestCase): @@ -136,19 +104,17 @@ class TestVerdiCodeCommands(AiidaTestCase): @classmethod def setUpClass(cls, *args, **kwargs): super().setUpClass(*args, **kwargs) - orm.Computer( + cls.computer = orm.Computer( name='comp', hostname='localhost', transport_type='local', scheduler_type='direct', workdir='/tmp/aiida' ).store() def setUp(self): - self.comp = orm.Computer.objects.get(name='comp') - try: code = orm.Code.get_from_string('code') except NotExistent: code = orm.Code( input_plugin_name='arithmetic.add', - remote_computer_exec=[self.comp, '/remote/abs/path'], + remote_computer_exec=[self.computer, '/remote/abs/path'], ) code.label = 'code' code.description = 'desc' @@ -175,14 +141,12 @@ def test_relabel_code(self): """Test force code relabeling.""" result = self.cli_runner.invoke(relabel, [str(self.code.pk), 'new_code']) self.assertIsNone(result.exception, result.output) - from aiida.orm import load_node - new_code = load_node(self.code.pk) + new_code = orm.load_node(self.code.pk) self.assertEqual(new_code.label, 'new_code') def test_relabel_code_full(self): self.cli_runner.invoke(relabel, [str(self.code.pk), 'new_code@comp']) - from aiida.orm import load_node - new_code = load_node(self.code.pk) + new_code = orm.load_node(self.code.pk) self.assertEqual(new_code.label, 'new_code') def test_relabel_code_full_bad(self): @@ -195,24 +159,22 @@ def test_code_delete_one_force(self): self.assertIsNone(result.exception, result.output) with self.assertRaises(NotExistent): - from aiida.orm import Code - Code.get_from_string('code') + orm.Code.get_from_string('code') def test_code_list(self): """Test code list command.""" # set up second code 'code2' - from aiida.orm import Code try: - code = Code.get_from_string('code2') + code = orm.Code.get_from_string('code2') except NotExistent: - code = Code( + code = orm.Code( input_plugin_name='templatereplacer', - remote_computer_exec=[self.comp, '/remote/abs/path'], + remote_computer_exec=[self.computer, '/remote/abs/path'], ) code.label = 'code2' code.store() - options = ['-A', '-a', '-o', '--input-plugin=arithmetic.add', '--computer={}'.format(self.comp.name)] + options = ['-A', '-a', '-o', '--input-plugin=arithmetic.add', '--computer={}'.format(self.computer.name)] result = self.cli_runner.invoke(code_list, options) self.assertIsNone(result.exception, result.output) self.assertTrue(str(self.code.pk) in result.output, 'PK of first code should be included') @@ -238,29 +200,13 @@ def test_code_show(self): self.assertIsNone(result.exception, result.output) self.assertTrue(str(self.code.pk) in result.output) - def test_code_duplicate_interactive(self): - """Test code duplication interactive.""" - os.environ['VISUAL'] = 'sleep 1; vim -cwq' - os.environ['EDITOR'] = 'sleep 1; vim -cwq' - label = 'code_duplicate_interactive' - user_input = label + '\n\n\n\n\n\n' - result = self.cli_runner.invoke(code_duplicate, [str(self.code.pk)], input=user_input, catch_exceptions=False) - self.assertIsNone(result.exception, result.output) - - from aiida.orm import Code - new_code = Code.get_from_string(label) - self.assertEqual(self.code.description, new_code.description) - self.assertEqual(self.code.get_prepend_text(), new_code.get_prepend_text()) - self.assertEqual(self.code.get_append_text(), new_code.get_append_text()) - def test_code_duplicate_non_interactive(self): """Test code duplication non-interactive.""" label = 'code_duplicate_noninteractive' result = self.cli_runner.invoke(code_duplicate, ['--non-interactive', '--label=' + label, str(self.code.pk)]) self.assertIsNone(result.exception, result.output) - from aiida.orm import Code - new_code = Code.get_from_string(label) + new_code = orm.Code.get_from_string(label) self.assertEqual(self.code.description, new_code.description) self.assertEqual(self.code.get_prepend_text(), new_code.get_prepend_text()) self.assertEqual(self.code.get_append_text(), new_code.get_append_text()) @@ -276,3 +222,68 @@ def setUp(self): def test_code_list_no_codes_error_message(self): result = self.cli_runner.invoke(code_list) self.assertEqual(1, result.output.count('# No codes found matching the specified criteria.')) + + +@pytest.mark.parametrize('non_interactive_editor', ('sleep 1; vim -cwq',), indirect=True) +def test_interactive_remote(clear_database_before_test, aiida_localhost, non_interactive_editor): + """Test interactive remote code setup.""" + label = 'interactive_remote' + user_input = '\n'.join([label, 'description', 'arithmetic.add', 'yes', aiida_localhost.name, '/remote/abs/path']) + result = CliRunner().invoke(setup_code, input=user_input) + assert result.exception is None + assert isinstance(orm.Code.get_from_string('{}@{}'.format(label, aiida_localhost.name)), orm.Code) + + +@pytest.mark.parametrize('non_interactive_editor', ('sleep 1; vim -cwq',), indirect=True) +def test_interactive_upload(clear_database_before_test, aiida_localhost, non_interactive_editor): + """Test interactive code setup.""" + label = 'interactive_upload' + dirname = os.path.dirname(__file__) + basename = os.path.basename(__file__) + user_input = '\n'.join([label, 'description', 'arithmetic.add', 'no', dirname, basename]) + result = CliRunner().invoke(setup_code, input=user_input) + assert result.exception is None + assert isinstance(orm.Code.get_from_string('{}'.format(label)), orm.Code) + + +@pytest.mark.parametrize('non_interactive_editor', ('sleep 1; vim -cwq',), indirect=True) +def test_mixed(clear_database_before_test, aiida_localhost, non_interactive_editor): + """Test mixed (interactive/from config) code setup.""" + from aiida.orm import Code + label = 'mixed_remote' + options = ['--description=description', '--on-computer', '--remote-abs-path=/remote/abs/path'] + user_input = '\n'.join([label, 'arithmetic.add', aiida_localhost.name]) + result = CliRunner().invoke(setup_code, options, input=user_input) + assert result.exception is None + assert isinstance(Code.get_from_string('{}@{}'.format(label, aiida_localhost.name)), Code) + + +@pytest.mark.parametrize('non_interactive_editor', ('sleep 1; vim -cwq',), indirect=True) +def test_code_duplicate_interactive(clear_database_before_test, aiida_local_code_factory, non_interactive_editor): + """Test code duplication interactive.""" + label = 'code_duplicate_interactive' + user_input = label + '\n\n\n\n\n\n' + code = aiida_local_code_factory('arithmetic.add', '/bin/cat', label='code') + result = CliRunner().invoke(code_duplicate, [str(code.pk)], input=user_input) + assert result.exception is None, result.exception + + duplicate = orm.Code.get_from_string(label) + assert code.description == duplicate.description + assert code.get_prepend_text() == duplicate.get_prepend_text() + assert code.get_append_text() == duplicate.get_append_text() + + +@pytest.mark.parametrize('non_interactive_editor', ('sleep 1; vim -cwq',), indirect=True) +def test_code_duplicate_ignore(clear_database_before_test, aiida_local_code_factory, non_interactive_editor): + """Providing "!" to description should lead to empty description. + + Regression test for: https://github.com/aiidateam/aiida-core/issues/3770 + """ + label = 'code_duplicate_interactive' + user_input = label + '\n!\n\n\n\n\n' + code = aiida_local_code_factory('arithmetic.add', '/bin/cat', label='code') + result = CliRunner().invoke(code_duplicate, [str(code.pk)], input=user_input, catch_exceptions=False) + assert result.exception is None, result.exception + + duplicate = orm.Code.get_from_string(label) + assert duplicate.description == '' diff --git a/tests/cmdline/commands/test_computer.py b/tests/cmdline/commands/test_computer.py index 574110121d..668728c21f 100644 --- a/tests/cmdline/commands/test_computer.py +++ b/tests/cmdline/commands/test_computer.py @@ -7,13 +7,14 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### +# pylint: disable=unused-argument """Tests for the 'verdi computer' command.""" - from collections import OrderedDict import os import tempfile from click.testing import CliRunner +import pytest from aiida import orm from aiida.backends.testbase import AiidaTestCase @@ -117,36 +118,6 @@ def test_reachable(self): output = sp.check_output(['verdi', 'computer', 'setup', '--help']) self.assertIn(b'Usage:', output) - def test_interactive(self): - """Test verdi computer setup in interactive mode.""" - os.environ['VISUAL'] = 'sleep 1; vim -cwq' - os.environ['EDITOR'] = 'sleep 1; vim -cwq' - label = 'interactive_computer' - - options_dict = generate_setup_options_dict(replace_args={'label': label}, non_interactive=False) - # In any case, these would be managed by the visual editor - options_dict.pop('prepend-text') - options_dict.pop('append-text') - user_input = '\n'.join(generate_setup_options_interactive(options_dict)) - - result = self.cli_runner.invoke(computer_setup, input=user_input) - self.assertIsNone(result.exception, msg='There was an unexpected exception. Output: {}'.format(result.output)) - - new_computer = orm.Computer.objects.get(name=label) - self.assertIsInstance(new_computer, orm.Computer) - - self.assertEqual(new_computer.description, options_dict['description']) - self.assertEqual(new_computer.hostname, options_dict['hostname']) - self.assertEqual(new_computer.get_transport_type(), options_dict['transport']) - self.assertEqual(new_computer.get_scheduler_type(), options_dict['scheduler']) - self.assertEqual(new_computer.get_mpirun_command(), options_dict['mpirun-command'].split()) - self.assertEqual(new_computer.get_shebang(), options_dict['shebang']) - self.assertEqual(new_computer.get_workdir(), options_dict['work-dir']) - self.assertEqual(new_computer.get_default_mpiprocs_per_machine(), int(options_dict['mpiprocs-per-machine'])) - # For now I'm not writing anything in them - self.assertEqual(new_computer.get_prepend_text(), '') - self.assertEqual(new_computer.get_append_text(), '') - def test_mixed(self): """ Test verdi computer setup in mixed mode. @@ -317,7 +288,7 @@ def test_noninteractive_from_config(self): with tempfile.NamedTemporaryFile('w') as handle: handle.write("""--- label: {l} -hostname: {l} +hostname: myhost transport: local scheduler: direct """.format(l=label)) @@ -749,3 +720,33 @@ def test_computer_duplicate_non_interactive(self): self.assertEqual(self.comp.get_default_mpiprocs_per_machine(), new_computer.get_default_mpiprocs_per_machine()) self.assertEqual(self.comp.get_prepend_text(), new_computer.get_prepend_text()) self.assertEqual(self.comp.get_append_text(), new_computer.get_append_text()) + + +@pytest.mark.parametrize('non_interactive_editor', ('sleep 1; vim -cwq',), indirect=True) +def test_interactive(clear_database_before_test, aiida_localhost, non_interactive_editor): + """Test verdi computer setup in interactive mode.""" + label = 'interactive_computer' + + options_dict = generate_setup_options_dict(replace_args={'label': label}, non_interactive=False) + # In any case, these would be managed by the visual editor + options_dict.pop('prepend-text') + options_dict.pop('append-text') + user_input = '\n'.join(generate_setup_options_interactive(options_dict)) + + result = CliRunner().invoke(computer_setup, input=user_input) + assert result.exception is None, 'There was an unexpected exception. Output: {}'.format(result.output) + + new_computer = orm.Computer.objects.get(name=label) + assert isinstance(new_computer, orm.Computer) + + assert new_computer.description == options_dict['description'] + assert new_computer.hostname == options_dict['hostname'] + assert new_computer.get_transport_type() == options_dict['transport'] + assert new_computer.get_scheduler_type() == options_dict['scheduler'] + assert new_computer.get_mpirun_command() == options_dict['mpirun-command'].split() + assert new_computer.get_shebang() == options_dict['shebang'] + assert new_computer.get_workdir() == options_dict['work-dir'] + assert new_computer.get_default_mpiprocs_per_machine() == int(options_dict['mpiprocs-per-machine']) + # For now I'm not writing anything in them + assert new_computer.get_prepend_text() == '' + assert new_computer.get_append_text() == '' diff --git a/tests/cmdline/commands/test_daemon.py b/tests/cmdline/commands/test_daemon.py index 034cc7e1b6..3a9cda21a2 100644 --- a/tests/cmdline/commands/test_daemon.py +++ b/tests/cmdline/commands/test_daemon.py @@ -10,6 +10,7 @@ """Tests for `verdi daemon`.""" from click.testing import CliRunner +import pytest from aiida.backends.testbase import AiidaTestCase from aiida.cmdline.commands import cmd_daemon @@ -42,6 +43,7 @@ def test_daemon_start(self): finally: self.daemon_client.stop_daemon(wait=True) + @pytest.mark.skip(reason='Test fails non-deterministically; see issue #3051.') def test_daemon_start_number(self): """Test `verdi daemon start` with a specific number of workers.""" diff --git a/tests/cmdline/commands/test_data.py b/tests/cmdline/commands/test_data.py index ff63bfa927..e4d507235a 100644 --- a/tests/cmdline/commands/test_data.py +++ b/tests/cmdline/commands/test_data.py @@ -7,7 +7,7 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -# pylint: disable=no-member +# pylint: disable=no-member, too-many-lines """Test data-related verdi commands.""" import io @@ -141,7 +141,6 @@ def data_listing_test(self, datatype, search_string, ids): # Check that the past days filter works as expected past_days_flags = ['-p', '--past-days'] - # past_days_flags = ['-p'] for flag in past_days_flags: options = [flag, '1'] res = self.cli_runner.invoke(listing_cmd, options, catch_exceptions=False) @@ -158,6 +157,7 @@ def data_listing_test(self, datatype, search_string, ids): ) # Check that the group filter works as expected + # if ids is not None: group_flags = ['-G', '--groups'] for flag in group_flags: # Non empty group @@ -289,10 +289,14 @@ def connect_structure_bands(strct): # pylint: disable=unused-argument bands = connect_structure_bands(strct) + bands_isolated = BandsData() + bands_isolated.store() + # Create 2 groups and add the data to one of them g_ne = Group(label='non_empty_group') g_ne.store() g_ne.add_nodes(bands) + g_ne.add_nodes(bands_isolated) g_e = Group(label='empty_group') g_e.store() @@ -321,6 +325,13 @@ def test_bandlistshelp(self): def test_bandslist(self): self.data_listing_test(BandsData, 'FeO', self.ids) + self.data_listing_test(BandsData, '<>', self.ids) + + def test_bandslist_with_elements(self): + options = ['-e', 'Fe'] + res = self.cli_runner.invoke(cmd_bands.bands_list, options, catch_exceptions=False) + self.assertIn(b'FeO', res.stdout_bytes, 'The string "FeO" was not found in the listing') + self.assertNotIn(b'<>', res.stdout_bytes, 'The string "<>" should not in the listing') def test_bandexporthelp(self): output = sp.check_output(['verdi', 'data', 'bands', 'export', '--help']) @@ -332,6 +343,31 @@ def test_bandsexport(self): self.assertEqual(res.exit_code, 0, 'The command did not finish correctly') self.assertIn(b'[1.0, 3.0]', res.stdout_bytes, 'The string [1.0, 3.0] was not found in the bands' 'export') + def test_bandsexport_single_kp(self): + """ + Plot band for single k-point (issue #2462). + """ + kpnts = KpointsData() + kpnts.set_kpoints([[0., 0., 0.]]) + + bands = BandsData() + bands.set_kpointsdata(kpnts) + bands.set_bands([[1.0, 2.0]]) + bands.store() + + # matplotlib + options = [str(bands.id), '--format', 'mpl_singlefile'] + res = self.cli_runner.invoke(cmd_bands.bands_export, options, catch_exceptions=False) + self.assertIn(b'p.scatter', res.stdout_bytes, 'The string p.scatter was not found in the bands mpl export') + + # gnuplot + with self.cli_runner.isolated_filesystem(): + options = [str(bands.id), '--format', 'gnuplot', '-o', 'bands.gnu'] + self.cli_runner.invoke(cmd_bands.bands_export, options, catch_exceptions=False) + with open('bands.gnu', 'r') as gnu_file: + res = gnu_file.read() + self.assertIn('vectors nohead', res, 'The string "vectors nohead" was not found in the gnuplot script') + class TestVerdiDataDict(AiidaTestCase): """Testing verdi data dict.""" diff --git a/tests/cmdline/commands/test_database.py b/tests/cmdline/commands/test_database.py index 131a2b7c6c..4269cf6c7e 100644 --- a/tests/cmdline/commands/test_database.py +++ b/tests/cmdline/commands/test_database.py @@ -9,7 +9,6 @@ ########################################################################### # pylint: disable=invalid-name,protected-access """Tests for `verdi database`.""" - import enum from click.testing import CliRunner @@ -17,7 +16,7 @@ from aiida.backends.testbase import AiidaTestCase from aiida.cmdline.commands import cmd_database from aiida.common.links import LinkType -from aiida.orm import Data, Node, CalculationNode, WorkflowNode +from aiida.orm import Data, CalculationNode, WorkflowNode class TestVerdiDatabasaIntegrity(AiidaTestCase): @@ -162,11 +161,11 @@ def test_detect_invalid_nodes_unknown_node_type(self): self.assertEqual(result.exit_code, 0) self.assertClickResultNoException(result) - # Create a node with invalid type: a base Node type string is considered invalid - # Note that there is guard against storing base Nodes for this reason, which we temporarily disable - Node._storable = True - Node().store() - Node._storable = False + # Create a node with invalid type: since there are a lot of validation rules that prevent us from creating an + # invalid node type normally, we have to do it manually on the database model instance before storing + node = Data() + node.backend_entity.dbmodel.node_type = '__main__.SubClass.' + node.store() result = self.cli_runner.invoke(cmd_database.detect_invalid_nodes, []) self.assertNotEqual(result.exit_code, 0) diff --git a/tests/cmdline/commands/test_export.py b/tests/cmdline/commands/test_export.py index 4e0d5a3233..441dcfae68 100644 --- a/tests/cmdline/commands/test_export.py +++ b/tests/cmdline/commands/test_export.py @@ -19,7 +19,7 @@ from aiida.backends.testbase import AiidaTestCase from aiida.cmdline.commands import cmd_export -from aiida.tools.importexport import EXPORT_VERSION +from aiida.tools.importexport import EXPORT_VERSION, Archive from tests.utils.archives import get_archive_file @@ -160,6 +160,26 @@ def test_migrate_versions_old(self): finally: delete_temporary_file(filename_output) + def test_migrate_version_specific(self): + """Test the `-v/--version` option to migrate to a specific version instead of the latest.""" + archive = 'export_v0.1_simple.aiida' + target_version = '0.2' + + filename_input = get_archive_file(archive, filepath=self.fixture_archive) + filename_output = next(tempfile._get_candidate_names()) # pylint: disable=protected-access + + try: + options = [filename_input, filename_output, '--version', target_version] + result = self.cli_runner.invoke(cmd_export.migrate, options) + self.assertIsNone(result.exception, result.output) + self.assertTrue(os.path.isfile(filename_output)) + self.assertEqual(zipfile.ZipFile(filename_output).testzip(), None) + + with Archive(filename_output) as archive_object: + self.assertEqual(archive_object.version_format, target_version) + finally: + delete_temporary_file(filename_output) + def test_migrate_versions_recent(self): """Migrating an archive with the current version should exit with non-zero status.""" filename_input = get_archive_file(self.newest_archive, filepath=self.fixture_archive) diff --git a/tests/cmdline/commands/test_group.py b/tests/cmdline/commands/test_group.py index 79171b3164..ab79f650b1 100644 --- a/tests/cmdline/commands/test_group.py +++ b/tests/cmdline/commands/test_group.py @@ -8,101 +8,121 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Tests for the `verdi group` command.""" - from aiida import orm from aiida.backends.testbase import AiidaTestCase from aiida.common import exceptions -from aiida.cmdline.commands.cmd_group import ( - group_list, group_create, group_delete, group_relabel, group_description, group_add_nodes, group_remove_nodes, - group_show, group_copy -) +from aiida.cmdline.commands import cmd_group class TestVerdiGroup(AiidaTestCase): """Tests for the `verdi group` command.""" - @classmethod - def setUpClass(cls, *args, **kwargs): - super().setUpClass(*args, **kwargs) - for group in ['dummygroup1', 'dummygroup2', 'dummygroup3', 'dummygroup4']: - orm.Group(label=group).store() - def setUp(self): """Create runner object to run tests.""" from click.testing import CliRunner self.cli_runner = CliRunner() + for group in ['dummygroup1', 'dummygroup2', 'dummygroup3', 'dummygroup4']: + orm.Group(label=group).store() + + def tearDown(self): + """Delete all created group objects.""" + for group in orm.Group.objects.all(): + orm.Group.objects.delete(group.pk) + def test_help(self): """Tests help text for all group sub commands.""" options = ['--help'] # verdi group list - result = self.cli_runner.invoke(group_list, options) + result = self.cli_runner.invoke(cmd_group.group_list, options) self.assertIsNone(result.exception, result.output) self.assertIn('Usage', result.output) # verdi group create - result = self.cli_runner.invoke(group_create, options) + result = self.cli_runner.invoke(cmd_group.group_create, options) self.assertIsNone(result.exception, result.output) self.assertIn('Usage', result.output) # verdi group delete - result = self.cli_runner.invoke(group_delete, options) + result = self.cli_runner.invoke(cmd_group.group_delete, options) self.assertIsNone(result.exception, result.output) self.assertIn('Usage', result.output) # verdi group relabel - result = self.cli_runner.invoke(group_relabel, options) + result = self.cli_runner.invoke(cmd_group.group_relabel, options) self.assertIsNone(result.exception, result.output) self.assertIn('Usage', result.output) # verdi group description - result = self.cli_runner.invoke(group_description, options) + result = self.cli_runner.invoke(cmd_group.group_description, options) self.assertIsNone(result.exception, result.output) self.assertIn('Usage', result.output) # verdi group addnodes - result = self.cli_runner.invoke(group_add_nodes, options) + result = self.cli_runner.invoke(cmd_group.group_add_nodes, options) self.assertIsNone(result.exception, result.output) self.assertIn('Usage', result.output) # verdi group removenodes - result = self.cli_runner.invoke(group_remove_nodes, options) + result = self.cli_runner.invoke(cmd_group.group_remove_nodes, options) self.assertIsNone(result.exception, result.output) self.assertIn('Usage', result.output) # verdi group show - result = self.cli_runner.invoke(group_show, options) + result = self.cli_runner.invoke(cmd_group.group_show, options) self.assertIsNone(result.exception, result.output) self.assertIn('Usage', result.output) # verdi group copy - result = self.cli_runner.invoke(group_copy, options) + result = self.cli_runner.invoke(cmd_group.group_copy, options) self.assertIsNone(result.exception, result.output) self.assertIn('Usage', result.output) def test_create(self): """Test `verdi group create` command.""" - result = self.cli_runner.invoke(group_create, ['dummygroup5']) + result = self.cli_runner.invoke(cmd_group.group_create, ['dummygroup5']) self.assertClickResultNoException(result) # check if newly added group in present in list - result = self.cli_runner.invoke(group_list) + result = self.cli_runner.invoke(cmd_group.group_list) self.assertClickResultNoException(result) self.assertIn('dummygroup5', result.output) def test_list(self): """Test `verdi group list` command.""" - result = self.cli_runner.invoke(group_list) + result = self.cli_runner.invoke(cmd_group.group_list) self.assertClickResultNoException(result) for grp in ['dummygroup1', 'dummygroup2']: self.assertIn(grp, result.output) + def test_list_order(self): + """Test `verdi group list` command with ordering options.""" + orm.Group(label='agroup').store() + + options = [] + result = self.cli_runner.invoke(cmd_group.group_list, options) + self.assertClickResultNoException(result) + group_ordering = [l.split()[1] for l in result.output.split('\n')[3:] if l] + self.assertEqual(['dummygroup1', 'dummygroup2', 'dummygroup3', 'dummygroup4', 'agroup'], group_ordering) + + options = ['--order-by', 'label'] + result = self.cli_runner.invoke(cmd_group.group_list, options) + self.assertClickResultNoException(result) + group_ordering = [l.split()[1] for l in result.output.split('\n')[3:] if l] + self.assertEqual(['agroup', 'dummygroup1', 'dummygroup2', 'dummygroup3', 'dummygroup4'], group_ordering) + + options = ['--order-by', 'id', '--order-direction', 'desc'] + result = self.cli_runner.invoke(cmd_group.group_list, options) + self.assertClickResultNoException(result) + group_ordering = [l.split()[1] for l in result.output.split('\n')[3:] if l] + self.assertEqual(['agroup', 'dummygroup4', 'dummygroup3', 'dummygroup2', 'dummygroup1'], group_ordering) + def test_copy(self): """Test `verdi group copy` command.""" - result = self.cli_runner.invoke(group_copy, ['dummygroup1', 'dummygroup2']) + result = self.cli_runner.invoke(cmd_group.group_copy, ['dummygroup1', 'dummygroup2']) self.assertClickResultNoException(result) self.assertIn('Success', result.output) @@ -112,11 +132,11 @@ def test_delete(self): orm.Group(label='group_test_delete_01').store() orm.Group(label='group_test_delete_02').store() - result = self.cli_runner.invoke(group_delete, ['--force', 'group_test_delete_01']) + result = self.cli_runner.invoke(cmd_group.group_delete, ['--force', 'group_test_delete_01']) self.assertClickResultNoException(result) # Verify that removed group is not present in list - result = self.cli_runner.invoke(group_list) + result = self.cli_runner.invoke(cmd_group.group_list) self.assertClickResultNoException(result) self.assertNotIn('group_test_delete_01', result.output) @@ -129,11 +149,11 @@ def test_delete(self): self.assertEqual(group.count(), 2) # Calling delete on a group without the `--clear` option should raise - result = self.cli_runner.invoke(group_delete, ['--force', 'group_test_delete_02']) + result = self.cli_runner.invoke(cmd_group.group_delete, ['--force', 'group_test_delete_02']) self.assertIsNotNone(result.exception, result.output) # With `--clear` option should delete group and nodes - result = self.cli_runner.invoke(group_delete, ['--force', '--clear', 'group_test_delete_02']) + result = self.cli_runner.invoke(cmd_group.group_delete, ['--force', '--clear', 'group_test_delete_02']) self.assertClickResultNoException(result) with self.assertRaises(exceptions.NotExistent): @@ -141,14 +161,37 @@ def test_delete(self): def test_show(self): """Test `verdi group show` command.""" - result = self.cli_runner.invoke(group_show, ['dummygroup1']) + result = self.cli_runner.invoke(cmd_group.group_show, ['dummygroup1']) self.assertClickResultNoException(result) for grpline in [ - 'Group label', 'dummygroup1', 'Group type_string', 'user', 'Group description', '' + 'Group label', 'dummygroup1', 'Group type_string', 'core', 'Group description', '' ]: self.assertIn(grpline, result.output) + def test_show_limit(self): + """Test `--limit` option of the `verdi group show` command.""" + label = 'test_group_limit' + nodes = [orm.Data().store(), orm.Data().store()] + group = orm.Group(label=label).store() + group.add_nodes(nodes) + + # Default should include all nodes in the output + result = self.cli_runner.invoke(cmd_group.group_show, [label]) + self.assertClickResultNoException(result) + + for node in nodes: + self.assertIn(str(node.pk), result.output) + + # Repeat test with `limit=1`, use also the `--raw` option to only display nodes + result = self.cli_runner.invoke(cmd_group.group_show, [label, '--limit', '1', '--raw']) + self.assertClickResultNoException(result) + + # The current `verdi group show` does not support ordering so we cannot rely on that for now to test if only + # one of the nodes is shown + self.assertEqual(len(result.output.strip().split('\n')), 1) + self.assertTrue(str(nodes[0].pk) in result.output or str(nodes[1].pk) in result.output) + def test_description(self): """Test `verdi group description` command.""" description = 'It is a new description' @@ -156,22 +199,22 @@ def test_description(self): self.assertNotEqual(group.description, description) # Change the description of the group - result = self.cli_runner.invoke(group_description, [group.label, description]) + result = self.cli_runner.invoke(cmd_group.group_description, [group.label, description]) self.assertClickResultNoException(result) self.assertEqual(group.description, description) # When no description argument is passed the command should just echo the current description - result = self.cli_runner.invoke(group_description, [group.label]) + result = self.cli_runner.invoke(cmd_group.group_description, [group.label]) self.assertClickResultNoException(result) self.assertIn(description, result.output) def test_relabel(self): """Test `verdi group relabel` command.""" - result = self.cli_runner.invoke(group_relabel, ['dummygroup4', 'relabeled_group']) + result = self.cli_runner.invoke(cmd_group.group_relabel, ['dummygroup4', 'relabeled_group']) self.assertIsNone(result.exception, result.output) # check if group list command shows changed group name - result = self.cli_runner.invoke(group_list) + result = self.cli_runner.invoke(cmd_group.group_list) self.assertClickResultNoException(result) self.assertNotIn('dummygroup4', result.output) self.assertIn('relabeled_group', result.output) @@ -182,21 +225,21 @@ def test_add_remove_nodes(self): node_02 = orm.CalculationNode().store() node_03 = orm.CalculationNode().store() - result = self.cli_runner.invoke(group_add_nodes, ['--force', '--group=dummygroup1', node_01.uuid]) + result = self.cli_runner.invoke(cmd_group.group_add_nodes, ['--force', '--group=dummygroup1', node_01.uuid]) self.assertClickResultNoException(result) # Check if node is added in group using group show command - result = self.cli_runner.invoke(group_show, ['dummygroup1']) + result = self.cli_runner.invoke(cmd_group.group_show, ['dummygroup1']) self.assertClickResultNoException(result) self.assertIn('CalculationNode', result.output) self.assertIn(str(node_01.pk), result.output) # Remove same node - result = self.cli_runner.invoke(group_remove_nodes, ['--force', '--group=dummygroup1', node_01.uuid]) + result = self.cli_runner.invoke(cmd_group.group_remove_nodes, ['--force', '--group=dummygroup1', node_01.uuid]) self.assertIsNone(result.exception, result.output) # Check if node is added in group using group show command - result = self.cli_runner.invoke(group_show, ['-r', 'dummygroup1']) + result = self.cli_runner.invoke(cmd_group.group_show, ['-r', 'dummygroup1']) self.assertClickResultNoException(result) self.assertNotIn('CalculationNode', result.output) self.assertNotIn(str(node_01.pk), result.output) @@ -206,7 +249,7 @@ def test_add_remove_nodes(self): group.add_nodes([node_01, node_02, node_03]) self.assertEqual(group.count(), 3) - result = self.cli_runner.invoke(group_remove_nodes, ['--force', '--clear', '--group=dummygroup1']) + result = self.cli_runner.invoke(cmd_group.group_remove_nodes, ['--force', '--clear', '--group=dummygroup1']) self.assertClickResultNoException(result) self.assertEqual(group.count(), 0) @@ -224,7 +267,7 @@ def test_copy_existing_group(self): # Copy using `verdi group copy` - making sure all is successful options = [source_label, dest_label] - result = self.cli_runner.invoke(group_copy, options) + result = self.cli_runner.invoke(cmd_group.group_copy, options) self.assertClickResultNoException(result) self.assertIn( 'Success: Nodes copied from group<{}> to group<{}>'.format(source_label, dest_label), result.output, @@ -238,7 +281,7 @@ def test_copy_existing_group(self): self.assertSetEqual(nodes_source_group, nodes_dest_group) # Copy again, making sure an abort error is raised, since no user input can be made and default is abort - result = self.cli_runner.invoke(group_copy, options) + result = self.cli_runner.invoke(cmd_group.group_copy, options) self.assertIsNotNone(result.exception, result.output) self.assertIn( 'Warning: Destination group<{}> already exists and is not empty.'.format(dest_label), result.output, diff --git a/tests/cmdline/commands/test_group_ls.py b/tests/cmdline/commands/test_group_ls.py new file mode 100644 index 0000000000..d1982d56b5 --- /dev/null +++ b/tests/cmdline/commands/test_group_ls.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Tests for GroupPath command line interface""" +# pylint: disable=redefined-outer-name,unused-argument +from textwrap import dedent + +from click.testing import CliRunner +import pytest + +from aiida import orm +from aiida.cmdline.commands.cmd_group import group_path_ls + + +@pytest.fixture +def setup_groups(clear_database_before_test): + """Setup some groups for testing.""" + for label in ['a', 'a/b', 'a/c/d', 'a/c/e/g', 'a/f']: + group, _ = orm.Group.objects.get_or_create(label) + group.description = 'A description of {}'.format(label) + orm.UpfFamily.objects.get_or_create('a/x') + yield + + +def test_with_no_opts(setup_groups): + """Test ``verdi group path ls``""" + + cli_runner = CliRunner() + + result = cli_runner.invoke(group_path_ls) + assert result.exit_code == 0, result.exception + assert result.output == 'a\n' + + result = cli_runner.invoke(group_path_ls, ['a']) + assert result.exit_code == 0, result.exception + assert result.output == 'a/b\na/c\na/f\n' + + result = cli_runner.invoke(group_path_ls, ['a/c']) + assert result.exit_code == 0, result.exception + assert result.output == 'a/c/d\na/c/e\n' + + +def test_recursive(setup_groups): + """Test ``verdi group path ls --recursive``""" + + cli_runner = CliRunner() + + for tag in ['-R', '--recursive']: + result = cli_runner.invoke(group_path_ls, [tag]) + assert result.exit_code == 0, result.exception + assert result.output == 'a\na/b\na/c\na/c/d\na/c/e\na/c/e/g\na/f\n' + + result = cli_runner.invoke(group_path_ls, [tag, 'a/c']) + assert result.exit_code == 0, result.exception + assert result.output == 'a/c/d\na/c/e\na/c/e/g\n' + + +@pytest.mark.parametrize('tag', ['-l', '--long']) +def test_long(setup_groups, tag): + """Test ``verdi group path ls --long``""" + + cli_runner = CliRunner() + + result = cli_runner.invoke(group_path_ls, [tag]) + assert result.exit_code == 0, result.exception + assert result.output == dedent( + """\ + Path Sub-Groups + ------ ------------ + a 4 + """ + ) + + result = cli_runner.invoke(group_path_ls, [tag, '-d', 'a']) + assert result.exit_code == 0, result.exception + assert result.output == dedent( + """\ + Path Sub-Groups Description + ------ ------------ -------------------- + a/b 0 A description of a/b + a/c 2 - + a/f 0 A description of a/f + """ + ) + + result = cli_runner.invoke(group_path_ls, [tag, '-R']) + assert result.exit_code == 0, result.exception + assert result.output == dedent( + """\ + Path Sub-Groups + ------- ------------ + a 4 + a/b 0 + a/c 2 + a/c/d 0 + a/c/e 1 + a/c/e/g 0 + a/f 0 + """ + ) + + +@pytest.mark.parametrize('tag', ['--no-virtual']) +def test_groups_only(setup_groups, tag): + """Test ``verdi group path ls --no-virtual``""" + + cli_runner = CliRunner() + + result = cli_runner.invoke(group_path_ls, [tag, '-l', '-R', '--with-description']) + assert result.exit_code == 0, result.exception + assert result.output == dedent( + """\ + Path Sub-Groups Description + ------- ------------ ------------------------ + a 4 A description of a + a/b 0 A description of a/b + a/c/d 0 A description of a/c/d + a/c/e/g 0 A description of a/c/e/g + a/f 0 A description of a/f + """ + ) diff --git a/tests/cmdline/commands/test_run.py b/tests/cmdline/commands/test_run.py index 3cae78fb70..4ed690bb20 100644 --- a/tests/cmdline/commands/test_run.py +++ b/tests/cmdline/commands/test_run.py @@ -8,6 +8,10 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Tests for `verdi run`.""" +import tempfile +import textwrap +import warnings + from click.testing import CliRunner from aiida.backends.testbase import AiidaTestCase @@ -28,22 +32,22 @@ def test_run_workfunction(self): that are defined within the script will fail, as the inspect module will not correctly be able to determin the full path of the source file. """ - import tempfile - from aiida.orm import load_node - from aiida.orm import WorkFunctionNode + from aiida.orm import load_node, WorkFunctionNode - script_content = """ -#!/usr/bin/env python -from aiida.engine import workfunction + script_content = textwrap.dedent( + """\ + #!/usr/bin/env python + from aiida.engine import workfunction -@workfunction -def wf(): - pass + @workfunction + def wf(): + pass -if __name__ == '__main__': - result, node = wf.run_get_node() - print(node.pk) - """ + if __name__ == '__main__': + result, node = wf.run_get_node() + print(node.pk) + """ + ) # If `verdi run` is not setup correctly, the script above when run with `verdi run` will fail, because when # the engine will try to create the node for the workfunction and create a copy of its sourcefile, namely the @@ -64,3 +68,374 @@ def wf(): self.assertTrue(isinstance(node, WorkFunctionNode)) self.assertEqual(node.function_name, 'wf') self.assertEqual(node.get_function_source_code(), script_content) + + +class TestAutoGroups(AiidaTestCase): + """Test the autogroup functionality.""" + + def setUp(self): + """Setup the CLI runner to run command line commands.""" + from aiida.orm import autogroup + + super().setUp() + self.cli_runner = CliRunner() + # I need to disable the global variable of this test environment, because invoke is just calling the function + # and therefore inheriting the global variable + self._old_autogroup = autogroup.CURRENT_AUTOGROUP + autogroup.CURRENT_AUTOGROUP = None + + def tearDown(self): + """Setup the CLI runner to run command line commands.""" + from aiida.orm import autogroup + + super().tearDown() + autogroup.CURRENT_AUTOGROUP = self._old_autogroup + + def test_autogroup(self): + """Check if the autogroup is properly generated.""" + from aiida.orm import QueryBuilder, Node, AutoGroup, load_node + + script_content = textwrap.dedent( + """\ + from aiida.orm import Data + node = Data().store() + print(node.pk) + """ + ) + + with tempfile.NamedTemporaryFile(mode='w+') as fhandle: + fhandle.write(script_content) + fhandle.flush() + + options = ['--auto-group', fhandle.name] + result = self.cli_runner.invoke(cmd_run.run, options) + self.assertClickResultNoException(result) + + pk = int(result.output) + _ = load_node(pk) # Check if the node can be loaded + + queryb = QueryBuilder().append(Node, filters={'id': pk}, tag='node') + queryb.append(AutoGroup, with_node='node', project='*') + all_auto_groups = queryb.all() + self.assertEqual( + len(all_auto_groups), 1, 'There should be only one autogroup associated with the node just created' + ) + + def test_autogroup_custom_label(self): + """Check if the autogroup is properly generated with the label specified.""" + from aiida.orm import QueryBuilder, Node, AutoGroup, load_node + + script_content = textwrap.dedent( + """\ + from aiida.orm import Data + node = Data().store() + print(node.pk) + """ + ) + + autogroup_label = 'SOME_group_LABEL' + with tempfile.NamedTemporaryFile(mode='w+') as fhandle: + fhandle.write(script_content) + fhandle.flush() + + options = [fhandle.name, '--auto-group', '--auto-group-label-prefix', autogroup_label] + result = self.cli_runner.invoke(cmd_run.run, options) + self.assertClickResultNoException(result) + + pk = int(result.output) + _ = load_node(pk) # Check if the node can be loaded + + queryb = QueryBuilder().append(Node, filters={'id': pk}, tag='node') + queryb.append(AutoGroup, with_node='node', project='*') + all_auto_groups = queryb.all() + self.assertEqual( + len(all_auto_groups), 1, 'There should be only one autogroup associated with the node just created' + ) + self.assertEqual(all_auto_groups[0][0].label, autogroup_label) + + def test_no_autogroup(self): + """Check if the autogroup is not generated if ``verdi run`` is asked not to.""" + from aiida.orm import QueryBuilder, Node, AutoGroup, load_node + + script_content = textwrap.dedent( + """\ + from aiida.orm import Data + node = Data().store() + print(node.pk) + """ + ) + + with tempfile.NamedTemporaryFile(mode='w+') as fhandle: + fhandle.write(script_content) + fhandle.flush() + + options = [fhandle.name] # Not storing an autogroup by default + result = self.cli_runner.invoke(cmd_run.run, options) + self.assertClickResultNoException(result) + + pk = int(result.output) + _ = load_node(pk) # Check if the node can be loaded + + queryb = QueryBuilder().append(Node, filters={'id': pk}, tag='node') + queryb.append(AutoGroup, with_node='node', project='*') + all_auto_groups = queryb.all() + self.assertEqual(len(all_auto_groups), 0, 'There should be no autogroup generated') + + def test_autogroup_filter_class(self): # pylint: disable=too-many-locals + """Check if the autogroup is properly generated but filtered classes are skipped.""" + from aiida.orm import Code, QueryBuilder, Node, AutoGroup, load_node + + script_content = textwrap.dedent( + """\ + import sys + from aiida.orm import Computer, Int, ArrayData, KpointsData, CalculationNode, WorkflowNode + from aiida.plugins import CalculationFactory + from aiida.engine import run_get_node + ArithmeticAdd = CalculationFactory('arithmetic.add') + + computer = Computer( + name='localhost-example-{}'.format(sys.argv[1]), + hostname='localhost', + description='my computer', + transport_type='local', + scheduler_type='direct', + workdir='/tmp' + ).store() + computer.configure() + + code = Code( + input_plugin_name='arithmetic.add', + remote_computer_exec=[computer, '/bin/true']).store() + inputs = { + 'x': Int(1), + 'y': Int(2), + 'code': code, + 'metadata': { + 'options': { + 'resources': { + 'num_machines': 1, + 'num_mpiprocs_per_machine': 1 + } + } + } + } + + node1 = KpointsData().store() + node2 = ArrayData().store() + node3 = Int(3).store() + node4 = CalculationNode().store() + node5 = WorkflowNode().store() + _, node6 = run_get_node(ArithmeticAdd, **inputs) + print(node1.pk) + print(node2.pk) + print(node3.pk) + print(node4.pk) + print(node5.pk) + print(node6.pk) + """ + ) + + Code() + for idx, ( + flags, + kptdata_in_autogroup, + arraydata_in_autogroup, + int_in_autogroup, + calc_in_autogroup, + wf_in_autogroup, + calcarithmetic_in_autogroup, + ) in enumerate([ + [['--exclude', 'aiida.data:array.kpoints'], False, True, True, True, True, True], + # Check if % works anywhere - both 'int' and 'array.kpoints' contain an 'i' + [['--exclude', 'aiida.data:%i%'], False, True, False, True, True, True], + [['--exclude', 'aiida.data:int'], True, True, False, True, True, True], + [['--exclude', 'aiida.data:%'], False, False, False, True, True, True], + [['--exclude', 'aiida.data:array', 'aiida.data:array.%'], False, False, True, True, True, True], + [['--exclude', 'aiida.data:array', 'aiida.data:array.%', 'aiida.data:int'], False, False, False, True, True, + True], + [['--exclude', 'aiida.calculations:arithmetic.add'], True, True, True, True, True, False], + [ + ['--include', 'aiida.node:process.calculation'], # Base type, no specific plugin + False, + False, + False, + True, + False, + False + ], + [ + ['--include', 'aiida.node:process.workflow'], # Base type, no specific plugin + False, + False, + False, + False, + True, + False + ], + [[], True, True, True, True, True, True], + ]): + with tempfile.NamedTemporaryFile(mode='w+') as fhandle: + fhandle.write(script_content) + fhandle.flush() + + options = ['--auto-group'] + flags + ['--', fhandle.name, str(idx)] + result = self.cli_runner.invoke(cmd_run.run, options) + self.assertClickResultNoException(result) + + pk1_str, pk2_str, pk3_str, pk4_str, pk5_str, pk6_str = result.output.split() + pk1 = int(pk1_str) + pk2 = int(pk2_str) + pk3 = int(pk3_str) + pk4 = int(pk4_str) + pk5 = int(pk5_str) + pk6 = int(pk6_str) + _ = load_node(pk1) # Check if the node can be loaded + _ = load_node(pk2) # Check if the node can be loaded + _ = load_node(pk3) # Check if the node can be loaded + _ = load_node(pk4) # Check if the node can be loaded + _ = load_node(pk5) # Check if the node can be loaded + _ = load_node(pk6) # Check if the node can be loaded + + queryb = QueryBuilder().append(Node, filters={'id': pk1}, tag='node') + queryb.append(AutoGroup, with_node='node', project='*') + all_auto_groups_kptdata = queryb.all() + + queryb = QueryBuilder().append(Node, filters={'id': pk2}, tag='node') + queryb.append(AutoGroup, with_node='node', project='*') + all_auto_groups_arraydata = queryb.all() + + queryb = QueryBuilder().append(Node, filters={'id': pk3}, tag='node') + queryb.append(AutoGroup, with_node='node', project='*') + all_auto_groups_int = queryb.all() + + queryb = QueryBuilder().append(Node, filters={'id': pk4}, tag='node') + queryb.append(AutoGroup, with_node='node', project='*') + all_auto_groups_calc = queryb.all() + + queryb = QueryBuilder().append(Node, filters={'id': pk5}, tag='node') + queryb.append(AutoGroup, with_node='node', project='*') + all_auto_groups_wf = queryb.all() + + queryb = QueryBuilder().append(Node, filters={'id': pk6}, tag='node') + queryb.append(AutoGroup, with_node='node', project='*') + all_auto_groups_calcarithmetic = queryb.all() + + self.assertEqual( + len(all_auto_groups_kptdata), 1 if kptdata_in_autogroup else 0, + 'Wrong number of nodes in autogroup associated with the KpointsData node ' + "just created with flags '{}'".format(' '.join(flags)) + ) + self.assertEqual( + len(all_auto_groups_arraydata), 1 if arraydata_in_autogroup else 0, + 'Wrong number of nodes in autogroup associated with the ArrayData node ' + "just created with flags '{}'".format(' '.join(flags)) + ) + self.assertEqual( + len(all_auto_groups_int), 1 if int_in_autogroup else 0, + 'Wrong number of nodes in autogroup associated with the Int node ' + "just created with flags '{}'".format(' '.join(flags)) + ) + self.assertEqual( + len(all_auto_groups_calc), 1 if calc_in_autogroup else 0, + 'Wrong number of nodes in autogroup associated with the CalculationNode ' + "just created with flags '{}'".format(' '.join(flags)) + ) + self.assertEqual( + len(all_auto_groups_wf), 1 if wf_in_autogroup else 0, + 'Wrong number of nodes in autogroup associated with the WorkflowNode ' + "just created with flags '{}'".format(' '.join(flags)) + ) + self.assertEqual( + len(all_auto_groups_calcarithmetic), 1 if calcarithmetic_in_autogroup else 0, + 'Wrong number of nodes in autogroup associated with the ArithmeticAdd CalcJobNode ' + "just created with flags '{}'".format(' '.join(flags)) + ) + + def test_autogroup_clashing_label(self): + """Check if the autogroup label is properly (re)generated when it clashes with an existing group name.""" + from aiida.orm import QueryBuilder, Node, AutoGroup, load_node + + script_content = textwrap.dedent( + """\ + from aiida.orm import Data + node = Data().store() + print(node.pk) + """ + ) + + autogroup_label = 'SOME_repeated_group_LABEL' + with tempfile.NamedTemporaryFile(mode='w+') as fhandle: + fhandle.write(script_content) + fhandle.flush() + + # First run + options = [fhandle.name, '--auto-group', '--auto-group-label-prefix', autogroup_label] + result = self.cli_runner.invoke(cmd_run.run, options) + self.assertClickResultNoException(result) + + pk = int(result.output) + _ = load_node(pk) # Check if the node can be loaded + queryb = QueryBuilder().append(Node, filters={'id': pk}, tag='node') + queryb.append(AutoGroup, with_node='node', project='*') + all_auto_groups = queryb.all() + self.assertEqual( + len(all_auto_groups), 1, 'There should be only one autogroup associated with the node just created' + ) + self.assertEqual(all_auto_groups[0][0].label, autogroup_label) + + # A few more runs with the same label - it should not crash but append something to the group name + for _ in range(10): + options = [fhandle.name, '--auto-group', '--auto-group-label-prefix', autogroup_label] + result = self.cli_runner.invoke(cmd_run.run, options) + self.assertClickResultNoException(result) + + pk = int(result.output) + _ = load_node(pk) # Check if the node can be loaded + queryb = QueryBuilder().append(Node, filters={'id': pk}, tag='node') + queryb.append(AutoGroup, with_node='node', project='*') + all_auto_groups = queryb.all() + self.assertEqual( + len(all_auto_groups), 1, 'There should be only one autogroup associated with the node just created' + ) + self.assertTrue(all_auto_groups[0][0].label.startswith(autogroup_label)) + + def test_legacy_autogroup_name(self): + """Check if the autogroup is properly generated when using the legacy --group-name flag.""" + from aiida.orm import QueryBuilder, Node, AutoGroup, load_node + + script_content = textwrap.dedent( + """\ + from aiida.orm import Data + node = Data().store() + print(node.pk) + """ + ) + group_label = 'legacy-group-name' + + with tempfile.NamedTemporaryFile(mode='w+') as fhandle: + fhandle.write(script_content) + fhandle.flush() + + options = ['--group-name', group_label, fhandle.name] + with warnings.catch_warnings(record=True) as warns: # pylint: disable=no-member + result = self.cli_runner.invoke(cmd_run.run, options) + self.assertTrue( + any(['use `--auto-group-label-prefix` instead' in str(warn.message) for warn in warns]), + "No warning for '--group-name' was raised" + ) + + self.assertClickResultNoException(result) + + pk = int(result.output) + _ = load_node(pk) # Check if the node can be loaded + + queryb = QueryBuilder().append(Node, filters={'id': pk}, tag='node') + queryb.append(AutoGroup, with_node='node', project='*') + all_auto_groups = queryb.all() + self.assertEqual( + len(all_auto_groups), 1, 'There should be only one autogroup associated with the node just created' + ) + self.assertEqual( + all_auto_groups[0][0].label, group_label, + 'The auto group label is "{}" instead of "{}"'.format(all_auto_groups[0][0].label, group_label) + ) diff --git a/tests/cmdline/commands/test_setup.py b/tests/cmdline/commands/test_setup.py index 7820bc5a12..c7c5fb8d18 100644 --- a/tests/cmdline/commands/test_setup.py +++ b/tests/cmdline/commands/test_setup.py @@ -121,7 +121,7 @@ def test_quicksetup_wrong_port(self): @with_temporary_config_instance def test_setup(self): - """Test `verdi setup`.""" + """Test `verdi setup` (non-interactive).""" postgres = Postgres(interactive=False, quiet=True, dbinfo=self.pg_test.dsn) postgres.determine_setup() db_name = 'aiida_test_setup' diff --git a/tests/cmdline/params/options/test_conditional.py b/tests/cmdline/params/options/test_conditional.py index aaad971fb4..94c300eff8 100644 --- a/tests/cmdline/params/options/test_conditional.py +++ b/tests/cmdline/params/options/test_conditional.py @@ -66,7 +66,7 @@ def test_switch_on(self): runner = CliRunner() result = runner.invoke(cmd, ['--on']) self.assertIsNotNone(result.exception) - self.assertIn('Error: Missing option "--opt".', result.output) + self.assertTrue('Error: Missing option' in result.output and '--opt' in result.output) def test_flag_off(self): """ @@ -89,7 +89,7 @@ def test_flag_on(self): runner = CliRunner() result = runner.invoke(cmd, ['--on']) self.assertIsNotNone(result.exception) - self.assertIn('Error: Missing option "--opt".', result.output) + self.assertTrue('Error: Missing option' in result.output and '--opt' in result.output) def setup_multi_non_eager(self): """ @@ -139,11 +139,11 @@ def test_ab(self): runner, cmd = self.setup_multi_non_eager() result = runner.invoke(cmd, ['--a', '--opt-b=Bla']) self.assertIsNotNone(result.exception) - self.assertIn('Error: Missing option "--opt-a".', result.output) + self.assertTrue('Error: Missing option' in result.output and '--opt-a' in result.output) result_rev = runner.invoke(cmd, ['--opt-b=Bla', '--a']) self.assertIsNotNone(result_rev.exception) - self.assertIn('Error: Missing option "--opt-a".', result_rev.output) + self.assertTrue('Error: Missing option' in result.output and '--opt-a' in result.output) def test_ba(self): """ @@ -154,11 +154,11 @@ def test_ba(self): runner, cmd = self.setup_multi_non_eager() result = runner.invoke(cmd, ['--b', '--opt-a=Bla']) self.assertIsNotNone(result.exception) - self.assertIn('Error: Missing option "--opt-b".', result.output) + self.assertTrue('Error: Missing option' in result.output and '--opt-b' in result.output) result_rev = runner.invoke(cmd, ['--opt-a=Bla', '--b']) self.assertIsNotNone(result_rev.exception) - self.assertIn('Error: Missing option "--opt-b".', result_rev.output) + self.assertTrue('Error: Missing option' in result.output and '--opt-b' in result.output) @staticmethod def user_callback(_ctx, param, value): @@ -181,9 +181,8 @@ def setup_flag_cond(**kwargs): @click.option('--flag', is_flag=True) @click.option('--opt-a', required_fn=lambda c: c.params.get('flag'), cls=ConditionalOption, **kwargs) def cmd(flag, opt_a): - """ A command with a flag and customizable options that dependon it """ + """A command with a flag and customizable options that depend on it.""" # pylint: disable=unused-argument - click.echo('{}'.format(opt_a)) return cmd diff --git a/tests/cmdline/params/options/test_interactive.py b/tests/cmdline/params/options/test_interactive.py index a72b52b266..04899ab19a 100644 --- a/tests/cmdline/params/options/test_interactive.py +++ b/tests/cmdline/params/options/test_interactive.py @@ -223,15 +223,17 @@ def test_default_value_empty_opt(self): def test_default_value_ignore_character(self): """ - scenario: InteractiveOption with default value, invoke with ignore default character `!` - behaviour: return `None` for the value + scenario: InteractiveOption with default value, invoke with "ignore default character" `!` + behaviour: return empty string '' for the value + + Note: It should *not* return None, since this is indistinguishable from the option not being prompted for. """ cmd = self.simple_command(default='default') runner = CliRunner() # Check the interactive mode, by not specifying the input on the command line and then enter `!` at the prompt result = runner.invoke(cmd, [], input='!\n') - expected = 'None' + expected = '' self.assertIsNone(result.exception) self.assertIn(expected, result.output.split('\n')[3]) # Fourth line should be parsed value printed to stdout diff --git a/tests/cmdline/params/types/test_code.py b/tests/cmdline/params/types/test_code.py index f96d9ecf05..a2464f64d7 100644 --- a/tests/cmdline/params/types/test_code.py +++ b/tests/cmdline/params/types/test_code.py @@ -7,109 +7,125 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### +# pylint: disable=redefined-outer-name,unused-variable,unused-argument """Tests for the `CodeParamType`.""" - import click +import pytest -from aiida.backends.testbase import AiidaTestCase from aiida.cmdline.params.types import CodeParamType from aiida.orm import Code from aiida.orm.utils.loaders import OrmEntityLoader -class TestCodeParamType(AiidaTestCase): - """Tests for the `CodeParamType`.""" - - @classmethod - def setUpClass(cls, *args, **kwargs): - """ - Create some code to test the CodeParamType parameter type for the command line infrastructure - We create an initial code with a random name and then on purpose create two code with a name - that matches exactly the ID and UUID, respectively, of the first one. This allows us to test - the rules implemented to solve ambiguities that arise when determing the identifier type - """ - super().setUpClass(*args, **kwargs) - - cls.param_base = CodeParamType() - cls.param_entry_point = CodeParamType(entry_point='arithmetic.add') - cls.entity_01 = Code(remote_computer_exec=(cls.computer, '/bin/true')).store() - cls.entity_02 = Code(remote_computer_exec=(cls.computer, '/bin/true'), - input_plugin_name='arithmetic.add').store() - cls.entity_03 = Code(remote_computer_exec=(cls.computer, '/bin/true'), - input_plugin_name='templatereplacer').store() - - cls.entity_01.label = 'computer_01' - cls.entity_02.label = str(cls.entity_01.pk) - cls.entity_03.label = str(cls.entity_01.uuid) - - def test_get_by_id(self): - """ - Verify that using the ID will retrieve the correct entity - """ - identifier = '{}'.format(self.entity_01.pk) - result = self.param_base.convert(identifier, None, None) - self.assertEqual(result.uuid, self.entity_01.uuid) - - def test_get_by_uuid(self): - """ - Verify that using the UUID will retrieve the correct entity - """ - identifier = '{}'.format(self.entity_01.uuid) - result = self.param_base.convert(identifier, None, None) - self.assertEqual(result.uuid, self.entity_01.uuid) - - def test_get_by_label(self): - """ - Verify that using the LABEL will retrieve the correct entity - """ - identifier = '{}'.format(self.entity_01.label) - result = self.param_base.convert(identifier, None, None) - self.assertEqual(result.uuid, self.entity_01.uuid) - - def test_get_by_fullname(self): - """ - Verify that using the LABEL@machinename will retrieve the correct entity - """ - identifier = '{}@{}'.format(self.entity_01.label, self.computer.name) # pylint: disable=no-member - result = self.param_base.convert(identifier, None, None) - self.assertEqual(result.uuid, self.entity_01.uuid) - - def test_ambiguous_label_pk(self): - """ - Situation: LABEL of entity_02 is exactly equal to ID of entity_01 - - Verify that using an ambiguous identifier gives precedence to the ID interpretation - Appending the special ambiguity breaker character will force the identifier to be treated as a LABEL - """ - identifier = '{}'.format(self.entity_02.label) - result = self.param_base.convert(identifier, None, None) - self.assertEqual(result.uuid, self.entity_01.uuid) - - identifier = '{}{}'.format(self.entity_02.label, OrmEntityLoader.label_ambiguity_breaker) - result = self.param_base.convert(identifier, None, None) - self.assertEqual(result.uuid, self.entity_02.uuid) - - def test_ambiguous_label_uuid(self): - """ - Situation: LABEL of entity_03 is exactly equal to UUID of entity_01 - - Verify that using an ambiguous identifier gives precedence to the UUID interpretation - Appending the special ambiguity breaker character will force the identifier to be treated as a LABEL - """ - identifier = '{}'.format(self.entity_03.label) - result = self.param_base.convert(identifier, None, None) - self.assertEqual(result.uuid, self.entity_01.uuid) - - identifier = '{}{}'.format(self.entity_03.label, OrmEntityLoader.label_ambiguity_breaker) - result = self.param_base.convert(identifier, None, None) - self.assertEqual(result.uuid, self.entity_03.uuid) - - def test_entry_point_validation(self): - """Verify that when an `entry_point` is defined in the constructor, it is respected in the validation.""" - identifier = '{}'.format(self.entity_02.pk) - result = self.param_entry_point.convert(identifier, None, None) - self.assertEqual(result.uuid, self.entity_02.uuid) - - with self.assertRaises(click.BadParameter): - identifier = '{}'.format(self.entity_03.pk) - result = self.param_entry_point.convert(identifier, None, None) +@pytest.fixture +def parameter_type(): + """Return an instance of the `CodeParamType`.""" + return CodeParamType() + + +@pytest.fixture +def setup_codes(clear_database_before_test, aiida_localhost): + """Create some `Code` instances to test the `CodeParamType` parameter type for the command line infrastructure. + + We create an initial code with a random name and then on purpose create two code with a name that matches exactly + the ID and UUID, respectively, of the first one. This allows us to test the rules implemented to solve ambiguities + that arise when determing the identifier type. + """ + entity_01 = Code(remote_computer_exec=(aiida_localhost, '/bin/true')).store() + entity_02 = Code(remote_computer_exec=(aiida_localhost, '/bin/true'), input_plugin_name='arithmetic.add').store() + entity_03 = Code(remote_computer_exec=(aiida_localhost, '/bin/true'), input_plugin_name='templatereplacer').store() + + entity_01.label = 'computer_01' + entity_02.label = str(entity_01.pk) + entity_03.label = str(entity_01.uuid) + + return entity_01, entity_02, entity_03 + + +def test_get_by_id(setup_codes, parameter_type): + """Verify that using the ID will retrieve the correct entity.""" + entity_01, entity_02, entity_03 = setup_codes + identifier = '{}'.format(entity_01.pk) + result = parameter_type.convert(identifier, None, None) + assert result.uuid == entity_01.uuid + + +def test_get_by_uuid(setup_codes, parameter_type): + """Verify that using the UUID will retrieve the correct entity.""" + entity_01, entity_02, entity_03 = setup_codes + identifier = '{}'.format(entity_01.uuid) + result = parameter_type.convert(identifier, None, None) + assert result.uuid == entity_01.uuid + + +def test_get_by_label(setup_codes, parameter_type): + """Verify that using the LABEL will retrieve the correct entity.""" + entity_01, entity_02, entity_03 = setup_codes + identifier = '{}'.format(entity_01.label) + result = parameter_type.convert(identifier, None, None) + assert result.uuid == entity_01.uuid + + +def test_get_by_fullname(setup_codes, parameter_type): + """Verify that using the LABEL@machinename will retrieve the correct entity.""" + entity_01, entity_02, entity_03 = setup_codes + identifier = '{}@{}'.format(entity_01.label, entity_01.computer.name) + result = parameter_type.convert(identifier, None, None) + assert result.uuid == entity_01.uuid + + +def test_ambiguous_label_pk(setup_codes, parameter_type): + """Situation: LABEL of entity_02 is exactly equal to ID of entity_01. + + Verify that using an ambiguous identifier gives precedence to the ID interpretation + Appending the special ambiguity breaker character will force the identifier to be treated as a LABEL + """ + entity_01, entity_02, entity_03 = setup_codes + identifier = '{}'.format(entity_02.label) + result = parameter_type.convert(identifier, None, None) + assert result.uuid == entity_01.uuid + + identifier = '{}{}'.format(entity_02.label, OrmEntityLoader.label_ambiguity_breaker) + result = parameter_type.convert(identifier, None, None) + assert result.uuid == entity_02.uuid + + +def test_ambiguous_label_uuid(setup_codes, parameter_type): + """Situation: LABEL of entity_03 is exactly equal to UUID of entity_01. + + Verify that using an ambiguous identifier gives precedence to the UUID interpretation + Appending the special ambiguity breaker character will force the identifier to be treated as a LABEL + """ + entity_01, entity_02, entity_03 = setup_codes + identifier = '{}'.format(entity_03.label) + result = parameter_type.convert(identifier, None, None) + assert result.uuid == entity_01.uuid + + identifier = '{}{}'.format(entity_03.label, OrmEntityLoader.label_ambiguity_breaker) + result = parameter_type.convert(identifier, None, None) + assert result.uuid == entity_03.uuid + + +def test_entry_point_validation(setup_codes): + """Verify that when an `entry_point` is defined in the constructor, it is respected in the validation.""" + entity_01, entity_02, entity_03 = setup_codes + parameter_type = CodeParamType(entry_point='arithmetic.add') + identifier = '{}'.format(entity_02.pk) + result = parameter_type.convert(identifier, None, None) + assert result.uuid == entity_02.uuid + + with pytest.raises(click.BadParameter): + identifier = '{}'.format(entity_03.pk) + result = parameter_type.convert(identifier, None, None) + + +def test_complete(setup_codes, parameter_type, aiida_localhost): + """Test the `complete` method that provides auto-complete functionality.""" + entity_01, entity_02, entity_03 = setup_codes + entity_04 = Code(label='xavier', remote_computer_exec=(aiida_localhost, '/bin/true')).store() + + options = [item[0] for item in parameter_type.complete(None, '')] + assert sorted(options) == sorted([entity_01.label, entity_02.label, entity_03.label, entity_04.label]) + + options = [item[0] for item in parameter_type.complete(None, 'xa')] + assert sorted(options) == sorted([entity_04.label]) diff --git a/tests/cmdline/params/types/test_group.py b/tests/cmdline/params/types/test_group.py index d40385682a..722c2d3fe7 100644 --- a/tests/cmdline/params/types/test_group.py +++ b/tests/cmdline/params/types/test_group.py @@ -7,81 +7,139 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### +# pylint: disable=redefined-outer-name,unused-variable,unused-argument """Tests for the `GroupParamType`.""" -from aiida.backends.testbase import AiidaTestCase +import click +import pytest + from aiida.cmdline.params.types import GroupParamType -from aiida.orm import Group +from aiida.orm import Group, AutoGroup, ImportGroup from aiida.orm.utils.loaders import OrmEntityLoader -class TestGroupParamType(AiidaTestCase): - """Tests for the `GroupParamType`.""" - - @classmethod - def setUpClass(cls, *args, **kwargs): - """ - Create some groups to test the GroupParamType parameter type for the command line infrastructure - We create an initial group with a random name and then on purpose create two groups with a name - that matches exactly the ID and UUID, respectively, of the first one. This allows us to test - the rules implemented to solve ambiguities that arise when determing the identifier type - """ - super().setUpClass(*args, **kwargs) - - cls.param = GroupParamType() - cls.entity_01 = Group(label='group_01').store() - cls.entity_02 = Group(label=str(cls.entity_01.pk)).store() - cls.entity_03 = Group(label=str(cls.entity_01.uuid)).store() - - def test_get_by_id(self): - """ - Verify that using the ID will retrieve the correct entity - """ - identifier = '{}'.format(self.entity_01.pk) - result = self.param.convert(identifier, None, None) - self.assertEqual(result.uuid, self.entity_01.uuid) - - def test_get_by_uuid(self): - """ - Verify that using the UUID will retrieve the correct entity - """ - identifier = '{}'.format(self.entity_01.uuid) - result = self.param.convert(identifier, None, None) - self.assertEqual(result.uuid, self.entity_01.uuid) - - def test_get_by_label(self): - """ - Verify that using the LABEL will retrieve the correct entity - """ - identifier = '{}'.format(self.entity_01.label) - result = self.param.convert(identifier, None, None) - self.assertEqual(result.uuid, self.entity_01.uuid) - - def test_ambiguous_label_pk(self): - """ - Situation: LABEL of entity_02 is exactly equal to ID of entity_01 - - Verify that using an ambiguous identifier gives precedence to the ID interpretation - Appending the special ambiguity breaker character will force the identifier to be treated as a LABEL - """ - identifier = '{}'.format(self.entity_02.label) - result = self.param.convert(identifier, None, None) - self.assertEqual(result.uuid, self.entity_01.uuid) - - identifier = '{}{}'.format(self.entity_02.label, OrmEntityLoader.label_ambiguity_breaker) - result = self.param.convert(identifier, None, None) - self.assertEqual(result.uuid, self.entity_02.uuid) - - def test_ambiguous_label_uuid(self): - """ - Situation: LABEL of entity_03 is exactly equal to UUID of entity_01 - - Verify that using an ambiguous identifier gives precedence to the UUID interpretation - Appending the special ambiguity breaker character will force the identifier to be treated as a LABEL - """ - identifier = '{}'.format(self.entity_03.label) - result = self.param.convert(identifier, None, None) - self.assertEqual(result.uuid, self.entity_01.uuid) - - identifier = '{}{}'.format(self.entity_03.label, OrmEntityLoader.label_ambiguity_breaker) - result = self.param.convert(identifier, None, None) - self.assertEqual(result.uuid, self.entity_03.uuid) +@pytest.fixture +def parameter_type(): + """Return an instance of the `GroupParamType`.""" + return GroupParamType() + + +@pytest.fixture +def setup_groups(clear_database_before_test): + """Create some groups to test the `GroupParamType` parameter type for the command line infrastructure. + + We create an initial group with a random name and then on purpose create two groups with a name that matches exactly + the ID and UUID, respectively, of the first one. This allows us to test the rules implemented to solve ambiguities + that arise when determing the identifier type. + """ + entity_01 = Group(label='group_01').store() + entity_02 = AutoGroup(label=str(entity_01.pk)).store() + entity_03 = ImportGroup(label=str(entity_01.uuid)).store() + return entity_01, entity_02, entity_03 + + +def test_get_by_id(setup_groups, parameter_type): + """Verify that using the ID will retrieve the correct entity.""" + entity_01, entity_02, entity_03 = setup_groups + identifier = '{}'.format(entity_01.pk) + result = parameter_type.convert(identifier, None, None) + assert result.uuid == entity_01.uuid + + +def test_get_by_uuid(setup_groups, parameter_type): + """Verify that using the UUID will retrieve the correct entity.""" + entity_01, entity_02, entity_03 = setup_groups + identifier = '{}'.format(entity_01.uuid) + result = parameter_type.convert(identifier, None, None) + assert result.uuid == entity_01.uuid + + +def test_get_by_label(setup_groups, parameter_type): + """Verify that using the LABEL will retrieve the correct entity.""" + entity_01, entity_02, entity_03 = setup_groups + identifier = '{}'.format(entity_01.label) + result = parameter_type.convert(identifier, None, None) + assert result.uuid == entity_01.uuid + + +def test_ambiguous_label_pk(setup_groups, parameter_type): + """Situation: LABEL of entity_02 is exactly equal to ID of entity_01. + + Verify that using an ambiguous identifier gives precedence to the ID interpretation. Appending the special ambiguity + breaker character will force the identifier to be treated as a LABEL. + """ + entity_01, entity_02, entity_03 = setup_groups + identifier = '{}'.format(entity_02.label) + result = parameter_type.convert(identifier, None, None) + assert result.uuid == entity_01.uuid + + identifier = '{}{}'.format(entity_02.label, OrmEntityLoader.label_ambiguity_breaker) + result = parameter_type.convert(identifier, None, None) + assert result.uuid == entity_02.uuid + + +def test_ambiguous_label_uuid(setup_groups, parameter_type): + """Situation: LABEL of entity_03 is exactly equal to UUID of entity_01. + + Verify that using an ambiguous identifier gives precedence to the UUID interpretation. Appending the special + ambiguity breaker character will force the identifier to be treated as a LABEL. + """ + entity_01, entity_02, entity_03 = setup_groups + identifier = '{}'.format(entity_03.label) + result = parameter_type.convert(identifier, None, None) + assert result.uuid == entity_01.uuid + + identifier = '{}{}'.format(entity_03.label, OrmEntityLoader.label_ambiguity_breaker) + result = parameter_type.convert(identifier, None, None) + assert result.uuid == entity_03.uuid + + +def test_create_if_not_exist(setup_groups): + """Test the `create_if_not_exist` constructor argument.""" + label = 'non-existing-label-01' + parameter_type = GroupParamType(create_if_not_exist=True) + result = parameter_type.convert(label, None, None) + assert isinstance(result, Group) + + label = 'non-existing-label-02' + parameter_type = GroupParamType(create_if_not_exist=True, sub_classes=('aiida.groups:core.auto',)) + result = parameter_type.convert(label, None, None) + assert isinstance(result, AutoGroup) + + # Specifying more than one subclass when `create_if_not_exist=True` is not allowed. + with pytest.raises(ValueError): + GroupParamType(create_if_not_exist=True, sub_classes=('aiida.groups:core.auto', 'aiida.groups:core.import')) + + +@pytest.mark.parametrize(('sub_classes', 'expected'), ( + (None, (True, True, True)), + (('aiida.groups:core.auto',), (False, True, False)), + (('aiida.groups:core.auto', 'aiida.groups:core.import'), (False, True, True)), +)) +def test_sub_classes(setup_groups, sub_classes, expected): + """Test the `sub_classes` constructor argument.""" + entity_01, entity_02, entity_03 = setup_groups + parameter_type = GroupParamType(sub_classes=sub_classes) + + results = [] + + for group in [entity_01, entity_02, entity_03]: + try: + parameter_type.convert(str(group.pk), None, None) + except click.BadParameter: + results.append(False) + else: + results.append(True) + + assert tuple(results) == expected + + +def test_complete(setup_groups, parameter_type): + """Test the `complete` method that provides auto-complete functionality.""" + entity_01, entity_02, entity_03 = setup_groups + entity_04 = Group(label='xavier').store() + + options = [item[0] for item in parameter_type.complete(None, '')] + assert sorted(options) == sorted([entity_01.label, entity_02.label, entity_03.label, entity_04.label]) + + options = [item[0] for item in parameter_type.complete(None, 'xa')] + assert sorted(options) == sorted([entity_04.label]) diff --git a/tests/cmdline/utils/test_multiline.py b/tests/cmdline/utils/test_multiline.py index 50fbee9451..8731972f30 100644 --- a/tests/cmdline/utils/test_multiline.py +++ b/tests/cmdline/utils/test_multiline.py @@ -7,56 +7,53 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### +# pylint: disable=unused-argument """Unit tests for editing pre and post bash scripts, comments, etc.""" -import os -import unittest - -from click.testing import CliRunner +import pytest from aiida.cmdline.utils.multi_line_input import edit_pre_post, edit_comment +COMMAND = 'sleep 1 ; vim -c "g!/^#=/s/$/Test" -cwq' # Appends `Test` to every line NOT starting with `#=` + + +@pytest.mark.parametrize('non_interactive_editor', (COMMAND,), indirect=True) +def test_pre_post(non_interactive_editor): + result = edit_pre_post(summary={'Param 1': 'Value 1', 'Param 2': 'Value 1'}) + assert result[0] == 'Test\nTest\nTest' + assert result[1] == 'Test\nTest\nTest' + + +@pytest.mark.parametrize('non_interactive_editor', (COMMAND,), indirect=True) +def test_edit_pre_post(non_interactive_editor): + result = edit_pre_post(pre='OldPre', post='OldPost') + assert result[0] == 'Test\nOldPreTest\nTest' + assert result[1] == 'Test\nOldPostTest\nTest' + + +@pytest.mark.parametrize('non_interactive_editor', (COMMAND,), indirect=True) +def test_edit_pre_post_comment(non_interactive_editor): + """Test that lines starting with '#=' are ignored and are not ignored if they start with any other character.""" + result = edit_pre_post(pre='OldPre\n#=Delete me', post='OldPost #=Dont delete me') + assert result[0] == 'Test\nOldPreTest\nTest' + assert result[1] == 'Test\nOldPost #=Dont delete meTest\nTest' + + +@pytest.mark.parametrize('non_interactive_editor', (COMMAND,), indirect=True) +def test_edit_pre_bash_comment(non_interactive_editor): + """Test that bash comments starting with '#' are NOT deleted.""" + result = edit_pre_post(pre='OldPre\n# Dont delete me', post='OldPost # Dont delete me') + assert result[0] == 'Test\nOldPreTest\n# Dont delete meTest\nTest' + assert result[1] == 'Test\nOldPost # Dont delete meTest\nTest' + + +@pytest.mark.parametrize('non_interactive_editor', (COMMAND,), indirect=True) +def test_new_comment(non_interactive_editor): + new_comment = edit_comment() + assert new_comment == 'Test' + -class TestMultilineInput(unittest.TestCase): - """Test functions for editing pre and post bash scripts, comments, etc.""" - - def setUp(self): - ## Sleep 1 is needed because on some filesystems (e.g. some pre 10.13 Mac) the - ## filesystem returns the time with a precision of 1 second, and - ## click uses the timestamp to decide if the file was re-saved or not. - editor_cmd = 'sleep 1 ; vim -c "g!/^#=/s/$/Test" -cwq' # appends Test to - # every line that does NOT start with '#=' characters - os.environ['EDITOR'] = editor_cmd - os.environ['VISUAL'] = editor_cmd - self.runner = CliRunner() - - def test_pre_post(self): - result = edit_pre_post(summary={'Param 1': 'Value 1', 'Param 2': 'Value 1'}) - self.assertEqual(result[0], 'Test\nTest\nTest') - self.assertEqual(result[1], 'Test\nTest\nTest') - - def test_edit_pre_post(self): - result = edit_pre_post(pre='OldPre', post='OldPost') - self.assertEqual(result[0], 'Test\nOldPreTest\nTest') - self.assertEqual(result[1], 'Test\nOldPostTest\nTest') - - def test_edit_pre_post_comment(self): - """Test that lines starting with '#=' are ignored and are not ignored - if they start with any other character""" - result = edit_pre_post(pre='OldPre\n#=Delete me', post='OldPost #=Dont delete me') - self.assertEqual(result[0], 'Test\nOldPreTest\nTest') - self.assertEqual(result[1], 'Test\nOldPost #=Dont delete meTest\nTest') - - def test_edit_pre_bash_comment(self): - """Test that bash comments starting with '#' are NOT deleted""" - result = edit_pre_post(pre='OldPre\n# Dont delete me', post='OldPost # Dont delete me') - self.assertEqual(result[0], 'Test\nOldPreTest\n# Dont delete meTest\nTest') - self.assertEqual(result[1], 'Test\nOldPost # Dont delete meTest\nTest') - - def test_new_comment(self): - new_comment = edit_comment() - self.assertEqual(new_comment, 'Test') - - def test_edit_comment(self): - old_comment = 'OldComment' - new_comment = edit_comment(old_cmt=old_comment) - self.assertEqual(new_comment, old_comment + 'Test') +@pytest.mark.parametrize('non_interactive_editor', (COMMAND,), indirect=True) +def test_edit_comment(non_interactive_editor): + old_comment = 'OldComment' + new_comment = edit_comment(old_cmt=old_comment) + assert new_comment == old_comment + 'Test' diff --git a/aiida/cmdline/params/types/nonemptystring.py b/tests/common/test_escaping.py similarity index 52% rename from aiida/cmdline/params/types/nonemptystring.py rename to tests/common/test_escaping.py index 295bd29bf3..80bdb377c6 100644 --- a/aiida/cmdline/params/types/nonemptystring.py +++ b/tests/common/test_escaping.py @@ -7,25 +7,20 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -""" -Module for the non empty string parameter type -""" +"""Tests for the :mod:`aiida.common.escaping`.""" +from aiida.common.escaping import escape_for_bash -from click.types import StringParamType +def test_escape_for_bash(): + """Tests various inputs for `aiida.common.escaping.escape_for_bash`.""" + tests = ( + [None, ''], + ['string', "'string'"], + ['string with space', "'string with space'"], + ["string with a ' single quote", """'string with a '"'"' single quote'"""], + [1, "'1'"], + [2.0, "'2.0'"], + ) -class NonEmptyStringParamType(StringParamType): - """ - Parameter that cannot be an an empty string. - """ - name = 'nonemptystring' - - def convert(self, value, param, ctx): - newval = super().convert(value, param, ctx) - if not newval: # None or empty string - self.fail('Empty string is not valid!') - - return newval - - def __repr__(self): - return 'NONEMPTYSTRING' + for string_input, string_escaped in tests: + assert escape_for_bash(string_input) == string_escaped diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000000..98f36fa465 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Configuration file for pytest tests.""" +import pytest # pylint: disable=unused-import + +pytest_plugins = ['aiida.manage.tests.pytest_fixtures'] # pylint: disable=invalid-name + + +@pytest.fixture() +def non_interactive_editor(request): + """Fixture to patch click's `Editor.edit_file`. + + In `click==7.1` the `Editor.edit_file` command was changed to escape the `os.environ['EDITOR']` command. Our tests + are currently abusing this variable to define an automated vim command in order to make an interactive command + non-interactive, and escaping it makes bash interpret the command and its arguments as a single command instead. + Here we patch the method to remove the escaping of the editor command. + + :param request: the command to set for the editor that is to be called + """ + import os + from unittest.mock import patch + from click._termui_impl import Editor + + os.environ['EDITOR'] = request.param + os.environ['VISUAL'] = request.param + + def edit_file(self, filename): + import os + import subprocess + import click + + editor = self.get_editor() + if self.env: + environ = os.environ.copy() + environ.update(self.env) + else: + environ = None + try: + process = subprocess.Popen( + '{} {}'.format(editor, filename), # This is the line that we change removing `shlex_quote` + env=environ, + shell=True, + ) + exit_code = process.wait() + if exit_code != 0: + raise click.ClickException('{}: Editing failed!'.format(editor)) + except OSError as exception: + raise click.ClickException('{}: Editing failed: {}'.format(editor, exception)) + + with patch.object(Editor, 'edit_file', edit_file): + yield diff --git a/tests/engine/processes/text_exit_code.py b/tests/engine/processes/text_exit_code.py new file mode 100644 index 0000000000..2371d7fb2b --- /dev/null +++ b/tests/engine/processes/text_exit_code.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Tests for `aiida.engine.processes.exit_code.ExitCode`.""" +import pytest + +from aiida.engine import ExitCode + + +def test_exit_code_defaults(): + """Test that the defaults are properly set.""" + exit_code = ExitCode() + assert exit_code.status == 0 + assert exit_code.message is None + assert exit_code.invalidates_cache is False + + +def test_exit_code_construct(): + """Test that the constructor allows to override defaults.""" + status = 418 + message = 'I am a teapot' + invalidates_cache = True + + exit_code = ExitCode(status, message, invalidates_cache) + assert exit_code.status == status + assert exit_code.message == message + assert exit_code.invalidates_cache == invalidates_cache + + +def test_exit_code_equality(): + """Test that the equality operator works properly.""" + exit_code_origin = ExitCode(1, 'message', True) + exit_code_clone = ExitCode(1, 'message', True) + exit_code_different = ExitCode(2, 'message', True) + + assert exit_code_origin == exit_code_clone + assert exit_code_clone != exit_code_different + + +def test_exit_code_template_message(): + """Test that an exit code with a templated message can be called to replace the parameters.""" + message_template = 'Wrong parameter {parameter}' + parameter_name = 'some_parameter' + + exit_code_base = ExitCode(418, message_template) + exit_code_called = exit_code_base.format(parameter=parameter_name) + + # Incorrect placeholder + with pytest.raises(ValueError): + exit_code_base.format(non_existing_parameter=parameter_name) + + # Missing placeholders + with pytest.raises(ValueError): + exit_code_base.format() + + assert exit_code_base != exit_code_called # Calling the exit code should return a new instance + assert exit_code_called.message == message_template.format(parameter=parameter_name) + + +def test_exit_code_expand_tuple(): + """Test that an exit code instance can be expanded in its attributes like a tuple.""" + status = 418 + message = 'I am a teapot' + invalidates_cache = True + + status_exp, message_exp, invalidates_cache_exp = ExitCode(418, message, True) + + assert status == status_exp + assert message == message_exp + assert invalidates_cache == invalidates_cache_exp diff --git a/tests/engine/processes/workchains/test_utils.py b/tests/engine/processes/workchains/test_utils.py index 00f1e127a3..51a787235e 100644 --- a/tests/engine/processes/workchains/test_utils.py +++ b/tests/engine/processes/workchains/test_utils.py @@ -53,6 +53,7 @@ def test_priority(self): attribute_key = 'handlers_called' class ArithmeticAddBaseWorkChain(BaseRestartWorkChain): + """Implementation of a possible BaseRestartWorkChain for the ``ArithmeticAddCalculation``.""" _process_class = ArithmeticAddCalculation @@ -61,6 +62,7 @@ class ArithmeticAddBaseWorkChain(BaseRestartWorkChain): # This can then be checked after invoking `inspect_process` to ensure they were called in the right order @process_handler(priority=100) def handler_01(self, node): + """Example handler returing ExitCode 100.""" handlers_called = node.get_attribute(attribute_key, default=[]) handlers_called.append('handler_01') node.set_attribute(attribute_key, handlers_called) @@ -68,6 +70,7 @@ def handler_01(self, node): @process_handler(priority=300) def handler_03(self, node): + """Example handler returing ExitCode 300.""" handlers_called = node.get_attribute(attribute_key, default=[]) handlers_called.append('handler_03') node.set_attribute(attribute_key, handlers_called) @@ -75,6 +78,7 @@ def handler_03(self, node): @process_handler(priority=200) def handler_02(self, node): + """Example handler returing ExitCode 200.""" handlers_called = node.get_attribute(attribute_key, default=[]) handlers_called.append('handler_02') node.set_attribute(attribute_key, handlers_called) @@ -82,6 +86,7 @@ def handler_02(self, node): @process_handler(priority=400) def handler_04(self, node): + """Example handler returing ExitCode 400.""" handlers_called = node.get_attribute(attribute_key, default=[]) handlers_called.append('handler_04') node.set_attribute(attribute_key, handlers_called) @@ -159,6 +164,7 @@ def test_exit_codes_filter(self): node_skip.set_exit_status(200) # Some other exit status class ArithmeticAddBaseWorkChain(BaseRestartWorkChain): + """Minimal base restart workchain for the ``ArithmeticAddCalculation``.""" _process_class = ArithmeticAddCalculation diff --git a/tests/fixtures/calcjob/arithmetic.add.aiida b/tests/fixtures/calcjob/arithmetic.add.aiida index 4c0de43491..093473ca20 100644 Binary files a/tests/fixtures/calcjob/arithmetic.add.aiida and b/tests/fixtures/calcjob/arithmetic.add.aiida differ diff --git a/tests/fixtures/calcjob/arithmetic.add_old.aiida b/tests/fixtures/calcjob/arithmetic.add_old.aiida index 17357815a0..bbe502e33a 100644 Binary files a/tests/fixtures/calcjob/arithmetic.add_old.aiida and b/tests/fixtures/calcjob/arithmetic.add_old.aiida differ diff --git a/tests/fixtures/export/compare/django.aiida b/tests/fixtures/export/compare/django.aiida index 633c58159e..4b1cc81496 100644 Binary files a/tests/fixtures/export/compare/django.aiida and b/tests/fixtures/export/compare/django.aiida differ diff --git a/tests/fixtures/export/compare/sqlalchemy.aiida b/tests/fixtures/export/compare/sqlalchemy.aiida index bd3ae08f44..19b819a44f 100644 Binary files a/tests/fixtures/export/compare/sqlalchemy.aiida and b/tests/fixtures/export/compare/sqlalchemy.aiida differ diff --git a/tests/fixtures/export/migrate/export_v0.1_simple.aiida b/tests/fixtures/export/migrate/export_v0.1_simple.aiida index 0778e826d4..673c35780b 100644 Binary files a/tests/fixtures/export/migrate/export_v0.1_simple.aiida and b/tests/fixtures/export/migrate/export_v0.1_simple.aiida differ diff --git a/tests/fixtures/export/migrate/export_v0.2_simple.aiida b/tests/fixtures/export/migrate/export_v0.2_simple.aiida index f7ded56120..8d8aa26fa9 100644 Binary files a/tests/fixtures/export/migrate/export_v0.2_simple.aiida and b/tests/fixtures/export/migrate/export_v0.2_simple.aiida differ diff --git a/tests/fixtures/export/migrate/export_v0.3_simple.aiida b/tests/fixtures/export/migrate/export_v0.3_simple.aiida index 5c2cc1f841..b35c582776 100644 Binary files a/tests/fixtures/export/migrate/export_v0.3_simple.aiida and b/tests/fixtures/export/migrate/export_v0.3_simple.aiida differ diff --git a/tests/fixtures/export/migrate/export_v0.4_simple.aiida b/tests/fixtures/export/migrate/export_v0.4_simple.aiida index c38c76a2dc..53ac38a473 100644 Binary files a/tests/fixtures/export/migrate/export_v0.4_simple.aiida and b/tests/fixtures/export/migrate/export_v0.4_simple.aiida differ diff --git a/tests/fixtures/export/migrate/export_v0.5_simple.aiida b/tests/fixtures/export/migrate/export_v0.5_simple.aiida index 541e903fcb..ec56f75d2d 100644 Binary files a/tests/fixtures/export/migrate/export_v0.5_simple.aiida and b/tests/fixtures/export/migrate/export_v0.5_simple.aiida differ diff --git a/tests/fixtures/export/migrate/export_v0.6_simple.aiida b/tests/fixtures/export/migrate/export_v0.6_simple.aiida index 759b6f059c..71f125c8cf 100644 Binary files a/tests/fixtures/export/migrate/export_v0.6_simple.aiida and b/tests/fixtures/export/migrate/export_v0.6_simple.aiida differ diff --git a/tests/fixtures/export/migrate/export_v0.7_simple.aiida b/tests/fixtures/export/migrate/export_v0.7_simple.aiida index 779c58ea3c..7ab067ce46 100644 Binary files a/tests/fixtures/export/migrate/export_v0.7_simple.aiida and b/tests/fixtures/export/migrate/export_v0.7_simple.aiida differ diff --git a/tests/fixtures/export/migrate/export_v0.8_simple.aiida b/tests/fixtures/export/migrate/export_v0.8_simple.aiida index 250876da28..02562c14b2 100644 Binary files a/tests/fixtures/export/migrate/export_v0.8_simple.aiida and b/tests/fixtures/export/migrate/export_v0.8_simple.aiida differ diff --git a/tests/fixtures/export/migrate/export_v0.9_simple.aiida b/tests/fixtures/export/migrate/export_v0.9_simple.aiida new file mode 100644 index 0000000000..a93c8038f7 Binary files /dev/null and b/tests/fixtures/export/migrate/export_v0.9_simple.aiida differ diff --git a/tests/fixtures/graphs/graph1.aiida b/tests/fixtures/graphs/graph1.aiida index 77198fe69c..56164574e3 100644 Binary files a/tests/fixtures/graphs/graph1.aiida and b/tests/fixtures/graphs/graph1.aiida differ diff --git a/tests/manage/external/test_postgres.py b/tests/manage/external/test_postgres.py index 266dc92921..5b83374f06 100644 --- a/tests/manage/external/test_postgres.py +++ b/tests/manage/external/test_postgres.py @@ -9,16 +9,10 @@ ########################################################################### """Unit tests for postgres database maintenance functionality""" from unittest import TestCase -from unittest.mock import patch from aiida.manage.external.postgres import Postgres -def _try_connect_always_fail(**kwargs): # pylint: disable=unused-argument - """Always return False""" - return False - - class PostgresTest(TestCase): """Test the public API provided by the `Postgres` class""" @@ -38,31 +32,18 @@ def _setup_postgres(self): return Postgres(interactive=False, quiet=True, dbinfo=self.pg_test.dsn) def test_determine_setup_fail(self): + """Check that setup fails, if bad port is provided. + + Note: In interactive mode, this would prompt for the connection details. + """ postgres = Postgres(interactive=False, quiet=True, dbinfo={'port': '11111'}) self.assertFalse(postgres.is_connected) def test_determine_setup_success(self): + """Check that setup works with default parameters.""" postgres = self._setup_postgres() self.assertTrue(postgres.is_connected) - def test_setup_fail_callback(self): - """Make sure `determine_setup` works despite wrong initial values in case of correct callback""" - - def correct_setup(interactive, dbinfo): # pylint: disable=unused-argument - return self.pg_test.dsn - - postgres = Postgres(interactive=False, quiet=True, dbinfo={'port': '11111'}, determine_setup=False) - postgres.set_setup_fail_callback(correct_setup) - setup_success = postgres.determine_setup() - self.assertTrue(setup_success) - - @patch('aiida.manage.external.pgsu._try_connect_psycopg', new=_try_connect_always_fail) - @patch('aiida.manage.external.pgsu._try_subcmd') - def test_fallback_on_subcmd(self, try_subcmd): - """Ensure that accessing postgres via subcommand is tried if psycopg does not work.""" - self._setup_postgres() - self.assertTrue(try_subcmd.call_count >= 1) - def test_create_drop_db_user(self): """Check creating and dropping a user works""" postgres = self._setup_postgres() diff --git a/tests/orm/data/test_upf.py b/tests/orm/data/test_upf.py index 228f8d9b77..02922bc60f 100644 --- a/tests/orm/data/test_upf.py +++ b/tests/orm/data/test_upf.py @@ -10,7 +10,6 @@ """ This module contains tests for UpfData and UpfData related functions. """ - import errno import tempfile import shutil @@ -95,8 +94,8 @@ def setUp(self): def tearDown(self): """Delete all groups and destroy the temporary directory created.""" - for group in orm.Group.objects.find(filters={'type_string': orm.GroupTypeString.UPFGROUP_TYPE.value}): - orm.Group.objects.delete(group.pk) + for group in orm.UpfFamily.objects.find(): + orm.UpfFamily.objects.delete(group.pk) try: shutil.rmtree(self.temp_dir) @@ -122,32 +121,31 @@ def test_get_upf_family_names(self): """Test the `UpfData.get_upf_family_names` method.""" label = 'family' - family, _ = orm.Group.objects.get_or_create(label=label, type_string=orm.GroupTypeString.UPFGROUP_TYPE.value) + family, _ = orm.UpfFamily.objects.get_or_create(label=label) family.add_nodes([self.pseudo_barium]) family.store() - self.assertEqual({group.label for group in orm.UpfData.get_upf_groups()}, {label}) + self.assertEqual({group.label for group in orm.UpfFamily.objects.all()}, {label}) self.assertEqual(self.pseudo_barium.get_upf_family_names(), [label]) def test_get_upf_groups(self): """Test the `UpfData.get_upf_groups` class method.""" - type_string = orm.GroupTypeString.UPFGROUP_TYPE.value label_01 = 'family_01' label_02 = 'family_02' user = orm.User(email='alternate@localhost').store() - self.assertEqual(orm.UpfData.get_upf_groups(), []) + self.assertEqual(orm.UpfFamily.objects.all(), []) # Create group with default user and add `Ba` pseudo - family_01, _ = orm.Group.objects.get_or_create(label=label_01, type_string=type_string) + family_01, _ = orm.UpfFamily.objects.get_or_create(label=label_01) family_01.add_nodes([self.pseudo_barium]) family_01.store() self.assertEqual({group.label for group in orm.UpfData.get_upf_groups()}, {label_01}) # Create group with different user and add `O` pseudo - family_02, _ = orm.Group.objects.get_or_create(label=label_02, type_string=type_string, user=user) + family_02, _ = orm.UpfFamily.objects.get_or_create(label=label_02, user=user) family_02.add_nodes([self.pseudo_oxygen]) family_02.store() diff --git a/tests/orm/test_autogroups.py b/tests/orm/test_autogroups.py new file mode 100644 index 0000000000..23ca495af9 --- /dev/null +++ b/tests/orm/test_autogroups.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Tests for the Autogroup functionality.""" +from aiida.backends.testbase import AiidaTestCase +from aiida.orm import AutoGroup, QueryBuilder +from aiida.orm.autogroup import Autogroup + + +class TestAutogroup(AiidaTestCase): + """Tests the Autogroup logic.""" + + def test_get_or_create(self): + """Test the ``get_or_create_group`` method of ``Autogroup``.""" + label_prefix = 'test_prefix_TestAutogroup' + + # Check that there are no groups to begin with + queryb = QueryBuilder().append(AutoGroup, filters={'label': label_prefix}) + assert not list(queryb.all()) + queryb = QueryBuilder().append(AutoGroup, filters={'label': {'like': r'{}\_%'.format(label_prefix)}}) + assert not list(queryb.all()) + + # First group (no existing one) + autogroup = Autogroup() + autogroup.set_group_label_prefix(label_prefix) + group = autogroup.get_or_create_group() + expected_label = label_prefix + self.assertEqual( + group.label, expected_label, + "The auto-group should be labelled '{}', it is instead '{}'".format(expected_label, group.label) + ) + + # Second group (only one with no suffix existing) + autogroup = Autogroup() + autogroup.set_group_label_prefix(label_prefix) + group = autogroup.get_or_create_group() + expected_label = label_prefix + '_1' + self.assertEqual( + group.label, expected_label, + "The auto-group should be labelled '{}', it is instead '{}'".format(expected_label, group.label) + ) + + # Second group (only one suffix _1 existing) + autogroup = Autogroup() + autogroup.set_group_label_prefix(label_prefix) + group = autogroup.get_or_create_group() + expected_label = label_prefix + '_2' + self.assertEqual( + group.label, expected_label, + "The auto-group should be labelled '{}', it is instead '{}'".format(expected_label, group.label) + ) + + # I create a group with a large integer suffix (9) + AutoGroup(label='{}_9'.format(label_prefix)).store() + # The next autogroup should become number 10 + autogroup = Autogroup() + autogroup.set_group_label_prefix(label_prefix) + group = autogroup.get_or_create_group() + expected_label = label_prefix + '_10' + self.assertEqual( + group.label, expected_label, + "The auto-group should be labelled '{}', it is instead '{}'".format(expected_label, group.label) + ) + + # I create a group with a non-integer suffix (15a), it should be ignored + AutoGroup(label='{}_15b'.format(label_prefix)).store() + # The next autogroup should become number 11 + autogroup = Autogroup() + autogroup.set_group_label_prefix(label_prefix) + group = autogroup.get_or_create_group() + expected_label = label_prefix + '_11' + self.assertEqual( + group.label, expected_label, + "The auto-group should be labelled '{}', it is instead '{}'".format(expected_label, group.label) + ) + + def test_get_or_create_invalid_prefix(self): + """Test the ``get_or_create_group`` method of ``Autogroup`` when there is already a group + with the same prefix, but followed by other non-underscore characters.""" + label_prefix = 'new_test_prefix_TestAutogroup' + # I create a group with the same prefix, but followed by non-underscore + # characters. These should be ignored in the logic. + AutoGroup(label='{}xx'.format(label_prefix)).store() + + # Check that there are no groups to begin with + queryb = QueryBuilder().append(AutoGroup, filters={'label': label_prefix}) + assert not list(queryb.all()) + queryb = QueryBuilder().append(AutoGroup, filters={'label': {'like': r'{}\_%'.format(label_prefix)}}) + assert not list(queryb.all()) + + # First group (no existing one) + autogroup = Autogroup() + autogroup.set_group_label_prefix(label_prefix) + group = autogroup.get_or_create_group() + expected_label = label_prefix + self.assertEqual( + group.label, expected_label, + "The auto-group should be labelled '{}', it is instead '{}'".format(expected_label, group.label) + ) + + # Second group (only one with no suffix existing) + autogroup = Autogroup() + autogroup.set_group_label_prefix(label_prefix) + group = autogroup.get_or_create_group() + expected_label = label_prefix + '_1' + self.assertEqual( + group.label, expected_label, + "The auto-group should be labelled '{}', it is instead '{}'".format(expected_label, group.label) + ) diff --git a/tests/orm/test_groups.py b/tests/orm/test_groups.py index 9c842aa2c4..e598983697 100644 --- a/tests/orm/test_groups.py +++ b/tests/orm/test_groups.py @@ -8,6 +8,7 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Test for the Group ORM class.""" +import pytest from aiida import orm from aiida.backends.testbase import AiidaTestCase @@ -273,3 +274,149 @@ def test_group_uuid_hashing_for_querybuidler(self): # And that the results are correct self.assertEqual(builder.count(), 1) self.assertEqual(builder.first()[0], group.id) + + +class TestGroupsSubclasses(AiidaTestCase): + """Test rules around creating `Group` subclasses.""" + + def setUp(self): + """Remove all existing Groups.""" + for group in orm.Group.objects.all(): + orm.Group.objects.delete(group.id) + + @staticmethod + def test_creation_registered(): + """Test rules around creating registered `Group` subclasses.""" + group = orm.AutoGroup('some-label') + assert isinstance(group, orm.AutoGroup) + assert group.type_string == 'core.auto' + + group, _ = orm.AutoGroup.objects.get_or_create('some-auto-group') + assert isinstance(group, orm.AutoGroup) + assert group.type_string == 'core.auto' + + @staticmethod + def test_loading(): + """Test that loading instances from the database returns the correct subclass of `Group`.""" + group = orm.Group('normal-group').store() + loaded = orm.load_group(group.pk) + assert isinstance(loaded, orm.Group) + + group = orm.AutoGroup('auto-group').store() + loaded = orm.load_group(group.pk) + assert isinstance(group, orm.AutoGroup) + + @staticmethod + def test_creation_unregistered(): + """Test rules around creating `Group` subclasses without a registered entry point.""" + + # Defining an unregistered subclas should issue a warning and its type string should be set to `None` + with pytest.warns(UserWarning): + + class SubGroup(orm.Group): + pass + + assert SubGroup._type_string is None # pylint: disable=protected-access + + # Creating an instance is allowed + instance = SubGroup(label='subgroup') + assert instance._type_string is None # pylint: disable=protected-access + + # Storing the instance, however, is forbidden and should raise + with pytest.raises(exceptions.StoringNotAllowed): + instance.store() + + @staticmethod + def test_loading_unregistered(): + """Test rules around loading `Group` subclasses without a registered entry point. + + Storing instances of unregistered subclasses is not allowed so we have to create one sneakily by instantiating + a normal group and manipulating the type string directly on the database model. + """ + group = orm.Group(label='group') + group.backend_entity.dbmodel.type_string = 'unregistered.subclass' + group.store() + + with pytest.warns(UserWarning): + loaded = orm.load_group(group.pk) + + assert isinstance(loaded, orm.Group) + + @staticmethod + def test_explicit_type_string(): + """Test that passing explicit `type_string` to `Group` constructor is still possible despite being deprecated. + + Both constructing a group while passing explicit `type_string` as well as loading a group with unregistered + type string should emit a warning, but it should be possible. + """ + type_string = 'data.potcar' # An unregistered custom type string + + with pytest.warns(UserWarning): + group = orm.Group(label='group', type_string=type_string) + + group.store() + assert group.type_string == type_string + + with pytest.warns(UserWarning): + loaded = orm.Group.get(label=group.label, type_string=type_string) + + assert isinstance(loaded, orm.Group) + assert loaded.pk == group.pk + assert loaded.type_string == group.type_string + + queried = orm.QueryBuilder().append(orm.Group, filters={'id': group.pk, 'type_string': type_string}).one()[0] + assert isinstance(queried, orm.Group) + assert queried.pk == group.pk + assert queried.type_string == group.type_string + + @staticmethod + def test_querying(): + """Test querying for groups with and without subclassing.""" + orm.Group(label='group').store() + orm.AutoGroup(label='auto-group').store() + + # Fake a subclass by manually setting the type string + group = orm.Group(label='custom-group') + group.backend_entity.dbmodel.type_string = 'custom.group' + group.store() + + assert orm.QueryBuilder().append(orm.AutoGroup).count() == 1 + assert orm.QueryBuilder().append(orm.AutoGroup, subclassing=False).count() == 1 + assert orm.QueryBuilder().append(orm.Group, subclassing=False).count() == 1 + assert orm.QueryBuilder().append(orm.Group).count() == 3 + assert orm.QueryBuilder().append(orm.Group, filters={'type_string': 'custom.group'}).count() == 1 + + @staticmethod + def test_querying_node_subclasses(): + """Test querying for groups with multiple types for nodes it contains.""" + group = orm.Group(label='group').store() + data_int = orm.Int().store() + data_str = orm.Str().store() + data_bool = orm.Bool().store() + + group.add_nodes([data_int, data_str, data_bool]) + + builder = orm.QueryBuilder().append(orm.Group, tag='group') + builder.append((orm.Int, orm.Str), with_group='group', project='id') + results = [entry[0] for entry in builder.iterall()] + + assert len(results) == 2 + assert data_int.pk in results + assert data_str.pk in results + assert data_bool.pk not in results + + @staticmethod + def test_query_with_group(): + """Docs.""" + group = orm.Group(label='group').store() + data = orm.Data().store() + + group.add_nodes([data]) + + builder = orm.QueryBuilder().append(orm.Data, filters={ + 'id': data.pk + }, tag='data').append(orm.Group, with_node='data') + + loaded = builder.one()[0] + + assert loaded.pk == group.pk diff --git a/tests/orm/test_querybuilder.py b/tests/orm/test_querybuilder.py index 68fad02f9d..ef7625d0ef 100644 --- a/tests/orm/test_querybuilder.py +++ b/tests/orm/test_querybuilder.py @@ -60,10 +60,10 @@ def test_ormclass_type_classification(self): for _cls, classifiers in ( qb._get_ormclass(orm.Group, None), - qb._get_ormclass(None, 'group'), - qb._get_ormclass(None, 'Group'), + qb._get_ormclass(None, 'group.core'), + qb._get_ormclass(None, 'Group.core'), ): - self.assertEqual(classifiers['ormclass_type_string'], 'group') + self.assertTrue(classifiers['ormclass_type_string'].startswith('group')) for _cls, classifiers in ( qb._get_ormclass(orm.User, None), diff --git a/tests/orm/utils/test_node.py b/tests/orm/utils/test_node.py index 239a2a5cb8..9cc72edba5 100644 --- a/tests/orm/utils/test_node.py +++ b/tests/orm/utils/test_node.py @@ -8,6 +8,7 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Tests for the `Node` utils.""" +import pytest from aiida.backends.testbase import AiidaTestCase from aiida.orm import Data @@ -21,3 +22,8 @@ def test_load_node_class_fallback(self): """Verify that `load_node_class` will fall back to `Data` class if entry point cannot be loaded.""" loaded_class = load_node_class('data.some.non.existing.plugin.') self.assertEqual(loaded_class, Data) + + # For really unresolvable type strings, we fall back onto the `Data` class + with pytest.warns(UserWarning): + loaded_class = load_node_class('__main__.SubData.') + self.assertEqual(loaded_class, Data) diff --git a/tests/test_dataclasses.py b/tests/test_dataclasses.py index 83e37ce8b3..47f9a7199c 100644 --- a/tests/test_dataclasses.py +++ b/tests/test_dataclasses.py @@ -255,13 +255,13 @@ def test_ase_primitive_and_conventional_cells_ase(self): c = CifData(file=tmpf.name) ase = c.get_structure(converter='ase', primitive_cell=False).get_ase() - self.assertEqual(ase.get_number_of_atoms(), 15) + self.assertEqual(ase.get_global_number_of_atoms(), 15) ase = c.get_structure(converter='ase').get_ase() - self.assertEqual(ase.get_number_of_atoms(), 15) + self.assertEqual(ase.get_global_number_of_atoms(), 15) ase = c.get_structure(converter='ase', primitive_cell=True, subtrans_included=False).get_ase() - self.assertEqual(ase.get_number_of_atoms(), 5) + self.assertEqual(ase.get_global_number_of_atoms(), 5) @unittest.skipIf(not has_ase(), 'Unable to import ase') @unittest.skipIf(not has_pycifrw(), 'Unable to import PyCifRW') @@ -310,13 +310,13 @@ def test_ase_primitive_and_conventional_cells_pymatgen(self): c = CifData(file=tmpf.name) ase = c.get_structure(converter='pymatgen', primitive_cell=False).get_ase() - self.assertEqual(ase.get_number_of_atoms(), 15) + self.assertEqual(ase.get_global_number_of_atoms(), 15) ase = c.get_structure(converter='pymatgen').get_ase() - self.assertEqual(ase.get_number_of_atoms(), 15) + self.assertEqual(ase.get_global_number_of_atoms(), 15) ase = c.get_structure(converter='pymatgen', primitive_cell=True).get_ase() - self.assertEqual(ase.get_number_of_atoms(), 5) + self.assertEqual(ase.get_global_number_of_atoms(), 5) @unittest.skipIf(not has_pycifrw(), 'Unable to import PyCifRW') def test_pycifrw_from_datablocks(self): diff --git a/tests/test_nodes.py b/tests/test_nodes.py index 463baad315..1c1a2d6c5e 100644 --- a/tests/test_nodes.py +++ b/tests/test_nodes.py @@ -24,32 +24,28 @@ class TestNodeIsStorable(AiidaTestCase): - """ - Test if one can store specific Node subclasses, and that Node and - ProcessType are not storable, intead. - """ + """Test that checks on storability of certain node sub classes work correctly.""" - def test_storable_unstorable(self): - """ - Test storability of Nodes - """ - node = orm.Node() + def test_base_classes(self): + """Test storability of `Node` base sub classes.""" with self.assertRaises(StoringNotAllowed): - node.store() + orm.Node().store() - process = orm.ProcessNode() with self.assertRaises(StoringNotAllowed): - process.store() + orm.ProcessNode().store() - # These below should be allowed instead - data = orm.Data() - data.store() + # The following base classes are storable + orm.Data().store() + orm.CalculationNode().store() + orm.WorkflowNode().store() - calc = orm.CalculationNode() - calc.store() + def test_unregistered_sub_class(self): + """Sub classes without a registered entry point are not storable.""" + class SubData(orm.Data): + pass - work = orm.WorkflowNode() - work.store() + with self.assertRaises(StoringNotAllowed): + SubData().store() class TestNodeCopyDeepcopy(AiidaTestCase): @@ -1207,35 +1203,6 @@ def test_load_node(self): with self.assertRaises(NotExistent): orm.load_node(spec, sub_classes=(orm.ArrayData,)) - def test_load_unknown_data_type(self): - """ - Test that the loader will choose a common data ancestor for an unknown data type. - For the case where, e.g., the user doesn't have the necessary plugin. - """ - from aiida.plugins import DataFactory - - KpointsData = DataFactory('array.kpoints') - kpoint = KpointsData().store() - - # compare if plugin exist - obj = orm.load_node(uuid=kpoint.uuid) - self.assertEqual(type(kpoint), type(obj)) - - class TestKpointsData(KpointsData): - pass - - # change node type and save in database again - TestKpointsData().store() - - # changed node should return data node as its plugin is not exist - obj = orm.load_node(uuid=kpoint.uuid) - self.assertEqual(type(kpoint), type(obj)) - - # for node - n1 = orm.Data().store() - obj = orm.load_node(n1.uuid) - self.assertEqual(type(n1), type(obj)) - class TestSubNodesAndLinks(AiidaTestCase): diff --git a/tests/tools/graph/test_age.py b/tests/tools/graph/test_age.py index dddf2323c2..538087c7d7 100644 --- a/tests/tools/graph/test_age.py +++ b/tests/tools/graph/test_age.py @@ -494,7 +494,7 @@ def test_groups(self): # Rule that only gets nodes connected by the same group queryb = orm.QueryBuilder() queryb.append(orm.Node, tag='nodes_in_set') - queryb.append(orm.Group, with_node='nodes_in_set', tag='groups_considered', filters={'type_string': 'user'}) + queryb.append(orm.Group, with_node='nodes_in_set', tag='groups_considered') queryb.append(orm.Data, with_group='groups_considered') initial_node = [node2.id] @@ -513,7 +513,7 @@ def test_groups(self): # But two rules chained should get both nodes and groups... queryb = orm.QueryBuilder() queryb.append(orm.Node, tag='nodes_in_set') - queryb.append(orm.Group, with_node='nodes_in_set', filters={'type_string': 'user'}) + queryb.append(orm.Group, with_node='nodes_in_set') rule1 = UpdateRule(queryb) queryb = orm.QueryBuilder() @@ -569,7 +569,7 @@ def test_groups(self): qb1 = orm.QueryBuilder() qb1.append(orm.Node, tag='nodes_in_set') - qb1.append(orm.Group, with_node='nodes_in_set', filters={'type_string': 'user'}) + qb1.append(orm.Group, with_node='nodes_in_set') rule1 = UpdateRule(qb1, track_edges=True) qb2 = orm.QueryBuilder() diff --git a/examples/__init__.py b/tests/tools/groups/__init__.py similarity index 100% rename from examples/__init__.py rename to tests/tools/groups/__init__.py diff --git a/tests/tools/groups/test_paths.py b/tests/tools/groups/test_paths.py new file mode 100644 index 0000000000..75e445bb97 --- /dev/null +++ b/tests/tools/groups/test_paths.py @@ -0,0 +1,186 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=redefined-outer-name,unused-argument +"""Tests for GroupPath""" +import pytest + +from aiida import orm +from aiida.tools.groups.paths import GroupAttr, GroupPath, InvalidPath, GroupNotFoundError, NoGroupsInPathError + + +@pytest.fixture +def setup_groups(clear_database_before_test): + """Setup some groups for testing.""" + for label in ['a', 'a/b', 'a/c/d', 'a/c/e/g', 'a/f']: + group, _ = orm.Group.objects.get_or_create(label) + group.description = 'A description of {}'.format(label) + yield + + +@pytest.mark.parametrize('path', ('/a', 'a/', '/a/', 'a//b')) +def test_invalid_paths(setup_groups, path): + """Invalid paths should raise an ``InvalidPath`` exception.""" + with pytest.raises(InvalidPath): + GroupPath(path=path) + + +def test_root_path(setup_groups): + """Test the root path properties""" + group_path = GroupPath() + assert group_path.path == '' + assert group_path.delimiter == '/' + assert group_path.parent is None + assert group_path.is_virtual + assert group_path.get_group() is None + + +def test_path_concatenation(setup_groups): + """Test methods to build a new path.""" + group_path = GroupPath() + assert (group_path / 'a').path == 'a' + assert (group_path / 'a' / 'b').path == 'a/b' + assert (group_path / 'a/b').path == 'a/b' + assert group_path['a/b'].path == 'a/b' + assert GroupPath('a/b/c') == GroupPath('a/b') / 'c' + + +def test_path_existence(setup_groups): + """Test existence of child "folders".""" + group_path = GroupPath() + assert 'a' in group_path + assert 'x' not in group_path + + +def test_group_retrieval(setup_groups): + """Test retrieval of the actual group from a path. + + The ``group`` attribute will return None + if no group is associated with the path + """ + group_path = GroupPath() + assert group_path['x'].is_virtual + assert not group_path['a'].is_virtual + assert group_path.get_group() is None + assert isinstance(group_path['a'].get_group(), orm.Group) + + +def test_group_creation(setup_groups): + """Test creation of new groups.""" + group_path = GroupPath() + group, created = group_path['a'].get_or_create_group() + assert isinstance(group, orm.Group) + assert created is False + group, created = group_path['x'].get_or_create_group() + assert isinstance(group, orm.Group) + assert created is True + + +def test_group_deletion(setup_groups): + """Test deletion of existing groups.""" + group_path = GroupPath() + assert not group_path['a'].is_virtual + group_path['a'].delete_group() + assert group_path['a'].is_virtual + with pytest.raises(GroupNotFoundError): + group_path['a'].delete_group() + + +def test_path_iteration(setup_groups): + """Test iteration of groups.""" + group_path = GroupPath() + assert len(group_path) == 1 + assert [(c.path, c.is_virtual) for c in group_path.children] == [('a', False)] + child = next(group_path.children) + assert child.parent == group_path + assert len(child) == 3 + assert [(c.path, c.is_virtual) for c in sorted(child)] == [('a/b', False), ('a/c', True), ('a/f', False)] + + +def test_path_with_no_groups(setup_groups): + """Test ``NoGroupsInPathError`` is raised if the path contains descendant groups.""" + group_path = GroupPath() + with pytest.raises(NoGroupsInPathError): + list(group_path['x']) + + +def test_walk(setup_groups): + """Test the ``GroupPath.walk()`` function.""" + group_path = GroupPath() + assert [c.path for c in sorted(group_path.walk())] == ['a', 'a/b', 'a/c', 'a/c/d', 'a/c/e', 'a/c/e/g', 'a/f'] + + +def test_walk_with_invalid_path(clear_database_before_test): + """Test the ``GroupPath.walk`` method with invalid paths.""" + for label in ['a', 'a/b', 'a/c/d', 'a/c/e/g', 'a/f', 'bad//group', 'bad/other']: + orm.Group.objects.get_or_create(label) + group_path = GroupPath() + expected = ['a', 'a/b', 'a/c', 'a/c/d', 'a/c/e', 'a/c/e/g', 'a/f', 'bad', 'bad/other'] + assert [c.path for c in sorted(group_path.walk())] == expected + + +def test_walk_nodes(clear_database_before_test): + """Test the ``GroupPath.walk_nodes()`` function.""" + group, _ = orm.Group.objects.get_or_create('a') + node = orm.Data() + node.set_attribute_many({'i': 1, 'j': 2}) + node.store() + group.add_nodes(node) + group_path = GroupPath() + assert [(r.group_path.path, r.node.attributes) for r in group_path.walk_nodes()] == [('a', {'i': 1, 'j': 2})] + + +def test_cls(clear_database_before_test): + """Test that only instances of `cls` or its subclasses are matched by ``GroupPath``.""" + for label in ['a', 'a/b', 'a/c/d', 'a/c/e/g']: + orm.Group.objects.get_or_create(label) + for label in ['a/c/e', 'a/f']: + orm.UpfFamily.objects.get_or_create(label) + group_path = GroupPath() + assert sorted([c.path for c in group_path.walk()]) == ['a', 'a/b', 'a/c', 'a/c/d', 'a/c/e', 'a/c/e/g'] + group_path = GroupPath(cls=orm.UpfFamily) + assert sorted([c.path for c in group_path.walk()]) == ['a', 'a/c', 'a/c/e', 'a/f'] + assert GroupPath('a/b/c') != GroupPath('a/b/c', cls=orm.UpfFamily) + + +def test_attr(clear_database_before_test): + """Test ``GroupAttr``.""" + for label in ['a', 'a/b', 'a/c/d', 'a/c/e/g', 'a/f', 'bad space', 'bad@char', '_badstart']: + orm.Group.objects.get_or_create(label) + group_path = GroupPath() + assert isinstance(group_path.browse.a.c.d, GroupAttr) + assert isinstance(group_path.browse.a.c.d(), GroupPath) + assert group_path.browse.a.c.d().path == 'a/c/d' + assert not set(group_path.browse.__dir__()).intersection(['bad space', 'bad@char', '_badstart']) + with pytest.raises(AttributeError): + group_path.browse.a.c.x # pylint: disable=pointless-statement + + +def test_cls_label_clashes(clear_database_before_test): + """Test behaviour when multiple group classes have the same label.""" + group_01, _ = orm.Group.objects.get_or_create('a') + node_01 = orm.Data().store() + group_01.add_nodes(node_01) + + group_02, _ = orm.UpfFamily.objects.get_or_create('a') + node_02 = orm.Data().store() + group_02.add_nodes(node_02) + + # Requests for non-existing groups should return `None` + assert GroupPath('b').get_group() is None + + assert GroupPath('a').group_ids == [group_01.pk] + assert GroupPath('a').get_group().pk == group_01.pk + expected = [('a', node_01.pk)] + assert [(r.group_path.path, r.node.pk) for r in GroupPath('a').walk_nodes()] == expected + + assert GroupPath('a', cls=orm.UpfFamily).group_ids == [group_02.pk] + assert GroupPath('a', cls=orm.UpfFamily).get_group().pk == group_02.pk + expected = [('a', node_02.pk)] + assert [(r.group_path.path, r.node.pk) for r in GroupPath('a', cls=orm.UpfFamily).walk_nodes()] == expected diff --git a/tests/tools/importexport/migration/__init__.py b/tests/tools/importexport/migration/__init__.py index 2776a55f97..3a12435017 100644 --- a/tests/tools/importexport/migration/__init__.py +++ b/tests/tools/importexport/migration/__init__.py @@ -7,3 +7,35 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### +"""Module with tests for export archive migrations.""" +from aiida.backends.testbase import AiidaTestCase +from aiida.tools.importexport.migration.utils import verify_metadata_version +from tests.utils.archives import get_json_files + + +class ArchiveMigrationTest(AiidaTestCase): + """Base class to write specific tests for a particular export archive migration.""" + + @classmethod + def setUpClass(cls, *args, **kwargs): + super().setUpClass(*args, **kwargs) + cls.external_archive = {'filepath': 'archives', 'external_module': 'aiida-export-migration-tests'} + cls.core_archive = {'filepath': 'export/migrate'} + cls.maxDiff = None # pylint: disable=invalid-name + + def migrate(self, filename_archive, version_old, version_new, migration_method): + """Migrate one of the archives from `aiida-export-migration-tests`. + + :param filename_archive: the relative file name of the archive + :param version_old: version of the archive + :param version_new: version to migrate to + :param migration_method: the migration method that should convert between version_old and version_new + :return: the migrated metadata and data as a tuple + """ + metadata, data = get_json_files(filename_archive, **self.external_archive) + verify_metadata_version(metadata, version=version_old) + + migration_method(metadata, data) + verify_metadata_version(metadata, version=version_new) + + return metadata, data diff --git a/tests/tools/importexport/migration/test_migration.py b/tests/tools/importexport/migration/test_migration.py index c2c45afbd2..82c08b8e1d 100644 --- a/tests/tools/importexport/migration/test_migration.py +++ b/tests/tools/importexport/migration/test_migration.py @@ -8,14 +8,12 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Test export file migration from old export versions to the newest""" - import os from aiida import orm from aiida.backends.testbase import AiidaTestCase -from aiida.tools.importexport import import_data, EXPORT_VERSION as newest_version +from aiida.tools.importexport import import_data, ArchiveMigrationError, Archive, EXPORT_VERSION as newest_version from aiida.tools.importexport.migration import migrate_recursively, verify_metadata_version -from aiida.common.utils import Capturing from tests.utils.archives import get_archive_file, get_json_files, migrate_archive from tests.utils.configuration import with_temp_dir @@ -102,6 +100,28 @@ def test_migrate_recursively(self): verify_metadata_version(metadata, version=newest_version) self.assertEqual(new_version, newest_version) + def test_migrate_recursively_specific_version(self): + """Test the `version` argument of the `migrate_recursively` function.""" + filepath_archive = get_archive_file('export_v0.3_simple.aiida', **self.core_archive) + + with Archive(filepath_archive) as archive: + + # Incorrect type + with self.assertRaises(TypeError): + migrate_recursively(archive.meta_data, archive.data, None, version=0.2) + + # Backward migrations are not supported + with self.assertRaises(ArchiveMigrationError): + migrate_recursively(archive.meta_data, archive.data, None, version='0.2') + + # Same version will also raise + with self.assertRaises(ArchiveMigrationError): + migrate_recursively(archive.meta_data, archive.data, None, version='0.3') + + migrated_version = '0.5' + version = migrate_recursively(archive.meta_data, archive.data, None, version=migrated_version) + self.assertEqual(version, migrated_version) + @with_temp_dir def test_no_node_export(self, temp_dir): """Test migration of export file that has no Nodes""" @@ -138,7 +158,6 @@ def test_wrong_versions(self): """Test correct errors are raised if export files have wrong version numbers""" from aiida.tools.importexport.migration import MIGRATE_FUNCTIONS - # Initialization wrong_versions = ['0.0', '0.1.0', '0.99'] old_versions = list(MIGRATE_FUNCTIONS.keys()) legal_versions = old_versions + [newest_version] @@ -147,7 +166,6 @@ def test_wrong_versions(self): metadata = {'export_version': version} wrong_version_metadatas.append(metadata) - # Checks # Make sure the "wrong_versions" are wrong for version in wrong_versions: self.assertNotIn( @@ -156,18 +174,11 @@ def test_wrong_versions(self): msg="'{}' was not expected to be a legal version, legal version: {}".format(version, legal_versions) ) - # Make sure migrate_recursively throws a critical message and raises SystemExit + # Make sure migrate_recursively throws an ArchiveMigrationError for metadata in wrong_version_metadatas: - with self.assertRaises(SystemExit) as exception: - with Capturing(capture_stderr=True): - new_version = migrate_recursively(metadata, {}, None) - - self.assertIn( - 'Critical: Cannot migrate from version {}'.format(metadata['export_version']), - exception.exception, - msg="Expected a critical statement for the wrong export version '{}', " - 'instead got {}'.format(metadata['export_version'], exception.exception) - ) + with self.assertRaises(ArchiveMigrationError): + new_version = migrate_recursively(metadata, {}, None) + self.assertIsNone( new_version, msg='migrate_recursively should not return anything, ' @@ -175,26 +186,12 @@ def test_wrong_versions(self): ) def test_migrate_newest_version(self): - """ - Test critical message and SystemExit is raised, when an export file with the newest export version is migrated - """ - # Initialization + """Test that an exception is raised when an export file with the newest export version is migrated.""" metadata = {'export_version': newest_version} - # Check - with self.assertRaises(SystemExit) as exception: + with self.assertRaises(ArchiveMigrationError): + new_version = migrate_recursively(metadata, {}, None) - with Capturing(capture_stderr=True): - new_version = migrate_recursively(metadata, {}, None) - - self.assertIn( - 'Critical: Your export file is already at the newest export version {}'.format( - metadata['export_version'] - ), - exception.exception, - msg="Expected a critical statement that the export version '{}' is the newest export version '{}', " - 'instead got {}'.format(metadata['export_version'], newest_version, exception.exception) - ) self.assertIsNone( new_version, msg='migrate_recursively should not return anything, ' diff --git a/tests/tools/importexport/migration/test_migrations.py b/tests/tools/importexport/migration/test_migrations.py new file mode 100644 index 0000000000..fc2546d259 --- /dev/null +++ b/tests/tools/importexport/migration/test_migrations.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=redefined-outer-name +"""Test the export archive migrations on the archives included in `tests/fixtures/export/migrate`.""" +import copy +import pytest + +from aiida import get_version +from aiida.tools.importexport.common import Archive +from aiida.tools.importexport.migration.v01_to_v02 import migrate_v1_to_v2 +from aiida.tools.importexport.migration.v02_to_v03 import migrate_v2_to_v3 +from aiida.tools.importexport.migration.v03_to_v04 import migrate_v3_to_v4 +from aiida.tools.importexport.migration.v04_to_v05 import migrate_v4_to_v5 +from aiida.tools.importexport.migration.v05_to_v06 import migrate_v5_to_v6 +from aiida.tools.importexport.migration.v06_to_v07 import migrate_v6_to_v7 +from aiida.tools.importexport.migration.v07_to_v08 import migrate_v7_to_v8 +from aiida.tools.importexport.migration.utils import verify_metadata_version +from tests.utils.archives import get_json_files, get_archive_file + + +@pytest.fixture +def migration_data(request): + """For a given tuple of two subsequent versions and corresponding migration method, return metadata and data.""" + version_old, version_new, migration_method = request.param + + filepath_archive = 'export_v{}_simple.aiida'.format(version_new) + metadata_new, data_new = get_json_files(filepath_archive, filepath='export/migrate') + verify_metadata_version(metadata_new, version=version_new) + + filepath_archive = get_archive_file('export_v{}_simple.aiida'.format(version_old), filepath='export/migrate') + + with Archive(filepath_archive) as archive: + metadata_old = copy.deepcopy(archive.meta_data) + data_old = copy.deepcopy(archive.data) + + migration_method(metadata_old, data_old, archive.folder) + verify_metadata_version(metadata_old, version=version_new) + + yield version_old, version_new, metadata_old, metadata_new, data_old, data_new + + +@pytest.mark.parametrize( + 'migration_data', + (('0.1', '0.2', migrate_v1_to_v2), ('0.2', '0.3', migrate_v2_to_v3), ('0.3', '0.4', migrate_v3_to_v4), + ('0.4', '0.5', migrate_v4_to_v5), ('0.5', '0.6', migrate_v5_to_v6), ('0.6', '0.7', migrate_v6_to_v7), + ('0.7', '0.8', migrate_v7_to_v8)), + indirect=True +) +def test_migrations(migration_data): + """Test each migration method from the `aiida.tools.importexport.migration` module.""" + version_old, version_new, metadata_old, metadata_new, data_old, data_new = migration_data + + # Remove AiiDA version, since this may change regardless of the migration function + metadata_old.pop('aiida_version') + metadata_new.pop('aiida_version') + + # Assert conversion message in `metadata.json` is correct and then remove it for later assertions + metadata_new.pop('conversion_info') + message = 'Converted from version {} to {} with AiiDA v{}'.format(version_old, version_new, get_version()) + assert metadata_old.pop('conversion_info')[-1] == message, 'Conversion message after migration is wrong' + + assert metadata_old == metadata_new + assert data_old == data_new diff --git a/tests/tools/importexport/migration/test_v01_to_v02.py b/tests/tools/importexport/migration/test_v01_to_v02.py deleted file mode 100644 index 4dcc92e27a..0000000000 --- a/tests/tools/importexport/migration/test_v01_to_v02.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Test export file migration from export version 0.1 to 0.2""" - -from aiida import get_version -from aiida.backends.testbase import AiidaTestCase -from aiida.tools.importexport.migration.utils import verify_metadata_version -from aiida.tools.importexport.migration.v01_to_v02 import migrate_v1_to_v2 - -from tests.utils.archives import get_json_files - - -class TestMigrateV01toV02(AiidaTestCase): - """Test migration of export files from export version 0.1 to 0.2""" - - def test_migrate_v1_to_v2(self): - """Test function migrate_v1_to_v2""" - # Get metadata.json and data.json as dicts from v0.1 file archive - metadata_v1, data_v1 = get_json_files('export_v0.1_simple.aiida', filepath='export/migrate') - verify_metadata_version(metadata_v1, version='0.1') - - # Get metadata.json and data.json as dicts from v0.2 file archive - metadata_v2, data_v2 = get_json_files('export_v0.2_simple.aiida', filepath='export/migrate') - verify_metadata_version(metadata_v2, version='0.2') - - # Migrate to v0.2 - migrate_v1_to_v2(metadata_v1, data_v1) - verify_metadata_version(metadata_v1, version='0.2') - - # Remove AiiDA version, since this may change irregardless of the migration function - metadata_v1.pop('aiida_version') - metadata_v2.pop('aiida_version') - - # Assert conversion message in `metadata.json` is correct and then remove it for later assertions - conversion_message = 'Converted from version 0.1 to 0.2 with AiiDA v{}'.format(get_version()) - self.assertEqual( - metadata_v1.pop('conversion_info')[-1], - conversion_message, - msg='The conversion message after migration is wrong' - ) - metadata_v2.pop('conversion_info') - - # Assert changes were performed correctly - self.maxDiff = None # pylint: disable=invalid-name - self.assertDictEqual( - metadata_v1, - metadata_v2, - msg='After migration, metadata.json should equal intended metadata.json from archives' - ) - self.assertDictEqual( - data_v1, data_v2, msg='After migration, data.json should equal intended data.json from archives' - ) diff --git a/tests/tools/importexport/migration/test_v02_to_v03.py b/tests/tools/importexport/migration/test_v02_to_v03.py index 0d5d00b6c3..8a0a0c0cc1 100644 --- a/tests/tools/importexport/migration/test_v02_to_v03.py +++ b/tests/tools/importexport/migration/test_v02_to_v03.py @@ -7,79 +7,21 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""Test export file migration from export version 0.2 to 0.3""" # pylint: disable=too-many-branches - -from aiida.backends.testbase import AiidaTestCase -from aiida.tools.importexport.migration.utils import verify_metadata_version +"""Test export file migration from export version 0.2 to 0.3""" from aiida.tools.importexport.migration.v02_to_v03 import migrate_v2_to_v3 from tests.utils.archives import get_json_files +from . import ArchiveMigrationTest -class TestMigrateV02toV03(AiidaTestCase): - """Test migration of export files from export version 0.2 to 0.3""" - - @classmethod - def setUpClass(cls, *args, **kwargs): - super().setUpClass(*args, **kwargs) - - # Utility helpers - cls.external_archive = {'filepath': 'archives', 'external_module': 'aiida-export-migration-tests'} - cls.core_archive = {'filepath': 'export/migrate'} - - def test_migrate_v2_to_v3(self): - """Test function migrate_v2_to_v3""" - from aiida import get_version - - # Get metadata.json and data.json as dicts from v0.2 file archive - metadata_v2, data_v2 = get_json_files('export_v0.2_simple.aiida', **self.core_archive) - verify_metadata_version(metadata_v2, version='0.2') - - # Get metadata.json and data.json as dicts from v0.3 file archive - metadata_v3, data_v3 = get_json_files('export_v0.3_simple.aiida', **self.core_archive) - verify_metadata_version(metadata_v3, version='0.3') - - # Migrate to v0.3 - migrate_v2_to_v3(metadata_v2, data_v2) - verify_metadata_version(metadata_v2, version='0.3') - - # Remove AiiDA version, since this may change irregardless of the migration function - metadata_v2.pop('aiida_version') - metadata_v3.pop('aiida_version') - - # Assert conversion message in `metadata.json` is correct and then remove it for later assertions - conversion_message = 'Converted from version 0.2 to 0.3 with AiiDA v{}'.format(get_version()) - self.assertEqual( - metadata_v2.pop('conversion_info')[-1], - conversion_message, - msg='The conversion message after migration is wrong' - ) - metadata_v3.pop('conversion_info') - - # Assert changes were performed correctly - self.maxDiff = None # pylint: disable=invalid-name - self.assertDictEqual( - metadata_v2, - metadata_v3, - msg='After migration, metadata.json should equal intended metadata.json from archives' - ) - self.assertDictEqual( - data_v2, data_v3, msg='After migration, data.json should equal intended data.json from archives' - ) - - def test_migrate_v2_to_v3_complete(self): - """Test migration for file containing complete v0.2 era possibilities""" - - # Get metadata.json and data.json as dicts from v0.2 file archive - metadata, data = get_json_files('export_v0.2.aiida', **self.external_archive) - verify_metadata_version(metadata, version='0.2') +class TestMigrate(ArchiveMigrationTest): + """Tests specific for this archive migration.""" - # Migrate to v0.3 - migrate_v2_to_v3(metadata, data) - verify_metadata_version(metadata, version='0.3') + def test_migrate_external(self): + """Test the migration on the test archive provided by the external test package.""" + metadata, data = self.migrate('export_v0.2.aiida', '0.2', '0.3', migrate_v2_to_v3) - self.maxDiff = None # pylint: disable=invalid-name # Check link types legal_link_types = {'unspecified', 'createlink', 'returnlink', 'inputlink', 'calllink'} for link in data['links_uuid']: @@ -137,7 +79,6 @@ def test_compare_migration_with_aiida_made(self): metadata_v3.pop('aiida_version') self.assertDictEqual(metadata_v2, metadata_v3) - self.maxDiff = None # Compare 'data.json' self.assertEqual(len(data_v2), len(data_v3)) diff --git a/tests/tools/importexport/migration/test_v03_to_v04.py b/tests/tools/importexport/migration/test_v03_to_v04.py index 8ec51bfcaf..63a7f151b0 100644 --- a/tests/tools/importexport/migration/test_v03_to_v04.py +++ b/tests/tools/importexport/migration/test_v03_to_v04.py @@ -7,13 +7,11 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""Test export file migration from export version 0.3 to 0.4""" # pylint: disable=too-many-locals,too-many-branches,too-many-statements - +"""Test export file migration from export version 0.3 to 0.4""" import tarfile import zipfile -from aiida.backends.testbase import AiidaTestCase from aiida.common.exceptions import NotExistent from aiida.common.folders import SandboxFolder from aiida.common.json import load as jsonload @@ -22,79 +20,13 @@ from aiida.tools.importexport.migration.v03_to_v04 import migrate_v3_to_v4 from tests.utils.archives import get_archive_file, get_json_files +from . import ArchiveMigrationTest -class TestMigrateV03toV04(AiidaTestCase): - """Test migration of export files from export version 0.3 to 0.4""" - - @classmethod - def setUpClass(cls, *args, **kwargs): - super().setUpClass(*args, **kwargs) - - # Utility helpers - cls.external_archive = {'filepath': 'archives', 'external_module': 'aiida-export-migration-tests'} - cls.core_archive = {'filepath': 'export/migrate'} - - def test_migrate_v3_to_v4(self): - """Test function migrate_v3_to_v4""" - from aiida import get_version - - # Get metadata.json and data.json as dicts from v0.4 file archive - metadata_v4, data_v4 = get_json_files('export_v0.4_simple.aiida', **self.core_archive) - verify_metadata_version(metadata_v4, version='0.4') - - # Get metadata.json and data.json as dicts from v0.3 file archive - # Cannot use 'get_json_files' for 'export_v0.3_simple.aiida', - # because we need to pass the SandboxFolder to 'migrate_v3_to_v4' - dirpath_archive = get_archive_file('export_v0.3_simple.aiida', **self.core_archive) - - with SandboxFolder(sandbox_in_repo=False) as folder: - if zipfile.is_zipfile(dirpath_archive): - extract_zip(dirpath_archive, folder, silent=True) - elif tarfile.is_tarfile(dirpath_archive): - extract_tar(dirpath_archive, folder, silent=True) - else: - raise ValueError('invalid file format, expected either a zip archive or gzipped tarball') - - try: - with open(folder.get_abs_path('data.json'), 'r', encoding='utf8') as fhandle: - data_v3 = jsonload(fhandle) - with open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle: - metadata_v3 = jsonload(fhandle) - except IOError: - raise NotExistent('export archive does not contain the required file {}'.format(fhandle.filename)) - - verify_metadata_version(metadata_v3, version='0.3') - - # Migrate to v0.4 - migrate_v3_to_v4(metadata_v3, data_v3, folder) - verify_metadata_version(metadata_v3, version='0.4') - - # Remove AiiDA version, since this may change irregardless of the migration function - metadata_v3.pop('aiida_version') - metadata_v4.pop('aiida_version') - - # Assert conversion message in `metadata.json` is correct and then remove it for later assertions - self.maxDiff = None # pylint: disable=invalid-name - conversion_message = 'Converted from version 0.3 to 0.4 with AiiDA v{}'.format(get_version()) - self.assertEqual( - metadata_v3.pop('conversion_info')[-1], - conversion_message, - msg='The conversion message after migration is wrong' - ) - metadata_v4.pop('conversion_info') - - # Assert changes were performed correctly - self.assertDictEqual( - metadata_v3, - metadata_v4, - msg='After migration, metadata.json should equal intended metadata.json from archives' - ) - self.assertDictEqual( - data_v3, data_v4, msg='After migration, data.json should equal intended data.json from archives' - ) +class TestMigrate(ArchiveMigrationTest): + """Tests specific for this archive migration.""" - def test_migrate_v3_to_v4_complete(self): + def test_migrate_external(self): """Test migration for file containing complete v0.3 era possibilities""" # Get metadata.json and data.json as dicts from v0.3 file archive @@ -138,7 +70,6 @@ def test_migrate_v3_to_v4_complete(self): ## Following checks are based on the archive-file ## Which means there are more legal entities, they are simply not relevant here. - self.maxDiff = None # pylint: disable=invalid-name # Check schema-changes new_node_attrs = {'node_type', 'process_type'} for change in new_node_attrs: @@ -331,13 +262,11 @@ def test_compare_migration_with_aiida_made(self): metadata_v4, data_v4 = get_json_files('export_v0.4.aiida', **self.external_archive) # Compare 'metadata.json' - self.maxDiff = None metadata_v3.pop('conversion_info') metadata_v3.pop('aiida_version') metadata_v4.pop('aiida_version') self.assertDictEqual(metadata_v3, metadata_v4) - self.maxDiff = None # Compare 'data.json' self.assertEqual(len(data_v3), len(data_v4)) diff --git a/tests/tools/importexport/migration/test_v04_to_v05.py b/tests/tools/importexport/migration/test_v04_to_v05.py index ab1d5f62e9..664d1f495a 100644 --- a/tests/tools/importexport/migration/test_v04_to_v05.py +++ b/tests/tools/importexport/migration/test_v04_to_v05.py @@ -8,104 +8,18 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Test export file migration from export version 0.4 to 0.5""" - -import tarfile -import zipfile - -from aiida.backends.testbase import AiidaTestCase -from aiida.common.exceptions import NotExistent -from aiida.common.folders import SandboxFolder -from aiida.common.json import load as jsonload -from aiida.tools.importexport.common.archive import extract_tar, extract_zip -from aiida.tools.importexport.migration.utils import verify_metadata_version from aiida.tools.importexport.migration.v04_to_v05 import migrate_v4_to_v5 -from tests.utils.archives import get_archive_file, get_json_files - - -class TestMigrateV04toV05(AiidaTestCase): - """Test migration of export files from export version 0.4 to 0.5""" - - @classmethod - def setUpClass(cls, *args, **kwargs): - super().setUpClass(*args, **kwargs) - - # Utility helpers - cls.external_archive = {'filepath': 'archives', 'external_module': 'aiida-export-migration-tests'} - cls.core_archive = {'filepath': 'export/migrate'} - - def test_migrate_v4_to_v5(self): - """Test function migrate_v4_to_v5""" - from aiida import get_version - - # Get metadata.json and data.json as dicts from v0.5 file archive - metadata_v5, data_v5 = get_json_files('export_v0.5_simple.aiida', **self.core_archive) - verify_metadata_version(metadata_v5, version='0.5') - - # Get metadata.json and data.json as dicts from v0.4 file archive - # Cannot use 'get_json_files' for 'export_v0.4_simple.aiida', - # because we need to pass the SandboxFolder to 'migrate_v4_to_v5' - dirpath_archive = get_archive_file('export_v0.4_simple.aiida', **self.core_archive) - - with SandboxFolder(sandbox_in_repo=False) as folder: - if zipfile.is_zipfile(dirpath_archive): - extract_zip(dirpath_archive, folder, silent=True) - elif tarfile.is_tarfile(dirpath_archive): - extract_tar(dirpath_archive, folder, silent=True) - else: - raise ValueError('invalid file format, expected either a zip archive or gzipped tarball') - - try: - with open(folder.get_abs_path('data.json'), 'r', encoding='utf8') as fhandle: - data_v4 = jsonload(fhandle) - with open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle: - metadata_v4 = jsonload(fhandle) - except IOError: - raise NotExistent('export archive does not contain the required file {}'.format(fhandle.filename)) - - verify_metadata_version(metadata_v4, version='0.4') - - # Migrate to v0.5 - migrate_v4_to_v5(metadata_v4, data_v4) - verify_metadata_version(metadata_v4, version='0.5') - - # Remove AiiDA version, since this may change irregardless of the migration function - metadata_v4.pop('aiida_version') - metadata_v5.pop('aiida_version') - - # Assert conversion message in `metadata.json` is correct and then remove it for later assertions - # Remove also 'conversion_info' from `metadata.json` of v0.5 file archive - self.maxDiff = None # pylint: disable=invalid-name - conversion_message = 'Converted from version 0.4 to 0.5 with AiiDA v{}'.format(get_version()) - self.assertEqual( - metadata_v4.pop('conversion_info')[-1], - conversion_message, - msg='The conversion message after migration is wrong' - ) - metadata_v5.pop('conversion_info') - - # Assert changes were performed correctly - self.assertDictEqual( - metadata_v4, - metadata_v5, - msg='After migration, metadata.json should equal intended metadata.json from archives' - ) - self.assertDictEqual( - data_v4, data_v5, msg='After migration, data.json should equal intended data.json from archives' - ) +from . import ArchiveMigrationTest - def test_migrate_v4_to_v5_complete(self): - """Test migration for file containing complete v0.4 era possibilities""" - # Get metadata.json and data.json as dicts from v0.4 file archive - metadata, data = get_json_files('export_v0.4.aiida', **self.external_archive) - verify_metadata_version(metadata, version='0.4') +class TestMigrate(ArchiveMigrationTest): + """Tests specific for this archive migration.""" - # Migrate to v0.5 - migrate_v4_to_v5(metadata, data) - verify_metadata_version(metadata, version='0.5') + def test_migrate_external(self): + """Test the migration on the test archive provided by the external test package.""" + metadata, data = self.migrate('export_v0.4.aiida', '0.4', '0.5', migrate_v4_to_v5) - self.maxDiff = None # pylint: disable=invalid-name # Check schema-changes removed_computer_attrs = {'transport_params'} removed_node_attrs = {'nodeversion', 'public'} diff --git a/tests/tools/importexport/migration/test_v05_to_v06.py b/tests/tools/importexport/migration/test_v05_to_v06.py index 23bea83d46..08f1490ded 100644 --- a/tests/tools/importexport/migration/test_v05_to_v06.py +++ b/tests/tools/importexport/migration/test_v05_to_v06.py @@ -8,65 +8,25 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Test export file migration from export version 0.5 to 0.6""" - from aiida.backends.general.migrations.calc_state import STATE_MAPPING -from aiida.backends.testbase import AiidaTestCase from aiida.tools.importexport.migration.utils import verify_metadata_version from aiida.tools.importexport.migration.v05_to_v06 import migrate_v5_to_v6 from tests.utils.archives import get_json_files +from . import ArchiveMigrationTest -class TestMigrateV05toV06(AiidaTestCase): - """Test migration of export files from export version 0.5 to 0.6""" - - @classmethod - def setUpClass(cls, *args, **kwargs): - super().setUpClass(*args, **kwargs) - - # Utility helpers - cls.external_archive = {'filepath': 'archives', 'external_module': 'aiida-export-migration-tests'} - cls.core_archive = {'filepath': 'export/migrate'} - - def test_migrate_v5_to_v6(self): - """Test migration for file containing complete v0.5 era possibilities""" - from aiida import get_version - - # Get metadata.json and data.json as dicts from v0.5 file archive - metadata_v5, data_v5 = get_json_files('export_v0.5_simple.aiida', **self.core_archive) - verify_metadata_version(metadata_v5, version='0.5') +class TestMigrate(ArchiveMigrationTest): + """Tests specific for this archive migration.""" - # Get metadata.json and data.json as dicts from v0.6 file archive - metadata_v6, data_v6 = get_json_files('export_v0.6_simple.aiida', **self.core_archive) - verify_metadata_version(metadata_v6, version='0.6') + def test_migrate_external(self): + """Test the migration on the test archive provided by the external test package.""" + _, data = self.migrate('export_v0.5_manual.aiida', '0.5', '0.6', migrate_v5_to_v6) - # Migrate to v0.6 - migrate_v5_to_v6(metadata_v5, data_v5) - verify_metadata_version(metadata_v5, version='0.6') - - # Remove AiiDA version, since this may change irregardless of the migration function - metadata_v5.pop('aiida_version') - metadata_v6.pop('aiida_version') - - # Assert conversion message in `metadata.json` is correct and then remove it for later assertions - self.maxDiff = None # pylint: disable=invalid-name - conversion_message = 'Converted from version 0.5 to 0.6 with AiiDA v{}'.format(get_version()) - self.assertEqual( - metadata_v5.pop('conversion_info')[-1], - conversion_message, - msg='The conversion message after migration is wrong' - ) - metadata_v6.pop('conversion_info') - - # Assert changes were performed correctly - self.assertDictEqual( - metadata_v5, - metadata_v6, - msg='After migration, metadata.json should equal intended metadata.json from archives' - ) - self.assertDictEqual( - data_v5, data_v6, msg='After migration, data.json should equal intended data.json from archives' - ) + # Explicitly check that conversion dictionaries were removed + illegal_data_dicts = {'node_attributes_conversion', 'node_extras_conversion'} + for dict_ in illegal_data_dicts: + self.assertNotIn(dict_, data, msg="dictionary '{}' should have been removed from data.json".format(dict_)) def test_migrate_v5_to_v6_calc_states(self): """Test the data migration of legacy `JobCalcState` attributes. @@ -141,19 +101,3 @@ def test_migrate_v5_to_v6_datetime(self): 'the archive `export_v0.5_simple.aiida` did not contain a node with the attribute ' '`scheduler_lastchecktime` which is required for this test.' ) - - def test_migrate_v5_to_v6_complete(self): - """Test migration for file containing complete v0.5 era possibilities""" - # Get metadata.json and data.json as dicts from v0.5 file archive - metadata, data = get_json_files('export_v0.5_manual.aiida', **self.external_archive) - verify_metadata_version(metadata, version='0.5') - - # Migrate to v0.6 - migrate_v5_to_v6(metadata, data) - verify_metadata_version(metadata, version='0.6') - - self.maxDiff = None # pylint: disable=invalid-name - # Explicitly check that conversion dictionaries were removed - illegal_data_dicts = {'node_attributes_conversion', 'node_extras_conversion'} - for dict_ in illegal_data_dicts: - self.assertNotIn(dict_, data, msg="dictionary '{}' should have been removed from data.json".format(dict_)) diff --git a/tests/tools/importexport/migration/test_v06_to_v07.py b/tests/tools/importexport/migration/test_v06_to_v07.py index e856b43826..34f2f10d87 100644 --- a/tests/tools/importexport/migration/test_v06_to_v07.py +++ b/tests/tools/importexport/migration/test_v06_to_v07.py @@ -8,78 +8,18 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Test export file migration from export version 0.6 to 0.7""" +from aiida.tools.importexport.migration.v06_to_v07 import migrate_v6_to_v7 + +from . import ArchiveMigrationTest + + +class TestMigrate(ArchiveMigrationTest): + """Tests specific for this archive migration.""" + + def test_migrate_external(self): + """Test the migration on the test archive provided by the external test package.""" + metadata, data = self.migrate('export_v0.6_manual.aiida', '0.6', '0.7', migrate_v6_to_v7) -from aiida.backends.testbase import AiidaTestCase -from aiida.tools.importexport.migration.utils import verify_metadata_version -from aiida.tools.importexport.migration.v06_to_v07 import ( - migrate_v6_to_v7, migration_data_migration_legacy_process_attributes -) - -from tests.utils.archives import get_json_files - - -class TestMigrateV06toV07(AiidaTestCase): - """Test migration of export files from export version 0.6 to 0.7""" - - @classmethod - def setUpClass(cls, *args, **kwargs): - super().setUpClass(*args, **kwargs) - - # Utility helpers - cls.external_archive = {'filepath': 'archives', 'external_module': 'aiida-export-migration-tests'} - cls.core_archive = {'filepath': 'export/migrate'} - - def test_migrate_v6_to_v7(self): - """Test migration for file containing complete v0.6 era possibilities""" - from aiida import get_version - - # Get metadata.json and data.json as dicts from v0.6 file archive - metadata_v6, data_v6 = get_json_files('export_v0.6_simple.aiida', **self.core_archive) - verify_metadata_version(metadata_v6, version='0.6') - - # Get metadata.json and data.json as dicts from v0.7 file archive - metadata_v7, data_v7 = get_json_files('export_v0.7_simple.aiida', **self.core_archive) - verify_metadata_version(metadata_v7, version='0.7') - - # Migrate to v0.7 - migrate_v6_to_v7(metadata_v6, data_v6) - verify_metadata_version(metadata_v6, version='0.7') - - # Remove AiiDA version, since this may change irregardless of the migration function - metadata_v6.pop('aiida_version') - metadata_v7.pop('aiida_version') - - # Assert conversion message in `metadata.json` is correct and then remove it for later assertions - self.maxDiff = None # pylint: disable=invalid-name - conversion_message = 'Converted from version 0.6 to 0.7 with AiiDA v{}'.format(get_version()) - self.assertEqual( - metadata_v6.pop('conversion_info')[-1], - conversion_message, - msg='The conversion message after migration is wrong' - ) - metadata_v7.pop('conversion_info') - - # Assert changes were performed correctly - self.assertDictEqual( - metadata_v6, - metadata_v7, - msg='After migration, metadata.json should equal intended metadata.json from archives' - ) - self.assertDictEqual( - data_v6, data_v7, msg='After migration, data.json should equal intended data.json from archives' - ) - - def test_migrate_v6_to_v7_complete(self): - """Test migration for file containing complete v0.6 era possibilities""" - # Get metadata.json and data.json as dicts from v0.6 file archive - metadata, data = get_json_files('export_v0.6_manual.aiida', **self.external_archive) - verify_metadata_version(metadata, version='0.6') - - # Migrate to v0.7 - migrate_v6_to_v7(metadata, data) - verify_metadata_version(metadata, version='0.7') - - self.maxDiff = None # pylint: disable=invalid-name # Check attributes of process.* nodes illegal_attrs = {'_sealed', '_finished', '_failed', '_aborted', '_do_abort'} new_attrs = {'sealed': True} @@ -119,6 +59,7 @@ def test_migrate_v6_to_v7_complete(self): def test_migration_0040_corrupt_archive(self): """Check CorruptArchive is raised for different cases during migration 0040""" from aiida.tools.importexport.common.exceptions import CorruptArchive + from aiida.tools.importexport.migration.v06_to_v07 import migration_data_migration_legacy_process_attributes # data has one "valid" entry, in the form of Node . # At least it has the needed key `node_type`. @@ -180,6 +121,7 @@ def test_migration_0040_corrupt_archive(self): def test_migration_0040_no_process_state(self): """Check old ProcessNodes without a `process_state` can be migrated""" + from aiida.tools.importexport.migration.v06_to_v07 import migration_data_migration_legacy_process_attributes # data has one "new" entry, in the form of Node . # data also has one "old" entry, in form of Node . # It doesn't have a `process_state` attribute (nor a `sealed` or `_sealed`) diff --git a/tests/tools/importexport/migration/test_v07_to_v08.py b/tests/tools/importexport/migration/test_v07_to_v08.py index 2068abb895..65ca6dcbda 100644 --- a/tests/tools/importexport/migration/test_v07_to_v08.py +++ b/tests/tools/importexport/migration/test_v07_to_v08.py @@ -8,76 +8,18 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Test export file migration from export version 0.7 to 0.8""" +from aiida.tools.importexport.migration.v07_to_v08 import migrate_v7_to_v8, migration_default_link_label -from aiida.backends.testbase import AiidaTestCase -from aiida.tools.importexport.migration.utils import verify_metadata_version -from aiida.tools.importexport.migration.v07_to_v08 import (migrate_v7_to_v8, migration_default_link_label) +from . import ArchiveMigrationTest -from tests.utils.archives import get_json_files +class TestMigrate(ArchiveMigrationTest): + """Tests specific for this archive migration.""" -class TestMigrateV07toV08(AiidaTestCase): - """Test migration of export files from export version 0.7 to 0.8""" + def test_migrate_external(self): + """Test the migration on the test archive provided by the external test package.""" + _, data = self.migrate('export_v0.7_manual.aiida', '0.7', '0.8', migrate_v7_to_v8) - @classmethod - def setUpClass(cls, *args, **kwargs): - super().setUpClass(*args, **kwargs) - - # Utility helpers - cls.external_archive = {'filepath': 'archives', 'external_module': 'aiida-export-migration-tests'} - cls.core_archive = {'filepath': 'export/migrate'} - - def test_migrate_v7_to_v8(self): - """Test migration for file containing complete v0.7 era possibilities""" - from aiida import get_version - - # Get metadata.json and data.json as dicts from v0.7 file archive - metadata_v7, data_v7 = get_json_files('export_v0.7_simple.aiida', **self.core_archive) - verify_metadata_version(metadata_v7, version='0.7') - - # Get metadata.json and data.json as dicts from v0.8 file archive - metadata_v8, data_v8 = get_json_files('export_v0.8_simple.aiida', **self.core_archive) - verify_metadata_version(metadata_v8, version='0.8') - - # Migrate to v0.8 - migrate_v7_to_v8(metadata_v7, data_v7) - verify_metadata_version(metadata_v7, version='0.8') - - # Remove AiiDA version, since this may change irregardless of the migration function - metadata_v7.pop('aiida_version') - metadata_v8.pop('aiida_version') - - # Assert conversion message in `metadata.json` is correct and then remove it for later assertions - self.maxDiff = None # pylint: disable=invalid-name - conversion_message = 'Converted from version 0.7 to 0.8 with AiiDA v{}'.format(get_version()) - self.assertEqual( - metadata_v7.pop('conversion_info')[-1], - conversion_message, - msg='The conversion message after migration is wrong' - ) - metadata_v8.pop('conversion_info') - - # Assert changes were performed correctly - self.assertDictEqual( - metadata_v7, - metadata_v8, - msg='After migration, metadata.json should equal intended metadata.json from archives' - ) - self.assertDictEqual( - data_v7, data_v8, msg='After migration, data.json should equal intended data.json from archives' - ) - - def test_migrate_v7_to_v8_complete(self): - """Test migration for file containing complete v0.7 era possibilities""" - # Get metadata.json and data.json as dicts from v0.7 file archive - metadata, data = get_json_files('export_v0.7_manual.aiida', **self.external_archive) - verify_metadata_version(metadata, version='0.7') - - # Migrate to v0.8 - migrate_v7_to_v8(metadata, data) - verify_metadata_version(metadata, version='0.8') - - self.maxDiff = None # pylint: disable=invalid-name # Check that no links have the label '_return', since it should now be 'result' illegal_label = '_return' for link in data.get('links_uuid'): diff --git a/tests/tools/importexport/migration/test_v08_to_v09.py b/tests/tools/importexport/migration/test_v08_to_v09.py new file mode 100644 index 0000000000..4cc3e43d36 --- /dev/null +++ b/tests/tools/importexport/migration/test_v08_to_v09.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Test export file migration from export version 0.8 to 0.9""" +from aiida.tools.importexport.migration.v08_to_v09 import migrate_v8_to_v9, migration_dbgroup_type_string + +from . import ArchiveMigrationTest + + +class TestMigrate(ArchiveMigrationTest): + """Tests specific for this archive migration.""" + + def test_migrate_external(self): + """Test the migration on the test archive provided by the external test package.""" + _, data = self.migrate('export_v0.8_manual.aiida', '0.8', '0.9', migrate_v8_to_v9) + + for attributes in data.get('export_data', {}).get('Group', {}).values(): + if attributes['type_string'] not in ['core', 'core.upf', 'core.import', 'core.auto']: + raise AssertionError('encountered illegal type string `{}`'.format(attributes['type_string'])) + + def test_migration_dbgroup_type_string(self): + """Test the `migration_dbgroup_type_string` function directly.""" + + data = { + 'export_data': { + 'Group': { + '50': { + 'type_string': 'user', + }, + '51': { + 'type_string': 'data.upf', + }, + '52': { + 'type_string': 'auto.import', + }, + '53': { + 'type_string': 'auto.run', + } + } + } + } + + migration_dbgroup_type_string(data) + + self.assertEqual( + data, { + 'export_data': { + 'Group': { + '50': { + 'type_string': 'core', + }, + '51': { + 'type_string': 'core.upf', + }, + '52': { + 'type_string': 'core.import', + }, + '53': { + 'type_string': 'core.auto', + } + } + } + } + ) diff --git a/tests/tools/importexport/orm/test_codes.py b/tests/tools/importexport/orm/test_codes.py index 5a11e07b94..d8f173107b 100644 --- a/tests/tools/importexport/orm/test_codes.py +++ b/tests/tools/importexport/orm/test_codes.py @@ -24,9 +24,11 @@ class TestCode(AiidaTestCase): """Test ex-/import cases related to Codes""" def setUp(self): + super().setUp() self.reset_database() def tearDown(self): + super().tearDown() self.reset_database() @with_temp_dir diff --git a/tests/tools/importexport/test_prov_redesign.py b/tests/tools/importexport/test_prov_redesign.py index 37f9a485a0..5ef849c51c 100644 --- a/tests/tools/importexport/test_prov_redesign.py +++ b/tests/tools/importexport/test_prov_redesign.py @@ -229,7 +229,7 @@ def test_group_name_and_type_change(self, temp_dir): groups_type_string = [g.type_string for g in [group_user, group_upf]] # Assert correct type strings exists prior to export - self.assertListEqual(groups_type_string, ['user', 'data.upf']) + self.assertListEqual(groups_type_string, ['core', 'core.upf']) # Export node filename = os.path.join(temp_dir, 'export.tar.gz') @@ -268,4 +268,4 @@ def test_group_name_and_type_change(self, temp_dir): # Check type_string content of "import group" import_group = orm.load_group(imported_groups_uuid[0]) - self.assertEqual(import_group.type_string, 'auto.import') + self.assertEqual(import_group.type_string, 'core.import') diff --git a/tests/tools/visualization/test_graph.py b/tests/tools/visualization/test_graph.py index 9f15cab9ca..d48a3e6800 100644 --- a/tests/tools/visualization/test_graph.py +++ b/tests/tools/visualization/test_graph.py @@ -22,9 +22,11 @@ class TestVisGraph(AiidaTestCase): """Tests for verdi graph""" def setUp(self): + super().setUp() self.reset_database() def tearDown(self): + super().tearDown() self.reset_database() def create_provenance(self): diff --git a/utils/dependency_management.py b/utils/dependency_management.py new file mode 100755 index 0000000000..af476de3e7 --- /dev/null +++ b/utils/dependency_management.py @@ -0,0 +1,397 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Utility CLI to manage dependencies for aiida-core.""" + +import sys +import re +import json +import subprocess +from pathlib import Path +from collections import OrderedDict +from pkg_resources import Requirement, parse_requirements +from packaging.utils import canonicalize_name + +import click +import yaml +import toml + +ROOT = Path(__file__).resolve().parent.parent # repository root + +SETUPTOOLS_CONDA_MAPPINGS = { + 'psycopg2-binary': 'psycopg2', + 'graphviz': 'python-graphviz', +} + +CONDA_IGNORE = ['pyblake2', r'.*python_version == \"3\.5\"'] + + +class DependencySpecificationError(click.ClickException): + """Indicates an issue in a dependency specification.""" + + +def _load_setup_cfg(): + """Load the setup configuration from the 'setup.json' file.""" + try: + with open(ROOT / 'setup.json') as setup_json_file: + return json.load(setup_json_file) + except json.decoder.JSONDecodeError as error: # pylint: disable=no-member + raise DependencySpecificationError("Error while parsing 'setup.json' file: {}".format(error)) + except FileNotFoundError: + raise DependencySpecificationError("The 'setup.json' file is missing!") + + +def _load_environment_yml(): + """Load the conda environment specification from the 'environment.yml' file.""" + try: + with open(ROOT / 'environment.yml') as file: + return yaml.load(file, Loader=yaml.SafeLoader) + except yaml.error.YAMLError as error: + raise DependencySpecificationError("Error while parsing 'environment.yml':\n{}".format(error)) + except FileNotFoundError as error: + raise DependencySpecificationError(str(error)) + + +def _setuptools_to_conda(req): + """Map package names from setuptools to conda where necessary. + + In case that the same underlying dependency is listed under different names + on PyPI and conda-forge. + """ + + for pattern, replacement in SETUPTOOLS_CONDA_MAPPINGS.items(): + if re.match(pattern, str(req)): + req = Requirement.parse(re.sub(pattern, replacement, str(req))) + break + + # markers are not supported by conda + req.marker = None + + # We need to parse the modified required again, to ensure consistency. + return Requirement.parse(str(req)) + + +class _Entry: + """Helper class to check whether a given distribution fulfills a requirement.""" + + def __init__(self, requirement): + self._req = requirement + + def fulfills(self, requirement): + """Returns True if this entry fullfills the requirement.""" + + return canonicalize_name(self._req.name) == canonicalize_name(requirement.name) \ + and self._req.specs[0][1] in requirement.specifier + + +def _parse_working_set(entries): + for req in parse_requirements(entries): + yield _Entry(req) + + +@click.group() +def cli(): + """Manage dependencies of the aiida-core package.""" + + +@cli.command('generate-environment-yml') +def generate_environment_yml(): + """Generate 'environment.yml' file.""" + + # needed for ordered dict, see https://stackoverflow.com/a/52621703 + yaml.add_representer( + OrderedDict, + lambda self, data: yaml.representer.SafeRepresenter.represent_dict(self, data.items()), + Dumper=yaml.SafeDumper + ) + + # Read the requirements from 'setup.json' + setup_cfg = _load_setup_cfg() + install_requirements = [Requirement.parse(r) for r in setup_cfg['install_requires']] + + # python version cannot be overriden from outside environment.yml + # (even if it is not specified at all in environment.yml) + # https://github.com/conda/conda/issues/9506 + conda_requires = ['python~=3.7'] + for req in install_requirements: + if req.name == 'python' or any(re.match(ignore, str(req)) for ignore in CONDA_IGNORE): + continue + conda_requires.append(str(_setuptools_to_conda(req))) + + environment = OrderedDict([ + ('name', 'aiida'), + ('channels', ['conda-forge', 'defaults']), + ('dependencies', conda_requires), + ]) + + with open(ROOT / 'environment.yml', 'w') as env_file: + env_file.write('# Usage: conda env create -n myenvname -f environment.yml\n') + yaml.safe_dump( + environment, env_file, explicit_start=True, default_flow_style=False, encoding='utf-8', allow_unicode=True + ) + + +@cli.command('generate-rtd-reqs') +def generate_requirements_for_rtd(): + """Generate 'docs/requirements_for_rtd.txt' file.""" + + # Read the requirements from 'setup.json' + setup_cfg = _load_setup_cfg() + install_requirements = {Requirement.parse(r) for r in setup_cfg['install_requires']} + for key in ('testing', 'docs', 'rest', 'atomic_tools'): + install_requirements.update({Requirement.parse(r) for r in setup_cfg['extras_require'][key]}) + + # pylint: disable=bad-continuation + with open(ROOT / Path('docs', 'requirements_for_rtd.txt'), 'w') as reqs_file: + reqs_file.write('\n'.join(sorted(map(str, install_requirements)))) + + +@cli.command() +def generate_pyproject_toml(): + """Generate 'pyproject.toml' file.""" + + # Read the requirements from 'setup.json' + setup_cfg = _load_setup_cfg() + install_requirements = [Requirement.parse(r) for r in setup_cfg['install_requires']] + + for requirement in install_requirements: + if requirement.name == 'reentry': + reentry_requirement = requirement + break + else: + raise DependencySpecificationError("Failed to find reentry requirement in 'setup.json'.") + + pyproject = { + 'build-system': { + 'requires': ['setuptools>=40.8.0', 'wheel', str(reentry_requirement)], + 'build-backend': 'setuptools.build_meta:__legacy__', + } + } + with open(ROOT / 'pyproject.toml', 'w') as file: + toml.dump(pyproject, file) + + +@cli.command() +@click.pass_context +def generate_all(ctx): + """Generate all dependent requirement files.""" + ctx.invoke(generate_environment_yml) + ctx.invoke(generate_requirements_for_rtd) + ctx.invoke(generate_pyproject_toml) + + +@cli.command('validate-environment-yml', help="Validate 'environment.yml'.") +def validate_environment_yml(): # pylint: disable=too-many-branches + """Validate that 'environment.yml' is consistent with 'setup.json'.""" + + # Read the requirements from 'setup.json' and 'environment.yml'. + setup_cfg = _load_setup_cfg() + install_requirements = [Requirement.parse(r) for r in setup_cfg['install_requires']] + python_requires = Requirement.parse('python' + setup_cfg['python_requires']) + + environment_yml = _load_environment_yml() + try: + assert environment_yml['name'] == 'aiida', "environment name should be 'aiida'." + assert environment_yml['channels'] == [ + 'conda-forge', 'defaults' + ], "channels should be 'conda-forge', 'defaults'." + except AssertionError as error: + raise DependencySpecificationError("Error in 'environment.yml': {}".format(error)) + + try: + conda_dependencies = {Requirement.parse(d) for d in environment_yml['dependencies']} + except TypeError as error: + raise DependencySpecificationError("Error while parsing requirements from 'environment_yml': {}".format(error)) + + # Attempt to find the specification of Python among the 'environment.yml' dependencies. + for dependency in conda_dependencies: + if dependency.name == 'python': # Found the Python dependency specification + conda_python_dependency = dependency + conda_dependencies.remove(dependency) + break + else: # Failed to find Python dependency specification + raise DependencySpecificationError("Did not find specification of Python version in 'environment.yml'.") + + # The Python version specified in 'setup.json' should be listed as trove classifiers. + for spec in conda_python_dependency.specifier: + expected_classifier = 'Programming Language :: Python :: ' + spec.version + if expected_classifier not in setup_cfg['classifiers']: + raise DependencySpecificationError( + "Trove classifier '{}' missing from 'setup.json'.".format(expected_classifier) + ) + + # The Python version should be specified as supported in 'setup.json'. + if not any(spec.version >= other_spec.version for other_spec in python_requires.specifier): + raise DependencySpecificationError( + "Required Python version between 'setup.json' and 'environment.yml' not consistent." + ) + + break + else: + raise DependencySpecificationError("Missing specifier: '{}'.".format(conda_python_dependency)) + + # Check that all requirements specified in the setup.json file are found in the + # conda environment specification. + for req in install_requirements: + if any(re.match(ignore, str(req)) for ignore in CONDA_IGNORE): + continue # skip explicitly ignored packages + + try: + conda_dependencies.remove(_setuptools_to_conda(req)) + except KeyError: + raise DependencySpecificationError("Requirement '{}' not specified in 'environment.yml'.".format(req)) + + # The only dependency left should be the one for Python itself, which is not part of + # the install_requirements for setuptools. + if conda_dependencies: + raise DependencySpecificationError( + "The 'environment.yml' file contains dependencies that are missing " + "in 'setup.json':\n- {}".format('\n- '.join(map(str, conda_dependencies))) + ) + + click.secho('Conda dependency specification is consistent.', fg='green') + + +@cli.command('validate-rtd-reqs', help="Validate 'docs/requirements_for_rtd.txt'.") +def validate_requirements_for_rtd(): + """Validate that 'docs/requirements_for_rtd.txt' is consistent with 'setup.json'.""" + + # Read the requirements from 'setup.json' + setup_cfg = _load_setup_cfg() + install_requirements = {Requirement.parse(r) for r in setup_cfg['install_requires']} + for key in ('testing', 'docs', 'rest', 'atomic_tools'): + install_requirements.update({Requirement.parse(r) for r in setup_cfg['extras_require'][key]}) + + with open(ROOT / Path('docs', 'requirements_for_rtd.txt')) as reqs_file: + reqs = {Requirement.parse(r) for r in reqs_file} + + if reqs != install_requirements: + raise DependencySpecificationError("The requirements for RTD are inconsistent with 'setup.json'.") + + click.secho('RTD requirements specification is consistent.', fg='green') + + +@cli.command('validate-pyproject-toml', help="Validate 'pyproject.toml'.") +def validate_pyproject_toml(): + """Validate that 'pyproject.toml' is consistent with 'setup.json'.""" + + # Read the requirements from 'setup.json' + setup_cfg = _load_setup_cfg() + install_requirements = [Requirement.parse(r) for r in setup_cfg['install_requires']] + + for requirement in install_requirements: + if requirement.name == 'reentry': + reentry_requirement = requirement + break + else: + raise DependencySpecificationError("Failed to find reentry requirement in 'setup.json'.") + + try: + with open(ROOT / 'pyproject.toml') as file: + pyproject = toml.load(file) + pyproject_requires = [Requirement.parse(r) for r in pyproject['build-system']['requires']] + + if reentry_requirement not in pyproject_requires: + raise DependencySpecificationError( + "Missing requirement '{}' in 'pyproject.toml'.".format(reentry_requirement) + ) + + except FileNotFoundError: + raise DependencySpecificationError("The 'pyproject.toml' file is missing!") + + click.secho('Pyproject.toml dependency specification is consistent.', fg='green') + + +@cli.command('validate-all', help='Validate consistency of all requirements.') +@click.pass_context +def validate_all(ctx): + """Validate consistency of all requirement specifications of the package. + + Validates that the specification of requirements/dependencies is consistent across + the following files: + + - setup.py + - setup.json + - environment.yml + - pyproject.toml + - docs/requirements_for_rtd.txt + """ + + ctx.invoke(validate_environment_yml) + ctx.invoke(validate_requirements_for_rtd) + ctx.invoke(validate_pyproject_toml) + + +@cli.command() +@click.argument('extras', nargs=-1) +def check_requirements(extras): + """Check the 'requirements/*.txt' files. + + Checks that the environments specified in the requirements files + match all the dependencies specified in 'setup.json. + + The arguments allow to specify which 'extra' requirements to expect. + Use 'DEFAULT' to select 'atomic_tools', 'docs', 'notebook', 'rest', and 'testing'. + + """ + + if len(extras) == 1 and extras[0] == 'DEFAULT': + extras = ['atomic_tools', 'docs', 'notebook', 'rest', 'testing'] + + # Read the requirements from 'setup.json' + setup_cfg = _load_setup_cfg() + install_requires = setup_cfg['install_requires'] + for extra in extras: + install_requires.extend(setup_cfg['extras_require'][extra]) + install_requires = set(parse_requirements(install_requires)) + + for fn_req in (ROOT / 'requirements').iterdir(): + env = {'python_version': re.match(r'.*-py-(.*)\.txt', str(fn_req)).groups()[0]} + required = {r for r in install_requires if r.marker is None or r.marker.evaluate(env)} + + with open(fn_req) as req_file: + working_set = list(_parse_working_set(req_file)) + installed = {req for req in required for entry in working_set if entry.fulfills(req)} + + not_installed = required.difference(installed) + if not_installed: # switch to assignment expression after yapf supports 3.8 + raise DependencySpecificationError( + f"Environment specified in '{fn_req.relative_to(ROOT)}' misses matches for:\n" + + '\n'.join(' - ' + str(f) for f in not_installed) + ) + + click.secho("Requirements files appear to be in sync with specifications in 'setup.json'.", fg='green') + + +@cli.command() +@click.argument('extras', nargs=-1) +def pip_install_extras(extras): + """Install extra requirements. + + For example: + + pip-install-extras docs + + This will install *only* the extra the requirements for docs, but without triggering + the installation of the main installations requirements of the aiida-core package. + """ + # Read the requirements from 'setup.json' + setup_cfg = _load_setup_cfg() + + to_install = set() + for key in extras: + to_install.update(Requirement.parse(r) for r in setup_cfg['extras_require'][key]) + + cmd = [sys.executable, '-m', 'pip', 'install'] + [str(r) for r in to_install] + subprocess.run(cmd, check=True) + + +if __name__ == '__main__': + cli() # pylint: disable=no-value-for-parameter diff --git a/utils/plugin_tpl/calculation.tpl b/utils/plugin_tpl/calculation.tpl deleted file mode 100644 index 5cab031e0f..0000000000 --- a/utils/plugin_tpl/calculation.tpl +++ /dev/null @@ -1,126 +0,0 @@ -#-*- coding: utf8 -*- -""" -defines {{classname}} -""" -from aiida.orm import JobCalculation - - -class {{classname}}(JobCalculation): - """TODO: describe the calculation""" - - def _init_internal_params(self): - """Initialize internal parameters""" - super()._init_internal_params() - - self._INPUT_FILE_NAME = '{{ifilename}}' - self._OUTPUT_FILE_NAME = '{{ofilename}}' - self._default_parser = '{{parser}}' - - @classproperty - def _use_methods(cls): - """ - input node declaration hook - """ - - ''' - Start by getting the _use_methods from super and update the dictionary - before returning it. - - Each entry should look like this:: - - '': { # the input will be set with calc.use_(Data) - 'valid_types': , - 'additional_parameter': , - # -> use__(Data) - 'linkname': - # The name attached to the link in the db between the input - # and the calculation. Will be used for queries. - 'docstring': - } - ''' - retdict = super()._use_methods - retdict.update({ - {% for item in inputs %} - '{{item.name}}: { - 'valid_types': {{item.types}}, - 'additional_parameter': {{item.adn_par}}, - 'linkname': '{{item.get("lname", item.name)}}' - 'docstring': '{{item.docstring}}' - }, - {% endfor %} - }) - return retdict - - def _prepare_for_submission(self, tempfolder, inputdict): - """ - Hook for the deamon to create input files and do everything - else necessary before submitting the calculation to the computer. - - :param tempfolder: all input files should be put into this :py:class:`aiida.common.folders.Folder` subclass - :param inputdict: a dictionary containing all the inputs, keys are link names - """ - self.verify_inputs(self, inputdict) - - self._write_inputfiles(self, tempfolder, inputdict) - - calcinfo = CalcInfo() - calcinfo.uuid = self.uuid - '''list of files to copy to the computer''' - calcinfo.local_copy_list = [] # [('', '')] - calcinfo.remote_copy_list = [] # [('', '', '')] - calcinfo.retrieve_list = [self._OUTPUT_FILE_NAME] # add all files to be parsed - - code = inputdict['code'] - codeinfo = CodeInfo() - codeinfo.cmdline_params = [] # example: ['-i {}'.format(self._INPUT_FILE_NAME)] - codeinfo.code_uuid = code.uuid - - calcinfo.codes_info = [codeinfo] - - return calcinfo - - def verify_inputs(self, inputdict): - """ - ensure required input nodes are given, of the right type and nothing else - - raise ValidationError() otherwise - - example required node:: - - try: - param_name = inputdict.pop(self.get_linkname(param_name)) - except KeyError: - raise InputValidationError("Missing: param_name") - - if not isinstance(param_name, param_type(s)): - raise InputValidationError("Wrong type: param_name") - - example no superfluous nodes:: - - # after pop() - ing all expected nodes - if inputdict: - raise ValidationError("Superflous input nodes!") - """ - - '''TODO: implement input checks''' - - def _write_input_files(self, tempfolder, inputdict): - """ - write inputfiles to a temporary folder in preparation to submitting - - example using json input format:: - - # Dict input nodes - input_params = inputdict['param_name'].get_dict() - secondary_params = inputdict['secondary_name'].get_dict() - - input_filename = tempfolder.get_abs_path(self._INPUT_FILE_NAME) - with open(input_filename, 'w') as infile: - json.dump(input_params, infile) - - secondary_input_filename = tempfolder.get_abs_path('secondary.inp') - with open(secondary_input_filename, 'w') as infile: - json.dump(secondary_params, infile) - """ - - '''TODO: implement input file writing diff --git a/utils/update_dependencies.py b/utils/update_dependencies.py deleted file mode 100755 index 2987f33196..0000000000 --- a/utils/update_dependencies.py +++ /dev/null @@ -1,142 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Utility CLI to update dependency version requirements of the `setup.json`.""" - -import copy -import os -import click - -from validate_consistency import get_setup_json, write_setup_json - -FILENAME_SETUP_JSON = 'setup.json' -SCRIPT_PATH = os.path.split(os.path.realpath(__file__))[0] -ROOT_DIR = os.path.join(SCRIPT_PATH, os.pardir) -FILEPATH_SETUP_JSON = os.path.join(ROOT_DIR, FILENAME_SETUP_JSON) -DEFAULT_EXCLUDE_LIST = ['django', 'circus', 'numpy', 'pymatgen', 'ase', 'monty', 'pyyaml'] - - -@click.group() -def cli(): - """Utility to update dependency requirements for `aiida-core`. - - Since `aiida-core` fixes the versions of almost all of its dependencies, once in a while these need to be updated. - This is a manual process, but this CLI attempts to simplify it somewhat. The idea is to remote all explicit version - restrictions from the `setup.json`, except for those packages where it is known that a upper limit is necessary. - This is accomplished by the command: - - python update_dependencies.py unrestrict - - The command will update the `setup.json` to remove all explicit limits, except for those packages specified by the - `--exclude` option. After this step, install `aiida-core` through pip with the `[all]` flag to install all optional - extra requirements as well. Since there are no explicit version requirements anymore, pip should install the latest - available version for each dependency. - - Once all the tests complete successfully, run the following command: - - pip freeze > requirements.txt - - This will now capture the exact versions of the packages installed in the virtual environment. Since the tests run - for this setup, we can now set those versions as the new requirements in the `setup.json`. Note that this is why a - clean virtual environment should be used for this entire procedure. Now execute the command: - - python update_dependencies.py update requirements.txt - - This will now update the `setup.json` to reinstate the exact version requirements for all dependencies. Commit the - changes to `setup.json` and make a pull request. - """ - - -@cli.command('unrestrict') -@click.option('--exclude', multiple=True, help='List of package names to exclude from updating.') -def unrestrict_requirements(exclude): - """Remove all explicit dependency version restrictions from `setup.json`. - - Warning, this currently only works for dependency requirements that use the `==` operator. Statements with different - operators, additional filters after a semicolon, or with extra requirements (using `[]`) are not supported. The - limits for these statements will have to be updated manually. - """ - setup = get_setup_json() - clone = copy.deepcopy(setup) - clone['install_requires'] = [] - - if exclude: - exclude = list(exclude).extend(DEFAULT_EXCLUDE_LIST) - else: - exclude = DEFAULT_EXCLUDE_LIST - - for requirement in setup['install_requires']: - if requirement in exclude or ';' in requirement or '==' not in requirement: - clone['install_requires'].append(requirement) - else: - package = requirement.split('==')[0] - clone['install_requires'].append(package) - - for extra, requirements in setup['extras_require'].items(): - clone['extras_require'][extra] = [] - - for requirement in requirements: - if requirement in exclude or ';' in requirement or '==' not in requirement: - clone['extras_require'][extra].append(requirement) - else: - package = requirement.split('==')[0] - clone['extras_require'][extra].append(package) - - write_setup_json(clone) - - -@cli.command('update') -@click.argument('requirements', type=click.File(mode='r')) -def update_requirements(requirements): - """Apply version restrictions from REQUIREMENTS. - - The REQUIREMENTS file should contain the output of `pip freeze`. - """ - setup = get_setup_json() - - package_versions = [] - - for requirement in requirements.readlines(): - try: - package, version = requirement.strip().split('==') - package_versions.append((package, version)) - except ValueError: - continue - - requirements = set() - - for requirement in setup['install_requires']: - for package, version in package_versions: - if requirement.lower() == package.lower(): - requirements.add('{}=={}'.format(package.lower(), version)) - break - else: - requirements.add(requirement) - - setup['install_requires'] = sorted(requirements) - - for extra, extra_requirements in setup['extras_require'].items(): - requirements = set() - - for requirement in extra_requirements: - for package, version in package_versions: - if requirement.lower() == package.lower(): - requirements.add('{}=={}'.format(package.lower(), version)) - break - else: - requirements.add(requirement) - - setup['extras_require'][extra] = sorted(requirements) - - write_setup_json(setup) - - -if __name__ == '__main__': - cli() # pylint: disable=no-value-for-parameter diff --git a/utils/validate_consistency.py b/utils/validate_consistency.py index 6604347ded..a771a75449 100644 --- a/utils/validate_consistency.py +++ b/utils/validate_consistency.py @@ -22,7 +22,6 @@ import sys import json from collections import OrderedDict -import toml import click FILENAME_TOML = 'pyproject.toml' @@ -229,94 +228,5 @@ def validate_version(): sys.exit(1) -@cli.command('toml') -def validate_pyproject(): - """Ensure that the version of reentry in setup.json and pyproject.toml are identical.""" - reentry_requirement = None - for requirement in get_setup_json()['install_requires']: - if 'reentry' in requirement: - reentry_requirement = requirement - break - - if reentry_requirement is None: - click.echo('Could not find the reentry requirement in {}'.format(FILEPATH_SETUP_JSON), err=True) - sys.exit(1) - - try: - with open(FILEPATH_TOML, 'r') as handle: - toml_string = handle.read() - except IOError as exception: - click.echo('Could not read the required file: {}'.format(FILEPATH_TOML), err=True) - sys.exit(1) - - try: - parsed_toml = toml.loads(toml_string) - except Exception as exception: # pylint: disable=broad-except - click.echo('Could not parse {}: {}'.format(FILEPATH_TOML, exception), err=True) - sys.exit(1) - - try: - pyproject_toml_requires = parsed_toml['build-system']['requires'] - except KeyError as exception: - click.echo('Could not retrieve the build-system requires list from {}'.format(FILEPATH_TOML), err=True) - sys.exit(1) - - if reentry_requirement not in pyproject_toml_requires: - click.echo( - 'Reentry requirement from {} {} is not mirrored in {}'.format( - FILEPATH_SETUP_JSON, reentry_requirement, FILEPATH_TOML - ), - err=True - ) - sys.exit(1) - - -@cli.command('conda') -def update_environment_yml(): - """Update `environment.yml` file for conda.""" - import yaml - import re - - # needed for ordered dict, see https://stackoverflow.com/a/52621703 - yaml.add_representer( - OrderedDict, - lambda self, data: yaml.representer.SafeRepresenter.represent_dict(self, data.items()), - Dumper=yaml.SafeDumper - ) - - # fix incompatibilities between conda and pypi - replacements = {'psycopg2-binary': 'psycopg2', 'graphviz': 'python-graphviz'} - install_requires = get_setup_json()['install_requires'] - - # python version cannot be overriden from outside environment.yml - # (even if it is not specified at all in environment.yml) - # https://github.com/conda/conda/issues/9506 - conda_requires = ['python~=3.7'] - for req in install_requires: - # skip packages required for specific python versions - # (environment.yml aims at the latest python version) - if req.find('python_version') != -1: - continue - - for (regex, replacement) in iter(replacements.items()): - req = re.sub(regex, replacement, req) - - conda_requires.append(req) - - environment = OrderedDict([ - ('name', 'aiida'), - ('channels', ['defaults', 'conda-forge', 'etetoolkit']), - ('dependencies', conda_requires), - ]) - - environment_filename = 'environment.yml' - file_path = os.path.join(ROOT_DIR, environment_filename) - with open(file_path, 'w') as env_file: - env_file.write('# Usage: conda env create -n myenvname -f environment.yml\n') - yaml.safe_dump( - environment, env_file, explicit_start=True, default_flow_style=False, encoding='utf-8', allow_unicode=True - ) - - if __name__ == '__main__': cli() # pylint: disable=no-value-for-parameter