diff --git a/changes/1513.general.rst b/changes/1513.general.rst new file mode 100644 index 000000000..91a475dc0 --- /dev/null +++ b/changes/1513.general.rst @@ -0,0 +1 @@ +remove ``okify_regtests`` script (move to ``ci_watson``) diff --git a/pyproject.toml b/pyproject.toml index 0a64a84bb..337409529 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -85,7 +85,6 @@ webbpsf = "pytest_plugin.webbpsf_plugin" [project.scripts] roman_static_preview = "romancal.scripts.static_preview:command" -okify_regtests = "romancal.scripts.okify_regtests:main" schema_editor = "romancal.scripts.schema_editor:main" schemadoc = "romancal.scripts.schemadoc:main" verify_install_requires = "romancal.scripts.verify_install_requires:main" diff --git a/romancal/scripts/okify_regtests.py b/romancal/scripts/okify_regtests.py deleted file mode 100755 index f0a71fe48..000000000 --- a/romancal/scripts/okify_regtests.py +++ /dev/null @@ -1,153 +0,0 @@ -#!/usr/bin/env python -""" -Regression test okifying script. - -Requires JFrog CLI (https://jfrog.com/getcli/) configured with credentials -that have write access to the roman-pipeline repository. -""" - -import os -import shutil -import subprocess -import tempfile -from argparse import ArgumentParser -from contextlib import contextmanager -from glob import glob - -import asdf - -ARTIFACTORY_REPO = "roman-pipeline-results/regression-tests/runs" -SPECFILE_SUFFIX = "_okify.json" -RTDATA_SUFFIX = "_rtdata.asdf" -TERMINAL_WIDTH = shutil.get_terminal_size((80, 20)).columns - - -def parse_args(): - parser = ArgumentParser(description="Okify regression test results") - parser.add_argument( - "build_number", - help="GitHub Actions build number for Roman builds", - metavar="build-number", - ) - parser.add_argument( - "--dry-run", action="store_true", help="pass the --dry-run flag to JFrog CLI" - ) - - return parser.parse_args() - - -def artifactory_copy(specfile, dry_run=False): - jfrog_args = [] - - if dry_run: - jfrog_args.append("--dry-run") - - args = list(["jfrog", "rt", "cp"] + jfrog_args + [f"--spec={specfile}"]) - subprocess.run(args, check=True) - - -def artifactory_get_breadcrumbs(build_number, suffix): - """Download specfiles or other breadcrump from Artifactory associated with - a build number and return a list of their locations on the local file system - - An example search would be: - - jfrog rt search roman-pipeline-results/regression-tests/runs/*/*_okify.json --props='build.number=540;build.name=RT :: romancal' - """ # noqa: E501 - - # Retreive all the okify specfiles for failed tests. - args = list( - ["jfrog", "rt", "dl"] - + [f"{ARTIFACTORY_REPO}/*_GITHUB_CI_*-{build_number}/*{suffix}"] - ) - subprocess.run(args, check=True, capture_output=True) - - return sorted(glob(f"**/**/**/**/*{suffix}")) - - -def artifactory_get_build_artifacts(build_number): - specfiles = artifactory_get_breadcrumbs(build_number, SPECFILE_SUFFIX) - asdffiles = artifactory_get_breadcrumbs(build_number, RTDATA_SUFFIX) - - if len(specfiles) != len(asdffiles): - raise RuntimeError("Different number of _okify.json and _rtdata.asdf files") - - for a, b in zip(specfiles, asdffiles): - if a.replace(SPECFILE_SUFFIX, "") != b.replace(RTDATA_SUFFIX, ""): - raise RuntimeError("The _okify.json and _rtdata.asdf files are not matched") - - return specfiles, asdffiles - - -@contextmanager -def pushd(newdir): - prevdir = os.getcwd() - os.chdir(os.path.expanduser(newdir)) - try: - yield - finally: - os.chdir(prevdir) - - -def main(): - args = parse_args() - - build = args.build_number - - # Create and chdir to a temporary directory to store specfiles - with tempfile.TemporaryDirectory() as tmpdir: - print(f"Downloading test okify artifacts to local directory {tmpdir}") - with pushd(tmpdir): - # Retreive all the okify specfiles for failed tests. - specfiles, asdffiles = artifactory_get_build_artifacts(build) - - number_failed_tests = len(specfiles) - - print(f"{number_failed_tests} failed tests to okify") - - for i, (specfile, asdffile) in enumerate(zip(specfiles, asdffiles)): - # Print traceback and OKify info for this test failure - with asdf.open(asdffile) as af: - traceback = af.tree["traceback"] - remote_results_path = af.tree["remote_results_path"] - output = af.tree["output"] - truth_remote = af.tree["truth_remote"] - try: - test_name = af.tree["test_name"] - except KeyError: - test_name = "test_name" - - remote_results = os.path.join( - remote_results_path, os.path.basename(output) - ) - - test_number = i + 1 - - print(f" {test_name} ".center(TERMINAL_WIDTH, "—")) - print(traceback) - print("—" * TERMINAL_WIDTH) - print(f"OK: {remote_results}") - print(f"--> {truth_remote}") - print( - f"[ test {test_number} of {number_failed_tests} ]".center( - TERMINAL_WIDTH, "—" - ) - ) - - # Ask if user wants to okify this test - while True: - result = input("Enter 'o' to okify, 's' to skip: ") - if result not in ["o", "s"]: - print(f"Unrecognized command '{result}', try again") - else: - break - - if result == "s": - print("Skipping\n") - else: - artifactory_copy(os.path.abspath(specfile), dry_run=args.dry_run) - print("") - - -if __name__ == "__main__": - main() diff --git a/romancal/scripts/tests/test_scripts.py b/romancal/scripts/tests/test_scripts.py index a09b2ed9e..2bfd109e1 100644 --- a/romancal/scripts/tests/test_scripts.py +++ b/romancal/scripts/tests/test_scripts.py @@ -3,7 +3,6 @@ import pytest SCRIPTS = [ - "okify_regtests", "verify_install_requires", ]