From 78974e508af8548a59383810d5f113d1275cf06a Mon Sep 17 00:00:00 2001 From: Alex Goodman Date: Mon, 2 Dec 2024 12:46:35 -0500 Subject: [PATCH] replace cli bash test harness with python Signed-off-by: Alex Goodman --- manager/src/grype_db_manager/cli/db.py | 46 +++- manager/src/grype_db_manager/cli/listing.py | 2 +- .../grype_db_manager/data/schema-info.json | 5 + manager/src/grype_db_manager/db/listing.py | 4 +- manager/src/grype_db_manager/db/metadata.py | 2 +- manager/src/grype_db_manager/grypedb.py | 78 ++++++- manager/src/grype_db_manager/s3utils.py | 4 +- manager/tests/cli/.grype-db.yaml | 2 +- manager/tests/cli/Makefile | 11 +- manager/tests/cli/README.md | 5 +- manager/tests/cli/conftest.py | 198 ++++++++++++++++ manager/tests/cli/run.sh | 35 --- ...rkflow-3.py => setup-legacy-workflow-3.py} | 0 ...rkflow-4.py => setup-legacy-workflow-4.py} | 0 manager/tests/cli/s3-mock/setup-workflow-1.py | 37 +++ manager/tests/cli/test_legacy_workflows.py | 220 ++++++++++++++++++ manager/tests/cli/test_workflows.py | 126 ++++++++++ manager/tests/cli/utils.sh | 128 ---------- .../cli/workflow-1-create-and-delete-db.sh | 33 --- manager/tests/cli/workflow-2-validate-db.sh | 56 ----- .../tests/cli/workflow-3-update-listing.sh | 64 ----- manager/tests/cli/workflow-4-full-publish.sh | 84 ------- pkg/process/package.go | 4 +- poetry.lock | 2 +- pyproject.toml | 4 +- 25 files changed, 712 insertions(+), 438 deletions(-) create mode 100644 manager/tests/cli/conftest.py delete mode 100755 manager/tests/cli/run.sh rename manager/tests/cli/s3-mock/{setup-workflow-3.py => setup-legacy-workflow-3.py} (100%) rename manager/tests/cli/s3-mock/{setup-workflow-4.py => setup-legacy-workflow-4.py} (100%) create mode 100644 manager/tests/cli/s3-mock/setup-workflow-1.py create mode 100644 manager/tests/cli/test_legacy_workflows.py create mode 100644 manager/tests/cli/test_workflows.py delete mode 100755 manager/tests/cli/utils.sh delete mode 100755 manager/tests/cli/workflow-1-create-and-delete-db.sh delete mode 100755 manager/tests/cli/workflow-2-validate-db.sh delete mode 100755 manager/tests/cli/workflow-3-update-listing.sh delete mode 100755 manager/tests/cli/workflow-4-full-publish.sh diff --git a/manager/src/grype_db_manager/cli/db.py b/manager/src/grype_db_manager/cli/db.py index 55c565b4..e6d12774 100644 --- a/manager/src/grype_db_manager/cli/db.py +++ b/manager/src/grype_db_manager/cli/db.py @@ -54,6 +54,12 @@ def clear_dbs(cfg: config.Application) -> None: click.echo("no databases to clear") +def remove_db(cfg: config.Application, db_uuid: str) -> None: + db_manager = DBManager(root_dir=cfg.data.root) + if db_manager.remove_db(db_uuid=db_uuid): + click.echo(f"database {db_uuid!r} removed") + click.echo(f"no database found with session id {db_uuid}") + @group.command(name="build", help="build and validate a grype database") @click.option("--schema-version", "-s", required=True, help="the DB schema version to build") @click.pass_obj @@ -119,9 +125,17 @@ def validate_db( click.echo(f"no database found with session id {db_uuid}") return + if db_info.schema_version >= 6: + # TODO: not implemented yet + raise NotImplementedError("validation for schema v6+ is not yet implemented") + if not skip_namespace_check: - # ensure the minimum number of namespaces are present - db_manager.validate_namespaces(db_uuid=db_uuid) + if db_info.schema_version < 6: + # ensure the minimum number of namespaces are present + db_manager.validate_namespaces(db_uuid=db_uuid) + else: + # TODO: implement me + raise NotImplementedError("namespace validation for schema v6+ is not yet implemented") # resolve tool versions and install them yardstick.store.config.set_values(store_root=cfg.data.yardstick_root) @@ -208,22 +222,34 @@ def upload_db(cfg: config.Application, db_uuid: str, ttl_seconds: int) -> None: db_manager = DBManager(root_dir=cfg.data.root) db_info = db_manager.get_db_info(db_uuid=db_uuid) - key = f"{s3_path}/{os.path.basename(db_info.archive_path)}" + if db_info.schema_version >= 6: + if not os.path.exists(db_info.archive_path): + raise ValueError(f"latest.json file not found for DB {db_uuid!r}") + + # /databases -> /databases/v6 , and is dynamic based on the schema version + s3_path = f"{s3_path}/v{db_info.schema_version}" - # TODO: we have folks that require legacy behavior, where the content type was application/x-tar - kwargs = {} - if db_info.archive_path.endswith(".tar.gz"): - kwargs["ContentType"] = "application/x-tar" + db_key = f"{s3_path}/{os.path.basename(db_info.archive_path)}" + latest_key = f"{s3_path}/latest.json" s3utils.upload_file( bucket=s3_bucket, - key=key, + key=db_key, path=db_info.archive_path, CacheControl=f"public,max-age={ttl_seconds}", - **kwargs, ) - click.echo(f"DB {db_uuid!r} uploaded to s3://{s3_bucket}/{s3_path}") + click.echo(f"DB archive {db_uuid!r} uploaded to s3://{s3_bucket}/{s3_path}") + + if db_info.schema_version >= 6: + s3utils.upload_file( + bucket=s3_bucket, + key=latest_key, + path=db_info.latest_path, + CacheControl=f"public,max-age=300", # 5 minutes + ) + + click.echo(f"DB latest.json {db_uuid!r} uploaded to s3://{s3_bucket}/{s3_path}") @group.command(name="build-and-upload", help="upload a grype database") diff --git a/manager/src/grype_db_manager/cli/listing.py b/manager/src/grype_db_manager/cli/listing.py index 41730310..43f528c5 100644 --- a/manager/src/grype_db_manager/cli/listing.py +++ b/manager/src/grype_db_manager/cli/listing.py @@ -9,7 +9,7 @@ from grype_db_manager.db.format import Format -@click.group(name="listing", help="manage the grype-db listing file") +@click.group(name="listing", help="manage the grype-db listing file (only schemas v1-v5)") @click.pass_obj def group(_: config.Application) -> None: pass diff --git a/manager/src/grype_db_manager/data/schema-info.json b/manager/src/grype_db_manager/data/schema-info.json index c68f65e1..3038806c 100644 --- a/manager/src/grype_db_manager/data/schema-info.json +++ b/manager/src/grype_db_manager/data/schema-info.json @@ -24,6 +24,11 @@ "schema": "5", "grype-version": "main", "supported": true + }, + { + "schema": "6", + "grype-version": "main", + "supported": false } ] } diff --git a/manager/src/grype_db_manager/db/listing.py b/manager/src/grype_db_manager/db/listing.py index ddd06fef..dbbd49af 100644 --- a/manager/src/grype_db_manager/db/listing.py +++ b/manager/src/grype_db_manager/db/listing.py @@ -24,7 +24,7 @@ LISTING_FILENAME = "listing.json" - +# Entry is a dataclass that represents a single entry from a listing.json for schemas v1-v5. @dataclass class Entry: built: str @@ -45,7 +45,7 @@ def age_in_days(self, now: datetime.datetime | None = None) -> int: now = datetime.datetime.now(tz=datetime.timezone.utc) return (now - iso8601.parse_date(self.built)).days - +# Listing is a dataclass that represents the listing.json for schemas v1-v5. @dataclass class Listing: available: dict[int, list[Entry]] diff --git a/manager/src/grype_db_manager/db/metadata.py b/manager/src/grype_db_manager/db/metadata.py index 633b9306..0a35babe 100644 --- a/manager/src/grype_db_manager/db/metadata.py +++ b/manager/src/grype_db_manager/db/metadata.py @@ -9,7 +9,7 @@ FILE = "metadata.json" - +# Metadata is a dataclass that represents the metadata.json for schemas v1-v5. @dataclass class Metadata: built: str diff --git a/manager/src/grype_db_manager/grypedb.py b/manager/src/grype_db_manager/grypedb.py index d5aecab2..26f11834 100644 --- a/manager/src/grype_db_manager/grypedb.py +++ b/manager/src/grype_db_manager/grypedb.py @@ -249,6 +249,7 @@ class DBInfo: db_created: datetime.datetime data_created: datetime.datetime archive_path: str + latest_path: str | None = None class DBInvalidException(Exception): @@ -289,6 +290,12 @@ def list_namespaces(self, db_uuid: str) -> list[str]: # a sqlite3 db db_path = os.path.join(build_dir, "vulnerability.db") + # check if there is a metadata.json file in the build directory + metadata_path = os.path.join(build_dir, "metadata.json") + if not os.path.exists(metadata_path): + msg = f"missing metadata.json for DB {db_uuid!r}" + raise DBInvalidException(msg) + # select distinct values in the "namespace" column of the "vulnerability" table con = sqlite3.connect(db_path) crsr = con.cursor() @@ -322,14 +329,8 @@ def get_db_info(self, db_uuid: str) -> DBInfo | None: with open(timestamp_path) as f: db_created_timestamp = datetime.datetime.fromisoformat(f.read()) - # read info from the metadata file in build/metadata.json - metadata_path = os.path.join(session_dir, "build", "metadata.json") - if not os.path.exists(metadata_path): - msg = f"missing metadata.json for DB {db_uuid!r}" - raise DBInvalidException(msg) - - with open(metadata_path) as f: - metadata = json.load(f) + # read info from the metadata file in build/metadata.json (v1 - v5) or build/latest.json (v6+) + metadata = db_metadata(build_dir=os.path.join(session_dir, "build")) stage_dir, _ = self.db_paths(db_uuid=db_uuid) db_pattern = os.path.join( @@ -347,13 +348,18 @@ def get_db_info(self, db_uuid: str) -> DBInfo | None: abs_archive_path = os.path.abspath(matches[0]) + db_created = db_created_timestamp.strftime("%Y-%m-%dT%H:%M:%SZ") + if "db_created" in metadata: + db_created = metadata["db_created"] + return DBInfo( uuid=db_uuid, schema_version=metadata["version"], - db_checksum=metadata["checksum"], - db_created=db_created_timestamp.strftime("%Y-%m-%dT%H:%M:%SZ"), - data_created=metadata["built"], + db_checksum=metadata["db_checksum"], + db_created=db_created, + data_created=metadata["data_created"], archive_path=abs_archive_path, + latest_path=metadata.get("latest_path", None), ) def list_dbs(self) -> list[DBInfo]: @@ -372,6 +378,54 @@ def list_dbs(self) -> list[DBInfo]: return sorted(sessions, key=lambda x: x.db_created) + def remove_db(self, db_uuid: str) -> bool: + session_dir = os.path.join(self.db_dir, db_uuid) + if os.path.exists(session_dir): + shutil.rmtree(session_dir) + return True + return False + +def db_metadata(build_dir: str) -> dict: + metadata_path = os.path.join(build_dir, "metadata.json") + + if os.path.exists(metadata_path): + # supports v1 - v5 + with open(metadata_path) as f: + metadata = json.load(f) + return { + "version": int(metadata["version"]), + "db_checksum": metadata["checksum"], + "data_created": metadata["built"], + } + + latest_path = os.path.join(build_dir, "latest.json") + if os.path.exists(latest_path): + # supports v6+ + with open(latest_path) as f: + + metadata = json.load(f) + # example data: + # { + # "status": "active", + # "schemaVersion": "6.0.0", + # "built": "2024-11-26T20:24:24Z", + # "path": "vulnerability-db_v6.0.0_2024-11-25T01:31:56Z_1732652663.tar.zst", + # "checksum": "sha256:1a0ec0ba815083d0ef50790c8c94307c822fd7d09632dee9c3edb6bf5a58e6ff" + # } + return { + "version": int(metadata["schemaVersion"].split(".")[0]), + "db_checksum": None, # we don't have this information + "db_created": metadata["built"], + "data_created": parse_datetime(metadata["path"].split("_")[2]), + "latest_path": os.path.abspath(latest_path), + } + + msg = f"missing metadata.json and latest.json for DB" + raise DBInvalidException(msg) + + +def parse_datetime(s: str) -> datetime.datetime: + return datetime.datetime.strptime(s, "%Y-%m-%dT%H:%M:%SZ") class GrypeDB: def __init__(self, bin_path: str, config_path: str = ""): @@ -424,7 +478,7 @@ def build_and_package(self, schema_version: int, provider_root_dir: str, root_di db_pattern = os.path.join( build_dir, - f"*_v{schema_version}_*.tar.*", + f"*_v{schema_version}[._]*.tar.*", ) matches = glob.glob(db_pattern) diff --git a/manager/src/grype_db_manager/s3utils.py b/manager/src/grype_db_manager/s3utils.py index 91d5141c..a4b7965c 100644 --- a/manager/src/grype_db_manager/s3utils.py +++ b/manager/src/grype_db_manager/s3utils.py @@ -74,13 +74,13 @@ def upload(bucket: str, key: str, contents: str, client_factory: type[ClientFact def upload_file(bucket: str, key: str, path: str, client_factory: type[ClientFactory] = ClientFactory, **kwargs) -> None: - logging.debug(f"uploading file={path} to s3 bucket={bucket} key={key}") - if "ContentType" not in kwargs: content_type = mime.from_file(path) if content_type: kwargs["ContentType"] = content_type + logging.debug(f"uploading file={path} to s3 bucket={bucket} key={key} content-type={kwargs.get('ContentType', '')}") + # boto is a little too verbose... let's tone that down just for a bit with LoggingContext(level=logging.WARNING): s3 = client_factory.new() diff --git a/manager/tests/cli/.grype-db.yaml b/manager/tests/cli/.grype-db.yaml index e7860813..88b97a12 100644 --- a/manager/tests/cli/.grype-db.yaml +++ b/manager/tests/cli/.grype-db.yaml @@ -4,7 +4,7 @@ provider: root: cli-test-data/vunnel configs: - # let's use a single provider that we can show in isolation the setup is generally working. We don't + # let's use a limited set of providers that we can show in isolation the setup is generally working. We don't # need all providers / an entire database to test the workflow. - name: oracle kind: vunnel diff --git a/manager/tests/cli/Makefile b/manager/tests/cli/Makefile index 57c53715..82aeca0e 100644 --- a/manager/tests/cli/Makefile +++ b/manager/tests/cli/Makefile @@ -6,12 +6,19 @@ CYAN := $(shell tput -T linux setaf 6) RESET := $(shell tput -T linux sgr0) test: virtual-env-check ## Run CLI tests - ./run.sh + pytest . -vv -o log_cli=true -cli-test-data/vunnel/oracle: ## Prepare data for CLI tests +.PHONY: vunnel-data +vunnel-data: cli-test-data/vunnel/oracle + +cli-test-data/vunnel/oracle: ## Prepare oracle data for CLI tests mkdir -p cli-test-data/vunnel oras pull ghcr.io/anchore/grype-db/data/oracle:latest && go run ../../../cmd/grype-db cache restore --path ./grype-db-cache.tar.gz +.PHONY: install-oracle-labels +install-oracle-labels: + cp -a ../../../data/vulnerability-match-labels/labels/docker.io+oraclelinux* ./cli-test-data/yardstick/labels/ + virtual-env-check: @ if [ "${VIRTUAL_ENV}" = "" ]; then \ echo "$(ERROR)Not in a virtual environment. Try running with 'poetry run' or enter a 'poetry shell' session.$(RESET)"; \ diff --git a/manager/tests/cli/README.md b/manager/tests/cli/README.md index a511ec5f..b6f7363b 100644 --- a/manager/tests/cli/README.md +++ b/manager/tests/cli/README.md @@ -17,9 +17,8 @@ If you'd like to run a single test: ```shell # from the manager/tests/cli directory -./run.sh +pytest . -vv -o log_cli=true -k # e.g. -# ./run.sh workflow-3-update-listing.sh -# ./run.sh workflow-*db.sh +# pytest . -vv -o log_cli=true -k test_workflow_4 ``` diff --git a/manager/tests/cli/conftest.py b/manager/tests/cli/conftest.py new file mode 100644 index 00000000..fb2807ec --- /dev/null +++ b/manager/tests/cli/conftest.py @@ -0,0 +1,198 @@ +import os +import shlex +import subprocess +import pytest +import logging +from enum import Enum +from pathlib import Path +from contextlib import contextmanager +from tempfile import TemporaryDirectory + +class Format(Enum): + RESET = "\033[0m" + GREEN = "\033[1;32m" + RED = "\033[1;31m" + GREY = "\033[0;37m" + PURPLE = "\033[1;35m" + ORANGE_BOLD = "\033[1;33m" + ITALIC = "\033[3m" + BOLD = "\033[1m" + + def render(self, text: str) -> str: + return f"{self.value}{text}{Format.RESET.value}" + +class CustomLogger(logging.Logger): + + def __init__(self, name, level=logging.NOTSET): + super().__init__(name, level) + self.test_function = None # Placeholder for test-specific context + + def step(self, message: str): + if self.test_function: + message = f"[{self.test_function}] {message}" + self.info(Format.GREEN.render(message)) + +@pytest.fixture(scope="function") +def logger(request): + logging.setLoggerClass(CustomLogger) + logger = logging.getLogger(f"test_logger_{id(object())}") + logger.setLevel(logging.DEBUG) + + # set the test function name dynamically + logger.test_function = request.node.name + + return logger + +@pytest.fixture(scope="function", autouse=True) +def change_to_cli_dir(request): + """ + Automatically change the working directory to the directory containing the test file + if it's not already set, and revert back after the test. + """ + # the directory of the current test file (which is in manage/tests/cli) + cli_dir = request.fspath.dirname + original_dir = os.getcwd() + + # bail if already in the target directory + if os.path.samefile(original_dir, cli_dir): + yield # run the test + return + + # change to the target directory + if not os.path.isdir(cli_dir): + raise FileNotFoundError(f"Expected directory '{cli_dir}' does not exist.") + + os.chdir(cli_dir) + try: + yield # run the test + finally: + os.chdir(original_dir) # revert to the original directory + + + +@pytest.fixture(scope="session") +def temporary_dir() -> str: + with TemporaryDirectory() as tmp_dir: + yield tmp_dir + + +@pytest.fixture(scope="session") +def cli_env() -> dict[str, str]: + env = os.environ.copy() + env["PATH"] = f"{os.path.abspath('bin')}:{env['PATH']}" # add `bin` to PATH + return env + +class CommandHelper: + + def __init__(self, logger: logging.Logger): + self.logger = logger + + def run(self, command: str, env=None, expect_fail=False, use_shell=True, **kwargs) -> tuple[str, str]: + self.logger.info(Format.ITALIC.render(f"{command}")) + + process = subprocess.run( + command if use_shell else shlex.split(command), + shell=use_shell, # use shell expansion if requested + capture_output=True, + text=True, + env=env, + **kwargs, + ) + + # log stdout and stderr when an error occurs + if process.returncode != 0 and not expect_fail: + self.logger.error(Format.RED.render("└── command failed unexpectedly")) + log_lines(process.stdout, " ", self.logger.error, Format.RED.render) + log_lines(process.stderr, " ", self.logger.error, Format.RED.render) + raise AssertionError("command failed unexpectedly") + elif process.returncode == 0 and expect_fail: + self.logger.error(Format.RED.render("└── expected failure, but command succeeded")) + log_lines(process.stdout, " ", self.logger.error, Format.RED.render) + log_lines(process.stderr, " ", self.logger.error, Format.RED.render) + raise AssertionError("command succeeded but was expected to fail") + + # log success + self.logger.debug(Format.GREY.render("└── command succeeded")) + return process.stdout.strip(), process.stderr.strip() + + @contextmanager + def pushd(self, path, logger): + """Temporarily change directory.""" + prev_dir = os.getcwd() + logger.info(f"pushd {path}") + os.chdir(path) + try: + yield + finally: + logger.info(f"popd # {prev_dir}") + os.chdir(prev_dir) + + +def log_lines(text: str, prefix: str, lgr, renderer=None): + for line in text.splitlines(): + msg = f"{prefix}{line}" + if renderer: + msg = renderer(msg) + lgr(msg) + +@pytest.fixture +def command(logger) -> CommandHelper: + return CommandHelper(logger) + +class GrypeHelper: + def __init__(self, bin_dir: str | Path | None = None): + if bin_dir: + self.bin_dir = Path(bin_dir) + else: + self.bin_dir = None + self.command = CommandHelper(logging.getLogger("grype")) + + def run(self, cmd: str, env: dict[str, str] | None = None, **kwargs) -> tuple[str, str]: + return self.command.run(f"{self.bin_dir}/grype {cmd}", env=env, **kwargs) + + def install(self, branch_or_version: str, bin_dir: str | None = None, env: dict[str, str] | None = None) -> "GrypeHelper": + """ + Install Grype either by building from a feature branch or downloading a prebuilt binary. + """ + if not bin_dir and not self.bin_dir: + raise ValueError("bin_dir is required for Grype installation") + + if bin_dir: + bin_dir = Path(bin_dir) + else: + bin_dir = self.bin_dir + + grype_binary = Path(bin_dir) / "grype" + + if branch_or_version.startswith("v"): + self.command.run( + f"curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b {bin_dir} {branch_or_version}", + use_shell=True, + env=env, + ) + + if not grype_binary.exists(): + raise RuntimeError("Grype binary installation failed via install.sh") + + else: + with TemporaryDirectory() as temp_dir: + self.command.run( + f"git clone --branch {branch_or_version} https://github.com/anchore/grype.git {temp_dir}", + check=True, + env=env, + ) + self.command.run( + f"go build -o {grype_binary} -ldflags '-X github.com/anchore/grype-db/pkg/grypedb.Version={branch_or_version}' ./cmd/grype", + cwd=temp_dir, + check=True, + env=env, + ) + + if not grype_binary.exists(): + raise RuntimeError("Grype binary build failed from feature branch") + + return GrypeHelper(bin_dir) + +@pytest.fixture(scope="session") +def grype(): + return GrypeHelper() diff --git a/manager/tests/cli/run.sh b/manager/tests/cli/run.sh deleted file mode 100755 index 9a615f40..00000000 --- a/manager/tests/cli/run.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env bash - -. utils.sh - -# if no arguments are given then use case-*.sh, otherwise use the files given -if [ $# -eq 0 ]; then - files=$(find . -maxdepth 1 -type f -name "workflow-*.sh" | sort) -else - files=$@ -fi - -if [ -z "$files" ]; then - echo "No test files found" - exit 1 -fi - -title "Test scripts to run:" -for script in $files; do - echo " $script" -done -echo - -# run all scripts in the current directory named workflow-*.sh and exit on first failure -status=0 -for script in $files; do - bash -c "./$script" || { status=1; break; } -done - -if [ $status -eq 0 ]; then - echo -e "${SUCCESS}All tests passed${RESET}" -else - echo -e "${ERROR}Some tests failed${RESET}" -fi - -exit $status \ No newline at end of file diff --git a/manager/tests/cli/s3-mock/setup-workflow-3.py b/manager/tests/cli/s3-mock/setup-legacy-workflow-3.py similarity index 100% rename from manager/tests/cli/s3-mock/setup-workflow-3.py rename to manager/tests/cli/s3-mock/setup-legacy-workflow-3.py diff --git a/manager/tests/cli/s3-mock/setup-workflow-4.py b/manager/tests/cli/s3-mock/setup-legacy-workflow-4.py similarity index 100% rename from manager/tests/cli/s3-mock/setup-workflow-4.py rename to manager/tests/cli/s3-mock/setup-legacy-workflow-4.py diff --git a/manager/tests/cli/s3-mock/setup-workflow-1.py b/manager/tests/cli/s3-mock/setup-workflow-1.py new file mode 100644 index 00000000..dec6fdcc --- /dev/null +++ b/manager/tests/cli/s3-mock/setup-workflow-1.py @@ -0,0 +1,37 @@ +import os +import requests +import shutil + +# the credentials are not required for localstack, but the boto3 client will complain if they are not set +os.environ["AWS_ACCESS_KEY_ID"] = "test" +os.environ["AWS_SECRET_ACCESS_KEY"] = "test" + +from grype_db_manager import s3utils +from grype_db_manager.cli import config + + +def main(): + cfg = config.load() + + s3_bucket = cfg.distribution.s3_bucket + region = cfg.distribution.aws_region + + if not bucket_exists(s3_bucket): + print(f"creating bucket {s3_bucket!r}") + s3 = s3utils.ClientFactory.new() + s3.create_bucket(Bucket=s3_bucket, CreateBucketConfiguration={"LocationConstraint": region}) + + print("done!") + + +def bucket_exists(bucket: str): + try: + list(s3utils.get_matching_s3_objects(bucket=bucket, prefix="")) + return True + except Exception as e: + pass + return False + + +if __name__ == "__main__": + main() diff --git a/manager/tests/cli/test_legacy_workflows.py b/manager/tests/cli/test_legacy_workflows.py new file mode 100644 index 00000000..80add0be --- /dev/null +++ b/manager/tests/cli/test_legacy_workflows.py @@ -0,0 +1,220 @@ +import pytest + +@pytest.mark.usefixtures("cli_env") +def test_workflow_1(cli_env, command, logger): + """ + workflow 1: create and delete a DB + """ + + logger.step("setup: clear previous data") + command.run("make clean-manager", env=cli_env) + command.run("make vunnel-data", env=cli_env) + + logger.step("case 1: create the DB") + stdout, _ = command.run("grype-db-manager -v db build -s 5", env=cli_env) + assert stdout.strip(), "Expected non-empty output" + db_id = stdout.splitlines()[-1] # assume DB ID is the last line of output + + stdout, _ = command.run("grype-db-manager db list", env=cli_env) + assert db_id in stdout, f"Expected DB ID {db_id} in output" + + logger.step("case 2: delete the DB") + command.run("grype-db-manager db clear", env=cli_env) + stdout, _ = command.run("grype-db-manager db list", env=cli_env) + assert db_id not in stdout, f"Did not expect DB ID {db_id} in output" + + +@pytest.mark.usefixtures("cli_env") +def test_workflow_2(cli_env, command, logger): + """ + workflow 2: validate DB + This test creates a database from raw vunnel data and performs validations under different conditions. + """ + + logger.step("setup: create the DB") + command.run("make clean-manager", env=cli_env) + command.run("make vunnel-data", env=cli_env) + + # create the database + stdout, _ = command.run("grype-db-manager -v db build -s 5", env=cli_env) + assert stdout.strip(), "Expected non-empty output" + db_id = stdout.splitlines()[-1] # Get the last line as the DB ID + + ### case 1: fail DB validation (too many unknowns) ### + logger.step("case 1: fail DB validation (too many unknowns)") + command.run("make clean-yardstick-labels", env=cli_env) + + # workaround for Go 1.23+ parent directory module lookup + cli_env["GOWORK"] = "off" + + stdout, _ = command.run( + f"grype-db-manager db validate {db_id} -vvv --skip-namespace-check --recapture", + env=cli_env, + expect_fail=True, + ) + assert "current indeterminate matches % is greater than 10%" in stdout + + ### case 2: fail DB validation (missing namespaces) ### + logger.step("case 2: fail DB validation (missing namespaces)") + command.run("make clean-yardstick-labels", env=cli_env) + + logger.info("installing labels") + command.run("make install-oracle-labels", env=cli_env) + + _, stderr = command.run( + f"grype-db-manager db validate {db_id} -vvv", + env=cli_env, + expect_fail=True, + ) + assert "missing namespaces in DB" in stderr + + ### case 3: pass DB validation ### + logger.step("case 3: pass DB validation") + command.run("make clean-yardstick-labels", env=cli_env) + + logger.info("installing labels") + command.run("make install-oracle-labels", env=cli_env) + + stdout, _ = command.run( + f"grype-db-manager db validate {db_id} -vvv --skip-namespace-check", + env=cli_env, + ) + assert "Quality gate passed!" in stdout + + +@pytest.mark.usefixtures("cli_env") +def test_workflow_3(cli_env, command, logger, tmp_path, grype): + """ + workflow 3: update an existing listing file + This test uses a mock S3 setup to upload databases, generate a new listing file, and validate that the updated + listing file works with grype for scanning. + """ + + logger.step("setup: prepare environment variables and directories") + + # set environment variables for aws and grype + bin_dir = tmp_path / "bin" + bin_dir.mkdir(parents=True, exist_ok=True) + + cli_env.update({ + "AWS_ACCESS_KEY_ID": "test", + "AWS_SECRET_ACCESS_KEY": "test", + "AWS_REGION": "us-west-2", + "PATH": f"{bin_dir}:{cli_env['PATH']}", # ensure `bin` directory is in PATH + }) + + grype = grype.install("v0.65.0", bin_dir) + + logger.step("setup: start mock S3 and upload databases") + with command.pushd("s3-mock", logger): + command.run("docker compose up -d", env=cli_env) + command.run("python setup-legacy-workflow-3.py", env=cli_env) + + ### start of testing ### + logger.step("case 1: update a listing file based on S3 state") + + # generate a new listing file + stdout, _ = command.run("grype-db-manager listing update", env=cli_env) + assert "Validation passed" in stdout + assert "listing.json uploaded to s3://testbucket/grype/databases" in stdout + + # setup grype for DB updates and scans + cli_env.update({ + "GRYPE_DB_UPDATE_URL": "http://localhost:4566/testbucket/grype/databases/listing.json", + "GRYPE_DB_CACHE_DIR": str(bin_dir) + }) + + # validate grype DB listing and scanning + stdout, _ = grype.run(f"db list", env=cli_env) + assert "http://localhost:4566" in stdout + + stdout, _ = grype.run(f"db update", env=cli_env) + + stdout, _ = grype.run(f"--platform linux/amd64 --by-cve alpine:3.2", env=cli_env) + assert "CVE-2016-2148" in stdout + + ### end of testing ### + + logger.step("teardown: stop mock S3 and clean up") + with command.pushd("s3-mock", logger): + command.run("docker compose down -t 1 -v", env=cli_env) + + +@pytest.mark.usefixtures("cli_env") +def test_workflow_4(cli_env, command, logger, tmp_path, grype): + """ + workflow 4: full publish workflow + This test builds and validates a new DB from raw vunnel data, uploads the DB to a mock S3, updates the listing file, + and uses the updated listing file in a grype scan. + """ + + logger.step("setup: prepare environment variables and directories") + + # set environment variables for aws, grype, and schema versions + bin_dir = tmp_path / "bin" + bin_dir.mkdir(parents=True, exist_ok=True) + + cli_env.update({ + "AWS_ACCESS_KEY_ID": "test", + "AWS_SECRET_ACCESS_KEY": "test", + "AWS_REGION": "us-west-2", + "SCHEMA_VERSION": "5", + "GRYPE_DB_MANAGER_VALIDATE_LISTING_OVERRIDE_GRYPE_VERSION": "v0.65.0", + "GRYPE_DB_MANAGER_VALIDATE_LISTING_OVERRIDE_DB_SCHEMA_VERSION": "5", + "PATH": f"{bin_dir}:{cli_env['PATH']}", # ensure `bin` directory is in PATH + }) + + grype = grype.install("v0.65.0", bin_dir) + + logger.step("setup: clean manager and prepare data") + command.run("make clean-manager", env=cli_env) + command.run("make vunnel-data", env=cli_env) + command.run("make install-oracle-labels", env=cli_env) + + logger.step("setup: start mock S3 and upload initial data") + with command.pushd("s3-mock", logger): + command.run("docker compose up -d", env=cli_env) + command.run("python setup-legacy-workflow-4.py", env=cli_env) + + ### start of testing ### + logger.step("case 1: create and publish a DB") + + # build, validate, and upload the database + stdout, _ = command.run( + "grype-db-manager db build-and-upload --schema-version 5 --skip-namespace-check", + env=cli_env, + ) + assert "Quality gate passed!" in stdout + assert "' uploaded to s3://testbucket/grype/databases" in stdout + + logger.step("case 2: update the listing file based on the DB uploaded") + + # update the listing file and validate + stdout, _ = command.run("grype-db-manager listing update", env=cli_env) + assert "Validation passed" in stdout + assert "listing.json uploaded to s3://testbucket/grype/databases" in stdout + + # set grype environment variables + cli_env.update({ + "GRYPE_DB_UPDATE_URL": "http://localhost:4566/testbucket/grype/databases/listing.json", + "GRYPE_DB_CACHE_DIR": str(bin_dir), + }) + + # validate grype DB listing and scanning + stdout, _ = grype.run("db list", env=cli_env) + assert "http://localhost:4566" in stdout + + stdout, _ = grype.run("db update", env=cli_env) + assert "Vulnerability database updated" in stdout + + stdout, _ = grype.run( + "docker.io/oraclelinux:6@sha256:a06327c0f1d18d753f2a60bb17864c84a850bb6dcbcf5946dd1a8123f6e75495 --by-cve", + env=cli_env, + ) + assert "ELSA-2021-9591" in stdout + + ### end of testing ### + + logger.step("teardown: stop mock S3 and clean up") + with command.pushd("s3-mock", logger): + command.run("docker compose down -t 1 -v", env=cli_env) diff --git a/manager/tests/cli/test_workflows.py b/manager/tests/cli/test_workflows.py new file mode 100644 index 00000000..0cc95ea2 --- /dev/null +++ b/manager/tests/cli/test_workflows.py @@ -0,0 +1,126 @@ +import pytest + +@pytest.mark.usefixtures("cli_env") +def test_workflow_1(cli_env, command, logger, tmp_path, grype): + """ + workflow 1: create, upload, and delete a DB + """ + logger.step("setup: prepare environment variables and directories") + + # set environment variables for aws and grype + bin_dir = tmp_path / "bin" + bin_dir.mkdir(parents=True, exist_ok=True) + schema_version = "6" + cli_env.update({ + "AWS_ACCESS_KEY_ID": "test", + "AWS_SECRET_ACCESS_KEY": "test", + "AWS_REGION": "us-west-2", + "GRYPE_EXP_DBV6": "true", # while we are in development, we need to enable the experimental dbv6 feature flag + "GOWORK": "off", # workaround for Go 1.23+ parent directory module lookup + "PATH": f"{bin_dir}:{cli_env['PATH']}", # ensure `bin` directory is in PATH + "GOBIN": bin_dir, + "GRYPE_DB_UPDATE_URL": f"http://localhost:4566/testbucket/grype/databases/v{schema_version}/latest.json", + "GRYPE_DB_CACHE_DIR": str(bin_dir) + }) + + # while we are in development, we need to use a git branch + grype = grype.install("add-v6-feature-flag", bin_dir) + + logger.step("setup: clear previous data") + command.run("make clean-manager", env=cli_env) + command.run("make vunnel-data", env=cli_env) + + logger.step("setup: start mock S3") + with command.pushd("s3-mock", logger): + command.run("docker compose up -d", env=cli_env) + command.run("python setup-workflow-1.py", env=cli_env) + + logger.step("case 1: create the DB") + stdout, _ = command.run(f"grype-db-manager -v db build -s {schema_version}", env=cli_env) + assert stdout.strip(), "Expected non-empty output" + db_id = stdout.splitlines()[-1] # assume DB ID is the last line of output + + stdout, _ = command.run("grype-db-manager db list", env=cli_env) + assert db_id in stdout, f"Expected DB ID {db_id} in output" + + logger.step("case 2: upload the DB") + stdout, _ = command.run(f"grype-db-manager db upload {db_id}", env=cli_env) + assert f"DB archive '{db_id}' uploaded to s3://testbucket/grype/databases/v{schema_version}" in stdout + assert f"latest.json '{db_id}' uploaded to s3://testbucket/grype/databases/v{schema_version}" in stdout + + logger.step("case 3: use the DB with grype") + stdout, _ = grype.run("db update -v", env=cli_env) + assert "Vulnerability database updated" in stdout + + # TODO: introduce this when there is v6 matching logic implemented + # stdout, _ = grype.run("--platform linux/amd64 --by-cve alpine:3.2", env=cli_env) + # assert "CVE-2016-2148" in stdout + + logger.step("case 4: delete the DB") + command.run("grype-db-manager db clear", env=cli_env) + stdout, _ = command.run("grype-db-manager db list", env=cli_env) + assert db_id not in stdout, f"Did not expect DB ID {db_id} in output" + + ### end of testing ### + + logger.step("teardown: stop mock S3 and clean up") + with command.pushd("s3-mock", logger): + command.run("docker compose down -t 1 -v", env=cli_env) + +# TODO: introduce this when there is v6 matching logic implemented +# @pytest.mark.usefixtures("cli_env") +# def test_workflow_2(cli_env, command, logger): +# """ +# workflow 2: validate DB +# This test creates a database from raw vunnel data and performs validations via the quality gate. +# """ +# +# logger.step("setup: create the DB") +# command.run("make clean-manager", env=cli_env) +# command.run("make vunnel-data", env=cli_env) +# +# # create the database +# stdout, _ = command.run("grype-db-manager -v db build -s 6", env=cli_env) +# assert stdout.strip(), "Expected non-empty output" +# db_id = stdout.splitlines()[-1] # Get the last line as the DB ID +# +# ### case 1: fail DB validation (too many unknowns) ### +# logger.step("case 1: fail DB validation (too many unknowns)") +# command.run("make clean-yardstick-labels", env=cli_env) +# +# # workaround for Go 1.23+ parent directory module lookup +# cli_env["GOWORK"] = "off" +# +# stdout, _ = command.run( +# f"grype-db-manager db validate {db_id} -vvv --skip-namespace-check --recapture", +# env=cli_env, +# expect_fail=True, +# ) +# assert "current indeterminate matches % is greater than 10%" in stdout +# +# ### case 2: fail DB validation (missing namespaces) ### +# logger.step("case 2: fail DB validation (missing namespaces)") +# command.run("make clean-yardstick-labels", env=cli_env) +# +# logger.info("installing labels") +# command.run("make install-oracle-labels", env=cli_env) +# +# _, stderr = command.run( +# f"grype-db-manager db validate {db_id} -vvv", +# env=cli_env, +# expect_fail=True, +# ) +# assert "missing namespaces in DB" in stderr +# +# ### case 3: pass DB validation ### +# logger.step("case 3: pass DB validation") +# command.run("make clean-yardstick-labels", env=cli_env) +# +# logger.info("installing labels") +# command.run("make install-oracle-labels", env=cli_env) +# +# stdout, _ = command.run( +# f"grype-db-manager db validate {db_id} -vvv --skip-namespace-check", +# env=cli_env, +# ) +# assert "Quality gate passed!" in stdout \ No newline at end of file diff --git a/manager/tests/cli/utils.sh b/manager/tests/cli/utils.sh deleted file mode 100755 index 312496a9..00000000 --- a/manager/tests/cli/utils.sh +++ /dev/null @@ -1,128 +0,0 @@ -#!/usr/bin/env bash -set -u - -ERROR="\033[1;31m" -SUCCESS="\033[1;32m" -STEP="\033[1;33m" -HEADER="\033[1;34m" -TITLE="\033[1;35m" -RESET="\033[0m" - -i=0 - -stdout_files=() -stderr_files=() - - - -function _run_and_capture() { - stdout_tmp_file=$(mktemp /tmp/grype-db-manager-test-stdout.XXXXXX) - stderr_tmp_file=$(mktemp /tmp/grype-db-manager-test-stderr.XXXXXX) - stdout_files+=( $stdout_tmp_file ) - stderr_files+=( $stderr_tmp_file ) - - echo -e "${STEP}$i| Running $@${RESET}" - - # we want to capture stdout and stderr to files but also print them to the screen in realtime. Using tee is the - # best resource for this, but there is an added challenge of needing the return code of the original command - # (which is now in a subshell). The "exit PIPESTATUS[0]" solves this by promoting the first command's return - # code as the subshell's return code. - ($@ | tee $stdout_tmp_file ; exit ${PIPESTATUS[0]}) 3>&1 1>&2 2>&3 | tee $stderr_tmp_file - rc=${PIPESTATUS[0]} - return $rc -} - -function run() { - _run_and_capture $@ - rc=$? - if [ $rc -eq 0 ]; then - echo -e "${SUCCESS}Success${RESET}" - else - echo -e "${ERROR}Failed: expected zero return code but got $rc${RESET}" - exit 1 - fi - ((i++)) -} - -function run_expect_fail() { - _run_and_capture $@ - rc=$? - if [ $rc -eq 0 ]; then - echo -e "${ERROR}Failed: expected non-zero return code but got $rc${RESET}" - exit 1 - else - echo -e "${SUCCESS}Success: exited with non-zero return code: $rc${RESET}" - fi - ((i++)) -} - -function last_stdout_file() { - echo ${stdout_files[${#stdout_files[@]} - 1]} -} - -function last_stderr_file() { - echo ${stderr_files[${#stderr_files[@]} - 1]} -} - -function last_stdout() { - cat $(last_stdout_file) -} - -function last_stderr() { - cat $(last_stderr_file) -} - -function assert_not_empty() { - output_file=$1 - len=$(cat $output_file | wc -l | tr -d ' ') - if [[ "$len" -gt 0 ]]; then - return - fi - echo -e "${ERROR}Unexpected length $len${RESET}" - exit 1 -} - -function assert_contains() { - output_file=$1 - target=$2 - is_in_file=$(cat $output_file | grep -c "$target") - if [ $is_in_file -eq 0 ]; then - echo -e "${ERROR}Target not found in contents '$target'${RESET}" - echo -e "${ERROR}...contents:\n$(cat $output_file)${RESET}" - exit 1 - fi -} - -function assert_does_not_contain() { - output_file=$1 - target=$1 - is_in_file=$(cat $output_file | grep -c "$target") - if [ $is_in_file -ne 0 ]; then - echo -e "${ERROR}Target found in contents '$target'${RESET}" - echo -e "${ERROR}...contents:\n$(cat output_file)${RESET}" - exit 1 - fi -} - -function header() { - echo -e "${HEADER}$@${RESET}" -} - -function title() { - echo -e "${TITLE}$@${RESET}" -} - -function end_testing() { - echo "cleaning up temp files created:" - for i in ${!stdout_files[@]}; do - echo " " ${stdout_files[$i]} - rm ${stdout_files[$i]} - done - - for i in ${!stderr_files[@]}; do - echo " " ${stderr_files[$i]} - rm ${stderr_files[$i]} - done - - echo -e "\n${SUCCESS}PASS${RESET}" -} diff --git a/manager/tests/cli/workflow-1-create-and-delete-db.sh b/manager/tests/cli/workflow-1-create-and-delete-db.sh deleted file mode 100755 index 83170857..00000000 --- a/manager/tests/cli/workflow-1-create-and-delete-db.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env bash - -. utils.sh - -title "Starting workflow 1: create and delete DB" - -header "Setup: clear previous data" - -make clean-manager -make cli-test-data/vunnel/oracle - - -### Start of testing ######################## -header "Case 1: create the DB" - -run grype-db-manager -v db build -s 5 -assert_not_empty $(last_stdout_file) -DB_ID="$(last_stdout)" -run grype-db-manager db list - -assert_contains "$(last_stdout_file)" $DB_ID - - -############################################# -header "Case 2: delete the DB" - -run grype-db-manager db clear -run grype-db-manager db list -assert_does_not_contain "$(last_stdout_file)" $DB_ID - - -### End of testing ######################## -end_testing diff --git a/manager/tests/cli/workflow-2-validate-db.sh b/manager/tests/cli/workflow-2-validate-db.sh deleted file mode 100755 index 4fe8cbc0..00000000 --- a/manager/tests/cli/workflow-2-validate-db.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env bash - -. utils.sh - -title "Starting workflow 2: validate DB" -# this test uses raw vunnel data to create a DB from scratch and see if it passes validation. There are different sets -# of labels to trigger a failing validation as well as passing validations under other non-ideal conditions. -# ultimately it is up to unit tests to fully exercise the validation logic, but this test is a good sanity check -# that the data needed for validations is wired up correctly. - -header "Setup: create the DB" -make clean-manager -make cli-test-data/vunnel/oracle -run grype-db-manager -v db build -s 5 -assert_not_empty $(last_stdout_file) -DB_ID="$(last_stdout)" - -### Start of testing ######################## -header "Case 1: fail DB validation (too many unknowns)" - -make clean-yardstick-labels - -# workaround for go1.23+ looking into parent dirs when building go modules in subdirs -export GOWORK=off - -run_expect_fail grype-db-manager db validate $DB_ID -vvv --skip-namespace-check --recapture -assert_contains $(last_stdout_file) "current indeterminate matches % is greater than 10%" - -############################################# -header "Case 2: fail DB validation (missing namespaces)" - -make clean-yardstick-labels -echo "installing labels" -# use the real labels -cp -a ../../../data/vulnerability-match-labels/labels/docker.io+oraclelinux* ./cli-test-data/yardstick/labels/ -tree ./cli-test-data/yardstick/labels/ - -run_expect_fail grype-db-manager db validate $DB_ID -vvv -assert_contains $(last_stderr_file) "missing namespaces in DB" - - -############################################# -header "Case 3: pass DB validation" - -make clean-yardstick-labels -echo "installing labels" -# use the real labels -cp -a ../../../data/vulnerability-match-labels/labels/docker.io+oraclelinux* ./cli-test-data/yardstick/labels/ -tree ./cli-test-data/yardstick/labels/ - -run grype-db-manager db validate $DB_ID -vvv --skip-namespace-check -assert_contains $(last_stdout_file) "Quality gate passed!" - - -### End of testing ######################## -end_testing diff --git a/manager/tests/cli/workflow-3-update-listing.sh b/manager/tests/cli/workflow-3-update-listing.sh deleted file mode 100755 index 31c86665..00000000 --- a/manager/tests/cli/workflow-3-update-listing.sh +++ /dev/null @@ -1,64 +0,0 @@ -#!/usr/bin/env bash - -. utils.sh - -title "Starting workflow 3: update the listing file" -# this uses real, already-built DBs (from the production workflow) to exercise the listing file update logic. -# an S3 mock is used to upload a set of DBs and to generate a new listing file from. The uploaded listing file -# is then used by grype to download the correct DB and run a scan. - -# note: these credentials / configurations must match the ones used in s3-mock/setup.py and .grype-db-manager.yaml -export AWS_ACCESS_KEY_ID="test" -export AWS_SECRET_ACCESS_KEY="test" -export AWS_REGION="us-west-2" - -GRYPE_VERSION="v0.65.0" - -set -e - -BIN_DIR="./bin" - -rm -rf $BIN_DIR - -curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b $BIN_DIR $GRYPE_VERSION - -pushd s3-mock -docker compose up -d -python setup-workflow-3.py -popd - -set +e - -### Start of testing ######################## -header "Case 1: update a listing file based on S3 state" - -# note: this test is exercising the following commands: -# grype-db-manager listing create -# grype-db-manager listing validate - -run grype-db-manager listing update -assert_contains $(last_stdout_file) "Validation passed" -assert_contains $(last_stdout_file) "listing.json uploaded to s3://testbucket/grype/databases" - -# check if grype works with this updated listing file -export GRYPE_DB_UPDATE_URL="http://localhost:4566/testbucket/grype/databases/listing.json" -export GRYPE_DB_CACHE_DIR=$BIN_DIR - -run bin/grype db list - -assert_contains $(last_stdout_file) "http://localhost:4566" - -run bin/grype db update - -run bin/grype alpine:3.2 - -assert_contains $(last_stdout_file) "CVE-2016-2148" - - -### End of testing ######################## - -pushd s3-mock -docker compose down -t 1 -v -popd - -end_testing diff --git a/manager/tests/cli/workflow-4-full-publish.sh b/manager/tests/cli/workflow-4-full-publish.sh deleted file mode 100755 index b5fe2a5a..00000000 --- a/manager/tests/cli/workflow-4-full-publish.sh +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env bash - -. utils.sh - -title "Starting workflow 4: full publish workflow" -# this test exercises the full publish workflow, by building and validating a new DB from raw vunnel data, -# uploading the DB to an S3 mock, updating and upload the listing file, and then using the updated listing file -# in a grype scan. - -# note: these credentials / configurations must match the ones used in s3-mock/setup.py and .grype-db-manager.yaml -export AWS_ACCESS_KEY_ID="test" -export AWS_SECRET_ACCESS_KEY="test" -export AWS_REGION="us-west-2" - -GRYPE_VERSION="v0.65.0" -SCHEMA_VERSION="5" - -# there are what are used in the staging pipeline for a single DB build -export GRYPE_DB_MANAGER_VALIDATE_LISTING_OVERRIDE_GRYPE_VERSION=$GRYPE_VERSION -export GRYPE_DB_MANAGER_VALIDATE_LISTING_OVERRIDE_DB_SCHEMA_VERSION=$SCHEMA_VERSION - -set -e - -BIN_DIR="./bin" - -rm -rf $BIN_DIR - -curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b $BIN_DIR $GRYPE_VERSION - -make clean-manager -make cli-test-data/vunnel/oracle - -pushd s3-mock -docker compose up -d -python setup-workflow-4.py -popd - -set +e - -### Start of testing ######################## -header "Case 1: create and publish a DB" - -# note: this test is exercising the following commands: -# grype-db-manager db build -# grype-db-manager db validate --skip-namespace-check -# grype-db-manager db upload - -run grype-db-manager db build-and-upload --schema-version $SCHEMA_VERSION --skip-namespace-check -assert_contains $(last_stdout_file) "Quality gate passed!" -assert_contains $(last_stdout_file) "' uploaded to s3://testbucket/grype/databases" - - -header "Case 2: update the listing file based on the DB uploaded" - -# note: this test is exercising the following commands: -# grype-db-manager listing create -# grype-db-manager listing validate - -run grype-db-manager listing update -assert_contains $(last_stdout_file) "Validation passed" -assert_contains $(last_stdout_file) "listing.json uploaded to s3://testbucket/grype/databases" - -# check if grype works with this updated listing file -export GRYPE_DB_UPDATE_URL="http://localhost:4566/testbucket/grype/databases/listing.json" -export GRYPE_DB_CACHE_DIR="./bin" - -run bin/grype db list - -assert_contains $(last_stdout_file) "http://localhost:4566" - -run bin/grype db update - -run bin/grype docker.io/oraclelinux:6@sha256:a06327c0f1d18d753f2a60bb17864c84a850bb6dcbcf5946dd1a8123f6e75495 - -assert_contains $(last_stdout_file) "ELSA-2021-9591" - - -### End of testing ######################## - -pushd s3-mock -docker compose down -t 1 -v -popd - -end_testing diff --git a/pkg/process/package.go b/pkg/process/package.go index 2a8d369c..c6416d41 100644 --- a/pkg/process/package.go +++ b/pkg/process/package.go @@ -117,7 +117,7 @@ func resolveExtension(overrideArchiveExtension string) (string, error) { return extension, nil } -var listingFiles = strset.New("listing.json", "latest.json", "history.json") +var ignoreFiles = strset.New(grypeDBLegacyDistribution.ListingFileName, v6Distribution.LatestFileName, v6.ChecksumFileName) func populateTar(tarPath string) error { originalDir, err := os.Getwd() @@ -146,7 +146,7 @@ func populateTar(tarPath string) error { var files []string for _, fi := range fileInfos { - if !listingFiles.Has(fi.Name()) && !strings.Contains(fi.Name(), ".tar.") { + if !ignoreFiles.Has(fi.Name()) && !strings.Contains(fi.Name(), ".tar.") { files = append(files, fi.Name()) } } diff --git a/poetry.lock b/poetry.lock index 4a1f45f4..7d7666ca 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1493,4 +1493,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.0" python-versions = ">=3.11,<=3.13" -content-hash = "618b58c3adf4fe5edba3e40b5e77ca15a472d53e82f46d059bc6aa4b4c0d06f7" +content-hash = "2b6b3e2ece1e147dba511638ae72f0bcb4b7c8f7df102d84fd5ba28fb33b55c4" diff --git a/pyproject.toml b/pyproject.toml index 1090c98d..f53cba6e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ boto3 = ">=1.28.16, <2" click = ">=8.1.6, <9" dataclass-wizard = ">=0.22.2, <1" iso8601 = ">=2.0.0, <3" -requests = ">=2.31.0, <3" +requests = "^2.32.3" semver = ">=3.0.1, <4" tabulate = ">=0.9.0, <1" zstandard = ">=0.21.0, <1" @@ -67,6 +67,8 @@ testpaths = ["manager/tests"] cache_dir = ".cache/pytest" pythonpath = ["manager/src"] norecursedirs = ["data"] +log_format = "%(levelname)-6s %(message)s" +log_cli_level = "INFO" [tool.black] line-length = 130