Skip to content

Commit

Permalink
replace cli bash test harness with python
Browse files Browse the repository at this point in the history
Signed-off-by: Alex Goodman <[email protected]>
  • Loading branch information
wagoodman committed Dec 4, 2024
1 parent 1a79f18 commit 78974e5
Show file tree
Hide file tree
Showing 25 changed files with 712 additions and 438 deletions.
46 changes: 36 additions & 10 deletions manager/src/grype_db_manager/cli/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,12 @@ def clear_dbs(cfg: config.Application) -> None:
click.echo("no databases to clear")


def remove_db(cfg: config.Application, db_uuid: str) -> None:
db_manager = DBManager(root_dir=cfg.data.root)
if db_manager.remove_db(db_uuid=db_uuid):
click.echo(f"database {db_uuid!r} removed")
click.echo(f"no database found with session id {db_uuid}")

@group.command(name="build", help="build and validate a grype database")
@click.option("--schema-version", "-s", required=True, help="the DB schema version to build")
@click.pass_obj
Expand Down Expand Up @@ -119,9 +125,17 @@ def validate_db(
click.echo(f"no database found with session id {db_uuid}")
return

if db_info.schema_version >= 6:
# TODO: not implemented yet
raise NotImplementedError("validation for schema v6+ is not yet implemented")

if not skip_namespace_check:
# ensure the minimum number of namespaces are present
db_manager.validate_namespaces(db_uuid=db_uuid)
if db_info.schema_version < 6:
# ensure the minimum number of namespaces are present
db_manager.validate_namespaces(db_uuid=db_uuid)
else:
# TODO: implement me
raise NotImplementedError("namespace validation for schema v6+ is not yet implemented")

# resolve tool versions and install them
yardstick.store.config.set_values(store_root=cfg.data.yardstick_root)
Expand Down Expand Up @@ -208,22 +222,34 @@ def upload_db(cfg: config.Application, db_uuid: str, ttl_seconds: int) -> None:
db_manager = DBManager(root_dir=cfg.data.root)
db_info = db_manager.get_db_info(db_uuid=db_uuid)

key = f"{s3_path}/{os.path.basename(db_info.archive_path)}"
if db_info.schema_version >= 6:
if not os.path.exists(db_info.archive_path):
raise ValueError(f"latest.json file not found for DB {db_uuid!r}")

# /databases -> /databases/v6 , and is dynamic based on the schema version
s3_path = f"{s3_path}/v{db_info.schema_version}"

# TODO: we have folks that require legacy behavior, where the content type was application/x-tar
kwargs = {}
if db_info.archive_path.endswith(".tar.gz"):
kwargs["ContentType"] = "application/x-tar"
db_key = f"{s3_path}/{os.path.basename(db_info.archive_path)}"
latest_key = f"{s3_path}/latest.json"

s3utils.upload_file(
bucket=s3_bucket,
key=key,
key=db_key,
path=db_info.archive_path,
CacheControl=f"public,max-age={ttl_seconds}",
**kwargs,
)

click.echo(f"DB {db_uuid!r} uploaded to s3://{s3_bucket}/{s3_path}")
click.echo(f"DB archive {db_uuid!r} uploaded to s3://{s3_bucket}/{s3_path}")

if db_info.schema_version >= 6:
s3utils.upload_file(
bucket=s3_bucket,
key=latest_key,
path=db_info.latest_path,
CacheControl=f"public,max-age=300", # 5 minutes
)

click.echo(f"DB latest.json {db_uuid!r} uploaded to s3://{s3_bucket}/{s3_path}")


@group.command(name="build-and-upload", help="upload a grype database")
Expand Down
2 changes: 1 addition & 1 deletion manager/src/grype_db_manager/cli/listing.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from grype_db_manager.db.format import Format


@click.group(name="listing", help="manage the grype-db listing file")
@click.group(name="listing", help="manage the grype-db listing file (only schemas v1-v5)")
@click.pass_obj
def group(_: config.Application) -> None:
pass
Expand Down
5 changes: 5 additions & 0 deletions manager/src/grype_db_manager/data/schema-info.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,11 @@
"schema": "5",
"grype-version": "main",
"supported": true
},
{
"schema": "6",
"grype-version": "main",
"supported": false
}
]
}
4 changes: 2 additions & 2 deletions manager/src/grype_db_manager/db/listing.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@

LISTING_FILENAME = "listing.json"


# Entry is a dataclass that represents a single entry from a listing.json for schemas v1-v5.
@dataclass
class Entry:
built: str
Expand All @@ -45,7 +45,7 @@ def age_in_days(self, now: datetime.datetime | None = None) -> int:
now = datetime.datetime.now(tz=datetime.timezone.utc)
return (now - iso8601.parse_date(self.built)).days


# Listing is a dataclass that represents the listing.json for schemas v1-v5.
@dataclass
class Listing:
available: dict[int, list[Entry]]
Expand Down
2 changes: 1 addition & 1 deletion manager/src/grype_db_manager/db/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

FILE = "metadata.json"


# Metadata is a dataclass that represents the metadata.json for schemas v1-v5.
@dataclass
class Metadata:
built: str
Expand Down
78 changes: 66 additions & 12 deletions manager/src/grype_db_manager/grypedb.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,6 +249,7 @@ class DBInfo:
db_created: datetime.datetime
data_created: datetime.datetime
archive_path: str
latest_path: str | None = None


class DBInvalidException(Exception):
Expand Down Expand Up @@ -289,6 +290,12 @@ def list_namespaces(self, db_uuid: str) -> list[str]:
# a sqlite3 db
db_path = os.path.join(build_dir, "vulnerability.db")

# check if there is a metadata.json file in the build directory
metadata_path = os.path.join(build_dir, "metadata.json")
if not os.path.exists(metadata_path):
msg = f"missing metadata.json for DB {db_uuid!r}"
raise DBInvalidException(msg)

# select distinct values in the "namespace" column of the "vulnerability" table
con = sqlite3.connect(db_path)
crsr = con.cursor()
Expand Down Expand Up @@ -322,14 +329,8 @@ def get_db_info(self, db_uuid: str) -> DBInfo | None:
with open(timestamp_path) as f:
db_created_timestamp = datetime.datetime.fromisoformat(f.read())

# read info from the metadata file in build/metadata.json
metadata_path = os.path.join(session_dir, "build", "metadata.json")
if not os.path.exists(metadata_path):
msg = f"missing metadata.json for DB {db_uuid!r}"
raise DBInvalidException(msg)

with open(metadata_path) as f:
metadata = json.load(f)
# read info from the metadata file in build/metadata.json (v1 - v5) or build/latest.json (v6+)
metadata = db_metadata(build_dir=os.path.join(session_dir, "build"))

stage_dir, _ = self.db_paths(db_uuid=db_uuid)
db_pattern = os.path.join(
Expand All @@ -347,13 +348,18 @@ def get_db_info(self, db_uuid: str) -> DBInfo | None:

abs_archive_path = os.path.abspath(matches[0])

db_created = db_created_timestamp.strftime("%Y-%m-%dT%H:%M:%SZ")
if "db_created" in metadata:
db_created = metadata["db_created"]

return DBInfo(
uuid=db_uuid,
schema_version=metadata["version"],
db_checksum=metadata["checksum"],
db_created=db_created_timestamp.strftime("%Y-%m-%dT%H:%M:%SZ"),
data_created=metadata["built"],
db_checksum=metadata["db_checksum"],
db_created=db_created,
data_created=metadata["data_created"],
archive_path=abs_archive_path,
latest_path=metadata.get("latest_path", None),
)

def list_dbs(self) -> list[DBInfo]:
Expand All @@ -372,6 +378,54 @@ def list_dbs(self) -> list[DBInfo]:

return sorted(sessions, key=lambda x: x.db_created)

def remove_db(self, db_uuid: str) -> bool:
session_dir = os.path.join(self.db_dir, db_uuid)
if os.path.exists(session_dir):
shutil.rmtree(session_dir)
return True
return False

def db_metadata(build_dir: str) -> dict:
metadata_path = os.path.join(build_dir, "metadata.json")

if os.path.exists(metadata_path):
# supports v1 - v5
with open(metadata_path) as f:
metadata = json.load(f)
return {
"version": int(metadata["version"]),
"db_checksum": metadata["checksum"],
"data_created": metadata["built"],
}

latest_path = os.path.join(build_dir, "latest.json")
if os.path.exists(latest_path):
# supports v6+
with open(latest_path) as f:

metadata = json.load(f)
# example data:
# {
# "status": "active",
# "schemaVersion": "6.0.0",
# "built": "2024-11-26T20:24:24Z",
# "path": "vulnerability-db_v6.0.0_2024-11-25T01:31:56Z_1732652663.tar.zst",
# "checksum": "sha256:1a0ec0ba815083d0ef50790c8c94307c822fd7d09632dee9c3edb6bf5a58e6ff"
# }
return {
"version": int(metadata["schemaVersion"].split(".")[0]),
"db_checksum": None, # we don't have this information
"db_created": metadata["built"],
"data_created": parse_datetime(metadata["path"].split("_")[2]),
"latest_path": os.path.abspath(latest_path),
}

msg = f"missing metadata.json and latest.json for DB"
raise DBInvalidException(msg)


def parse_datetime(s: str) -> datetime.datetime:
return datetime.datetime.strptime(s, "%Y-%m-%dT%H:%M:%SZ")

class GrypeDB:
def __init__(self, bin_path: str, config_path: str = ""):
Expand Down Expand Up @@ -424,7 +478,7 @@ def build_and_package(self, schema_version: int, provider_root_dir: str, root_di

db_pattern = os.path.join(
build_dir,
f"*_v{schema_version}_*.tar.*",
f"*_v{schema_version}[._]*.tar.*",
)

matches = glob.glob(db_pattern)
Expand Down
4 changes: 2 additions & 2 deletions manager/src/grype_db_manager/s3utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,13 +74,13 @@ def upload(bucket: str, key: str, contents: str, client_factory: type[ClientFact


def upload_file(bucket: str, key: str, path: str, client_factory: type[ClientFactory] = ClientFactory, **kwargs) -> None:
logging.debug(f"uploading file={path} to s3 bucket={bucket} key={key}")

if "ContentType" not in kwargs:
content_type = mime.from_file(path)
if content_type:
kwargs["ContentType"] = content_type

logging.debug(f"uploading file={path} to s3 bucket={bucket} key={key} content-type={kwargs.get('ContentType', '')}")

# boto is a little too verbose... let's tone that down just for a bit
with LoggingContext(level=logging.WARNING):
s3 = client_factory.new()
Expand Down
2 changes: 1 addition & 1 deletion manager/tests/cli/.grype-db.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ provider:
root: cli-test-data/vunnel

configs:
# let's use a single provider that we can show in isolation the setup is generally working. We don't
# let's use a limited set of providers that we can show in isolation the setup is generally working. We don't
# need all providers / an entire database to test the workflow.
- name: oracle
kind: vunnel
Expand Down
11 changes: 9 additions & 2 deletions manager/tests/cli/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,19 @@ CYAN := $(shell tput -T linux setaf 6)
RESET := $(shell tput -T linux sgr0)

test: virtual-env-check ## Run CLI tests
./run.sh
pytest . -vv -o log_cli=true

cli-test-data/vunnel/oracle: ## Prepare data for CLI tests
.PHONY: vunnel-data
vunnel-data: cli-test-data/vunnel/oracle

cli-test-data/vunnel/oracle: ## Prepare oracle data for CLI tests
mkdir -p cli-test-data/vunnel
oras pull ghcr.io/anchore/grype-db/data/oracle:latest && go run ../../../cmd/grype-db cache restore --path ./grype-db-cache.tar.gz

.PHONY: install-oracle-labels
install-oracle-labels:
cp -a ../../../data/vulnerability-match-labels/labels/docker.io+oraclelinux* ./cli-test-data/yardstick/labels/

virtual-env-check:
@ if [ "${VIRTUAL_ENV}" = "" ]; then \
echo "$(ERROR)Not in a virtual environment. Try running with 'poetry run' or enter a 'poetry shell' session.$(RESET)"; \
Expand Down
5 changes: 2 additions & 3 deletions manager/tests/cli/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,8 @@ If you'd like to run a single test:
```shell
# from the manager/tests/cli directory

./run.sh <test-file-name>
pytest . -vv -o log_cli=true -k <test-function-name>

# e.g.
# ./run.sh workflow-3-update-listing.sh
# ./run.sh workflow-*db.sh
# pytest . -vv -o log_cli=true -k test_workflow_4
```
Loading

0 comments on commit 78974e5

Please sign in to comment.