Skip to content

Commit

Permalink
Improve CI (#123)
Browse files Browse the repository at this point in the history
CI seems to have stopped failing for a while (see output of https://github.com/eth-cscs/stackinator/actions/runs/5486778221/jobs/9997318839?pr=122, for example). This PR improves how linting tools are run.

The whole code base is automatically re-formatted to make `black` and `isort` happy again.

Additionally, the `docs` and `publish` actions are only allowed to run from the main repo.
  • Loading branch information
RMeli authored Aug 16, 2023
1 parent 0d6517c commit 0de7d22
Show file tree
Hide file tree
Showing 10 changed files with 112 additions and 80 deletions.
1 change: 1 addition & 0 deletions .github/workflows/docs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ permissions:
contents: write
jobs:
deploy:
if: github.repository == 'eth-cscs/stackinator'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/lint.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,10 @@ jobs:
python -m pip install black flake8 isort mypy
- name: Black
run: |
black .
black --check --verbose .
- name: isort
run: |
isort .
isort --check --diff .
- name: flake8
run: |
flake8
flake8 --count --show-source --statistics .
1 change: 1 addition & 0 deletions .github/workflows/publish.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ on:

jobs:
publish:
if: github.repository == 'eth-cscs/stackinator'
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v3
Expand Down
23 changes: 14 additions & 9 deletions stackinator/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
import jinja2
import yaml

from . import VERSION, root_logger, cache
from . import VERSION, cache, root_logger


class Builder:
def __init__(self, args):
Expand Down Expand Up @@ -72,7 +73,7 @@ def environment_meta(self):

@environment_meta.setter
def environment_meta(self, recipe):
'''
"""
The output that we want to generate looks like the following,
Which should correspond directly to the environment_view_meta provided
by the recipe.
Expand All @@ -96,7 +97,7 @@ def environment_meta(self, recipe):
}
}
}
'''
"""
conf = recipe.config
meta = {}
meta["name"] = conf["name"]
Expand Down Expand Up @@ -204,9 +205,12 @@ def generate(self, recipe):
# print warning if mirrors.yaml is found
if f_config.name in ["mirrors.yaml"]:
self._logger.error(
"mirrors.yaml have been removed from cluster configurations,"
" use the --cache option on stack-config instead.")
raise RuntimeError("Unsupported mirrors.yaml file in cluster configuration.")
"mirrors.yaml have been removed from cluster configurations,"
" use the --cache option on stack-config instead."
)
raise RuntimeError(
"Unsupported mirrors.yaml file in cluster configuration."
)

# construct full file path
src = system_config_path / f_config.name
Expand All @@ -219,7 +223,7 @@ def generate(self, recipe):
if recipe.mirror:
dst = config_path / "mirrors.yaml"
self._logger.debug(f"generate the build cache mirror: {dst}")
with dst.open('w') as fid:
with dst.open("w") as fid:
fid.write(cache.generate_mirrors_yaml(recipe.mirror))

# append recipe packages to packages.yaml
Expand Down Expand Up @@ -366,8 +370,9 @@ def generate(self, recipe):
with debug_script_path.open("w") as f:
f.write(
debug_script_template.render(
mount_path=recipe.config["store"], build_path=str(self.path), verbose=False
mount_path=recipe.config["store"],
build_path=str(self.path),
verbose=False,
)
)
f.write("\n")

52 changes: 23 additions & 29 deletions stackinator/cache.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import os
import pathlib

import os
import yaml

from . import schema


def configuration_from_file(file, mount):
with file.open() as fid:
# load the raw yaml input
Expand All @@ -16,13 +17,9 @@ def configuration_from_file(file, mount):
# verify that the root path exists
path = pathlib.Path(os.path.expandvars(raw["root"]))
if not path.is_absolute():
raise FileNotFoundError(
f"The build cache path '{path}' is not absolute"
)
raise FileNotFoundError(f"The build cache path '{path}' is not absolute")
if not path.is_dir():
raise FileNotFoundError(
f"The build cache path '{path}' does not exist"
)
raise FileNotFoundError(f"The build cache path '{path}' does not exist")

raw["root"] = path

Expand All @@ -35,36 +32,33 @@ def configuration_from_file(file, mount):
if key is not None:
key = pathlib.Path(os.path.expandvars(key))
if not key.is_absolute():
raise FileNotFoundError(
f"The build cache key '{key}' is not absolute"
)
raise FileNotFoundError(f"The build cache key '{key}' is not absolute")
if not key.is_file():
raise FileNotFoundError(
f"The build cache key '{key}' does not exist"
)
raise FileNotFoundError(f"The build cache key '{key}' does not exist")
raw["key"] = key

return raw


def generate_mirrors_yaml(config):
path = config['path'].as_posix()
path = config["path"].as_posix()
mirrors = {
'mirrors': {
'alpscache': {
'fetch': {
'url': f"file://{path}",
'access_pair': [None, None],
'access_token': None,
'profile': None,
'endpoint_url': None,
"mirrors": {
"alpscache": {
"fetch": {
"url": f"file://{path}",
"access_pair": [None, None],
"access_token": None,
"profile": None,
"endpoint_url": None,
},
"push": {
"url": f"file://{path}",
"access_pair": [None, None],
"access_token": None,
"profile": None,
"endpoint_url": None,
},
'push': {
'url': f"file://{path}",
'access_pair': [None, None],
'access_token': None,
'profile': None,
'endpoint_url': None,
}
}
}
}
Expand Down
38 changes: 24 additions & 14 deletions stackinator/etc/add-compiler-links.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,39 +4,47 @@

import argparse
import os

import yaml


# parse compilers.yaml file.
# return a list with the compiler descriptions from the yaml file.
def load_compilers_yaml(path):
with open(path, 'r') as file:
with open(path, "r") as file:
data = yaml.safe_load(file)
compilers = [c["compiler"] for c in data["compilers"]]
return compilers


def parse_export(line):
s = line.replace('=', ' ').split()
s = line.replace("=", " ").split()
var = s[1]
paths = None
if len(s)>2:
paths = s[2].rstrip(';').split(':')
if len(s) > 2:
paths = s[2].rstrip(";").split(":")
return {"variable": var, "paths": paths}


def split_line(line):
return line.strip().rstrip(';').replace('=', ' ').split()
return line.strip().rstrip(";").replace("=", " ").split()


def is_export(parts):
return len(parts)>1 and parts[0]=="export"
return len(parts) > 1 and parts[0] == "export"


def is_alias(parts):
return len(parts)>0 and parts[0]=="alias"
return len(parts) > 0 and parts[0] == "alias"


# Returns True if the given path is a descendant of prefix, False otherwise.
def has_prefix(path, prefix):
prefix = os.path.realpath(prefix)
path = os.path.realpath(path)
return os.path.commonprefix([path, prefix]) == prefix


parser = argparse.ArgumentParser()
parser.add_argument("compiler_path", help="Path to the compilers.yaml file")
parser.add_argument("activate_path", help="Path to the activate script to configure")
Expand All @@ -59,12 +67,12 @@ def has_prefix(path, prefix):

paths = []
for c in compilers:
local_paths = set([os.path.dirname(v) for k,v in c["paths"].items()])
local_paths = set([os.path.dirname(v) for k, v in c["paths"].items()])
paths += local_paths
print(f'adding compiler {c["spec"]} -> {[p for p in local_paths]}')

# find unique paths and concatenate them
pathstring = ':'.join(set(paths))
pathstring = ":".join(set(paths))

# Parse the spack env activation script line by line.
# Remove spack-specific environment variables and references the build path.
Expand All @@ -73,7 +81,7 @@ def has_prefix(path, prefix):
# etc. This may or may not be surprising for users, and we may have to append
# :$PATH, :$CPATH, etc.

lines=[]
lines = []
with open(args.activate_path) as fid:
for line in fid:
parts = split_line(line)
Expand All @@ -86,22 +94,24 @@ def has_prefix(path, prefix):

# parse PATH to remove references to the build directory
if export["variable"] == "PATH":
paths=[p for p in export["paths"] if not has_prefix(p, args.build_path)]
paths = [
p for p in export["paths"] if not has_prefix(p, args.build_path)
]
lines.append(f"export PATH={':'.join(paths)};\n")

# drop the SPACK_ENV variable
elif export["variable"] == "SPACK_ENV":
pass

else:
lines.append(line.strip()+"\n")
lines.append(line.strip() + "\n")
else:
lines.append(line.strip()+"\n")
lines.append(line.strip() + "\n")

# Prepend the compiler paths to PATH
lines.append("# compiler paths added by stackinator\n")
lines.append(f"export PATH={pathstring}:$PATH;\n")

# Write a modified version of the activation script.
with open(args.activate_path, 'w') as fid:
with open(args.activate_path, "w") as fid:
fid.writelines(lines)
Loading

0 comments on commit 0de7d22

Please sign in to comment.