forked from SwissDataScienceCenter/renku-data-services
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Makefile
169 lines (147 loc) · 12.5 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
.PHONY: schemas tests test_setup main_tests schemathesis_tests collect_coverage style_checks pre_commit_checks run download_avro check_avro avro_models update_avro k3d_cluster install_amaltheas all
AMALTHEA_JS_VERSION ?= 0.13.0
AMALTHEA_SESSIONS_VERSION ?= 0.13.0
codegen_params = --input-file-type openapi --output-model-type pydantic_v2.BaseModel --use-double-quotes --target-python-version 3.12 --collapse-root-models --field-constraints --strict-nullable --set-default-enum-member --openapi-scopes schemas paths parameters --set-default-enum-member --use-one-literal-as-default --use-default
define test_apispec_up_to_date
$(eval $@_NAME=$(1))
cp "components/renku_data_services/${$@_NAME}/apispec.py" "/tmp/apispec_orig.py"
poetry run datamodel-codegen --input components/renku_data_services/${$@_NAME}/api.spec.yaml --output components/renku_data_services/${$@_NAME}/apispec.py --base-class renku_data_services.${$@_NAME}.apispec_base.BaseAPISpec $(codegen_params)
diff -I "^# timestamp\: " "/tmp/apispec_orig.py" "components/renku_data_services/${$@_NAME}/apispec.py"
@RESULT=$?
cp "/tmp/apispec_orig.py" "components/renku_data_services/${$@_NAME}/apispec.py"
exit ${RESULT}
endef
all: help
components/renku_data_services/crc/apispec.py: components/renku_data_services/crc/api.spec.yaml
poetry run datamodel-codegen --input components/renku_data_services/crc/api.spec.yaml --output components/renku_data_services/crc/apispec.py --base-class renku_data_services.crc.apispec_base.BaseAPISpec $(codegen_params)
components/renku_data_services/storage/apispec.py: components/renku_data_services/storage/api.spec.yaml
poetry run datamodel-codegen --input components/renku_data_services/storage/api.spec.yaml --output components/renku_data_services/storage/apispec.py --base-class renku_data_services.storage.apispec_base.BaseAPISpec $(codegen_params)
components/renku_data_services/users/apispec.py: components/renku_data_services/users/api.spec.yaml
poetry run datamodel-codegen --input components/renku_data_services/users/api.spec.yaml --output components/renku_data_services/users/apispec.py --base-class renku_data_services.users.apispec_base.BaseAPISpec $(codegen_params)
components/renku_data_services/project/apispec.py: components/renku_data_services/project/api.spec.yaml
poetry run datamodel-codegen --input components/renku_data_services/project/api.spec.yaml --output components/renku_data_services/project/apispec.py --base-class renku_data_services.project.apispec_base.BaseAPISpec $(codegen_params)
components/renku_data_services/session/apispec.py: components/renku_data_services/session/api.spec.yaml
poetry run datamodel-codegen --input components/renku_data_services/session/api.spec.yaml --output components/renku_data_services/session/apispec.py --base-class renku_data_services.session.apispec_base.BaseAPISpec $(codegen_params)
components/renku_data_services/namespace/apispec.py: components/renku_data_services/namespace/api.spec.yaml
poetry run datamodel-codegen --input components/renku_data_services/namespace/api.spec.yaml --output components/renku_data_services/namespace/apispec.py --base-class renku_data_services.namespace.apispec_base.BaseAPISpec $(codegen_params)
components/renku_data_services/secrets/apispec.py: components/renku_data_services/secrets/api.spec.yaml
poetry run datamodel-codegen --input components/renku_data_services/secrets/api.spec.yaml --output components/renku_data_services/secrets/apispec.py --base-class renku_data_services.secrets.apispec_base.BaseAPISpec $(codegen_params)
components/renku_data_services/connected_services/apispec.py: components/renku_data_services/connected_services/api.spec.yaml
poetry run datamodel-codegen --input components/renku_data_services/connected_services/api.spec.yaml --output components/renku_data_services/connected_services/apispec.py --base-class renku_data_services.connected_services.apispec_base.BaseAPISpec $(codegen_params)
components/renku_data_services/repositories/apispec.py: components/renku_data_services/repositories/api.spec.yaml
poetry run datamodel-codegen --input components/renku_data_services/repositories/api.spec.yaml --output components/renku_data_services/repositories/apispec.py --base-class renku_data_services.repositories.apispec_base.BaseAPISpec $(codegen_params)
components/renku_data_services/notebooks/apispec.py: components/renku_data_services/notebooks/api.spec.yaml
poetry run datamodel-codegen --input components/renku_data_services/notebooks/api.spec.yaml --output components/renku_data_services/notebooks/apispec.py --base-class renku_data_services.notebooks.apispec_base.BaseAPISpec $(codegen_params)
components/renku_data_services/platform/apispec.py: components/renku_data_services/platform/api.spec.yaml
poetry run datamodel-codegen --input components/renku_data_services/platform/api.spec.yaml --output components/renku_data_services/platform/apispec.py --base-class renku_data_services.platform.apispec_base.BaseAPISpec $(codegen_params)
components/renku_data_services/message_queue/apispec.py: components/renku_data_services/message_queue/api.spec.yaml
poetry run datamodel-codegen --input components/renku_data_services/message_queue/api.spec.yaml --output components/renku_data_services/message_queue/apispec.py --base-class renku_data_services.message_queue.apispec_base.BaseAPISpec $(codegen_params)
components/renku_data_services/data_connectors/apispec.py: components/renku_data_services/data_connectors/api.spec.yaml
poetry run datamodel-codegen --input components/renku_data_services/data_connectors/api.spec.yaml --output components/renku_data_services/data_connectors/apispec.py --base-class renku_data_services.data_connectors.apispec_base.BaseAPISpec $(codegen_params)
##@ Apispec
schemas: components/renku_data_services/crc/apispec.py \
components/renku_data_services/storage/apispec.py \
components/renku_data_services/users/apispec.py \
components/renku_data_services/project/apispec.py \
components/renku_data_services/session/apispec.py \
components/renku_data_services/namespace/apispec.py \
components/renku_data_services/secrets/apispec.py \
components/renku_data_services/connected_services/apispec.py \
components/renku_data_services/repositories/apispec.py \
components/renku_data_services/notebooks/apispec.py \
components/renku_data_services/platform/apispec.py \
components/renku_data_services/message_queue/apispec.py \
components/renku_data_services/data_connectors/apispec.py ## Generate pydantic classes from apispec yaml files
@echo "generated classes based on ApiSpec"
##@ Avro schemas
download_avro: ## Download the latest avro schema files
@echo "Downloading avro schema files"
curl -L -o schemas.tar.gz https://github.com/SwissDataScienceCenter/renku-schema/tarball/main
tar xf schemas.tar.gz --directory=components/renku_data_services/message_queue/schemas/ --strip-components=1
rm schemas.tar.gz
check_avro: download_avro avro_models ## Download avro schemas, generate models and check if the avro schemas are up to date
@echo "checking if avro schemas are up to date"
git diff --exit-code || (git diff && exit 1)
avro_models: ## Generate message queue classes and code from the avro schemas
@echo "generating message queues classes from avro schemas"
poetry run python components/renku_data_services/message_queue/generate_models.py
update_avro: download_avro avro_models ## Download avro schemas and generate models
##@ Test and linting
style_checks: ## Run linting and style checks
poetry check
@echo "checking crc apispec is up to date"
@$(call test_apispec_up_to_date,"crc")
@echo "checking storage apispec is up to date"
@$(call test_apispec_up_to_date,"storage")
@echo "checking users apispec is up to date"
@$(call test_apispec_up_to_date,"users")
@echo "checking project apispec is up to date"
@$(call test_apispec_up_to_date,"project")
@echo "checking namespace apispec is up to date"
@$(call test_apispec_up_to_date,"namespace")
@echo "checking connected_services apispec is up to date"
@$(call test_apispec_up_to_date,"connected_services")
@echo "checking repositories apispec is up to date"
@$(call test_apispec_up_to_date,"repositories")
@echo "checking notebooks apispec is up to date"
@$(call test_apispec_up_to_date,"notebooks")
@echo "checking platform apispec is up to date"
@$(call test_apispec_up_to_date,"platform")
@echo "checking message_queue apispec is up to date"
@$(call test_apispec_up_to_date,"message_queue")
@echo "checking session apispec is up to date"
@$(call test_apispec_up_to_date,"session")
@echo "checking data connectors apispec is up to date"
@$(call test_apispec_up_to_date,"data_connectors")
poetry run mypy
poetry run ruff format --check
poetry run ruff check .
poetry run bandit -c pyproject.toml -r .
poetry poly check
poetry poly libs
test_setup: ## Prep for the tests - removes old coverage reports if one is present
@rm -f coverage.lcov .coverage
main_tests: ## Run the main (i.e. non-schemathesis tests)
DUMMY_STORES=true poetry run alembic --name common upgrade heads
poetry run alembic --name common check
poetry run pytest -m "not schemathesis" -n auto -v
schemathesis_tests: ## Run schemathesis checks
poetry run pytest -m "schemathesis" --cov-append
collect_coverage: ## Collect test coverage reports
poetry run coverage report --show-missing
poetry run coverage lcov -o coverage.lcov
tests: test_setup main_tests schemathesis_tests collect_coverage ## Run all tests
pre_commit_checks: ## Run pre-commit checks
poetry run pre-commit run --all-files
##@ General
run: ## Run the sanic server
DUMMY_STORES=true poetry run python bases/renku_data_services/data_api/main.py --dev --debug
debug: ## Debug the sanic server
DUMMY_STORES=true poetry run python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:5678 --wait-for-client -m sanic renku_data_services.data_api.main:create_app --debug --single-process --port 8000 --host 0.0.0.0
# From the operator sdk Makefile
# The help target prints out all targets with their descriptions organized
# beneath their categories. The categories are represented by '##@' and the
# target descriptions by '##'. The awk command is responsible for reading the
# entire set of makefiles included in this invocation, looking for lines of the
# file as xyz: ## something, and then pretty-format the target and help. Then,
# if there's a line with ##@ something, that gets pretty-printed as a category.
# More info on the usage of ANSI control characters for terminal formatting:
# https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters
# More info on the awk command:
# http://linuxcommand.org/lc3_adv_awk.php
.PHONY: help
help: ## Display this help.
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-25s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
##@ Helm/k8s
k3d_cluster: ## Creates a k3d cluster for testing
k3d cluster delete
k3d cluster create --agents 1 --k3s-arg --disable=metrics-server@server:0
install_amaltheas: ## Installs both version of amalthea in the. NOTE: It uses the currently active k8s context.
helm repo add renku https://swissdatasciencecenter.github.io/helm-charts
helm repo update
helm upgrade --install amalthea-js renku/amalthea --version $(AMALTHEA_JS_VERSION)
helm upgrade --install amalthea-se renku/amalthea-sessions --version ${AMALTHEA_SESSIONS_VERSION}
# TODO: Add the version variables from the top of the file here when the charts are fully published
amalthea_schema: ## Updates generates pydantic classes from CRDs
curl https://raw.githubusercontent.com/SwissDataScienceCenter/amalthea/main/config/crd/bases/amalthea.dev_amaltheasessions.yaml | yq '.spec.versions[0].schema.openAPIV3Schema' | poetry run datamodel-codegen --input-file-type jsonschema --output-model-type pydantic_v2.BaseModel --output components/renku_data_services/notebooks/cr_amalthea_session.py --use-double-quotes --target-python-version 3.12 --collapse-root-models --field-constraints --strict-nullable --base-class renku_data_services.notebooks.cr_base.BaseCRD --allow-extra-fields --use-default-kwarg
curl https://raw.githubusercontent.com/SwissDataScienceCenter/amalthea/main/controller/crds/jupyter_server.yaml | yq '.spec.versions[0].schema.openAPIV3Schema' | poetry run datamodel-codegen --input-file-type jsonschema --output-model-type pydantic_v2.BaseModel --output components/renku_data_services/notebooks/cr_jupyter_server.py --use-double-quotes --target-python-version 3.12 --collapse-root-models --field-constraints --strict-nullable --base-class renku_data_services.notebooks.cr_base.BaseCRD --allow-extra-fields --use-default-kwarg