Skip to content

Commit

Permalink
Merge pull request #2 from astrolabsoftware/816-deploy-fink-broker-on…
Browse files Browse the repository at this point in the history
…-fink-integration-platform

Implement improvments for ink-int platform
  • Loading branch information
fjammes authored Apr 5, 2024
2 parents fc6b0b0 + 2c26f2b commit c6421ee
Show file tree
Hide file tree
Showing 29 changed files with 427 additions and 193 deletions.
8 changes: 8 additions & 0 deletions .ciux
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
apiVersion: v1alpha1
registry: gitlab-registry.in2p3.fr/astrolabsoftware/fink
dependencies:
- package: github.com/k8s-school/[email protected]
labels:
itest: "optional"
ci: "true"

14 changes: 12 additions & 2 deletions .github/workflows/e2e.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@ on:
pull_request:
branches:
- main
env:
CIUXCONFIG: /tmp/ciux.sh
CIUX_VERSION: v0.0.2-rc1
jobs:
e2e:
name: e2e tests
Expand All @@ -15,9 +18,16 @@ jobs:
go-version: '^1.21.4'
- name: Checkout code
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Install ciux
run: go install github.com/k8s-school/ciux@"${{ env.CIUX_VERSION }}"
- name: Build finkctl
run: |
go install .
- name: Run raw2science test
- name: Ciux project ignition
run: |
ciux ignite --selector ci $PWD
- name: Run e2e test
run: |
./_e2e/raw2science.sh
./_e2e/e2e.sh
18 changes: 16 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,18 @@

# finkctl
CLI for fink on k8s
CLI tool for managing fink on Kubernetes.

To access documentation, run `finkctl -h`.

## Installation

go 1.21.+ is required.

`go install github.com/astrolabsoftware/finkctl/v3@<release_tag>`

## Configuration
To use finkctl, you need a configuration file. Set the FINKCONFIG environment variable to the directory containing `finkctl.yaml` and `finkctl.secret.yaml`. By default, it uses `$HOME/.fink`.

Run `finkctl -h` in order to access inline documentation.
Example configuration files:
- [finkctl.yaml](../_e2e_/finkctl.yaml)
- [finkctl.secret.yaml](../_e2e_/finkctl.secret.yaml)
4 changes: 3 additions & 1 deletion TODO.org
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
* TODO add unit test for "night" param, on topic name
* TODO add support for cpu/memory/instances config for each task, and not only at the run level
* TODO implement wait for spark task (distribute, raw2science, stream2raw)
* TODO implement finkctl run/delete for spark task
* DONE implement finkctl run/delete for spark task
* TODO Add option finctk s3 makebucket --port-forward
30 changes: 30 additions & 0 deletions _e2e/distribution.out.expected
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
spark-submit --master "k8s://https://127.0.0.1:34729" \
--deploy-mode cluster \
--conf spark.kubernetes.namespace=default \
--conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \
--conf spark.kubernetes.container.image="param_image" \
--conf spark.driver.extraJavaOptions="-Divy.cache.dir=/tmp -Divy.home=/tmp" \
--conf spark.hadoop.fs.s3a.endpoint=http://minio.minio:9000 \
--conf spark.hadoop.fs.s3a.access.key="minioadmin" \
--conf spark.hadoop.fs.s3a.secret.key="minioadmin" \
--conf spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2 \
--conf spark.hadoop.fs.s3a.connection.ssl.enabled=false \
--conf spark.hadoop.fs.s3a.fast.upload=true \
--conf spark.hadoop.fs.s3a.path.style.access=true \
--conf spark.hadoop.fs.s3a.aws.credentials.provider=org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider \
--conf spark.hadoop.fs.s3a.impl="org.apache.hadoop.fs.s3a.S3AFileSystem" \
--conf spark.kubernetes.executor.podTemplateFile=/tmp/fink-broker-3955890421/executor-pod-template.yaml \
--conf "spark.executor.extraJavaOptions=-Djava.security.auth.login.config=/etc/fink-broker/kafka-jaas.conf" \
--conf spark.driver.memory=2Gi \
--conf spark.executor.memory=2Gi \
--conf spark.kubernetes.driver.request.cores=1 \
--conf spark.kubernetes.executor.request.cores=1 \
local:///home/fink/fink-broker/bin/distribute.py \
-log_level "INFO" \
-online_data_prefix "s3a://fink-broker-online-20000101" \
-producer "sims" \
-tinterval "2" \
-distribution_servers "kafka-cluster-kafka-external-bootstrap.kafka:9094" \
-distribution_schema "/home/fink/fink-alert-schemas/ztf/distribution_schema_0p2.avsc" \
-substream_prefix "fink_" \
-night "20000101"
46 changes: 46 additions & 0 deletions _e2e/e2e.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
#!/bin/bash

set -euxo pipefail

DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"

export FINKCONFIG="$DIR"

mkdir -p $HOME/.kube
cat >> $HOME/.kube/config << EOF
apiVersion: v1
clusters:
- cluster:
server: https://127.0.0.1:34729
name: kind-kind
contexts:
- context:
cluster: kind-kind
user: kind-kind
name: kind-kind
current-context: kind-kind
kind: Config
preferences: {}
users:
- name: kind-kind
user:
EOF

ink "Check -N parameter parsing"
if finkctl run raw2science --image=param_image -N 2020111101011 --dry-run
then
ink -r "Expected to fail with -N parameter"
exit 1
fi

ink "Check stream2raw dry-run"
finkctl run stream2raw --image=param_image -N 20000101 --dry-run > /tmp/stream2raw.out
diff /tmp/stream2raw.out $DIR/stream2raw.out.expected

ink "Check raw2science dry-run"
finkctl run raw2science --image=param_image -N 20000101 --dry-run > /tmp/raw2science.out
diff /tmp/raw2science.out $DIR/raw2science.out.expected

ink "Check distribution dry-run"
finkctl run distribution --image=param_image -N 20000101 --dry-run > /tmp/distribution.out
diff -I '^ --conf spark.kubernetes.executor.podTemplateFile=/tmp/fink-broker-[0-9]\+/executor-pod-template.yaml \\$' /tmp/distribution.out $DIR/distribution.out.expected
3 changes: 2 additions & 1 deletion _e2e/finkctl.secret.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
apiVersion: v1alpha2
s3:
id: "minioadmin"
secret: "minioadmin"
Expand All @@ -6,5 +7,5 @@ distribution:
username: "fink-producer"
# If empty, password is set to "kubectl get -n kafka secrets/fink-producer --template={{.data.password}} | base64 --decode"
# this is used for integration tests and CI which use a local kafka cluster
password: ""
password: "changeme"

48 changes: 37 additions & 11 deletions _e2e/finkctl.yaml
Original file line number Diff line number Diff line change
@@ -1,34 +1,60 @@
apiVersion: v1alpha1
s3:
endpoint: http://minio.minio:9000
use_ssl: "false"
bucket: fink-broker-online
apiVersion: v1alpha2
#
# General parameter used to run fink tasks
#
run:
cpu: 1
# Default to spark-submit.sh default values
# Can be overriden in stream2raw, raw2science and distribution sections
cpus: 1
memory: 1Gi
# instances: 1

fink_trigger_update: "2"

# Can be overriden using --image option
image: gitlab-registry.in2p3.fr/astrolabsoftware/fink/fink-broker:2.7.1-33-ge27a2aa-dirty

# Default to s3a://<s3.bucket>
# online_data_prefix: s3a://fink-broker-online
producer: sims

log_level: INFO

# Can be overriden using --night or --tonight options
night: 20240101
#
# Parameters used to run the stream2raw task
#
stream2raw:
fink_alert_schema: /home/fink/fink-alert-schemas/ztf/ztf_public_20190903.schema.avro
kafka_socket: kafka-cluster-kafka-bootstrap.kafka:9092
kafka_starting_offset: earliest
kafka_topic: ztf-stream-sim

# {{.Night}} is optional and will be replaced by the value of run.night
kafka_topic: ztf-stream-{{.Night}}
#
# Parameters used to run the raw2science task
#
raw2science:
night: "20200101"
# Override the default value of run.instances
instances: 4
memory: 3Gi
#
# Parameters used to run the distribution task
#
distribution:
cpu: 2
memory: 2Gi

# Comma-separated list of kafka servers, default to stream2raw.kafka_socket
distribution_servers: "kafka-cluster-kafka-external-bootstrap.kafka:9094"
distribution_schema: "/home/fink/fink-alert-schemas/ztf/distribution_schema_0p2.avsc"
substream_prefix: "fink_"
# Default to <stream2raw.night>
# night: "20200101"
#
# Parameters used to access the S3 bucket
#
s3:
endpoint: http://minio.minio:9000
use_ssl: "false"
# {{.Night}} is optional and will be replaced by the value of run.night
bucket: fink-broker-online-{{.Night}}

6 changes: 3 additions & 3 deletions _e2e/raw2science.out.expected
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
spark-submit --master "k8s://https://127.0.0.1:34729" \
--deploy-mode cluster \
--conf spark.executor.instances=1 \
--conf spark.kubernetes.namespace=default \
--conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \
--conf spark.kubernetes.container.image="param_image" \
Expand All @@ -14,13 +13,14 @@ spark-submit --master "k8s://https://127.0.0.1:34729" \
--conf spark.hadoop.fs.s3a.path.style.access=true \
--conf spark.hadoop.fs.s3a.aws.credentials.provider=org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider \
--conf spark.hadoop.fs.s3a.impl="org.apache.hadoop.fs.s3a.S3AFileSystem" \
--conf spark.executor.instances=4 \
--conf spark.driver.memory=3Gi \
--conf spark.executor.memory=3Gi \
--conf spark.kubernetes.driver.request.cores=1 \
--conf spark.kubernetes.executor.request.cores=1 \
local:///home/fink/fink-broker/bin/raw2science.py \
-log_level "INFO" \
-online_data_prefix "s3a://fink-broker-online" \
-online_data_prefix "s3a://fink-broker-online-20000101" \
-producer "sims" \
-tinterval "2" \
-night "20200101"
-night "20000101"
31 changes: 0 additions & 31 deletions _e2e/raw2science.sh

This file was deleted.

28 changes: 28 additions & 0 deletions _e2e/stream2raw.out.expected
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
spark-submit --master "k8s://https://127.0.0.1:34729" \
--deploy-mode cluster \
--conf spark.kubernetes.namespace=default \
--conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \
--conf spark.kubernetes.container.image="param_image" \
--conf spark.driver.extraJavaOptions="-Divy.cache.dir=/tmp -Divy.home=/tmp" \
--conf spark.hadoop.fs.s3a.endpoint=http://minio.minio:9000 \
--conf spark.hadoop.fs.s3a.access.key="minioadmin" \
--conf spark.hadoop.fs.s3a.secret.key="minioadmin" \
--conf spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2 \
--conf spark.hadoop.fs.s3a.connection.ssl.enabled=false \
--conf spark.hadoop.fs.s3a.fast.upload=true \
--conf spark.hadoop.fs.s3a.path.style.access=true \
--conf spark.hadoop.fs.s3a.aws.credentials.provider=org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider \
--conf spark.hadoop.fs.s3a.impl="org.apache.hadoop.fs.s3a.S3AFileSystem" \
--conf spark.driver.memory=1Gi \
--conf spark.executor.memory=1Gi \
--conf spark.kubernetes.driver.request.cores=1 \
--conf spark.kubernetes.executor.request.cores=1 \
local:///home/fink/fink-broker/bin/stream2raw.py \
-log_level "INFO" \
-online_data_prefix "s3a://fink-broker-online-20000101" \
-producer "sims" \
-tinterval "2" \
-servers "kafka-cluster-kafka-bootstrap.kafka:9092" \
-schema "/home/fink/fink-alert-schemas/ztf/ztf_public_20190903.schema.avro" \
-startingoffsets_stream "earliest" \
-topic "ztf-stream-20000101"
3 changes: 2 additions & 1 deletion cmd/delete.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ package cmd
import (
"context"
"fmt"
"log/slog"
"os"

"github.com/spf13/cobra"
Expand Down Expand Up @@ -36,7 +37,7 @@ var deleteCmd = &cobra.Command{
fmt.Fprintf(os.Stderr, "error: unable to delete spark pod %s, reason: %s\n", p.Name, err)
os.Exit(1)
}
logger.Infof("Delete pod %s", p.Name)
slog.Info("Delete pod", "podName", p.Name)
}
},
}
Expand Down
3 changes: 2 additions & 1 deletion cmd/get_topic.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ package cmd

import (
"fmt"
"log/slog"
"strings"

"github.com/spf13/cobra"
Expand All @@ -16,7 +17,7 @@ var getTopicCmd = &cobra.Command{
Aliases: []string{"to", "topics"},
Short: "List kafka topics produced by the fink-broker",
Run: func(cmd *cobra.Command, args []string) {
logger.Info("List kafka topics produced by the fink-broker")
slog.Info("List kafka topics produced by the fink-broker")
topics, err := getFinkTopics()
cobra.CheckErr(err)
if len(topics) == 0 {
Expand Down
Loading

0 comments on commit c6421ee

Please sign in to comment.