Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CI: fix save benchmark #4812

Merged
merged 9 commits into from
May 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 7 additions & 4 deletions .github/actions/sysbench/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,12 @@ inputs:
description: 'file test mode'
required: true
default: 'seqrd'
mysql_password:
description: 'mysql password'
awk_access_key_id:
description: 'aws access key id'
required: true
default: ''
awk_access_key_secret:
description: 'aws access key secret'
required: true
default: ''
meta:
Expand Down Expand Up @@ -52,6 +56,5 @@ runs:
runtime=$((end-start))
echo "cost $runtime seconds"
cd -
export MYSQL_PASSWORD=${{inputs.mysql_password}}
version=$(./juicefs -V|cut -b 17- | sed 's/:/-/g')
.github/scripts/save_benchmark.sh --name ${{inputs.name}} --result $runtime --meta ${{inputs.meta}} --storage ${{inputs.storage}} --extra ${{inputs.compress}}
AWS_ACCESS_KEY_ID=${{inputs.aws_access_key_id}} AWS_SECRET_ACCESS_KEY=${{inputs.aws_access_key_secret}} .github/scripts/save_benchmark.sh --name ${{inputs.name}} --result $runtime --meta ${{inputs.meta}} --storage ${{inputs.storage}} --extra ${{inputs.compress}}
10 changes: 5 additions & 5 deletions .github/scripts/command/load_dump_bench.sh
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,14 @@ test_load_dump_with_small_dir(){
end=`date +%s`
runtime=$((end-start))
version=$(./juicefs -V|cut -b 17- | sed 's/:/-/g')
.github/scripts/save_benchmark.sh --name load_small_dir --result $runtime --meta $META --storage file
.github/scripts/save_benchmark.sh --category load_dump --name load_small_dir --result $runtime --meta $META --storage file
echo "load cost $runtime seconds"
start=`date +%s`
./juicefs dump $META_URL dump.json --fast
end=`date +%s`
runtime=$((end-start))
echo "dump cost $runtime seconds"
.github/scripts/save_benchmark.sh --name dump_small_dir --result $runtime --meta $META --storage file
.github/scripts/save_benchmark.sh --category load_dump --name dump_small_dir --result $runtime --meta $META --storage file
./juicefs mount $META_URL /jfs -d --no-usage-report
inode=$(df -i /jfs | grep JuiceFS |awk -F" " '{print $3}')
if [ "$inode" -ne "2233313" ]; then
Expand Down Expand Up @@ -60,7 +60,7 @@ do_load_dump_with_big_dir(){
runtime=$((end-start))
echo "load cost $runtime seconds"
version=$(./juicefs -V|cut -b 17- | sed 's/:/-/g')
.github/scripts/save_benchmark.sh --name load_big_dir --result $runtime --meta $META --storage file
.github/scripts/save_benchmark.sh --category load_dump --name load_big_dir --result $runtime --meta $META --storage file
start=`date +%s`
if [ "$with_subdir" = true ] ; then
./juicefs dump $META_URL dump.json --subdir test --fast
Expand All @@ -70,7 +70,7 @@ do_load_dump_with_big_dir(){
end=`date +%s`
runtime=$((end-start))
echo "dump cost $runtime seconds"
.github/scripts/save_benchmark.sh --name dump_big_dir --result $runtime --meta $META --storage file
.github/scripts/save_benchmark.sh --category load_dump --name dump_big_dir --result $runtime --meta $META --storage file
./juicefs mount $META_URL /jfs -d --no-usage-report
df -i /jfs
inode=$(df -i /jfs | grep JuiceFS |awk -F" " '{print $3}')
Expand All @@ -89,7 +89,7 @@ test_list_with_big_dir(){
runtime=$((end-start))
echo "list cost $runtime seconds"
version=$(./juicefs -V|cut -b 17- | sed 's/:/-/g')
.github/scripts/save_benchmark.sh --name list_big_dir --result $runtime --meta $META --storage file
.github/scripts/save_benchmark.sh --category list --name list_big_dir --result $runtime --meta $META --storage file
if [ "$file_count" -ne "1000001" ]; then
echo "<FATAL>: file_count error: $file_count"
exit 1
Expand Down
2 changes: 1 addition & 1 deletion .github/scripts/fio.sh
Original file line number Diff line number Diff line change
Expand Up @@ -101,4 +101,4 @@ echo bandwidth is $bandwidth
meta=$(echo $meta_url | awk -F: '{print $1}')
echo meta is $meta
[[ -z "$meta" ]] && echo "meta is empty" && exit 1
.github/scripts/save_benchmark.sh --name $name --result $bandwidth --meta $meta --storage $storage minio
.github/scripts/save_benchmark.sh --category fio --name $name --result $bandwidth --meta $meta --storage minio
7 changes: 6 additions & 1 deletion .github/scripts/save_benchmark.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ save_benchmark(){
while [[ $# -gt 0 ]]; do
key="$1"
case $key in
--category)
category="$2"
shift
;;
--name)
name="$2"
shift
Expand Down Expand Up @@ -39,6 +43,7 @@ save_benchmark(){
created_date=$(date +"%Y-%m-%d")
cat <<EOF > result.json
{
"category": "$category",
"name": "$name",
"result": "$result",
"meta": "$meta",
Expand All @@ -54,7 +59,7 @@ save_benchmark(){
}
EOF
cat result.json
# AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID AWS_ACCESS_KEY_SECRET=$AWS_ACCESS_KEY_SECRET ./juicefs sync --force-update result.json s3://juicefs-ci-aws.s3.us-east-1.amazonaws.com/ci-report/fio-test/$created_date/$name
AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY ./juicefs sync --force-update result.json s3://juicefs-ci-aws.s3.us-east-1.amazonaws.com/ci-report/$category/$name/$created_date/$meta-$storage.json
}

save_benchmark $@
10 changes: 4 additions & 6 deletions .github/workflows/fio_benchmark.yml
Original file line number Diff line number Diff line change
Expand Up @@ -68,16 +68,14 @@ jobs:
source .github/scripts/start_meta_engine.sh
start_meta_engine ${{matrix.meta}} minio

# - name: Test upload
# run: |
# echo abc > test.json
# AWS_ACCESS_KEY_ID=${{secrets.CI_AWS_ACCESS_KEY_ID}} AWS_ACCESS_KEY_SECRET=${{secrets.CI_AWS_ACCESS_KEY_SECRET}} ./juicefs sync --force-update test.json s3://juicefs-ci-aws.s3.us-east-1.amazonaws.com/ci-report/fio-test/test.json

- name: Fio Benchmark
env:
AWS_ACCESS_KEY_ID: ${{secrets.CI_AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{secrets.CI_AWS_ACCESS_KEY_SECRET}}
run: |
source .github/scripts/start_meta_engine.sh
meta_url=$(get_meta_url ${{matrix.meta}})
AWS_ACCESS_KEY_ID=${{secrets.CI_AWS_ACCESS_KEY_ID}} AWS_ACCESS_KEY_SECRET=${{secrets.CI_AWS_ACCESS_KEY_SECRET}} .github/scripts/fio.sh $meta_url ${{matrix.fio_job}}
.github/scripts/fio.sh $meta_url ${{matrix.fio_job}}

- name: log
if: always()
Expand Down
30 changes: 25 additions & 5 deletions .github/workflows/load.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ jobs:
if [ "${{github.event_name}}" == "schedule" ] || [ "${{github.event_name}}" == "workflow_dispatch" ]; then
echo 'meta_matrix=["sqlite3", "redis", "mysql", "tikv", "tidb", "postgres", "mariadb", "fdb"]' >> $GITHUB_OUTPUT
else
echo 'meta_matrix=["redis", "mysql", "tikv"]' >> $GITHUB_OUTPUT
echo 'meta_matrix=["redis", "mysql"]' >> $GITHUB_OUTPUT
# echo 'meta_matrix=["redis"]' >> $GITHUB_OUTPUT
fi
outputs:
Expand Down Expand Up @@ -87,24 +87,44 @@ jobs:

- name: Load and dump with small directory
timeout-minutes: 30
env:
AWS_ACCESS_KEY_ID: ${{secrets.CI_AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{secrets.CI_AWS_ACCESS_KEY_SECRET}}
META: ${{matrix.meta}}
START_META: true
run: |
sudo MYSQL_PASSWORD=${{secrets.MYSQL_PASSWORD_FOR_JUICEDATA}} META=${{matrix.meta}} START_META=true .github/scripts/command/load_dump_bench.sh test_load_dump_with_small_dir
sudo -E .github/scripts/command/load_dump_bench.sh test_load_dump_with_small_dir

- name: Load and dump with big directory
timeout-minutes: 30
env:
AWS_ACCESS_KEY_ID: ${{secrets.CI_AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{secrets.CI_AWS_ACCESS_KEY_SECRET}}
META: ${{matrix.meta}}
START_META: false
run: |
sudo MYSQL_PASSWORD=${{secrets.MYSQL_PASSWORD_FOR_JUICEDATA}} META=${{matrix.meta}} START_META=false .github/scripts/command/load_dump_bench.sh test_load_dump_with_big_dir
sudo -E .github/scripts/command/load_dump_bench.sh test_load_dump_with_big_dir

- name: Load and dump subdir with big directory
if: false
timeout-minutes: 30
env:
AWS_ACCESS_KEY_ID: ${{secrets.CI_AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{secrets.CI_AWS_ACCESS_KEY_SECRET}}
META: ${{matrix.meta}}
START_META: false
run: |
sudo MYSQL_PASSWORD=${{secrets.MYSQL_PASSWORD_FOR_JUICEDATA}} META=${{matrix.meta}} START_META=false .github/scripts/command/load_dump_bench.sh test_load_dump_with_big_dir_subdir
sudo -E .github/scripts/command/load_dump_bench.sh test_load_dump_with_big_dir_subdir

- name: List big directory
timeout-minutes: 30
env:
AWS_ACCESS_KEY_ID: ${{secrets.CI_AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{secrets.CI_AWS_ACCESS_KEY_SECRET}}
META: ${{matrix.meta}}
START_META: false
run: |
sudo MYSQL_PASSWORD=${{secrets.MYSQL_PASSWORD_FOR_JUICEDATA}} META=${{matrix.meta}} START_META=false .github/scripts/command/load_dump_bench.sh test_list_with_big_dir
sudo -E .github/scripts/command/load_dump_bench.sh test_list_with_big_dir

- name: log
if: always()
Expand Down
32 changes: 26 additions & 6 deletions .github/workflows/sysbench.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,20 @@
name: "sysbench"

on:
push:
branches:
- 'main'
- 'release-**'
paths:
- '**/sysbench.yml'
- '**/sysbench/action.yml'
pull_request:
branches:
- 'main'
- 'release-**'
paths:
- '**/sysbench.yml'
- '**/sysbench/action.yml'
schedule:
- cron: '30 20 * * *'
workflow_dispatch:
Expand Down Expand Up @@ -86,7 +100,8 @@ jobs:
- name: seq read 1M
uses: ./.github/actions/sysbench
with:
mysql_password: ${{secrets.MYSQL_PASSWORD_FOR_JUICEDATA}}
aws_access_key_id: ${{secrets.CI_AWS_ACCESS_KEY_ID}}
aws_access_key_secret: ${{secrets.CI_AWS_ACCESS_KEY_SECRET}}
file_num: 1000
file_total_size: '1G'
file_test_mode: 'seqrd'
Expand All @@ -98,7 +113,8 @@ jobs:
- name: seq write 1M
uses: ./.github/actions/sysbench
with:
mysql_password: ${{secrets.MYSQL_PASSWORD_FOR_JUICEDATA}}
aws_access_key_id: ${{secrets.CI_AWS_ACCESS_KEY_ID}}
aws_access_key_secret: ${{secrets.CI_AWS_ACCESS_KEY_SECRET}}
file_num: 1000
file_total_size: '1G'
file_test_mode: 'seqwr'
Expand All @@ -110,7 +126,8 @@ jobs:
- name: seq read 100K
uses: ./.github/actions/sysbench
with:
mysql_password: ${{secrets.MYSQL_PASSWORD_FOR_JUICEDATA}}
aws_access_key_id: ${{secrets.CI_AWS_ACCESS_KEY_ID}}
aws_access_key_secret: ${{secrets.CI_AWS_ACCESS_KEY_SECRET}}
file_num: 10000
file_total_size: '1G'
file_test_mode: 'seqrd'
Expand All @@ -122,7 +139,8 @@ jobs:
- name: seq write 100K
uses: ./.github/actions/sysbench
with:
mysql_password: ${{secrets.MYSQL_PASSWORD_FOR_JUICEDATA}}
aws_access_key_id: ${{secrets.CI_AWS_ACCESS_KEY_ID}}
aws_access_key_secret: ${{secrets.CI_AWS_ACCESS_KEY_SECRET}}
file_num: 10000
file_total_size: '1G'
file_test_mode: 'seqwr'
Expand All @@ -134,7 +152,8 @@ jobs:
- name: random readwrite 1M
uses: ./.github/actions/sysbench
with:
mysql_password: ${{secrets.MYSQL_PASSWORD_FOR_JUICEDATA}}
aws_access_key_id: ${{secrets.CI_AWS_ACCESS_KEY_ID}}
aws_access_key_secret: ${{secrets.CI_AWS_ACCESS_KEY_SECRET}}
file_num: 1000
file_total_size: '1G'
file_test_mode: 'rndrw'
Expand All @@ -146,7 +165,8 @@ jobs:
- name: random readwrite 100K
uses: ./.github/actions/sysbench
with:
mysql_password: ${{secrets.MYSQL_PASSWORD_FOR_JUICEDATA}}
aws_access_key_id: ${{secrets.CI_AWS_ACCESS_KEY_ID}}
aws_access_key_secret: ${{secrets.CI_AWS_ACCESS_KEY_SECRET}}
file_num: 10000
file_total_size: '1G'
file_test_mode: 'rndrw'
Expand Down
Loading