From 7416cdba01e96b65e20ab6b1647a137c54e59c9a Mon Sep 17 00:00:00 2001 From: Pablo Olivares Date: Tue, 28 May 2024 21:03:56 +0200 Subject: [PATCH] Scripts to execute in the GPU cluster advances #26 --- Makefile | 2 +- scripts/create_configs.sh | 40 ++++++++++++++++++++++++++++++++++ scripts/find_learning_rates.sh | 23 +++++++++++++++++++ 3 files changed, 64 insertions(+), 1 deletion(-) create mode 100644 scripts/create_configs.sh create mode 100644 scripts/find_learning_rates.sh diff --git a/Makefile b/Makefile index 23b2b79..cca5d9f 100644 --- a/Makefile +++ b/Makefile @@ -20,7 +20,7 @@ ifndef CONDA endif @echo "Creating the Conda environment if it doesn't exist..." @conda env list | grep -q '^$(ENV_NAME) ' || \ - conda create --yes --name $(ENV_NAME) python=$(PYTHON_VERSION) + conda create --yes --prefix $(ENV_NAME) python=$(PYTHON_VERSION) # Install packages from an environment file or manually specified .PHONY: install diff --git a/scripts/create_configs.sh b/scripts/create_configs.sh new file mode 100644 index 0000000..9997f8a --- /dev/null +++ b/scripts/create_configs.sh @@ -0,0 +1,40 @@ +#!/bin/bash + +# Define the ranges for each parameter +learning_rates=(0.0001 0.001 0.01) +batch_sizes=(16 32 64) +num_epochs=(2 5 10) +optimizers=(Adam RMSProp SGD) +loss_functions=(CrossEntropyLoss MeanSquaredError) + +# Define the config file and output directory +config_file="config.yaml" +output_dir="experiments" + +# Create the output directory if it doesn't exist +mkdir -p "$output_dir" + +# Loop through each combination of parameters +for lr in "${learning_rates[@]}"; do + for bs in "${batch_sizes[@]}"; do + for ne in "${num_epochs[@]}"; do + for opt in "${optimizers[@]}"; do + for lf in "${loss_functions[@]}"; do + # Create a new config file with the modified parameters + new_config_file="$output_dir/config_lr_${lr}_bs_${bs}_ne_${ne}_opt_${opt}_lf_${lf}.yaml" + cp "$config_file" "$new_config_file" + + # Modify the parameters in the new config file + sed -i "s/learning_rate:.*/learning_rate: $lr/" "$new_config_file" + sed -i "s/batch_size:.*/batch_size: $bs/" "$new_config_file" + sed -i "s/num_epochs:.*/num_epochs: $ne/" "$new_config_file" + sed -i "s/optimizer:.*/optimizer:\n type: \"$opt\"/" "$new_config_file" + sed -i "s/loss_function:.*/loss_function:\n type: \"$lf\"/" "$new_config_file" + + # Run the training process with the new config file + python train.py --config "$new_config_file" + done + done + done + done +done \ No newline at end of file diff --git a/scripts/find_learning_rates.sh b/scripts/find_learning_rates.sh new file mode 100644 index 0000000..26137bb --- /dev/null +++ b/scripts/find_learning_rates.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +#SBATCH --job-name LR_EfficientNetB0 # Nombre del proceso + +#SBATCH --partition dios # Cola para ejecutar + +#SBATCH --gres=gpu:1 # Numero de gpus a usar + + + +export PATH="/opt/anaconda/anaconda3/bin:$PATH" + +export PATH="/opt/anaconda/bin:$PATH" + +eval "$(conda shell.bash hook)" + +conda activate /mnt/homeGPU/polivares/tda-nn-separability + +export TFHUB_CACHE_DIR=. + +python find_learning_rates.py + +mail -s "Proceso finalizado" pablolivares@correo.ugr.es <<< "El proceso ha finalizado" \ No newline at end of file