diff --git a/script/code-snippet.ipynb b/script/code-snippet.ipynb new file mode 100644 index 00000000..9c19f022 --- /dev/null +++ b/script/code-snippet.ipynb @@ -0,0 +1,140 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [], + "authorship_tag": "ABX9TyOy7gfeCIYYqRIQ8Qy/JUcp", + "include_colab_link": true + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "cellView": "form", + "id": "aUiK2tqPBmsp" + }, + "outputs": [], + "source": [ + "#@title Auto-move file \n", + "#@markdown This code automatically moves the `last.ckpt` file, the `last-state` folder, the `train_data`\n", + "#@markdown directory, and the `meta_lat.json` directory from the output directory to the cloned model\n", + "#@markdown and datasets repositories. Before running this code, you need to clone the datasets and\n", + "#@markdown model repositories from huggingface. The code checks for the existence of these files and\n", + "#@markdown folders in the source and destination directories, and prints messages if they already\n", + "#@markdown exist or do not exist. It uses the os and shutil libraries to check for the existence of\n", + "#@markdown files and folders and to move them.\n", + "import shutil\n", + "\n", + "# The path of the output directory\n", + "output_dir = '/content/kohya-trainer/fine_tuned/' #@param {'type':'string'}\n", + "\n", + "# The name of the model\n", + "model_name = 'momoko30k' #@param {'type':'string'}\n", + "\n", + "# The path of the cloned model repository\n", + "cloned_model_repo = '/content/momoko' #@param {'type':'string'}\n", + "\n", + "# The name of the save state\n", + "save_state_name = 'momoko30k-state' #@param {'type':'string'}\n", + "\n", + "# The path of the cloned datasets repository\n", + "cloned_datasets_repo = '/content/momoko-tag' #@param {'type':'string'}\n", + "\n", + "# The path of the meta lat json directory\n", + "meta_lat_json_dir = \"/content/kohya-trainer/meta_lat.json\" #@param {'type':'string'}\n", + "\n", + "# The path of the train data directory\n", + "train_data_dir = \"/content/kohya-trainer/train_data\" #@param {'type':'string'}\n", + "\n", + "if opt_out == True :\n", + " # Move file\n", + " src_file = f'{output_dir}/last.ckpt'\n", + " dst_file = f'{cloned_model_repo}/{model_name}.ckpt'\n", + " if os.path.exists(src_file):\n", + " if not os.path.exists(dst_file):\n", + " shutil.move(src_file, dst_file)\n", + " print(f'Moved {src_file} to {dst_file}\\n', flush=True)\n", + " else:\n", + " print(f'{dst_file} already exists\\n', flush=True)\n", + " else:\n", + " print(f'There is no {src_file} like that\\n', flush=True)\n", + "\n", + " # Move folder\n", + " src_folder = f'{output_dir}/last-state'\n", + " dst_folder = f'{cloned_datasets_repo}/{save_state_name}'\n", + " if os.path.exists(src_folder):\n", + " if not os.path.exists(dst_folder):\n", + " shutil.move(src_folder, dst_folder)\n", + " print(f'Moved {src_folder} to {dst_folder}\\n', flush=True)\n", + " else:\n", + " print(f'{dst_folder} already exists\\n', flush=True)\n", + " else:\n", + " print(f'There is no {src_folder} like that\\n', flush=True)\n", + "\n", + " # Define train data directory\n", + " dst_train_data_dir = f'{cloned_datasets_repo}/train_data'\n", + "\n", + " # Check if train data directory already exists\n", + " if not os.path.exists(dst_train_data_dir):\n", + " # Move train data directory\n", + " src_train_data_dir = f'{train_data_dir}'\n", + " if os.path.exists(src_train_data_dir):\n", + " shutil.move(src_train_data_dir, dst_train_data_dir)\n", + " print(f'Moved {src_train_data_dir} to {dst_train_data_dir}\\n', flush=True)\n", + " else:\n", + " print(f'There is no {src_train_data_dir} like that\\n', flush=True)\n", + "\n", + " # Define meta lat json directory\n", + " dst_meta_lat_json_dir = f'{cloned_datasets_repo}/meta_lat.json'\n", + "\n", + " # Check if meta lat json directory already exists\n", + " if not os.path.exists(dst_meta_lat_json_dir):\n", + " # Move meta lat json directory\n", + " src_meta_lat_json_dir = f'{meta_lat_json_dir}'\n", + " if os.path.exists(src_meta_lat_json_dir):\n", + " shutil.move(src_meta_lat_json_dir, dst_meta_lat_json_dir)\n", + " print(f'Moved {src_meta_lat_json_dir} to {dst_meta_lat_json_dir}\\n', flush=True)\n", + " else:\n", + " print(f'There is no {src_meta_lat_json_dir} like that\\n', flush=True)\n", + "\n", + " # Iterate over all files and folders in the cloned_datasets_repo directory\n", + " for filename in os.listdir(cloned_datasets_repo):\n", + " # Check if the file or folder is not the save_state_name folder, the train_data folder, or the meta_lat.json file\n", + " if filename != save_state_name and filename != os.path.basename(dst_train_data_dir) and filename != os.path.basename(dst_meta_lat_json_dir):\n", + " # Get the path of the file or folder\n", + " file_path = os.path.join(cloned_datasets_repo, filename)\n", + "\n", + " # Check if the file or folder is a directory (i.e., a folder)\n", + " if os.path.isdir(file_path):\n", + " # Delete the folder\n", + " shutil.rmtree(file_path)\n", + " print(f'Deleted folder: {filename}')\n", + " else:\n", + " # Delete the file\n", + " os.remove(file_path)\n", + " print(f'Deleted file: {filename}')\n", + "\n" + ] + } + ] +} \ No newline at end of file