diff --git a/tools/max_projections_stack_and_upload_omero/.shed.yml b/tools/max_projections_stack_and_upload_omero/.shed.yml
new file mode 100644
index 0000000..26aad63
--- /dev/null
+++ b/tools/max_projections_stack_and_upload_omero/.shed.yml
@@ -0,0 +1,7 @@
+categories:
+ - Imaging
+description: Combine images from max projections to stack and upload to the omero server
+name: max_projections_stack_and_upload_omero
+owner: lldelisle
+long_description: Uses a groovy to combine images and a bash script to upload as dataset to the omero server
+remote_repository_url: https://github.com/lldelisle/tools-lldelisle/tree/master/tools/max_projections_stack_and_upload_omero
diff --git a/tools/max_projections_stack_and_upload_omero/max_projections_stack_and_upload_omero.xml b/tools/max_projections_stack_and_upload_omero/max_projections_stack_and_upload_omero.xml
new file mode 100644
index 0000000..ec02398
--- /dev/null
+++ b/tools/max_projections_stack_and_upload_omero/max_projections_stack_and_upload_omero.xml
@@ -0,0 +1,132 @@
+
+ And upload to omero
+
+ 20241212
+ 0
+
+
+ Fiji
+ omero-py
+ gawk
+
+ Preferences -> Manage Information" 1>&2 &&
+ exit 1 &&
+ #end if
+ #end if
+
+ ## Prefix to directories provided by users
+ #set $prefix = "/data/mount_s3/image_storage/"
+ ## Prepare output directory
+ mkdir output &&
+ ## Use ln -s for input
+ #if not os.path.isdir($prefix + "/" + str($base_dir)):
+ echo "Base dir $base_dir does not exists" &&
+ exit 1 &&
+ #end if
+ ln -s '${prefix}/${base_dir}/' input &&
+ ## Run the groovy
+ ImageJ --ij2 --headless --console --run '$__tool_directory__/'stack_max_projs.groovy
+ 'base_dir="input/",output_dir="output/",suffix_white="$suffix_white",suffix_fluo="$suffix_fluo",pattern_green="$pattern_green",pattern_red="$pattern_red"' > output.log
+ ## Upload to omero
+ #if str($upload_omero.upload_omero_select) == "yes":
+ && bash '$__tool_directory__/'upload_omero.sh '$upload_omero.omero_host' '$credentials' '$upload_omero.cond_create.to_create' '$upload_omero.cond_create.project_name_or_id' '$upload_omero.cond_create.dataset_name_or_id' >> output.log
+ #end if
+ ]]>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ^[a-zA-Z0-9._-]*$
+ '..' not in value
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ upload_omero['get_stacks_in_galaxy']
+
+
+
+
\ No newline at end of file
diff --git a/tools/max_projections_stack_and_upload_omero/stack_max_projs.groovy b/tools/max_projections_stack_and_upload_omero/stack_max_projs.groovy
new file mode 100644
index 0000000..7ea0395
--- /dev/null
+++ b/tools/max_projections_stack_and_upload_omero/stack_max_projs.groovy
@@ -0,0 +1,194 @@
+/**
+ *
+ * The purpose of this script is to combine a series of time-lapse images into
+ * one file per image with possibly multiple channels and multiple time points
+ *
+ * To make the script run
+ * 1. Create a parent folder (base_dir) and a output folder (output_dir)
+ * 2. The struction of the base_dir must be: one directory per final image and per channel. All the directories should be: `unique_identifier` `suffix specific to channel`.
+ * 3. The image names will be sorted before being merged.
+ * 4. The images must be regular tif.
+ *
+ * The expected outputs are:
+ * 1. In the output_dir one tiff per `unique_identifier` (potentially multi-T and potentially multi-C)
+ */
+
+#@ File(style="directory", label="Directory with one directory per final image and per channel") base_dir
+#@ File(style="directory", label="Output directory (must exist)") output_dir
+#@ String(label="Suffix for white channel directory", value="_BF_max", help="Leave empty if you are not interested") suffix_white
+#@ String(label="Suffix for fluo channel(s) directory", value="_Fluo_max", help="Leave empty if you are not interested") suffix_fluo
+#@ String(label="Pattern for green channel images", value="_H2B-GFP", help="Leave empty if you are not interested") pattern_green
+#@ String(label="Pattern for red channel images", value="_RFP670", help="Leave empty if you are not interested") pattern_red
+
+
+/**
+ * *****************************************************************************************************************
+ * ********************************************* Final Variables **************************************************
+ * ********************************************* DO NOT MODIFY ****************************************************
+ * ****************************************************************************************************************
+ */
+
+// Version number = date of last modif
+VERSION = "20241212"
+
+/**
+ * *****************************************************************************************************************
+ * **************************************** Beginning of the script ***********************************************
+ * ****************************************************************************************************************
+ */
+
+try {
+
+ println "Beginning of the script"
+
+ IJ.run("Close All", "")
+
+ // Find all directories
+ File[] dir_list = base_dir.listFiles()
+
+ // The images are stored in a TreeMap where
+ // keys are unique_identifier
+ // values are a TreeMap that we call channelMap where:
+ // keys are colors (Green, Grays, Red)
+ // values are an ImagePlus (T-stack)
+ Map> samplesMap = new TreeMap<>()
+ List dir_suffix_list = [suffix_white, suffix_fluo]
+ List dir_channels_list = ["Grays", "Fluo"]
+
+ List fluo_pattern_list = [pattern_green, pattern_red]
+ List fluo_channels_list = ["Green", "Red"]
+
+ // Loop over directories:
+ for (File current_directory : dir_list) {
+ // Ignore if it is not a directory
+ if (! current_directory.isDirectory()) {
+ continue
+ }
+ String current_directory_name = current_directory.getName()
+ // Check if it matches one of the suffix
+ String final_color = ""
+ // And find the unique identifier:
+ String unique_identifier = ""
+ for(int i = 0; i < dir_suffix_list.size(); i++){
+ if (dir_suffix_list[i] != "" && current_directory_name.endsWith(dir_suffix_list[i])) {
+ final_color = dir_channels_list[i]
+ unique_identifier = current_directory_name.replace(dir_suffix_list[i], "")
+ continue
+ }
+ }
+ if (final_color == "") {
+ println current_directory_name + " do not match any suffix."
+ continue
+ }
+ if (! samplesMap.containsKey(unique_identifier) ) {
+ // Initiate the Map
+ samplesMap.put(unique_identifier, new TreeMap<>())
+ }
+ // Generate the ImagePlus
+ if (final_color == "Fluo") {
+ for(int i = 0; i < fluo_pattern_list.size(); i++){
+ // Use pattern for each color
+ if (fluo_pattern_list[i] != "") {
+ println "Processing " + unique_identifier + " " + fluo_pattern_list[i]
+ samplesMap.get(unique_identifier).put(
+ fluo_channels_list[i],
+ FolderOpener.open(
+ current_directory.getAbsolutePath(),
+ " filter=" + fluo_pattern_list[i]
+ )
+ )
+ if (!GraphicsEnvironment.isHeadless()){
+ samplesMap.get(unique_identifier).get(
+ fluo_channels_list[i]).show()
+ }
+ }
+ }
+ } else {
+ // It is easy as all images are used
+ println "Processing " + unique_identifier + " Greys"
+ samplesMap.get(unique_identifier).put(final_color, FolderOpener.open(current_directory.getAbsolutePath()))
+ if (!GraphicsEnvironment.isHeadless()){
+ samplesMap.get(unique_identifier).get(
+ final_color).show()
+ }
+ }
+ }
+
+ // Explore the HashMap and save to tiff
+ for(String unique_identifier : samplesMap.keySet()){
+ // get the channel map
+ Map channelsMap = samplesMap.get(unique_identifier)
+ ArrayList channels = []
+ ArrayList current_images = []
+
+ for(String channel : channelsMap.keySet()){
+ channels.add(channel)
+ current_images.add(channelsMap.get(channel))
+ }
+ // Get number of time:
+ int nT = current_images[0].nSlices
+
+ // Merge all
+ ImagePlus merged_imps = Concatenator.run(current_images as ImagePlus[])
+ // Re-order to make a multi-channel, time-lapse image
+ ImagePlus final_imp
+ if (channels.size() == 1 && nT == 1) {
+ final_imp = merged_imps
+ } else {
+ final_imp = HyperStackConverter.toHyperStack(merged_imps, channels.size() , 1, nT, "xytcz", "Color")
+ }
+ // set LUTs
+ (0..channels.size()-1).each{
+ final_imp.setC(it + 1)
+ IJ.run(final_imp, channels[it], "")
+ final_imp.resetDisplayRange()
+ }
+ // Save to tiff
+ final_imp.setTitle(unique_identifier)
+
+ if (!GraphicsEnvironment.isHeadless()){
+ final_imp.show()
+ }
+
+ def fs = new FileSaver(final_imp)
+ File output_path = new File (output_dir ,final_imp.getTitle()+"_merge.tif" )
+ fs.saveAsTiff(output_path.toString() )
+
+ }
+ println "End of the script"
+
+} catch (Throwable e) {
+ println("Something went wrong: " + e)
+ e.printStackTrace()
+ throw e
+
+ if (GraphicsEnvironment.isHeadless()){
+ // Force to give exit signal of error
+ System.exit(1)
+ }
+
+}
+
+return
+
+/**
+ * ****************************************************************************************************************
+ * ******************************************* End of the script **************************************************
+ *
+ * ****************************************************************************************************************
+ *
+ * *********************************** Helpers and processing methods *********************************************
+ * ***************************************************************************************************************
+ */
+
+import ij.IJ
+import ij.ImagePlus
+import ij.io.FileSaver
+import ij.io.Opener
+import ij.plugin.Concatenator
+import ij.plugin.FolderOpener
+import ij.plugin.HyperStackConverter
+import ij.process.LUT
+
+import java.awt.GraphicsEnvironment
+import java.io.File
diff --git a/tools/max_projections_stack_and_upload_omero/upload_omero.sh b/tools/max_projections_stack_and_upload_omero/upload_omero.sh
new file mode 100644
index 0000000..9d50cae
--- /dev/null
+++ b/tools/max_projections_stack_and_upload_omero/upload_omero.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+omero_server="$1"
+omero_user="$(cat $2 | awk 'NR==2{print $0}')"
+omero_password="$(cat $2 | awk 'NR==3{print $0}')"
+to_create=$3
+project_name_or_id=$4
+dataset_name_or_id=$5
+
+if [ "$to_create" = "both" ]; then
+ # Create a project:
+ project_name_or_id=$(${omero_path} obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new Project name="${project_name_or_id}" | awk -F ":" 'END{print $NF}')
+ echo "Just created the new project ${project_name_or_id}"
+fi
+if [ "$to_create" = "both" ] || [ "$to_create" = "dataset" ]; then
+ dataset_name_or_id=$(${omero_path} obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new Dataset name="${dataset_name_or_id}" | awk -F ":" 'END{print $NF}')
+ echo "Just created the new dataset ${dataset_name_or_id}"
+ ${omero_path} obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new ProjectDatasetLink parent=Project:${project_name_or_id} child=Dataset:${dataset_name_or_id}
+fi
+echo "Start upload"
+omero import -s ${omero_server} -u ${omero_user} -w ${omero_password} --depth 1 -T Dataset:id:"${dataset_name_or_id}" output 2>&1
+echo "Upload finished"