From 39c8ae77d15d77ddcd8a0bb70f46d1be1531e9cc Mon Sep 17 00:00:00 2001 From: Lucille Delisle Date: Fri, 13 Dec 2024 08:53:34 +0100 Subject: [PATCH] fix max_proj tool --- .../CHANGELOG.md | 11 +++ ...max_projections_stack_and_upload_omero.xml | 3 +- .../stack_max_projs.groovy | 68 +++++++++++++------ .../upload_omero.sh | 6 +- 4 files changed, 63 insertions(+), 25 deletions(-) create mode 100644 tools/max_projections_stack_and_upload_omero/CHANGELOG.md diff --git a/tools/max_projections_stack_and_upload_omero/CHANGELOG.md b/tools/max_projections_stack_and_upload_omero/CHANGELOG.md new file mode 100644 index 0000000..963b8ca --- /dev/null +++ b/tools/max_projections_stack_and_upload_omero/CHANGELOG.md @@ -0,0 +1,11 @@ +# CHANGELOG + +## 20241213 + +- Remove unused input plateName in wrapper. +- Do not break if there is an issue with the nb of slices in one image but try to get the Greys +- Fix the omero path to create projects/datasets + +## 20241212 + +First release \ No newline at end of file diff --git a/tools/max_projections_stack_and_upload_omero/max_projections_stack_and_upload_omero.xml b/tools/max_projections_stack_and_upload_omero/max_projections_stack_and_upload_omero.xml index ec02398..0037b8f 100644 --- a/tools/max_projections_stack_and_upload_omero/max_projections_stack_and_upload_omero.xml +++ b/tools/max_projections_stack_and_upload_omero/max_projections_stack_and_upload_omero.xml @@ -1,7 +1,7 @@ And upload to omero - 20241212 + 20241213 0 @@ -64,7 +64,6 @@ $password ^[a-zA-Z0-9._-]*$ '..' not in value - diff --git a/tools/max_projections_stack_and_upload_omero/stack_max_projs.groovy b/tools/max_projections_stack_and_upload_omero/stack_max_projs.groovy index 7ea0395..d1a026f 100644 --- a/tools/max_projections_stack_and_upload_omero/stack_max_projs.groovy +++ b/tools/max_projections_stack_and_upload_omero/stack_max_projs.groovy @@ -29,7 +29,7 @@ */ // Version number = date of last modif -VERSION = "20241212" +VERSION = "20241213" /** * ***************************************************************************************************************** @@ -97,9 +97,9 @@ try { " filter=" + fluo_pattern_list[i] ) ) + // println samplesMap.get(unique_identifier).get(fluo_channels_list[i]).getDimensions() if (!GraphicsEnvironment.isHeadless()){ - samplesMap.get(unique_identifier).get( - fluo_channels_list[i]).show() + samplesMap.get(unique_identifier).get(fluo_channels_list[i]).show() } } } @@ -107,9 +107,9 @@ try { // It is easy as all images are used println "Processing " + unique_identifier + " Greys" samplesMap.get(unique_identifier).put(final_color, FolderOpener.open(current_directory.getAbsolutePath())) + // println samplesMap.get(unique_identifier).get(final_color).getDimensions() if (!GraphicsEnvironment.isHeadless()){ - samplesMap.get(unique_identifier).get( - final_color).show() + samplesMap.get(unique_identifier).get(final_color).show() } } } @@ -120,27 +120,55 @@ try { Map channelsMap = samplesMap.get(unique_identifier) ArrayList channels = [] ArrayList current_images = [] + int ref_nT = 0 + boolean all_compatibles = true for(String channel : channelsMap.keySet()){ channels.add(channel) current_images.add(channelsMap.get(channel)) + if (ref_nT == 0) { + ref_nT = channelsMap.get(channel).nSlices + } else { + if (ref_nT != channelsMap.get(channel).nSlices) { + all_compatibles = false + } + } } - // Get number of time: - int nT = current_images[0].nSlices - - // Merge all - ImagePlus merged_imps = Concatenator.run(current_images as ImagePlus[]) - // Re-order to make a multi-channel, time-lapse image - ImagePlus final_imp - if (channels.size() == 1 && nT == 1) { - final_imp = merged_imps + + if (all_compatibles) { + // Merge all + ImagePlus merged_imps = Concatenator.run(current_images as ImagePlus[]) + // Re-order to make a multi-channel, time-lapse image + ImagePlus final_imp + if (channels.size() == 1 && nT == 1) { + final_imp = merged_imps + } else { + try { + final_imp = HyperStackConverter.toHyperStack(merged_imps, channels.size() , 1, ref_nT, "xytcz", "Color") + // set LUTs + (0..channels.size()-1).each{ + final_imp.setC(it + 1) + IJ.run(final_imp, channels[it], "") + final_imp.resetDisplayRange() + } + } catch(Exception e) { + println "Could not create the hyperstack for " + unique_identifier + ": " + e + continue + } + } } else { - final_imp = HyperStackConverter.toHyperStack(merged_imps, channels.size() , 1, nT, "xytcz", "Color") - } - // set LUTs - (0..channels.size()-1).each{ - final_imp.setC(it + 1) - IJ.run(final_imp, channels[it], "") + println "Not all channels have the same number of slices:" + (0..channels.size()-1).each{ + println "Channel " + channels[it] + " has " + current_images[it].getDimensions() + " whCZT." + } + if (channelsMap.containsKey("Greys")) { + println "Will keep only Greys channel" + final_imp = channelsMap.get("Greys") + } else { + println "Will keep only " + channels[0] + " channel" + final_imp = current_images[0] + IJ.run(final_imp, channels[0], "") + } final_imp.resetDisplayRange() } // Save to tiff diff --git a/tools/max_projections_stack_and_upload_omero/upload_omero.sh b/tools/max_projections_stack_and_upload_omero/upload_omero.sh index 9d50cae..f83593c 100644 --- a/tools/max_projections_stack_and_upload_omero/upload_omero.sh +++ b/tools/max_projections_stack_and_upload_omero/upload_omero.sh @@ -8,13 +8,13 @@ dataset_name_or_id=$5 if [ "$to_create" = "both" ]; then # Create a project: - project_name_or_id=$(${omero_path} obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new Project name="${project_name_or_id}" | awk -F ":" 'END{print $NF}') + project_name_or_id=$(omero obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new Project name="${project_name_or_id}" | awk -F ":" 'END{print $NF}') echo "Just created the new project ${project_name_or_id}" fi if [ "$to_create" = "both" ] || [ "$to_create" = "dataset" ]; then - dataset_name_or_id=$(${omero_path} obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new Dataset name="${dataset_name_or_id}" | awk -F ":" 'END{print $NF}') + dataset_name_or_id=$(omero obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new Dataset name="${dataset_name_or_id}" | awk -F ":" 'END{print $NF}') echo "Just created the new dataset ${dataset_name_or_id}" - ${omero_path} obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new ProjectDatasetLink parent=Project:${project_name_or_id} child=Dataset:${dataset_name_or_id} + omero obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new ProjectDatasetLink parent=Project:${project_name_or_id} child=Dataset:${dataset_name_or_id} fi echo "Start upload" omero import -s ${omero_server} -u ${omero_user} -w ${omero_password} --depth 1 -T Dataset:id:"${dataset_name_or_id}" output 2>&1