Skip to content

Commit

Permalink
fix max_proj tool
Browse files Browse the repository at this point in the history
  • Loading branch information
lldelisle committed Dec 13, 2024
1 parent 69a86e0 commit 39c8ae7
Show file tree
Hide file tree
Showing 4 changed files with 63 additions and 25 deletions.
11 changes: 11 additions & 0 deletions tools/max_projections_stack_and_upload_omero/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# CHANGELOG

## 20241213

- Remove unused input plateName in wrapper.
- Do not break if there is an issue with the nb of slices in one image but try to get the Greys
- Fix the omero path to create projects/datasets

## 20241212

First release
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<tool id="max_projections_stack_and_upload_omero" name="Stack MaxProj" version="@TOOL_VERSION@+galaxy@VERSION_SUFFIX@" profile="20.01" license="BSD-3">
<description>And upload to omero</description>
<macros>
<token name="@TOOL_VERSION@">20241212</token>
<token name="@TOOL_VERSION@">20241213</token>
<token name="@VERSION_SUFFIX@">0</token>
</macros>
<requirements>
Expand Down Expand Up @@ -64,7 +64,6 @@ $password
<validator type="regex" message="Enter a valid host location, for example, your.omero.server">^[a-zA-Z0-9._-]*$</validator>
<validator type="expression" message="No two dots (..) allowed">'..' not in value</validator>
</param>
<param name="plateName" type="text" value="Experiment:0" label="Name of the plate (on omero)" />
<conditional name="cond_create">
<param name="to_create" type="select" label="Create the project/dataset on OMERO or use existing one?">
<option value="both">Create a new Project and a new Dataset</option>
Expand Down
68 changes: 48 additions & 20 deletions tools/max_projections_stack_and_upload_omero/stack_max_projs.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
*/

// Version number = date of last modif
VERSION = "20241212"
VERSION = "20241213"

/**
* *****************************************************************************************************************
Expand Down Expand Up @@ -97,19 +97,19 @@ try {
" filter=" + fluo_pattern_list[i]
)
)
// println samplesMap.get(unique_identifier).get(fluo_channels_list[i]).getDimensions()
if (!GraphicsEnvironment.isHeadless()){
samplesMap.get(unique_identifier).get(
fluo_channels_list[i]).show()
samplesMap.get(unique_identifier).get(fluo_channels_list[i]).show()
}
}
}
} else {
// It is easy as all images are used
println "Processing " + unique_identifier + " Greys"
samplesMap.get(unique_identifier).put(final_color, FolderOpener.open(current_directory.getAbsolutePath()))
// println samplesMap.get(unique_identifier).get(final_color).getDimensions()
if (!GraphicsEnvironment.isHeadless()){
samplesMap.get(unique_identifier).get(
final_color).show()
samplesMap.get(unique_identifier).get(final_color).show()
}
}
}
Expand All @@ -120,27 +120,55 @@ try {
Map<String, ImagePlus> channelsMap = samplesMap.get(unique_identifier)
ArrayList<String> channels = []
ArrayList<ImagePlus> current_images = []
int ref_nT = 0
boolean all_compatibles = true

for(String channel : channelsMap.keySet()){
channels.add(channel)
current_images.add(channelsMap.get(channel))
if (ref_nT == 0) {
ref_nT = channelsMap.get(channel).nSlices
} else {
if (ref_nT != channelsMap.get(channel).nSlices) {
all_compatibles = false
}
}
}
// Get number of time:
int nT = current_images[0].nSlices

// Merge all
ImagePlus merged_imps = Concatenator.run(current_images as ImagePlus[])
// Re-order to make a multi-channel, time-lapse image
ImagePlus final_imp
if (channels.size() == 1 && nT == 1) {
final_imp = merged_imps

if (all_compatibles) {
// Merge all
ImagePlus merged_imps = Concatenator.run(current_images as ImagePlus[])
// Re-order to make a multi-channel, time-lapse image
ImagePlus final_imp
if (channels.size() == 1 && nT == 1) {
final_imp = merged_imps
} else {
try {
final_imp = HyperStackConverter.toHyperStack(merged_imps, channels.size() , 1, ref_nT, "xytcz", "Color")
// set LUTs
(0..channels.size()-1).each{
final_imp.setC(it + 1)
IJ.run(final_imp, channels[it], "")
final_imp.resetDisplayRange()
}
} catch(Exception e) {
println "Could not create the hyperstack for " + unique_identifier + ": " + e
continue
}
}
} else {
final_imp = HyperStackConverter.toHyperStack(merged_imps, channels.size() , 1, nT, "xytcz", "Color")
}
// set LUTs
(0..channels.size()-1).each{
final_imp.setC(it + 1)
IJ.run(final_imp, channels[it], "")
println "Not all channels have the same number of slices:"
(0..channels.size()-1).each{
println "Channel " + channels[it] + " has " + current_images[it].getDimensions() + " whCZT."
}
if (channelsMap.containsKey("Greys")) {
println "Will keep only Greys channel"
final_imp = channelsMap.get("Greys")
} else {
println "Will keep only " + channels[0] + " channel"
final_imp = current_images[0]
IJ.run(final_imp, channels[0], "")
}
final_imp.resetDisplayRange()
}
// Save to tiff
Expand Down
6 changes: 3 additions & 3 deletions tools/max_projections_stack_and_upload_omero/upload_omero.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,13 @@ dataset_name_or_id=$5

if [ "$to_create" = "both" ]; then
# Create a project:
project_name_or_id=$(${omero_path} obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new Project name="${project_name_or_id}" | awk -F ":" 'END{print $NF}')
project_name_or_id=$(omero obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new Project name="${project_name_or_id}" | awk -F ":" 'END{print $NF}')
echo "Just created the new project ${project_name_or_id}"
fi
if [ "$to_create" = "both" ] || [ "$to_create" = "dataset" ]; then
dataset_name_or_id=$(${omero_path} obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new Dataset name="${dataset_name_or_id}" | awk -F ":" 'END{print $NF}')
dataset_name_or_id=$(omero obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new Dataset name="${dataset_name_or_id}" | awk -F ":" 'END{print $NF}')
echo "Just created the new dataset ${dataset_name_or_id}"
${omero_path} obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new ProjectDatasetLink parent=Project:${project_name_or_id} child=Dataset:${dataset_name_or_id}
omero obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new ProjectDatasetLink parent=Project:${project_name_or_id} child=Dataset:${dataset_name_or_id}
fi
echo "Start upload"
omero import -s ${omero_server} -u ${omero_user} -w ${omero_password} --depth 1 -T Dataset:id:"${dataset_name_or_id}" output 2>&1
Expand Down

0 comments on commit 39c8ae7

Please sign in to comment.