-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
add a new tool to get max proj on omero
- Loading branch information
Showing
4 changed files
with
354 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
categories: | ||
- Imaging | ||
description: Combine images from max projections to stack and upload to the omero server | ||
name: max_projections_stack_and_upload_omero | ||
owner: lldelisle | ||
long_description: Uses a groovy to combine images and a bash script to upload as dataset to the omero server | ||
remote_repository_url: https://github.com/lldelisle/tools-lldelisle/tree/master/tools/max_projections_stack_and_upload_omero |
132 changes: 132 additions & 0 deletions
132
tools/max_projections_stack_and_upload_omero/max_projections_stack_and_upload_omero.xml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,132 @@ | ||
<tool id="max_projections_stack_and_upload_omero" name="Stack MaxProj" version="@TOOL_VERSION@+galaxy@VERSION_SUFFIX@" profile="20.01" license="BSD-3"> | ||
<description>And upload to omero</description> | ||
<macros> | ||
<token name="@TOOL_VERSION@">20241212</token> | ||
<token name="@VERSION_SUFFIX@">0</token> | ||
</macros> | ||
<requirements> | ||
<requirement type="package" version="20240614">Fiji</requirement> | ||
<requirement type="package" version="5.19.4">omero-py</requirement> | ||
<requirement type="package" version="5.3.1">gawk</requirement> | ||
</requirements> | ||
<command detect_errors="exit_code"><![CDATA[ | ||
#import os | ||
## the user wants to use a non-public OMERO instance | ||
## check if credentials are set in the user-preferences, if not warn the user and exit | ||
#set $username = $__user__.extra_preferences.get('omero_account|username', "") | ||
#set $password = $__user__.extra_preferences.get('omero_account|password', "") | ||
#if str($upload_omero.upload_omero_select) == "yes": | ||
#if ($username == "" or $password ==""): | ||
echo "OMERO connection credentials are empty. Set your credentials via: User -> Preferences -> Manage Information" 1>&2 && | ||
exit 1 && | ||
#end if | ||
#end if | ||
## Prefix to directories provided by users | ||
#set $prefix = "/data/mount_s3/image_storage/" | ||
## Prepare output directory | ||
mkdir output && | ||
## Use ln -s for input | ||
#if not os.path.isdir($prefix + "/" + str($base_dir)): | ||
echo "Base dir $base_dir does not exists" && | ||
exit 1 && | ||
#end if | ||
ln -s '${prefix}/${base_dir}/' input && | ||
## Run the groovy | ||
ImageJ --ij2 --headless --console --run '$__tool_directory__/'stack_max_projs.groovy | ||
'base_dir="input/",output_dir="output/",suffix_white="$suffix_white",suffix_fluo="$suffix_fluo",pattern_green="$pattern_green",pattern_red="$pattern_red"' > output.log | ||
## Upload to omero | ||
#if str($upload_omero.upload_omero_select) == "yes": | ||
&& bash '$__tool_directory__/'upload_omero.sh '$upload_omero.omero_host' '$credentials' '$upload_omero.cond_create.to_create' '$upload_omero.cond_create.project_name_or_id' '$upload_omero.cond_create.dataset_name_or_id' >> output.log | ||
#end if | ||
]]></command> | ||
<configfiles> | ||
<configfile name="credentials"><![CDATA[ | ||
#set $username = $__user__.extra_preferences.get('omero_account|username', "") | ||
#set $password = $__user__.extra_preferences.get('omero_account|password', "") | ||
$username | ||
$password | ||
]]></configfile> | ||
</configfiles> | ||
<inputs> | ||
<param name="base_dir" type="text" value="" label="Directory on s3 with all directories" help="For example Pierre/ViventisLightSheet/20220617_104242_MixCell_50pc_20pc_72hstart/"/> | ||
<param name="suffix_white" type="text" value="_BF_max" label="Suffix for white channel directory" help="Leave empty if you are not interested"/> | ||
<param name="suffix_fluo" type="text" value="_Fluo_max" label="Suffix for fluo channel(s) directory" help="Leave empty if you are not interested"/> | ||
<param name="pattern_green" type="text" value="_H2B-GFP" label="Pattern for green channel images" help="Leave empty if you are not interested"/> | ||
<param name="pattern_red" type="text" value="_RFP670" label="Pattern for red channel images" help="Leave empty if you are not interested"/> | ||
<conditional name="upload_omero"> | ||
<param name="upload_omero_select" type="select" label="Upload your images to omero?" > | ||
<option value="yes">Yes</option> | ||
<option value="no">No</option> | ||
</param> | ||
<when value="yes"> | ||
<param name="omero_host" type="text" label="OMERO host URL"> | ||
<validator type="regex" message="Enter a valid host location, for example, your.omero.server">^[a-zA-Z0-9._-]*$</validator> | ||
<validator type="expression" message="No two dots (..) allowed">'..' not in value</validator> | ||
</param> | ||
<param name="plateName" type="text" value="Experiment:0" label="Name of the plate (on omero)" /> | ||
<conditional name="cond_create"> | ||
<param name="to_create" type="select" label="Create the project/dataset on OMERO or use existing one?"> | ||
<option value="both">Create a new Project and a new Dataset</option> | ||
<option value="dataset">Use an existing Project and create in a new Dataset</option> | ||
<option value="none">Use an existing Dataset</option> | ||
</param> | ||
<when value="both"> | ||
<param name="project_name_or_id" type="text" value="MyNewProject" label="Name of the project (on omero)" /> | ||
<param name="dataset_name_or_id" type="text" value="MyNewDataset" label="Name of the dataset (on omero)" /> | ||
</when> | ||
<when value="dataset"> | ||
<param name="project_name_or_id" type="integer" min="1" value="12" label="ID of the project (on omero)" /> | ||
<param name="dataset_name_or_id" type="text" value="MyNewDataset" label="Name of the dataset (on omero)" /> | ||
</when> | ||
<when value="none"> | ||
<param name="project_name_or_id" type="integer" min="1" value="12" label="ID of the project (on omero)" /> | ||
<param name="dataset_name_or_id" type="integer" min="1" value="56" label="ID of the dataset (on omero)" /> | ||
</when> | ||
</conditional> | ||
<param name="get_stacks_in_galaxy" type="boolean" checked="false" label="Get stacks in galaxy" /> | ||
</when> | ||
<when value="no"> | ||
<param name="get_stacks_in_galaxy" type="hidden" value="true"/> | ||
</when> | ||
</conditional> | ||
</inputs> | ||
<outputs> | ||
<data name="logfile" format="txt" from_work_dir="output.log" label="${tool.name} on ${on_string}: logfile" /> | ||
<collection name="list_output" type="list" label="${tool.name} on ${on_string}: Stacks"> | ||
<discover_datasets pattern="(?P<designation>.+)\.tif" directory="output/" ext="tiff" /> | ||
<filter>upload_omero['get_stacks_in_galaxy']</filter> | ||
</collection> | ||
</outputs> | ||
<help><![CDATA[ | ||
.. class:: infomark | ||
**What it does** | ||
It will use images coming from the Viventis Lightsheet which are on the s3 server to make a "T-stack" per well and upload it to omero. | ||
This wrapper uses ImageJ to combine images (with a groovy) and omero-py to import the images. | ||
Usage | ||
..... | ||
**Input** | ||
- One path with directory with data | ||
- suffix_white which allow to find the directories with the 'Grays' channel and extract the `unique identifier` | ||
- suffix_fluo which allow to find the directories with the fluo channels (Red and Green) and extract the `unique identifier` to be able to merge with the 'Grays' | ||
- pattern_green which allow to filter for green images in the fluo directory. | ||
- pattern_red which allow to filter for red images in the fluo directory. | ||
- URL of omero | ||
- Name/ID of the Project in omero | ||
- Name/ID of the Dataset in omero | ||
- Whether the Dataset/Projects should be created | ||
**Output** | ||
- A log file with info | ||
- Optionally all images as T-stack | ||
- On OMERO your images all channels combined and all time points combined | ||
]]></help> | ||
</tool> |
194 changes: 194 additions & 0 deletions
194
tools/max_projections_stack_and_upload_omero/stack_max_projs.groovy
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,194 @@ | ||
/** | ||
* | ||
* The purpose of this script is to combine a series of time-lapse images into | ||
* one file per image with possibly multiple channels and multiple time points | ||
* | ||
* To make the script run | ||
* 1. Create a parent folder (base_dir) and a output folder (output_dir) | ||
* 2. The struction of the base_dir must be: one directory per final image and per channel. All the directories should be: `unique_identifier` `suffix specific to channel`. | ||
* 3. The image names will be sorted before being merged. | ||
* 4. The images must be regular tif. | ||
* | ||
* The expected outputs are: | ||
* 1. In the output_dir one tiff per `unique_identifier` (potentially multi-T and potentially multi-C) | ||
*/ | ||
|
||
#@ File(style="directory", label="Directory with one directory per final image and per channel") base_dir | ||
#@ File(style="directory", label="Output directory (must exist)") output_dir | ||
#@ String(label="Suffix for white channel directory", value="_BF_max", help="Leave empty if you are not interested") suffix_white | ||
#@ String(label="Suffix for fluo channel(s) directory", value="_Fluo_max", help="Leave empty if you are not interested") suffix_fluo | ||
#@ String(label="Pattern for green channel images", value="_H2B-GFP", help="Leave empty if you are not interested") pattern_green | ||
#@ String(label="Pattern for red channel images", value="_RFP670", help="Leave empty if you are not interested") pattern_red | ||
|
||
|
||
/** | ||
* ***************************************************************************************************************** | ||
* ********************************************* Final Variables ************************************************** | ||
* ********************************************* DO NOT MODIFY **************************************************** | ||
* **************************************************************************************************************** | ||
*/ | ||
|
||
// Version number = date of last modif | ||
VERSION = "20241212" | ||
|
||
/** | ||
* ***************************************************************************************************************** | ||
* **************************************** Beginning of the script *********************************************** | ||
* **************************************************************************************************************** | ||
*/ | ||
|
||
try { | ||
|
||
println "Beginning of the script" | ||
|
||
IJ.run("Close All", "") | ||
|
||
// Find all directories | ||
File[] dir_list = base_dir.listFiles() | ||
|
||
// The images are stored in a TreeMap where | ||
// keys are unique_identifier | ||
// values are a TreeMap that we call channelMap where: | ||
// keys are colors (Green, Grays, Red) | ||
// values are an ImagePlus (T-stack) | ||
Map<Integer, Map<String, ImagePlus>> samplesMap = new TreeMap<>() | ||
List<String> dir_suffix_list = [suffix_white, suffix_fluo] | ||
List<String> dir_channels_list = ["Grays", "Fluo"] | ||
|
||
List<String> fluo_pattern_list = [pattern_green, pattern_red] | ||
List<String> fluo_channels_list = ["Green", "Red"] | ||
|
||
// Loop over directories: | ||
for (File current_directory : dir_list) { | ||
// Ignore if it is not a directory | ||
if (! current_directory.isDirectory()) { | ||
continue | ||
} | ||
String current_directory_name = current_directory.getName() | ||
// Check if it matches one of the suffix | ||
String final_color = "" | ||
// And find the unique identifier: | ||
String unique_identifier = "" | ||
for(int i = 0; i < dir_suffix_list.size(); i++){ | ||
if (dir_suffix_list[i] != "" && current_directory_name.endsWith(dir_suffix_list[i])) { | ||
final_color = dir_channels_list[i] | ||
unique_identifier = current_directory_name.replace(dir_suffix_list[i], "") | ||
continue | ||
} | ||
} | ||
if (final_color == "") { | ||
println current_directory_name + " do not match any suffix." | ||
continue | ||
} | ||
if (! samplesMap.containsKey(unique_identifier) ) { | ||
// Initiate the Map | ||
samplesMap.put(unique_identifier, new TreeMap<>()) | ||
} | ||
// Generate the ImagePlus | ||
if (final_color == "Fluo") { | ||
for(int i = 0; i < fluo_pattern_list.size(); i++){ | ||
// Use pattern for each color | ||
if (fluo_pattern_list[i] != "") { | ||
println "Processing " + unique_identifier + " " + fluo_pattern_list[i] | ||
samplesMap.get(unique_identifier).put( | ||
fluo_channels_list[i], | ||
FolderOpener.open( | ||
current_directory.getAbsolutePath(), | ||
" filter=" + fluo_pattern_list[i] | ||
) | ||
) | ||
if (!GraphicsEnvironment.isHeadless()){ | ||
samplesMap.get(unique_identifier).get( | ||
fluo_channels_list[i]).show() | ||
} | ||
} | ||
} | ||
} else { | ||
// It is easy as all images are used | ||
println "Processing " + unique_identifier + " Greys" | ||
samplesMap.get(unique_identifier).put(final_color, FolderOpener.open(current_directory.getAbsolutePath())) | ||
if (!GraphicsEnvironment.isHeadless()){ | ||
samplesMap.get(unique_identifier).get( | ||
final_color).show() | ||
} | ||
} | ||
} | ||
|
||
// Explore the HashMap and save to tiff | ||
for(String unique_identifier : samplesMap.keySet()){ | ||
// get the channel map | ||
Map<String, ImagePlus> channelsMap = samplesMap.get(unique_identifier) | ||
ArrayList<String> channels = [] | ||
ArrayList<ImagePlus> current_images = [] | ||
|
||
for(String channel : channelsMap.keySet()){ | ||
channels.add(channel) | ||
current_images.add(channelsMap.get(channel)) | ||
} | ||
// Get number of time: | ||
int nT = current_images[0].nSlices | ||
|
||
// Merge all | ||
ImagePlus merged_imps = Concatenator.run(current_images as ImagePlus[]) | ||
// Re-order to make a multi-channel, time-lapse image | ||
ImagePlus final_imp | ||
if (channels.size() == 1 && nT == 1) { | ||
final_imp = merged_imps | ||
} else { | ||
final_imp = HyperStackConverter.toHyperStack(merged_imps, channels.size() , 1, nT, "xytcz", "Color") | ||
} | ||
// set LUTs | ||
(0..channels.size()-1).each{ | ||
final_imp.setC(it + 1) | ||
IJ.run(final_imp, channels[it], "") | ||
final_imp.resetDisplayRange() | ||
} | ||
// Save to tiff | ||
final_imp.setTitle(unique_identifier) | ||
|
||
if (!GraphicsEnvironment.isHeadless()){ | ||
final_imp.show() | ||
} | ||
|
||
def fs = new FileSaver(final_imp) | ||
File output_path = new File (output_dir ,final_imp.getTitle()+"_merge.tif" ) | ||
fs.saveAsTiff(output_path.toString() ) | ||
|
||
} | ||
println "End of the script" | ||
|
||
} catch (Throwable e) { | ||
println("Something went wrong: " + e) | ||
e.printStackTrace() | ||
throw e | ||
|
||
if (GraphicsEnvironment.isHeadless()){ | ||
// Force to give exit signal of error | ||
System.exit(1) | ||
} | ||
|
||
} | ||
|
||
return | ||
|
||
/** | ||
* **************************************************************************************************************** | ||
* ******************************************* End of the script ************************************************** | ||
* | ||
* **************************************************************************************************************** | ||
* | ||
* *********************************** Helpers and processing methods ********************************************* | ||
* *************************************************************************************************************** | ||
*/ | ||
|
||
import ij.IJ | ||
import ij.ImagePlus | ||
import ij.io.FileSaver | ||
import ij.io.Opener | ||
import ij.plugin.Concatenator | ||
import ij.plugin.FolderOpener | ||
import ij.plugin.HyperStackConverter | ||
import ij.process.LUT | ||
|
||
import java.awt.GraphicsEnvironment | ||
import java.io.File |
21 changes: 21 additions & 0 deletions
21
tools/max_projections_stack_and_upload_omero/upload_omero.sh
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,21 @@ | ||
#!/bin/bash | ||
omero_server="$1" | ||
omero_user="$(cat $2 | awk 'NR==2{print $0}')" | ||
omero_password="$(cat $2 | awk 'NR==3{print $0}')" | ||
to_create=$3 | ||
project_name_or_id=$4 | ||
dataset_name_or_id=$5 | ||
|
||
if [ "$to_create" = "both" ]; then | ||
# Create a project: | ||
project_name_or_id=$(${omero_path} obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new Project name="${project_name_or_id}" | awk -F ":" 'END{print $NF}') | ||
echo "Just created the new project ${project_name_or_id}" | ||
fi | ||
if [ "$to_create" = "both" ] || [ "$to_create" = "dataset" ]; then | ||
dataset_name_or_id=$(${omero_path} obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new Dataset name="${dataset_name_or_id}" | awk -F ":" 'END{print $NF}') | ||
echo "Just created the new dataset ${dataset_name_or_id}" | ||
${omero_path} obj -s ${omero_server} -u ${omero_user} -w ${omero_password} new ProjectDatasetLink parent=Project:${project_name_or_id} child=Dataset:${dataset_name_or_id} | ||
fi | ||
echo "Start upload" | ||
omero import -s ${omero_server} -u ${omero_user} -w ${omero_password} --depth 1 -T Dataset:id:"${dataset_name_or_id}" output 2>&1 | ||
echo "Upload finished" |