Skip to content

Commit

Permalink
Merge pull request #14 from ivadomed/jv/compute_csa_T2w
Browse files Browse the repository at this point in the history
OHBM abstract submitted and scripts cleaned-up --> merging.
  • Loading branch information
valosekj authored Jan 20, 2023
2 parents 672919d + a0ea789 commit 8e1b2c3
Show file tree
Hide file tree
Showing 6 changed files with 1,144 additions and 4 deletions.
112 changes: 112 additions & 0 deletions preprocess/copy_files_to_derivatives.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
#!/usr/bin/env python
#
# Copy manually corrected files (segmentations, vertebral labeling, etc.) from the source preprocessed dataset to the
# git-annex BIDS dataset's derivatives folder
#
# Authors: Jan Valosek

import argparse
import glob
import os
import shutil
import utils
import re


def get_parser():
"""
parser function
"""

parser = argparse.ArgumentParser(
description='Copy manually corrected files (segmentations, vertebral labeling, etc.) from the source '
'preprocessed dataset to the git-annex BIDS derivatives folder',
formatter_class=utils.SmartFormatter,
prog=os.path.basename(__file__).strip('.py')
)
parser.add_argument(
'-path-in',
metavar="<folder>",
required=True,
type=str,
help='Path to the folder with manually corrected files (usually derivatives). The script assumes that labels '
'folder is located in the provided folder.'
)
parser.add_argument(
'-path-out',
metavar="<folder>",
required=True,
type=str,
help='Path to the BIDS dataset where manually corrected files will be copied. Include also derivatives folder '
'in the path. Files will be copied to the derivatives/label folder.'
)

return parser


# TODO - merge this function with function in utils.py
def fetch_subject_and_session(filename_path):
"""
Get subject ID, session ID and filename from the input BIDS-compatible filename or file path
The function works both on absolute file path as well as filename
:param filename_path: input nifti filename (e.g., sub-001_ses-01_T1w.nii.gz) or file path
(e.g., /home/user/MRI/bids/derivatives/labels/sub-001/ses-01/anat/sub-001_ses-01_T1w.nii.gz
:return: subjectID: subject ID (e.g., sub-001)
:return: sessionID: session ID (e.g., ses-01)
:return: filename: nii filename (e.g., sub-001_ses-01_T1w.nii.gz)
"""

_, filename = os.path.split(filename_path) # Get just the filename (i.e., remove the path)
subject = re.search('sub-(.*?)[_/]', filename_path)
subjectID = subject.group(0)[:-1] if subject else "" # [:-1] removes the last underscore or slash
session = re.findall(r'ses-..', filename_path)
sessionID = session[0] if session else "" # Return None if there is no session
contrast = 'dwi' if 'dwi' in filename_path else 'anat' # Return contrast (dwi or anat)
# REGEX explanation
# \d - digit
# \d? - no or one occurrence of digit
# *? - match the previous element as few times as possible (zero or more times)

return subjectID, sessionID, filename, contrast


def main():

# Parse the command line arguments
parser = get_parser()
args = parser.parse_args()

# Check if path_in exists
if os.path.isdir(args.path_in):
path_in = os.path.join(os.path.abspath(args.path_in), 'labels')
else:
raise NotADirectoryError(f'{args.path_in} does not exist.')

# Check if path_out exists
if os.path.isdir(args.path_out):
path_out = os.path.join(os.path.abspath(args.path_out), 'labels')
else:
raise NotADirectoryError(f'{args.path_out} does not exist.')

# Loop across files in input dataset
for path_file_in in sorted(glob.glob(path_in + '/**/*.nii.gz', recursive=True)):
sub, ses, filename, contrast = fetch_subject_and_session(path_file_in)
# Construct path for the output file
path_file_out = os.path.join(path_out, sub, ses, contrast, filename)
# Check if subject's folder exists in the output dataset, if not, create it
path_subject_folder_out = os.path.join(path_out, sub, ses, contrast)
if not os.path.isdir(path_subject_folder_out):
os.makedirs(path_subject_folder_out)
print(f'Creating directory: {path_subject_folder_out}')
# Copy nii and json files to the output dataset
# TODO - consider rsync instead of shutil.copy
shutil.copy(path_file_in, path_file_out)
print(f'Copying: {path_file_in} to {path_file_out}')
path_file_json_in = path_file_in.replace('nii.gz', 'json')
path_file_json_out = path_file_out.replace('nii.gz', 'json')
shutil.copy(path_file_json_in, path_file_json_out)
print(f'Copying: {path_file_json_in} to {path_file_json_out}')


if __name__ == '__main__':
main()
11 changes: 7 additions & 4 deletions preprocess/sct-preprocess_data.sh
Original file line number Diff line number Diff line change
Expand Up @@ -203,12 +203,15 @@ if [[ -f ${file_t2w}.nii.gz ]];then
mv ${file_t2w}_raw_RPI_r.nii.gz ${file_t2w}.nii.gz

# Spinal cord segmentation
# Note: For T2w images, we use sct_deepseg_sc with 2d kernel. Generally, it works better than sct_propseg and
# sct_deepseg_sc with 3d kernel.
# Note: For T2w images, we use sct_deepseg_sc with 2d kernel. Generally, it works better than sct_propseg and sct_deepseg_sc with 3d kernel.
segment_if_does_not_exist ${file_t2w} 't2' 'deepseg'

# Do vertebral labeling
#label_if_does_not_exist ${file_t2w} ${file_t2w}_seg
# Vertebral labeling
label_if_does_not_exist ${file_t2w} ${file_t2w}_seg

# Compute average cord CSA between C2 and C3
sct_process_segmentation -i ${file_t2w}_seg.nii.gz -vert 2:3 -vertfile ${file_t2w}_seg_labeled.nii.gz -o ${PATH_RESULTS}/csa-SC_T2w.csv -append 1
exit

# MS lesions segmentation
# TODO - explore why sct_deepseg_lesion produces different results with manually provided centerline
Expand Down
7 changes: 7 additions & 0 deletions scripts-t2w_csa/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# T2w CSA

This folder contains bash scripts used for calculation of cross-sectional area (CSA) at C2-C3 vertebral levels from
T2w images for CanProCo and spine-generic dataset. Both scripts can be run using SCT wrapper `sct_run_batch`.

The folder also contains python script used for figure creation (rainplot, scatter plot with linear fit) and statistics
computation (ANOVA, Kruskal-Wallis H-test, partial correlation).
Loading

0 comments on commit 8e1b2c3

Please sign in to comment.