Skip to content

Commit

Permalink
debugged and tested Image Smoothing and Resampling
Browse files Browse the repository at this point in the history
  • Loading branch information
RLeenings committed Jun 5, 2019
1 parent ce251ce commit a43655b
Show file tree
Hide file tree
Showing 3 changed files with 74 additions and 6 deletions.
11 changes: 10 additions & 1 deletion photonai/base/PhotonBatchElement.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from .PhotonBase import PipelineElement
from ..photonlogger import Logger
import numpy as np


Expand Down Expand Up @@ -30,8 +31,13 @@ def batch_call(self, delegate, X, y=None, call_with_y=True, **kwargs):
else:
nr = len(X)
dim = 1

batch_idx = 0
for start, stop in PhotonBatchElement.chunker(nr, self.batch_size):

batch_idx += 1
Logger().debug(self.name + " is processing batch nr " + str(batch_idx))

# split data in batches
if dim > 1:
X_batched = X[start:stop, :]
Expand Down Expand Up @@ -76,7 +82,10 @@ def batch_call(self, delegate, X, y=None, call_with_y=True, **kwargs):
def stack_results(new_a, existing_a):
if existing_a is not None:
if isinstance(new_a, list) or (isinstance(new_a, np.ndarray) and len(new_a.shape) < 2):
existing_a = np.hstack((existing_a, new_a))
if isinstance(existing_a, list):
existing_a = existing_a + new_a
else:
existing_a = np.hstack((existing_a, new_a))
else:
existing_a = np.vstack((existing_a, new_a))
else:
Expand Down
2 changes: 1 addition & 1 deletion photonai/examples/Brain_Age_Master.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@

my_pipe.fit(X, y)

# batched_transformer.clear_cache()
batched_transformer.base_element.clear_cache()



Expand Down
67 changes: 63 additions & 4 deletions photonai/test/NeuroTest.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
import unittest, os, inspect
from ..neuro.BrainAtlas import AtlasLibrary, BrainAtlas
from ..base.PhotonBatchElement import PhotonBatchElement
from ..neuro.ImageBasics import ResampleImages, SmoothImages
from nilearn import image
from nibabel.nifti1 import Nifti1Image
import numpy as np


Expand All @@ -9,6 +12,7 @@ class NeuroTest(unittest.TestCase):
def setUp(self):
self.test_folder = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'test_data/')
self.atlas_name = "AAL"
self.X = AtlasLibrary().get_nii_files_from_folder(self.test_folder, extension=".nii")

def tearDown(self):
pass
Expand All @@ -22,7 +26,7 @@ def test_brain_atlas_load(self):
self.assertTrue(np.array_equal(man_map, brain_atlas.map))

def test_brain_atlas(self):
X = AtlasLibrary().get_nii_files_from_folder(self.test_folder, extension=".nii")


# 4101
# Hippocampus_L
Expand All @@ -34,13 +38,68 @@ def test_brain_atlas(self):
# Amygdala_R

brain_atlas = BrainAtlas("AAL", "vec", rois=["Hippocampus_R", "Hippocampus_L", "Amygdala_L", "Amygdala_R"])
new_data = brain_atlas.transform(X)
self.assertTrue(len(X), len(brain_atlas.rois))
new_data = brain_atlas.transform(self.X)
self.assertTrue(len(self.X), len(brain_atlas.rois))

brain_atlas_mean = BrainAtlas("AAL", "mean", rois='all')
brain_atlas_mean.transform(X)
brain_atlas_mean.transform(self.X)
debug = True

def test_resampling_and_smoothing(self):

testsuite = ["Testing Method on Single Core",
"Testing Method on Multi Core",
"Testing Method on Single Core Batched",
"Testing Method on Multi Core Batched"]

def create_instances_and_transform(neuro_class, neuro_class_str, param_dict, transformed_X):
instance_list = []

instance_list.append(neuro_class(nr_of_processes=1, **param_dict))
instance_list.append(neuro_class(nr_of_processes=3,
cache_folder=os.path.join(self.test_folder, "cache_folder"), **param_dict))
instance_list.append(PhotonBatchElement(neuro_class_str, batch_size=5, nr_of_processes=1, **param_dict))
instance_list.append(PhotonBatchElement(neuro_class_str, batch_size=5, nr_of_processes=3,
cache_folder=os.path.join(self.test_folder, "cache_folder"),
**param_dict))

for test, obj in enumerate(instance_list):
print(testsuite[test])

# transform data
if isinstance(obj, PhotonBatchElement):
new_X, _, _ = obj.transform(self.X)
obj.base_element.clear_cache()
else:
new_X = obj.transform(self.X)
obj.clear_cache()

# compare output to nilearn version
for index, nilearn_nifti in enumerate(transformed_X):
photon_nifti = new_X[index]
if isinstance(photon_nifti, Nifti1Image):
self.assertTrue(np.array_equal(photon_nifti.dataobj, nilearn_nifti.dataobj))
else:
self.assertTrue(np.array_equal(np.asarray(photon_nifti), nilearn_nifti.dataobj))

print("finished testing object: all images are fine.")

print("Testing Nifti Smoothing.")
smoothing_param_dict = {'fwhm': [3, 3, 3]}
nilearn_smoothed_X = []
for element in self.X:
nilearn_smoothed_X.append(image.smooth_img(element, **smoothing_param_dict))
create_instances_and_transform(SmoothImages, 'SmoothImages', smoothing_param_dict, nilearn_smoothed_X)

print("Testing Nifti Resampling.")
target_affine = np.diag([5, 5, 5])
resample_param_dict = {'target_affine': target_affine, 'interpolation': 'nearest'}
nilearn_resampled_X = []
for element in self.X:
nilearn_resampled_X.append(image.resample_img(element, **resample_param_dict))
create_instances_and_transform(ResampleImages, 'ResampleImages', {'voxel_size': [5, 5, 5]}, nilearn_resampled_X)





Expand Down

0 comments on commit a43655b

Please sign in to comment.