Skip to content

Commit

Permalink
Merge pull request #2334 from kif/2314_medfilt_ng_python
Browse files Browse the repository at this point in the history
Python implementation of medfilt_ng
  • Loading branch information
kif authored Nov 19, 2024
2 parents 34d3213 + 8ca53f7 commit e810bd9
Show file tree
Hide file tree
Showing 4 changed files with 225 additions and 2 deletions.
118 changes: 117 additions & 1 deletion src/pyFAI/engines/CSR_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,10 @@
__contact__ = "[email protected]"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
__date__ = "06/09/2024"
__date__ = "14/11/2024"
__status__ = "development"

from collections.abc import Iterable
import logging
import warnings
logger = logging.getLogger(__name__)
Expand All @@ -46,6 +47,7 @@
from ..utils import calc_checksum
from ..containers import Integrate1dtpl, Integrate2dtpl, ErrorModel

mf_dtype = numpy.dtype([('any', 'f4'),('sig', 'f4'),('var', 'f4'),('norm', 'f4')])

class CSRIntegrator(object):

Expand Down Expand Up @@ -389,6 +391,120 @@ def sigma_clip(self, data, dark=None, dummy=None, delta_dummy=None,
# Here we return the standard deviation and not the standard error of the mean !
return Integrate1dtpl(self.bin_centers, avg, std, sum_sig, sum_var, sum_nrm, cnt, std, sem, sum_nrm2)

def medfilt(self, data, dark=None, dummy=None, delta_dummy=None,
variance=None, dark_variance=None,
flat=None, solidangle=None, polarization=None, absorption=None,
safe=True, error_model=None,
normalization_factor=1.0, quantile=0.5
):
"""
Perform a median-filter/quantile mean in azimuthal space.
The error is propagated according to:
.. math::
signal = (raw - dark)
variance = variance + dark_variance
normalization = normalization_factor*(flat * solidangle * polarization * absortoption)
count = number of pixel contributing
Averaging is performed using the CSR representation of the look-up table on all
arrays after sorting pixels by apparant intensity and taking only the selected ones
based on quantiles and the length of the ensemble.
:param dark: array of same shape as data for pre-processing
:param dummy: value for invalid data
:param delta_dummy: precesion for dummy assessement
:param variance: array of same shape as data for pre-processing
:param dark_variance: array of same shape as data for pre-processing
:param flat: array of same shape as data for pre-processing
:param solidangle: array of same shape as data for pre-processing
:param polarization: array of same shape as data for pre-processing
:param safe: Unused in this implementation
:param error_model: Enum or str, "azimuthal" or "poisson"
:param normalization_factor: divide raw signal by this value
:param quantile: which percentile/100 use for cutting out quantil.
can be a 2-tuple to specify a region to average out.
By default, takes the median
:return: namedtuple with "position intensity error signal variance normalization count"
"""
if isinstance(quantile, Iterable):
q_start = min(quantile)
q_stop = max(quantile)
else:
q_stop = q_start = quantile

indptr = self._csr.indptr
indices = self._csr.indices
csr_data = self._csr.data
csr_data2 = self._csr2.data

error_model = ErrorModel.parse(error_model)

prep = preproc(data,
dark=dark,
flat=flat,
solidangle=solidangle,
polarization=polarization,
absorption=absorption,
mask=None,
dummy=dummy,
delta_dummy=delta_dummy,
normalization_factor=normalization_factor,
empty=self.empty,
split_result=4,
variance=variance,
dark_variance=dark_variance,
dtype=numpy.float32,
error_model=error_model,
out=self.preprocessed)

prep_flat = prep.reshape((-1, 4))
pixels = prep_flat[indices]

work0 = numpy.zeros((indices.size,4), dtype=numpy.float32)
work0[:, 0] = pixels[:, 0]/ pixels[:, 2]
work0[:, 1] = pixels[:, 0] * csr_data
work0[:, 2] = pixels[:, 1] * csr_data2
work0[:, 3] = pixels[:, 2] * csr_data
work1 = work0.view(mf_dtype).ravel()

size = indptr.size-1
signal = numpy.zeros(size, dtype=numpy.float64)
norm = numpy.zeros(size, dtype=numpy.float64)
norm2 = numpy.zeros(size, dtype=numpy.float64)
variance = numpy.zeros(size, dtype=numpy.float64)
cnt = numpy.zeros(size, dtype=numpy.int32)
for i,start in enumerate(indptr[:-1]):
stop = indptr[i+1]
tmp = numpy.sort(work1[start:stop])
upper = numpy.cumsum(tmp["norm"])
last = upper[-1]
lower = numpy.concatenate(([0],upper[:-1]))
mask = numpy.logical_and(upper>=q_start*last, lower<=q_stop*last)
tmp = tmp[mask]
cnt[i] = tmp.size
signal[i] = tmp["sig"].sum(dtype=numpy.float64)
variance[i] = tmp["var"].sum(dtype=numpy.float64)
norm[i] = tmp["norm"].sum(dtype=numpy.float64)
norm2[i] = (tmp["norm"]**2).sum(dtype=numpy.float64)

with warnings.catch_warnings():
warnings.simplefilter("ignore")
avg = signal / norm
std = numpy.sqrt(variance / norm2)
sem = numpy.sqrt(variance) / norm
# mask out remaining NaNs
msk = norm <= 0
avg[msk] = self.empty
std[msk] = self.empty
sem[msk] = self.empty

return Integrate1dtpl(self.bin_centers, avg, sem, signal, variance, norm, cnt, std, sem, norm2)

@property
def check_mask(self):
return self.mask_checksum is not None
Expand Down
1 change: 1 addition & 0 deletions src/pyFAI/test/meson.build
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ py.install_sources(
'test_uncertainties.py',
'test_watershed.py',
'test_worker.py',
'test_medfilt_engine.py',
'utilstest.py'],
pure: false, # Will be installed next to binaries
subdir: 'pyFAI/test' # Folder relative to site-packages to install to
Expand Down
4 changes: 3 additions & 1 deletion src/pyFAI/test/test_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
__contact__ = "[email protected]"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
__date__ = "30/10/2024"
__date__ = "14/11/2024"

import sys
import unittest
Expand Down Expand Up @@ -96,6 +96,7 @@
from . import test_uncertainties
from . import test_ring_extraction
from . import test_fiber_integrator
from . import test_medfilt_engine

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -158,6 +159,7 @@ def suite():
testsuite.addTest(test_uncertainties.suite())
testsuite.addTest(test_ring_extraction.suite())
testsuite.addTest(test_fiber_integrator.suite())
testsuite.addTest(test_medfilt_engine.suite())
return testsuite


Expand Down
104 changes: 104 additions & 0 deletions src/pyFAI/test/test_medfilt_engine.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
#!/usr/bin/env python
# coding: utf-8
#
# Project: Azimuthal integration
# https://github.com/silx-kit/pyFAI
#
# Copyright (C) 2015-2018 European Synchrotron Radiation Facility, Grenoble, France
#
# Principal author: Jérôme Kieffer ([email protected])
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.

"""Test suites for median filtering engines"""

__author__ = "Jérôme Kieffer"
__contact__ = "[email protected]"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
__date__ = "14/11/2024"

import unittest
import numpy
import logging
logger = logging.getLogger(__name__)
from .utilstest import UtilsTest
import fabio
from .. import load


class TestMedfilt(unittest.TestCase):
"""Test Azimuthal median filtering results
"""

@classmethod
def setUpClass(cls)->None:
super(TestMedfilt, cls).setUpClass()
cls.method = ["full", "csr", "python"]
cls.img = fabio.open(UtilsTest.getimage("mock.tif")).data
cls.ai = load({ "dist": 0.1,
"poni1":0.03,
"poni2":0.03,
"detector": "Detector",
"detector_config": {"pixel1": 1e-4,
"pixel2": 1e-4,
"max_shape": [500, 600],
"orientation": 3}})
cls.npt = 100

@classmethod
def tearDownClass(cls)->None:
super(TestMedfilt, cls).tearDownClass()
cls.method = cls.img =cls.ai =cls.npt =None


def test_python(self):
print(self.ai)
method = tuple(self.method)
ref = self.ai.integrate1d(self.img, self.npt, unit="2th_rad", method=method, error_model="poisson")
print(ref.method)
engine = self.ai.engines[ref.method].engine
obt = engine.medfilt(self.img,
solidangle=self.ai.solidAngleArray(),
quantile=(0,1), # taking all Like this it works like a normal mean
error_model="poisson")

self.assertTrue(numpy.allclose(ref.radial, obt.position), "radial matches")
self.assertTrue(numpy.allclose(ref.sum_signal, obt.signal), "signal matches")
self.assertTrue(numpy.allclose(ref.sum_variance, obt.variance), "variance matches")
self.assertTrue(numpy.allclose(ref.sum_normalization, obt.normalization), "normalization matches")
self.assertTrue(numpy.allclose(ref.sum_normalization2, obt.norm_sq), "norm_sq matches")

self.assertTrue(numpy.allclose(ref.intensity, obt.intensity), "intensity matches")
self.assertTrue(numpy.allclose(ref.sigma, obt.sigma), "sigma matches")
self.assertTrue(numpy.allclose(ref.std, obt.std), "std matches")
self.assertTrue(numpy.allclose(ref.sem, obt.sem), "sem matches")



def suite():
loader = unittest.defaultTestLoader.loadTestsFromTestCase
testsuite = unittest.TestSuite()
testsuite.addTest(loader(TestMedfilt))
return testsuite


if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite())

0 comments on commit e810bd9

Please sign in to comment.