Skip to content

Commit

Permalink
Fix polarisation flagging for uncalibrated data (#55)
Browse files Browse the repository at this point in the history
* Fixes #54 (polarization flagging),  Fixes #51 (baseline statistics printing) and Fixes #49 (Harden scan selection switch, Harden field selection switch)
* slightly less agressive strategy to handle uncalibrated data case
* Add capacity to flag total power for residuals
* Add 4k UHF mask
  • Loading branch information
bennahugo authored Jul 22, 2019
1 parent 9656338 commit ae1e117
Show file tree
Hide file tree
Showing 21 changed files with 362 additions and 276 deletions.
4 changes: 4 additions & 0 deletions HISTORY.rst
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@ History
0.1.3 (YYYY-MM-DD)
------------------

* Fix polarisation flagging for uncalibrated data (:pr:`55`)
* Add ability to flag on total power (:pr:`55`)
* Baseline statistics (:pr:`55`)
* 4K UHF mask (:pr:`55`)
* Add Pull Request Template (:pr:`53`)


Expand Down
30 changes: 16 additions & 14 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
# All configuration values have a default; values that are commented out
# serve to show the default.

import sphinx_rtd_theme
import sys
import os
import shlex
Expand All @@ -32,6 +33,7 @@
except ImportError:
from mock import Mock as MagicMock


class Mock(MagicMock):
@classmethod
def __getattr__(cls, name):
Expand All @@ -40,11 +42,11 @@ def __getattr__(cls, name):
obj.__doc__ = "doc"
return obj


MOCK_MODULES = ['numpy', 'numba', 'dask']

sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)

import sphinx_rtd_theme

# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
Expand Down Expand Up @@ -232,25 +234,25 @@ def __getattr__(cls, name):
# -- Options for LaTeX output ---------------------------------------------

latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',

# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',

# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',

# Latex figure (float) alignment
#'figure_align': 'htbp',
# Latex figure (float) alignment
# 'figure_align': 'htbp',
}

# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'cookiecutter-pypackage.tex', 'cookiecutter-pypackage Documentation',
'Audrey Roy Greenfeld', 'manual'),
(master_doc, 'cookiecutter-pypackage.tex', 'cookiecutter-pypackage Documentation',
'Audrey Roy Greenfeld', 'manual'),
]

# The name of an image file (relative to this directory) to place at the top of
Expand Down Expand Up @@ -293,9 +295,9 @@ def __getattr__(cls, name):
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'cookiecutter-pypackage', 'cookiecutter-pypackage Documentation',
author, 'cookiecutter-pypackage', 'One line description of project.',
'Miscellaneous'),
(master_doc, 'cookiecutter-pypackage', 'cookiecutter-pypackage Documentation',
author, 'cookiecutter-pypackage', 'One line description of project.',
'Miscellaneous'),
]

extlinks = {
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,6 @@
name='tricolour',
packages=find_packages(),
url='https://github.com/ska-sa/tricolour',
version='0.1.0',
version="0.1.2",
zip_safe=False,
)
85 changes: 7 additions & 78 deletions tests/custom.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
# List of strategies to apply in order
strategies:
# only enable me if you really want to start from scratch
#-
# name: reset_flags:
# task: unflag
# -
# name: reset_flags:
# task: unflag
-
name: nan_dropouts_flag
task: flag_nans_zeros
Expand All @@ -18,7 +18,7 @@ strategies:
name: background_flags
task: sum_threshold
kwargs:
outlier_nsigma: 5
outlier_nsigma: 10
windows_time: [1, 2, 4, 8]
windows_freq: [1, 2, 4, 8]
background_reject: 2.0
Expand All @@ -39,8 +39,8 @@ strategies:
kwargs:
major_cycles: 7
or_original_from_cycle: 1
taylor_degrees: 10
sigma: 7.0
taylor_degrees: 20
sigma: 15.0
# flags are discarded at this point since we or from cycle 1
# reflag nans and zeros
-
Expand All @@ -51,75 +51,4 @@ strategies:
task: apply_static_mask
kwargs:
accumulation_mode: "or"
uvrange: "0~100"
-
name: final_st_very_broad
task: sum_threshold
kwargs:
outlier_nsigma: 10
windows_time: [1, 2, 4, 8]
windows_freq: [32, 48, 64, 128]
background_reject: 2.0
background_iterations: 5
spike_width_time: 6.5
spike_width_freq: 64.0
time_extend: 3
freq_extend: 3
freq_chunks: 10
average_freq: 1
flag_all_time_frac: 0.6
flag_all_freq_frac: 0.8
rho: 1.3
num_major_iterations: 2
-
name: final_st_broad
task: sum_threshold
kwargs:
outlier_nsigma: 5
windows_time: [1, 2, 4, 8]
windows_freq: [1, 2, 4, 8]
background_reject: 2.0
background_iterations: 5
spike_width_time: 6.5
spike_width_freq: 10.0
time_extend: 3
freq_extend: 3
freq_chunks: 10
average_freq: 1
flag_all_time_frac: 0.6
flag_all_freq_frac: 0.8
rho: 1.3
num_major_iterations: 2
-
name: final_st_narrow
task: sum_threshold
kwargs:
outlier_nsigma: 8
windows_time: [1, 2, 4, 8]
windows_freq: [1, 2, 4, 8]
background_reject: 2.0
background_iterations: 5
spike_width_time: 2
spike_width_freq: 10.0
time_extend: 3
freq_extend: 3
freq_chunks: 10
average_freq: 1
flag_all_time_frac: 0.6
flag_all_freq_frac: 0.8
rho: 1.3
num_major_iterations: 2
-
name: residual_flag_final
task: uvcontsub_flagger
kwargs:
major_cycles: 7
or_original_from_cycle: 0
taylor_degrees: 15
sigma: 7.0
-
name: flag_autos
task: flag_autos
-
name: combine_with_input_flags
task: combine_with_input_flags
uvrange: "0~550"
10 changes: 5 additions & 5 deletions tests/test_acceptance.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def flagged_ms(request, tmp_path_factory):
test_directory = os.path.dirname(__file__)

args = ['tricolour',
'-fs', 'polarisation',
'-fs', 'total_power',
'-c', os.path.join(test_directory, 'custom.yaml'),
ms_filename]

Expand Down Expand Up @@ -208,13 +208,13 @@ def test_flag_count(flagged_ms, tol):
flag_sel = flag[fid == fnames.index("3C286")]
count_flagged_3c286 = np.nansum(flag_sel, axis=(0, 1, 2))
flagged_ratio = count_flagged_3c286 / flag_sel.size
print("Percent flagged for 3C286: %.3f%%" % (100.*flagged_ratio))
print("Percent flagged for 3C286: %.3f%%" % (100. * flagged_ratio))
assert flagged_ratio < tol

flag_sel = flag[fid == fnames.index("PKS1934-63")]
count_flagged_1934 = np.nansum(flag_sel, axis=(0, 1, 2))
flagged_ratio = count_flagged_1934 / flag_sel.size
print("Percent flagged for PKS1934-63: %.3f%%" % (100.*flagged_ratio))
print("Percent flagged for PKS1934-63: %.3f%%" % (100. * flagged_ratio))
assert flagged_ratio < tol


Expand All @@ -232,12 +232,12 @@ def test_bandwidth_flagged(flagged_ms, tol):
count_flagged_3c286 = np.nansum(data_sel, axis=0) > 0
flagged_ratio = count_flagged_3c286.sum() / data_sel.shape[1]
print("Percent bandwidth flagged for 3C286: %.3f%%"
% (100.*flagged_ratio))
% (100. * flagged_ratio))
assert flagged_ratio < tol

data_sel = data[fid == fnames.index("PKS1934-63"), :, 0]
count_flagged_1934 = np.nansum(data_sel, axis=0) > 0
flagged_ratio = count_flagged_1934.sum() / data_sel.shape[1]
print("Percent bandwidth flagged for PKS1934-63: %.3f%%"
% (100.*flagged_ratio))
% (100. * flagged_ratio))
assert flagged_ratio < tol
5 changes: 4 additions & 1 deletion tests/test_flagging.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,7 @@ def test_time_median():
class TestMedianAbs(object):
"""Tests for :func:`katsdpsigproc.rfi.flagging._median_abs` and
:func:`katsdpsigproc.rfi.flagging._median_abs_axis0`."""

def setup(self):
self.data = np.array([[-2.0, -6.0, 4.5], [1.5, 3.3, 0.5]], np.float32)
self.flags = np.array([[0, 0, 0], [0, 1, 0]], np.uint8)
Expand Down Expand Up @@ -185,6 +186,7 @@ class TestLinearlyInterpolateNans(object):
"""
Tests for :func:`katsdpsigproc.rfi.flagging._linearly_interpolate_nans`.
"""

def setup(self):
self.y = np.array([np.nan, np.nan, 4.0, np.nan, np.nan,
10.0, np.nan, -2.0, np.nan, np.nan])
Expand Down Expand Up @@ -523,7 +525,8 @@ def _make_background(self, shape, rs):

def _make_data(self, flagger, rs, shape=(1, 234, 345)):
background = self._make_background(shape, rs).astype(np.float32)
data = background + (rs.standard_normal(shape)*0.1).astype(np.float32)
data = background + (rs.standard_normal(shape)
* 0.1).astype(np.float32)
rfi = np.zeros(shape, np.float32)
# Some completely bad channels and bad times
rfi[:, 12, :] = 1
Expand Down
34 changes: 17 additions & 17 deletions tests/test_flagging_additional.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,20 +17,20 @@
def wsrt_ants():
""" Westerbork antenna positions """
return np.array([
[3828763.10544699, 442449.10566454, 5064923.00777],
[3828746.54957258, 442592.13950824, 5064923.00792],
[3828729.99081359, 442735.17696417, 5064923.00829],
[3828713.43109885, 442878.2118934, 5064923.00436],
[3828696.86994428, 443021.24917264, 5064923.00397],
[3828680.31391933, 443164.28596862, 5064923.00035],
[3828663.75159173, 443307.32138056, 5064923.00204],
[3828647.19342757, 443450.35604638, 5064923.0023],
[3828630.63486201, 443593.39226634, 5064922.99755],
[3828614.07606798, 443736.42941621, 5064923.],
[3828609.94224429, 443772.19450029, 5064922.99868],
[3828601.66208572, 443843.71178407, 5064922.99963],
[3828460.92418735, 445059.52053929, 5064922.99071],
[3828452.64716351, 445131.03744105, 5064922.98793]],
[3828763.10544699, 442449.10566454, 5064923.00777],
[3828746.54957258, 442592.13950824, 5064923.00792],
[3828729.99081359, 442735.17696417, 5064923.00829],
[3828713.43109885, 442878.2118934, 5064923.00436],
[3828696.86994428, 443021.24917264, 5064923.00397],
[3828680.31391933, 443164.28596862, 5064923.00035],
[3828663.75159173, 443307.32138056, 5064923.00204],
[3828647.19342757, 443450.35604638, 5064923.0023],
[3828630.63486201, 443593.39226634, 5064922.99755],
[3828614.07606798, 443736.42941621, 5064923.],
[3828609.94224429, 443772.19450029, 5064922.99868],
[3828601.66208572, 443843.71178407, 5064922.99963],
[3828460.92418735, 445059.52053929, 5064922.99071],
[3828452.64716351, 445131.03744105, 5064922.98793]],
dtype=np.float64)


Expand Down Expand Up @@ -61,10 +61,10 @@ def test_flag_nans_and_zeros(unique_baselines):
shape = (unique_baselines.shape[0], ncorr, ntime, nchan)

zero_flags = np.zeros(shape, dtype=np.uint8)
vis = np.random.random(shape) + 1j*np.random.random(shape)
vis = np.random.random(shape) + 1j * np.random.random(shape)

vis[4, 2, 4, 5] = 0
vis[0, 1, 2, 7] = np.nan + np.nan*1j
vis[0, 1, 2, 7] = np.nan + np.nan * 1j

out_flags = flag_nans_and_zeros(vis, zero_flags)

Expand Down Expand Up @@ -110,7 +110,7 @@ def test_apply_static_mask(wsrt_ants, unique_baselines,
ncorr = 4

first_freq = .856e9
last_freq = 2*.856e9
last_freq = 2 * .856e9

chan_freqs = np.linspace(first_freq, last_freq, nchan, dtype=np.float64)
chan_widths = np.zeros_like(chan_freqs)
Expand Down
2 changes: 1 addition & 1 deletion tests/test_packing.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def test_vis_and_flag_packing(tmpdir, backend):
nrow = time.size

vis = (rs.standard_normal((nrow, nchan, ncorr)) +
rs.standard_normal((nrow, nchan, ncorr))*1j)
rs.standard_normal((nrow, nchan, ncorr)) * 1j)

flag = rs.randint(0, 2, (nrow, nchan, ncorr))

Expand Down
16 changes: 8 additions & 8 deletions tests/test_stokes.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
def test_unpolarised_intensity(stokes):
# Set up our stokes parameters in an interesting order
stokes = list(map(STOKES_TYPES.__getitem__, stokes))
vis = np.asarray([[[1+1j, 2+2j, 3+3j, 4+4j]]], np.complex128)
vis = np.asarray([[[1 + 1j, 2 + 2j, 3 + 3j, 4 + 4j]]], np.complex128)

stokes_map = stokes_corr_map(stokes)

Expand All @@ -30,16 +30,16 @@ def test_unpolarised_intensity(stokes):
unpol = 0

for c1, c2, a, s1, s2 in stokes_unpol:
v = a*(s1*vis[0, 0, c1] + s2*vis[0, 0, c2])
unpol += v.real # imaginary contains only noise
v = a * (s1 * vis[0, 0, c1] + s2 * vis[0, 0, c2])
unpol += np.abs(v)

# Polarised stokes mappings
stokes_pol = tuple(v for k, v in stokes_map.items() if k != 'I')
pol = 0

for c1, c2, a, s1, s2 in stokes_pol:
v = a*(s1*vis[0, 0, c1] + s2*vis[0, 0, c2])
pol += v.real**2 # imaginary contains only noise
v = a * (s1 * vis[0, 0, c1] + s2 * vis[0, 0, c2])
pol += np.abs(v)**2

upi = unpol - np.sqrt(pol)
val = unpolarised_intensity(vis, stokes_unpol, stokes_pol)
Expand All @@ -54,7 +54,7 @@ def test_unpolarised_intensity(stokes):
def test_polarised_intensity(stokes):
# Set up our stokes parameters in an interesting order
stokes = list(map(STOKES_TYPES.__getitem__, stokes))
vis = np.asarray([[[1+1j, 2+2j, 3+3j, 4+4j]]], np.complex128)
vis = np.asarray([[[1 + 1j, 2 + 2j, 3 + 3j, 4 + 4j]]], np.complex128)

stokes_map = stokes_corr_map(stokes)

Expand All @@ -63,8 +63,8 @@ def test_polarised_intensity(stokes):
pol = 0

for c1, c2, a, s1, s2 in stokes_pol:
v = a*(s1*vis[0, 0, c1] + s2*vis[0, 0, c2])
pol += v.real**2 # imaginary contains only noise
v = a * (s1 * vis[0, 0, c1] + s2 * vis[0, 0, c2])
pol += np.abs(v)**2 # imaginary contains only noise

pi = np.sqrt(pol)
val = polarised_intensity(vis, stokes_pol)
Expand Down
2 changes: 1 addition & 1 deletion tests/test_window_statistics.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def unique_baselines(antenna_names):

@pytest.fixture
def channels():
return np.linspace(.856e9, 2*.856e9, nchan)
return np.linspace(.856e9, 2 * .856e9, nchan)


@pytest.fixture
Expand Down
Loading

0 comments on commit ae1e117

Please sign in to comment.