From 089a9b43399349a157000a8d60f553458b85e7a5 Mon Sep 17 00:00:00 2001 From: Andrew Pontzen Date: Sun, 7 Apr 2024 14:48:19 +0100 Subject: [PATCH 01/13] Updates for latest pynbody beta --- tangos/parallel_tasks/pynbody_server/__init__.py | 11 ++++++----- .../parallel_tasks/pynbody_server/transfer_array.py | 7 +++++++ 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/tangos/parallel_tasks/pynbody_server/__init__.py b/tangos/parallel_tasks/pynbody_server/__init__.py index 6c6cbf08..53b9e0e5 100644 --- a/tangos/parallel_tasks/pynbody_server/__init__.py +++ b/tangos/parallel_tasks/pynbody_server/__init__.py @@ -67,22 +67,23 @@ def process_async(self): log.logger.debug("Tree built after %.2fs", time.time()-start) class ReturnSharedTree(Message): - def __init__(self, leafsize, boxsize, kdnodes, offsets): + def __init__(self, leafsize, boxsize, kdnodes, offsets, kernel_id): super().__init__() self.leafsize = leafsize self.boxsize = boxsize self.kdnodes = kdnodes self.offsets = offsets + self.kernel_id = kernel_id def serialize(self): - return self.leafsize, self.boxsize + return self.leafsize, self.boxsize, self.kernel_id @classmethod def deserialize(cls, source, message): - leafsize, boxsize = message + leafsize, boxsize, kernel_id = message kdnodes = transfer_array.receive_array(source, use_shared_memory=True) offsets = transfer_array.receive_array(source, use_shared_memory=True) - obj = cls(leafsize, boxsize, kdnodes, offsets) + obj = cls(leafsize, boxsize, kdnodes, offsets, kernel_id) obj.source = source return obj @@ -92,7 +93,7 @@ def send(self, destination): transfer_array.send_array(self.offsets, destination, use_shared_memory=True) def import_tree_into_local_view(self, sim): - sim.import_tree((self.leafsize, self.boxsize, self.kdnodes, self.offsets)) + sim.import_tree((self.leafsize, self.boxsize, self.kdnodes, self.offsets, self.kernel_id)) class GetSharedTree(AsyncProcessedMessage): diff --git a/tangos/parallel_tasks/pynbody_server/transfer_array.py b/tangos/parallel_tasks/pynbody_server/transfer_array.py index e4e74b07..1bf7d0a4 100644 --- a/tangos/parallel_tasks/pynbody_server/transfer_array.py +++ b/tangos/parallel_tasks/pynbody_server/transfer_array.py @@ -11,7 +11,14 @@ def send_array(array: pynbody.array.SimArray, destination: int, use_shared_memor array._shared_fname = array.base._shared_fname # the strides/offset will point into the same memory else: raise ValueError("Array %r has no shared memory information" % array) + clearup_array_name = True + else: + clearup_array_name = False _send_array_shared_memory(array, destination) + if clearup_array_name: + # We can't leave _shared_fname lying around, as then the shared memory will get cleaned up when + # this array (which seems to be a subview) is garbage collected + del array._shared_fname else: _send_array_copy(array, destination) From 70431a7bfc98e12e2d80b259348d5fc24fcb87ca Mon Sep 17 00:00:00 2001 From: Andrew Pontzen Date: Sat, 4 May 2024 11:04:25 +0100 Subject: [PATCH 02/13] Fix an issue where an input handler requesting a specific halo class would fail Add regression test --- tangos/input_handlers/pynbody.py | 9 ++++---- tangos/testing/__init__.py | 19 +++++++++++++++++ tests/test_simulation_outputs.py | 36 ++++++++++++++++++++++++++++++-- 3 files changed, 57 insertions(+), 7 deletions(-) diff --git a/tangos/input_handlers/pynbody.py b/tangos/input_handlers/pynbody.py index 59fd9be2..ec3204f3 100644 --- a/tangos/input_handlers/pynbody.py +++ b/tangos/input_handlers/pynbody.py @@ -46,11 +46,10 @@ def __new__(cls, *args, **kwargs): @classmethod def _construct_pynbody_halos(cls, sim, *args, **kwargs): - if cls.pynbody_halo_class_name is None: - return sim.halos(*args, **kwargs) - else: - halo_class = getattr(pynbody.halo, cls.pynbody_halo_class_name) - return halo_class(sim, *args, **kwargs) + if cls.pynbody_halo_class_name is not None: + kwargs['priority'] = [cls.pynbody_halo_class_name] + + return sim.halos(*args, **kwargs) def _is_able_to_load(self, ts_extension): filepath = self._extension_to_filename(ts_extension) diff --git a/tangos/testing/__init__.py b/tangos/testing/__init__.py index 7eaf6f03..957e8fd5 100644 --- a/tangos/testing/__init__.py +++ b/tangos/testing/__init__.py @@ -214,6 +214,25 @@ def init_blank_db_for_testing(**init_kwargs): return db_is_blank +@contextlib.contextmanager +def blank_db_for_testing(**kwargs): + """Context manager to create a blank database, then on exit restores the previous database. + + For arguments, see init_blank_db_for_testing. + """ + + old_engine = core.get_default_engine() + old_session = core.get_default_session() + old_session_class = core.Session + core._internal_session = None + core._engine = None + init_blank_db_for_testing(**kwargs) + yield + core.close_db() + core._engine = old_engine + core.Session = old_session_class + core.set_default_session(old_session) + def using_parallel_tasks(fn_or_num_processes, num_processes = 2): """Decorator for tests, using parallel_tasks multiprocessing backend to launch diff --git a/tests/test_simulation_outputs.py b/tests/test_simulation_outputs.py index 6dde83f1..d4a3354f 100644 --- a/tests/test_simulation_outputs.py +++ b/tests/test_simulation_outputs.py @@ -1,6 +1,7 @@ import gc import os +import numpy as np import numpy.testing as npt import pynbody @@ -41,14 +42,14 @@ def test_handler_properties_quicker_flag(): output_manager.quicker = True prop = output_manager.get_properties() npt.assert_allclose(prop['approx_resolution_kpc'], 33.590757, rtol=1e-5) - npt.assert_allclose(prop['approx_resolution_Msol'], 2.412033e+10, rtol=1e-5) + npt.assert_allclose(prop['approx_resolution_Msol'], 2.412033e+10, rtol=1e-4) def test_enumerate(): assert set(output_manager.enumerate_timestep_extensions())=={"tiny.000640","tiny.000832"} def test_timestep_properties(): props = output_manager.get_timestep_properties("tiny.000640") - npt.assert_allclose(props['time_gyr'],2.17328504831) + npt.assert_allclose(props['time_gyr'],2.173236752357068) npt.assert_allclose(props['redshift'], 2.96382819878) def test_enumerate_objects(): @@ -181,3 +182,34 @@ def test_load_region_uses_cache(): assert id(region1a) == id(region1b) assert id(region1a) != id(region2) + + +class DummyHaloClass(pynbody.halo.number_array.HaloNumberCatalogue): + def __init__(self, sim): + sim['grp'] = np.empty(len(sim), dtype=int) + sim['grp'].fill(-1) + sim['grp'][:1000] = 0 + sim['grp'][1000:2000] = 1 + super().__init__(sim, 'grp', ignore=-1) + + @classmethod + def _can_load(cls, sim, arr_name='grp'): + return True + + +class DummyPynbodyHandler(pynbody_outputs.ChangaInputHandler): + pynbody_halo_class_name = "DummyHaloClass" + + def _can_enumerate_objects_from_statfile(self, ts_extension, object_typetag): + return False # test requires enumerating halos via pynbody, to verify right halo class is used + +def test_halo_class_priority(): + with testing.blank_db_for_testing(testing_db_name="test_halo_class_priority", erase_if_exists=True): + handler = DummyPynbodyHandler("test_tipsy") + + with log.LogCapturer(): + add.SimulationAdderUpdater(handler).scan_simulation_and_add_all_descendants() + h = db.get_halo("test_tipsy/tiny.000640/1").load() + assert (h.get_index_list(h.ancestor) == np.arange(1000)).all() + h = db.get_halo("test_tipsy/tiny.000640/2").load() + assert (h.get_index_list(h.ancestor) == np.arange(1000, 2000)).all() \ No newline at end of file From 96e6458f9a634fe79b05a9a2763ee26188b42b75 Mon Sep 17 00:00:00 2001 From: Andrew Pontzen Date: Sat, 4 May 2024 11:05:15 +0100 Subject: [PATCH 03/13] Update for new pynbody shared array protection Add better output for where multiprocessing processes crash --- tangos/parallel_tasks/backends/multiprocessing.py | 9 +++++++-- tangos/parallel_tasks/pynbody_server/transfer_array.py | 9 ++------- tests/test_pynbody_server.py | 4 ++-- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/tangos/parallel_tasks/backends/multiprocessing.py b/tangos/parallel_tasks/backends/multiprocessing.py index 952f1b62..e6b7be93 100644 --- a/tangos/parallel_tasks/backends/multiprocessing.py +++ b/tangos/parallel_tasks/backends/multiprocessing.py @@ -8,6 +8,8 @@ import time from typing import Optional +from ...log import logger + import tblib.pickling_support _slave = False @@ -188,9 +190,12 @@ def launch_functions(functions, args, capture_log=False): for proc_i in processes: if error: - #print "multiprocessing backend: send signal to",proc_i.pid os.kill(proc_i.pid, signal.SIGTERM) - proc_i.join() + proc_i.join(timeout=1.0) + if proc_i.is_alive(): + logger.warn("Process %d did not terminate in a timely way; sending SIGKILL", proc_i.pid) + os.kill(proc_i.pid, signal.SIGKILL) + proc_i.join() if error is not None: raise error.with_traceback(traceback) diff --git a/tangos/parallel_tasks/pynbody_server/transfer_array.py b/tangos/parallel_tasks/pynbody_server/transfer_array.py index 1bf7d0a4..8e440aaf 100644 --- a/tangos/parallel_tasks/pynbody_server/transfer_array.py +++ b/tangos/parallel_tasks/pynbody_server/transfer_array.py @@ -9,16 +9,11 @@ def send_array(array: pynbody.array.SimArray, destination: int, use_shared_memor if not hasattr(array, "_shared_fname"): if isinstance(array, np.ndarray) and hasattr(array, "base") and hasattr(array.base, "_shared_fname"): array._shared_fname = array.base._shared_fname # the strides/offset will point into the same memory + array._shared_owner = False # otherwise the memory will be deleted else: raise ValueError("Array %r has no shared memory information" % array) - clearup_array_name = True - else: - clearup_array_name = False _send_array_shared_memory(array, destination) - if clearup_array_name: - # We can't leave _shared_fname lying around, as then the shared memory will get cleaned up when - # this array (which seems to be a subview) is garbage collected - del array._shared_fname + else: _send_array_copy(array, destination) diff --git a/tests/test_pynbody_server.py b/tests/test_pynbody_server.py index 7cb050c1..e991eb7b 100644 --- a/tests/test_pynbody_server.py +++ b/tests/test_pynbody_server.py @@ -53,7 +53,7 @@ def test_get_array(): @using_parallel_tasks(3) def test_get_shared_array(): if pt.backend.rank()==1: - shared_array = pynbody.array._array_factory((10,), int, True, True) + shared_array = pynbody.array.array_factory((10,), int, True, True) shared_array[:] = np.arange(0,10) pt.pynbody_server.transfer_array.send_array(shared_array, 2, True) assert shared_array[2]==2 @@ -74,7 +74,7 @@ def test_get_shared_array(): def test_get_shared_array_slice(): """Like test_get_shared_array, but with a slice""" if pt.backend.rank()==1: - shared_array = pynbody.array._array_factory((10,), int, True, True) + shared_array = pynbody.array.array_factory((10,), int, True, True) shared_array[:] = np.arange(0,10) pt.pynbody_server.transfer_array.send_array(shared_array[1:7:2], 2, True) assert shared_array[3] == 3 From 2efeea6ed0f6920b400d7b2b4420df40e141aeb5 Mon Sep 17 00:00:00 2001 From: Andrew Pontzen Date: Sat, 4 May 2024 11:31:49 +0100 Subject: [PATCH 04/13] Update pynbody minimum version --- setup.py | 2 +- tangos/input_handlers/pynbody.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index e1b3ee46..3061417b 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,7 @@ 'pytest >= 5.0.0', 'webtest >= 2.0', 'pyquery >= 1.3.0', - 'pynbody >= 2.0.0-beta.5', + 'pynbody >= 2.0.0-beta.8', 'yt>=3.4.0', 'PyMySQL>=1.0.2', ] diff --git a/tangos/input_handlers/pynbody.py b/tangos/input_handlers/pynbody.py index ec3204f3..9ea815af 100644 --- a/tangos/input_handlers/pynbody.py +++ b/tangos/input_handlers/pynbody.py @@ -34,7 +34,7 @@ class PynbodyInputHandler(finding.PatternBasedFileDiscovery, HandlerBase): def __new__(cls, *args, **kwargs): import pynbody as pynbody_local - min_version = "2.0.0-beta.5" + min_version = "2.0.0-beta.8" if pynbody_local.__version__ < min_version: raise ImportError(f"Using tangos with pynbody requires pynbody {min_version} or later") From 05e3a6c1a5d3aebb1603dc9951de508b811e60e5 Mon Sep 17 00:00:00 2001 From: Andrew Pontzen Date: Sat, 4 May 2024 11:32:10 +0100 Subject: [PATCH 05/13] Fix formatting --- tangos/parallel_tasks/backends/multiprocessing.py | 4 ++-- tests/test_simulation_outputs.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tangos/parallel_tasks/backends/multiprocessing.py b/tangos/parallel_tasks/backends/multiprocessing.py index e6b7be93..1499556c 100644 --- a/tangos/parallel_tasks/backends/multiprocessing.py +++ b/tangos/parallel_tasks/backends/multiprocessing.py @@ -8,10 +8,10 @@ import time from typing import Optional -from ...log import logger - import tblib.pickling_support +from ...log import logger + _slave = False _rank = None _size = None diff --git a/tests/test_simulation_outputs.py b/tests/test_simulation_outputs.py index d4a3354f..abb2b206 100644 --- a/tests/test_simulation_outputs.py +++ b/tests/test_simulation_outputs.py @@ -212,4 +212,4 @@ def test_halo_class_priority(): h = db.get_halo("test_tipsy/tiny.000640/1").load() assert (h.get_index_list(h.ancestor) == np.arange(1000)).all() h = db.get_halo("test_tipsy/tiny.000640/2").load() - assert (h.get_index_list(h.ancestor) == np.arange(1000, 2000)).all() \ No newline at end of file + assert (h.get_index_list(h.ancestor) == np.arange(1000, 2000)).all() From f5b4e874e1f6733e69ce41f2ba61af760ff20e96 Mon Sep 17 00:00:00 2001 From: Andrew Pontzen Date: Sat, 4 May 2024 13:40:33 +0100 Subject: [PATCH 06/13] Fix centering for pynbody v2 interface --- tangos/properties/pynbody/centring.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tangos/properties/pynbody/centring.py b/tangos/properties/pynbody/centring.py index 81237339..806fc379 100644 --- a/tangos/properties/pynbody/centring.py +++ b/tangos/properties/pynbody/centring.py @@ -27,7 +27,8 @@ def _get_centre_and_max_radius(self, particle_data): # ensure the box is wrapped correctly by centring on one of the particles: temporary_centre = np.array(particle_data['pos'][0]) with _recenter(particle_data, temporary_centre): - center = pynbody.analysis.halo.shrink_sphere_center(particle_data, shrink_factor=0.8, velocity=False) + center = pynbody.analysis.halo.shrink_sphere_center(particle_data, shrink_factor=0.8, + particles_for_velocity=0) # i.e., don't calc velocity # mark_timer can be used to track timing of parts of the calculation. The results of these timings # appears in the tangos_writer logs: From 7b78a1c8adc559b95caf3d20f6383892e99be734 Mon Sep 17 00:00:00 2001 From: Andrew Pontzen Date: Sat, 4 May 2024 13:40:49 +0100 Subject: [PATCH 07/13] Update test_stat_files to be robust against test ordering --- tests/test_stat_files.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/test_stat_files.py b/tests/test_stat_files.py index 36013b7f..486db788 100644 --- a/tests/test_stat_files.py +++ b/tests/test_stat_files.py @@ -41,6 +41,10 @@ def setup_module(): session.commit() + adder = add_simulation.SimulationAdderUpdater(sim.get_output_handler()) + adder.min_halo_particles = 300000 + adder.add_objects_to_timestep(ts1) + parallel_tasks.use('null') def teardown_module(): @@ -80,10 +84,7 @@ def test_mpi_ahf_values(): def test_insert_halos(): - #stat.HaloStatFile(ts1.filename).add_halos(min_NDM=200000) - adder = add_simulation.SimulationAdderUpdater(sim.get_output_handler()) - adder.min_halo_particles = 300000 - adder.add_objects_to_timestep(ts1) + # insert has already happened in the setup; check that it worked assert ts1.halos.count()==3 assert ts1.halos[0].NDM==4348608 assert ts1.halos[1].NDM==402567 From e1caeed454946666142dff35a8ee25b5a801fb6d Mon Sep 17 00:00:00 2001 From: Andrew Pontzen Date: Sat, 4 May 2024 13:53:27 +0100 Subject: [PATCH 08/13] More fiddling to make tests order-independent --- tests/test_stat_files.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/test_stat_files.py b/tests/test_stat_files.py index 486db788..31f5b864 100644 --- a/tests/test_stat_files.py +++ b/tests/test_stat_files.py @@ -84,7 +84,15 @@ def test_mpi_ahf_values(): def test_insert_halos(): - # insert has already happened in the setup; check that it worked + + # insert has already happened in the setup + for h in ts1.halos: + db.get_default_session().delete(h) # remove previous objects so that we can add them afresh + + adder = add_simulation.SimulationAdderUpdater(sim.get_output_handler()) + adder.min_halo_particles = 300000 + adder.add_objects_to_timestep(ts1) + assert ts1.halos.count()==3 assert ts1.halos[0].NDM==4348608 assert ts1.halos[1].NDM==402567 From 223f3734b3c0cd16e48c01318aa191b67e94e5d9 Mon Sep 17 00:00:00 2001 From: Andrew Pontzen Date: Sat, 4 May 2024 22:47:54 +0100 Subject: [PATCH 09/13] Fix calls to image-related functions for pynbody v2 --- tangos/properties/pynbody/images.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/tangos/properties/pynbody/images.py b/tangos/properties/pynbody/images.py index 4783cbd2..bf9997f7 100644 --- a/tangos/properties/pynbody/images.py +++ b/tangos/properties/pynbody/images.py @@ -23,8 +23,7 @@ def calculate(self, particle_data, properties): import pynbody.analysis.angmom as angmom size = self.plot_extent() g, s = self._render_gas(particle_data, size), self._render_stars(particle_data, size) - with angmom.sideon(particle_data, return_transform=True, - cen_size=self.get_simulation_property("approx_resolution_kpc", 0.1)*10.): + with angmom.sideon(particle_data): g_side, s_side = self._render_gas(particle_data, size), self._render_stars(particle_data, size) with particle_data.rotate_x(90): g_face, s_face = self._render_gas(particle_data, size), self._render_stars(particle_data, size) @@ -33,8 +32,7 @@ def calculate(self, particle_data, properties): def _render_projected(self, f, size): import pynbody.plot - im = pynbody.plot.sph.image(f[pynbody.filt.BandPass( - 'z', -size / 2, size / 2)], 'rho', size, units="Msol kpc^-2", noplot=True) + im = pynbody.plot.sph.image(f, 'rho', size, units="Msol kpc^-2", noplot=True, restrict_depth=True) return im def _render_gas(self, f, size): @@ -47,6 +45,6 @@ def _render_stars(self, f, size): import pynbody.plot if len(f.st)>0: return pynbody.plot.stars.render(f.st[pynbody.filt.HighPass('tform',0) & pynbody.filt.BandPass('z', -size / 2, size / 2)], - width=size, plot=False, ret_im=True, mag_range=(16,22)) + width=size, noplot=True, return_image=True, mag_range=(16,22)) else: return None From bb81101b37e15d892403e240493e8cc847395692 Mon Sep 17 00:00:00 2001 From: Andrew Pontzen Date: Mon, 6 May 2024 11:44:23 +0100 Subject: [PATCH 10/13] Fix calls to bridge for pynbody v2 --- setup.py | 1 + tangos/input_handlers/pynbody.py | 27 ++++++++++++++++----------- tangos/tools/crosslink.py | 2 +- 3 files changed, 18 insertions(+), 12 deletions(-) diff --git a/setup.py b/setup.py index 3061417b..9120cf91 100644 --- a/setup.py +++ b/setup.py @@ -24,6 +24,7 @@ 'matplotlib >= 3.0.0', # for web interface 'tqdm >= 4.59.0', 'tblib >= 3.0.0', + 'packaging >= 22.0' ] tests_require = [ diff --git a/tangos/input_handlers/pynbody.py b/tangos/input_handlers/pynbody.py index 9ea815af..d9bc3683 100644 --- a/tangos/input_handlers/pynbody.py +++ b/tangos/input_handlers/pynbody.py @@ -6,6 +6,7 @@ from collections import defaultdict import numpy as np +from packaging.version import Version from ..util import proxy_object @@ -36,7 +37,7 @@ def __new__(cls, *args, **kwargs): min_version = "2.0.0-beta.8" - if pynbody_local.__version__ < min_version: + if Version(pynbody_local.__version__) < Version(min_version): raise ImportError(f"Using tangos with pynbody requires pynbody {min_version} or later") global pynbody @@ -246,16 +247,22 @@ def match_objects(self, ts1, ts2, halo_min, halo_max, if halo_max is None: halo_max = max(len(h2), len(h1)) - return self.create_bridge(f1, f2).fuzzy_match_catalog( - halo_min, - halo_max, - threshold=threshold, - only_family=only_family, - groups_1=h1, - groups_2=h2, + matches = self.create_bridge(f1, f2).fuzzy_match_halos( + h1, h2, threshold=threshold, use_family=only_family, **fuzzy_match_kwa, ) + del_keys = [] + for k in matches: + if k < halo_min or k > halo_max: + del_keys.append(k) + + for k in del_keys: + del matches[k] + + return matches + + @classmethod def create_bridge(cls, f1, f2): return f1.bridge(f2) @@ -492,7 +499,6 @@ def create_bridge(self, f1, f2): def match_objects(self, ts1, ts2, halo_min, halo_max, dm_only=True, threshold=0.005, object_typetag="halo", output_handler_for_ts2=None): - import pynbody if not dm_only: logger.warning( "`match_objects` was called with dm_only=%s, but %s only supports DM-only" @@ -508,8 +514,7 @@ def match_objects(self, ts1, ts2, halo_min, halo_max, dm_only=True, threshold=0. dm_only=dm_only, threshold=threshold, object_typetag=object_typetag, - output_handler_for_ts2=output_handler_for_ts2, - fuzzy_match_kwa={"use_family": pynbody.family.dm} + output_handler_for_ts2=output_handler_for_ts2 ) diff --git a/tangos/tools/crosslink.py b/tangos/tools/crosslink.py index 6fc105b1..5f4d5d87 100644 --- a/tangos/tools/crosslink.py +++ b/tangos/tools/crosslink.py @@ -87,7 +87,7 @@ def need_crosslink_ts(self, ts1, ts2, object_typecode=0): def create_db_objects_from_catalog(self, cat, finder_id_to_halos_1, finder_id_to_halos_2, same_d_id): items = [] missing_db_object = 0 - for i, possibilities in enumerate(cat): + for i, possibilities in cat.items(): h1 = finder_id_to_halos_1.get(i, None) for cat_i, weight in possibilities: h2 = finder_id_to_halos_2.get(cat_i, None) From ebff15097adb17695257a97f5b63528ad7a97b9c Mon Sep 17 00:00:00 2001 From: Andrew Pontzen Date: Mon, 6 May 2024 11:50:30 +0100 Subject: [PATCH 11/13] Fix testing of match_objects --- tangos/input_handlers/output_testing.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/tangos/input_handlers/output_testing.py b/tangos/input_handlers/output_testing.py index 527d2544..cb40c170 100644 --- a/tangos/input_handlers/output_testing.py +++ b/tangos/input_handlers/output_testing.py @@ -56,12 +56,10 @@ def match_objects(self, ts1, ts2, halo_min, halo_max, dm_only=False, threshold=0 if halo_max is None: halo_max = f1.max_halos halo_max = min((halo_max,f1.max_halos,f2.max_halos)) - return_matches = [tuple()] + return_matches = {} for i in range(1,halo_max+1): if i>=halo_min: - return_matches.append(((i, 1.0),(i+1,0.05),)) - else: - return_matches.append(tuple()) + return_matches[i] = (((i, 1.0),(i+1,0.05),)) return return_matches def load_timestep_without_caching(self, ts_extension, mode=None): From 7e798b6b98a81d11201cd5747b2ae4b0daa25bfb Mon Sep 17 00:00:00 2001 From: Andrew Pontzen Date: Mon, 6 May 2024 12:39:37 +0100 Subject: [PATCH 12/13] Fix resolution of images to compensate for changed pynbody default --- tangos/properties/pynbody/images.py | 6 ++++-- tangos/scripts/manager.py | 1 + 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/tangos/properties/pynbody/images.py b/tangos/properties/pynbody/images.py index bf9997f7..30f5c2cc 100644 --- a/tangos/properties/pynbody/images.py +++ b/tangos/properties/pynbody/images.py @@ -32,7 +32,8 @@ def calculate(self, particle_data, properties): def _render_projected(self, f, size): import pynbody.plot - im = pynbody.plot.sph.image(f, 'rho', size, units="Msol kpc^-2", noplot=True, restrict_depth=True) + im = pynbody.plot.sph.image(f, 'rho', size, units="Msol kpc^-2", noplot=True, restrict_depth=True, + resolution=500) return im def _render_gas(self, f, size): @@ -45,6 +46,7 @@ def _render_stars(self, f, size): import pynbody.plot if len(f.st)>0: return pynbody.plot.stars.render(f.st[pynbody.filt.HighPass('tform',0) & pynbody.filt.BandPass('z', -size / 2, size / 2)], - width=size, noplot=True, return_image=True, mag_range=(16,22)) + width=size, noplot=True, return_image=True, mag_range=(16,22), + resolution=500) else: return None diff --git a/tangos/scripts/manager.py b/tangos/scripts/manager.py index 2109d275..93aabade 100755 --- a/tangos/scripts/manager.py +++ b/tangos/scripts/manager.py @@ -313,6 +313,7 @@ def diff(options): differ = db_diff.TangosDbDiff(options.uri1, options.uri2, ignore_keys=options.ignore_value_of) if options.property_tolerance is not None: for k, rtol, atol in options.property_tolerance: + if k == '.': k = None differ.set_tolerance(k, float(rtol), float(atol)) if options.simulation: From 39385b1ca71038827bfa82d955f2ce52c606d34e Mon Sep 17 00:00:00 2001 From: Andrew Pontzen Date: Mon, 6 May 2024 16:37:17 +0100 Subject: [PATCH 13/13] Update integration test to point at new reference db and use newer action versions --- .github/workflows/integration-test.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/integration-test.yaml b/.github/workflows/integration-test.yaml index f1776db8..e4d76086 100644 --- a/.github/workflows/integration-test.yaml +++ b/.github/workflows/integration-test.yaml @@ -23,7 +23,7 @@ jobs: CXX: g++-10 steps: - name: Install Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -33,7 +33,7 @@ jobs: sudo apt-get update -qq sudo apt install gcc-10 g++-10 - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Update python pip/setuptools/wheel run: | @@ -54,7 +54,7 @@ jobs: working-directory: test_tutorial_build run: export INTEGRATION_TESTING=1; bash build.sh - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v4 with: name: Tangos database path: test_tutorial_build/data.db @@ -62,7 +62,7 @@ jobs: - name: Verify database working-directory: test_tutorial_build run: | - wget https://zenodo.org/record/10825178/files/reference_database.db?download=1 -O reference_database.db -nv + wget https://zenodo.org/record/11122073/files/reference_database.db?download=1 -O reference_database.db -nv tangos diff data.db reference_database.db --property-tolerance dm_density_profile 1e-2 0 --property-tolerance gas_map 1e-2 0 --property-tolerance gas_map_sideon 1e-2 0 --property-tolerance gas_map_faceon 1e-2 0 # --property-tolerance dm_density_profile here is because if a single particle crosses between bins # (which seems to happen due to differing library versions), the profile can change by this much