From acb7baf6bd9ff2acb84192b8afe2f45658ed87dc Mon Sep 17 00:00:00 2001 From: Trevor James Smith <10819524+Zeitsperre@users.noreply.github.com> Date: Thu, 7 Nov 2024 16:15:06 -0500 Subject: [PATCH] update tests to use new functions --- src/xsdba/testing/registry.txt | 5 ++ tests/__init__.py | 3 -- tests/conftest.py | 1 - tests/test_adjustment.py | 28 +++++----- tests/test_base.py | 4 +- tests/test_loess.py | 6 +-- tests/test_measures.py | 94 +++++++++++++++++++++++++--------- tests/test_nbutils.py | 7 ++- tests/test_processing.py | 7 +-- tests/test_properties.py | 86 ++++++++++++++++--------------- tests/test_units.py | 3 -- 11 files changed, 149 insertions(+), 95 deletions(-) delete mode 100644 tests/__init__.py diff --git a/src/xsdba/testing/registry.txt b/src/xsdba/testing/registry.txt index f1737e5..1ab1e4d 100644 --- a/src/xsdba/testing/registry.txt +++ b/src/xsdba/testing/registry.txt @@ -1,3 +1,8 @@ +cmip3/tas.sresb1.giss_model_e_r.run1.atm.da.nc sha256:e709552beeeccafcfe280759edf5477ae5241c698409ca051b0899c16e92c95e +ERA5/daily_surface_cancities_1990-1993.nc sha256:049d54ace3d229a96cc621189daa3e1a393959ab8d988221cfc7b2acd7ab94b2 +NRCANdaily/nrcan_canada_daily_pr_1990.nc sha256:144479ec7a976cfecb6a10762d128a771356093d72caf5f075508ee86d25a1b0 +NRCANdaily/nrcan_canada_daily_tasmax_1990.nc sha256:84880205b798740e37a102c7f40e595d7a4fde6e35fb737a1ef68b8dad447526 +NRCANdaily/nrcan_canada_daily_tasmin_1990.nc sha256:13d61fc54cdcb4c1617ec777ccbf59575d8fdc24754f914042301bc1b024d7f7 sdba/CanESM2_1950-2100.nc sha256:b41fe603676e70d16c747ec207eb75ec86a39b665de401dcb23b5969ab3e1b32 sdba/adjusted_external.nc sha256:ff325c88eca96844bc85863744e4e08bcdf3d257388255636427ad5e11960d2e sdba/ahccd_1950-2013.nc sha256:7e9a1f61c1d04ca257b09857a82715f1fa3f0550d77f97b7306d4eaaf0c70239 diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index 792f1a2..0000000 --- a/tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -"""Unit test package for xsdba.""" - -from __future__ import annotations diff --git a/tests/conftest.py b/tests/conftest.py index 01849b9..175408c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,4 @@ # noqa: D104 -# XC: Many things deactivated, not sure what will be necessary from __future__ import annotations from pathlib import Path diff --git a/tests/test_adjustment.py b/tests/test_adjustment.py index 44f306e..3d2cefb 100644 --- a/tests/test_adjustment.py +++ b/tests/test_adjustment.py @@ -632,15 +632,15 @@ def test_mon_u( @pytest.mark.parametrize("use_dask", [True, False]) @pytest.mark.filterwarnings("ignore::RuntimeWarning") - def test_add_dims(self, use_dask, open_dataset): + def test_add_dims(self, use_dask, gosset): with set_options(sdba_encode_cf=use_dask): if use_dask: chunks = {"location": -1} else: chunks = None - dsim = open_dataset( - "sdba/CanESM2_1950-2100.nc", + dsim = xr.open_dataset( + gosset.fetch("sdba/CanESM2_1950-2100.nc"), chunks=chunks, drop_variables=["lat", "lon"], ).tasmax @@ -648,8 +648,8 @@ def test_add_dims(self, use_dask, open_dataset): sim = dsim.sel(time=slice("2041", "2070")) ref = ( - open_dataset( - "sdba/ahccd_1950-2013.nc", + xr.open_dataset( + gosset.fetch("sdba/ahccd_1950-2013.nc"), chunks=chunks, drop_variables=["lat", "lon"], ) @@ -678,7 +678,7 @@ class TestMBCn: @pytest.mark.parametrize("use_dask", [True, False]) @pytest.mark.parametrize("group, window", [["time", 1], ["time.dayofyear", 31]]) @pytest.mark.parametrize("period_dim", [None, "period"]) - def test_simple(self, open_dataset, use_dask, group, window, period_dim): + def test_simple(self, use_dask, group, window, period_dim, gosset): group, window, period_dim, use_dask = "time", 1, None, False with set_options(sdba_encode_cf=use_dask): if use_dask: @@ -686,8 +686,8 @@ def test_simple(self, open_dataset, use_dask, group, window, period_dim): else: chunks = None ref, dsim = ( - open_dataset( - f"sdba/{file}", + xr.open_dataset( + gosset.fetch(f"sdba/{file}"), chunks=chunks, drop_variables=["lat", "lon"], ) @@ -767,8 +767,10 @@ def _group_assert(ds, dim): @pytest.mark.parametrize("use_dask", [True, False]) @pytest.mark.parametrize("pcorient", ["full", "simple"]) - def test_real_data(self, open_dataset, use_dask, pcorient): - atmosds = open_dataset("ERA5/daily_surface_cancities_1990-1993.nc") + def test_real_data(self, use_dask, pcorient, gosset): + atmosds = xr.open_dataset( + gosset.fetch("ERA5/daily_surface_cancities_1990-1993.nc") + ) ds0 = xr.Dataset( {"tasmax": atmosds.tasmax, "tasmin": atmosds.tasmin, "tas": atmosds.tas} @@ -862,9 +864,9 @@ def gen_testdata(c, s): ).sum() @pytest.mark.slow - def test_real_data(self, open_dataset): - dsim = open_dataset("sdba/CanESM2_1950-2100.nc") # .chunk() - dref = open_dataset("sdba/ahccd_1950-2013.nc") # .chunk() + def test_real_data(self, gosset): + dsim = xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) # .chunk() + dref = xr.open_dataset(gosset.fetch("sdba/ahccd_1950-2013.nc")) # .chunk() ref = dref.sel(time=slice("1950", "2009")).pr hist = dsim.sel(time=slice("1950", "2009")).pr # TODO: Do we want to include standard conversions in xsdba tests? diff --git a/tests/test_base.py b/tests/test_base.py index d8b10ed..68735a6 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -237,8 +237,8 @@ def func(ds, *, group, lon=None): func(xr.Dataset(dict(da0=da0)), group="time") @pytest.mark.parametrize("use_dask", [True, False]) - def test_dataarray_cfencode(self, use_dask): - ds = open_dataset("sdba/CanESM2_1950-2100.nc") + def test_dataarray_cfencode(self, use_dask, gosset): + ds = xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) if use_dask: ds = ds.chunk() diff --git a/tests/test_loess.py b/tests/test_loess.py index aa58831..ff7aa7a 100644 --- a/tests/test_loess.py +++ b/tests/test_loess.py @@ -40,9 +40,9 @@ def test_loess_nb(d, f, w, n, dx, exp): @pytest.mark.slow @pytest.mark.parametrize("use_dask", [True, False]) -def test_loess_smoothing(use_dask, open_dataset): - tas = open_dataset( - "cmip3/tas.sresb1.giss_model_e_r.run1.atm.da.nc", +def test_loess_smoothing(use_dask, gosset): + tas = xr.open_dataset( + gosset.fetch("cmip3/tas.sresb1.giss_model_e_r.run1.atm.da.nc"), chunks={"lat": 1} if use_dask else None, ).tas.isel(lon=0, time=slice(0, 740)) tas = tas.where(tas.time.dt.dayofyear != 360) # Put NaNs diff --git a/tests/test_measures.py b/tests/test_measures.py index 3d2483a..a42f0e8 100644 --- a/tests/test_measures.py +++ b/tests/test_measures.py @@ -7,16 +7,32 @@ from xsdba import measures -def test_bias(open_dataset): - sim = open_dataset("sdba/CanESM2_1950-2100.nc").sel(time="1950-01-01").tasmax - ref = open_dataset("sdba/nrcan_1950-2013.nc").sel(time="1950-01-01").tasmax +def test_bias(gosset): + sim = ( + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) + .sel(time="1950-01-01") + .tasmax + ) + ref = ( + xr.open_dataset(gosset.fetch("sdba/nrcan_1950-2013.nc")) + .sel(time="1950-01-01") + .tasmax + ) test = measures.bias(sim, ref).values np.testing.assert_array_almost_equal(test, [[6.430237, 39.088974, 5.2402344]]) -def test_relative_bias(open_dataset): - sim = open_dataset("sdba/CanESM2_1950-2100.nc").sel(time="1950-01-01").tasmax - ref = open_dataset("sdba/nrcan_1950-2013.nc").sel(time="1950-01-01").tasmax +def test_relative_bias(gosset): + sim = ( + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) + .sel(time="1950-01-01") + .tasmax + ) + ref = ( + xr.open_dataset(gosset.fetch("sdba/nrcan_1950-2013.nc")) + .sel(time="1950-01-01") + .tasmax + ) test = measures.relative_bias(sim, ref).values np.testing.assert_array_almost_equal(test, [[0.02366494, 0.16392256, 0.01920133]]) @@ -32,36 +48,62 @@ def test_circular_bias(): np.testing.assert_array_almost_equal(test, [1, 1, 66, -1, -1, -66]) -def test_ratio(open_dataset): - sim = open_dataset("sdba/CanESM2_1950-2100.nc").sel(time="1950-01-01").tasmax - ref = open_dataset("sdba/nrcan_1950-2013.nc").sel(time="1950-01-01").tasmax +def test_ratio(gosset): + sim = ( + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) + .sel(time="1950-01-01") + .tasmax + ) + ref = ( + xr.open_dataset(gosset.fetch("sdba/nrcan_1950-2013.nc")) + .sel(time="1950-01-01") + .tasmax + ) test = measures.ratio(sim, ref).values np.testing.assert_array_almost_equal(test, [[1.023665, 1.1639225, 1.0192013]]) -def test_rmse(open_dataset): +def test_rmse(gosset): sim = ( - open_dataset("sdba/CanESM2_1950-2100.nc").sel(time=slice("1950", "1953")).tasmax + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) + .sel(time=slice("1950", "1953")) + .tasmax + ) + ref = ( + xr.open_dataset(gosset.fetch("sdba/nrcan_1950-2013.nc")) + .sel(time=slice("1950", "1953")) + .tasmax ) - ref = open_dataset("sdba/nrcan_1950-2013.nc").sel(time=slice("1950", "1953")).tasmax test = measures.rmse(sim, ref).values np.testing.assert_array_almost_equal(test, [5.4499755, 18.124086, 12.387193], 4) -def test_mae(open_dataset): +def test_mae(gosset): sim = ( - open_dataset("sdba/CanESM2_1950-2100.nc").sel(time=slice("1950", "1953")).tasmax + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) + .sel(time=slice("1950", "1953")) + .tasmax + ) + ref = ( + xr.open_dataset(gosset.fetch("sdba/nrcan_1950-2013.nc")) + .sel(time=slice("1950", "1953")) + .tasmax ) - ref = open_dataset("sdba/nrcan_1950-2013.nc").sel(time=slice("1950", "1953")).tasmax test = measures.mae(sim, ref).values np.testing.assert_array_almost_equal(test, [4.159672, 14.2148, 9.768536], 4) -def test_annual_cycle_correlation(open_dataset): +def test_annual_cycle_correlation(gosset): sim = ( - open_dataset("sdba/CanESM2_1950-2100.nc").sel(time=slice("1950", "1953")).tasmax + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) + .sel(time=slice("1950", "1953")) + .tasmax + ) + ref = ( + xr.open_dataset(gosset.fetch("sdba/nrcan_1950-2013.nc")) + .sel(time=slice("1950", "1953")) + .tasmax ) - ref = open_dataset("sdba/nrcan_1950-2013.nc").sel(time=slice("1950", "1953")).tasmax test = ( measures.annual_cycle_correlation(sim, ref, window=31) .sel(location="Vancouver") @@ -71,22 +113,26 @@ def test_annual_cycle_correlation(open_dataset): @pytest.mark.slow -def test_scorr(open_dataset): - ref = open_dataset("NRCANdaily/nrcan_canada_daily_tasmin_1990.nc").tasmin - sim = open_dataset("NRCANdaily/nrcan_canada_daily_tasmax_1990.nc").tasmax +def test_scorr(gosset): + ref = xr.open_dataset( + gosset.fetch("NRCANdaily/nrcan_canada_daily_tasmin_1990.nc") + ).tasmin + sim = xr.open_dataset( + gosset.fetch("NRCANdaily/nrcan_canada_daily_tasmax_1990.nc") + ).tasmax scorr = measures.scorr(sim.isel(lon=slice(0, 50)), ref.isel(lon=slice(0, 50))) np.testing.assert_allclose(scorr, [97374.2146243]) -def test_taylordiagram(open_dataset): +def test_taylordiagram(gosset): sim = ( - open_dataset("sdba/CanESM2_1950-2100.nc") + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) .sel(time=slice("1950", "1953"), location="Amos") .tasmax ) ref = ( - open_dataset("sdba/nrcan_1950-2013.nc") + xr.open_dataset(gosset.fetch("sdba/nrcan_1950-2013.nc")) .sel(time=slice("1950", "1953"), location="Amos") .tasmax ) diff --git a/tests/test_nbutils.py b/tests/test_nbutils.py index 16a2dd4..b7909c0 100644 --- a/tests/test_nbutils.py +++ b/tests/test_nbutils.py @@ -10,8 +10,11 @@ class TestQuantiles: @pytest.mark.parametrize("uses_dask", [True, False]) def test_quantile(self, uses_dask, gosset): - file = gosset.fetch("sdba/CanESM2_1950-2100.nc") - da = (xr.open_dataset(file).sel(time=slice("1950", "1955")).pr).load() + da = ( + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) + .sel(time=slice("1950", "1955")) + .pr + ).load() if uses_dask: da = da.chunk({"location": 1}) else: diff --git a/tests/test_processing.py b/tests/test_processing.py index 865d139..318ca26 100644 --- a/tests/test_processing.py +++ b/tests/test_processing.py @@ -279,13 +279,14 @@ def test_normalize(timeseries, random): np.testing.assert_allclose(xp, xp2) -def test_stack_variables(open_dataset): - ds1 = open_dataset("sdba/CanESM2_1950-2100.nc") - ds2 = open_dataset("sdba/ahccd_1950-2013.nc") +def test_stack_variables(gosset): + ds1 = xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) + ds2 = xr.open_dataset(gosset.fetch("sdba/ahccd_1950-2013.nc")) da1 = stack_variables(ds1) da2 = stack_variables(ds2) + # FIXME: Thes test for variable order; use a membership test instead assert list(da1.multivar.values) == ["pr", "tasmax"] assert da1.multivar.attrs["_standard_name"] == [ "precipitation_flux", diff --git a/tests/test_properties.py b/tests/test_properties.py index 1f3bacf..ab077ab 100644 --- a/tests/test_properties.py +++ b/tests/test_properties.py @@ -11,9 +11,9 @@ class TestProperties: - def test_mean(self, open_dataset): + def test_mean(self, gosset): sim = ( - open_dataset("sdba/CanESM2_1950-2100.nc") + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) .sel(time=slice("1950", "1980"), location="Vancouver") .pr ).load() @@ -29,9 +29,9 @@ def test_mean(self, open_dataset): assert out_season.long_name.startswith("Mean") - def test_var(self, open_dataset): + def test_var(self, gosset): sim = ( - open_dataset("sdba/CanESM2_1950-2100.nc") + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) .sel(time=slice("1950", "1980"), location="Vancouver") .pr ).load() @@ -47,9 +47,9 @@ def test_var(self, open_dataset): assert out_season.long_name.startswith("Variance") assert out_season.units == "kg2 m-4 s-2" - def test_std(self, open_dataset): + def test_std(self, gosset): sim = ( - open_dataset("sdba/CanESM2_1950-2100.nc") + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) .sel(time=slice("1950", "1980"), location="Vancouver") .pr ).load() @@ -65,9 +65,9 @@ def test_std(self, open_dataset): assert out_season.long_name.startswith("Standard deviation") assert out_season.units == "kg m-2 s-1" - def test_skewness(self, open_dataset): + def test_skewness(self, gosset): sim = ( - open_dataset("sdba/CanESM2_1950-2100.nc") + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) .sel(time=slice("1950", "1980"), location="Vancouver") .pr ).load() @@ -88,9 +88,9 @@ def test_skewness(self, open_dataset): assert out_season.long_name.startswith("Skewness") assert out_season.units == "" - def test_quantile(self, open_dataset): + def test_quantile(self, gosset): sim = ( - open_dataset("sdba/CanESM2_1950-2100.nc") + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) .sel(time=slice("1950", "1980"), location="Vancouver") .pr ).load() @@ -110,9 +110,9 @@ def test_quantile(self, open_dataset): ) assert out_season.long_name.startswith("Quantile 0.2") - def test_spell_length_distribution(self, open_dataset): + def test_spell_length_distribution(self, gosset): ds = ( - open_dataset("sdba/CanESM2_1950-2100.nc") + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) .sel(time=slice("1950", "1952"), location="Vancouver") .load() ) @@ -155,12 +155,12 @@ def test_spell_length_distribution(self, open_dataset): == "Average of spell length distribution when the variable is >= the quantile 0.9 for 1 consecutive day(s)." ) - def test_spell_length_distribution_mixed_stat(self, open_dataset): + def test_spell_length_distribution_mixed_stat(self, gosset): time = pd.date_range("2000-01-01", periods=2 * 365, freq="D") tas = xr.DataArray( np.array([0] * 365 + [40] * 365), - dims=("time"), + dims="time", coords={"time": time}, attrs={"units": "degC"}, ) @@ -185,10 +185,10 @@ def test_spell_length_distribution_mixed_stat(self, open_dataset): ], ) def test_bivariate_spell_length_distribution( - self, open_dataset, window, expected_amount, expected_quantile + self, window, expected_amount, expected_quantile, gosset ): ds = ( - open_dataset("sdba/CanESM2_1950-2100.nc").sel( + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")).sel( time=slice("1950", "1952"), location="Vancouver" ) ).load() @@ -240,9 +240,9 @@ def test_bivariate_spell_length_distribution( [outd[k] for k in ["mean", "max", "min"]], expected_quantile ) - def test_acf(self, open_dataset): + def test_acf(self, gosset): sim = ( - open_dataset("sdba/CanESM2_1950-2100.nc") + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) .sel(time=slice("1950", "1952"), location="Vancouver") .pr ).load() @@ -257,9 +257,9 @@ def test_acf(self, open_dataset): assert out.long_name.startswith("Lag-1 autocorrelation") assert out.units == "" - def test_annual_cycle(self, open_dataset): + def test_annual_cycle(self, gosset): simt = ( - open_dataset("sdba/CanESM2_1950-2100.nc") + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) .sel(time=slice("1950", "1952"), location="Vancouver") .tasmax ).load() @@ -292,9 +292,9 @@ def test_annual_cycle(self, open_dataset): assert relamp.units == "%" assert phase.units == "" - def test_annual_range(self, open_dataset): + def test_annual_range(self, gosset): simt = ( - open_dataset("sdba/CanESM2_1950-2100.nc") + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) .sel(time=slice("1950", "1952"), location="Vancouver") .tasmax ).load() @@ -336,15 +336,15 @@ def test_annual_range(self, open_dataset): assert relamp.units == "%" assert phase.units == "" - def test_corr_btw_var(self, open_dataset): + def test_corr_btw_var(self, gosset): simt = ( - open_dataset("sdba/CanESM2_1950-2100.nc") + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) .sel(time=slice("1950", "1952"), location="Vancouver") .tasmax ).load() sim = ( - open_dataset("sdba/CanESM2_1950-2100.nc") + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) .sel(time=slice("1950", "1952"), location="Vancouver") .pr ).load() @@ -381,9 +381,9 @@ def test_corr_btw_var(self, open_dataset): ): properties.corr_btw_var(sim, simt, group="time", corr_type="pear") - def test_relative_frequency(self, open_dataset): + def test_relative_frequency(self, gosset): sim = ( - open_dataset("sdba/CanESM2_1950-2100.nc") + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) .sel(time=slice("1950", "1952"), location="Vancouver") .pr ).load() @@ -402,9 +402,9 @@ def test_relative_frequency(self, open_dataset): assert test.long_name == "Relative frequency of values >= 2.8925e-04 kg/m^2/s." assert test.units == "" - def test_transition(self, open_dataset): + def test_transition(self, gosset): sim = ( - open_dataset("sdba/CanESM2_1950-2100.nc") + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) .sel(time=slice("1950", "1952"), location="Vancouver") .pr ).load() @@ -420,9 +420,9 @@ def test_transition(self, open_dataset): ) assert test.units == "" - def test_trend(self, open_dataset): + def test_trend(self, gosset): simt = ( - open_dataset("sdba/CanESM2_1950-2100.nc") + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) .sel(time=slice("1950", "1952"), location="Vancouver") .tasmax ).load() @@ -490,9 +490,9 @@ def test_trend(self, open_dataset): assert slope.long_name.startswith("Slope of the interannual linear trend") assert slope.units == "K/year" - def test_return_value(self, open_dataset): + def test_return_value(self, gosset): simt = ( - open_dataset("sdba/CanESM2_1950-2100.nc") + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) .sel(time=slice("1950", "2010"), location="Vancouver") .tasmax ).load() @@ -511,11 +511,11 @@ def test_return_value(self, open_dataset): assert out_y.long_name.startswith("20-year maximal return level") @pytest.mark.slow - def test_spatial_correlogram(self, open_dataset): + def test_spatial_correlogram(self, gosset): # This also tests sdba.utils._pairwise_spearman and sdba.nbutils._pairwise_haversine_and_bins # Test 1, does it work with 1D data? sim = ( - open_dataset("sdba/CanESM2_1950-2100.nc") + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) .sel(time=slice("1981", "2010")) .tasmax ).load() @@ -524,7 +524,9 @@ def test_spatial_correlogram(self, open_dataset): np.testing.assert_allclose(out, [-1, np.nan, 0], atol=1e-6) # Test 2, not very exhaustive, this is more of a detect-if-we-break-it test. - sim = open_dataset("NRCANdaily/nrcan_canada_daily_tasmax_1990.nc").tasmax + sim = xr.open_dataset( + gosset.fetch("NRCANdaily/nrcan_canada_daily_tasmax_1990.nc") + ).tasmax out = properties.spatial_correlogram( sim.isel(lon=slice(0, 50)), dims=["lon", "lat"], bins=20 ) @@ -539,9 +541,11 @@ def test_spatial_correlogram(self, open_dataset): ) @pytest.mark.slow - def test_decorrelation_length(self, open_dataset): + def test_decorrelation_length(self, gosset): sim = ( - open_dataset("NRCANdaily/nrcan_canada_daily_tasmax_1990.nc") + xr.open_dataset( + gosset.fetch("NRCANdaily/nrcan_canada_daily_tasmax_1990.nc") + ) .tasmax.isel(lon=slice(0, 5), lat=slice(0, 1)) .load() ) @@ -555,15 +559,15 @@ def test_decorrelation_length(self, open_dataset): ) # ADAPT? The plan was not to allow mm/d -> kg m-2 s-1 in xsdba - def test_get_measure(self, open_dataset): + def test_get_measure(self, gosset): sim = ( - open_dataset("sdba/CanESM2_1950-2100.nc") + xr.open_dataset(gosset.fetch("sdba/CanESM2_1950-2100.nc")) .sel(time=slice("1981", "2010"), location="Vancouver") .pr ).load() ref = ( - open_dataset("sdba/ahccd_1950-2013.nc") + xr.open_dataset(gosset.fetch("sdba/ahccd_1950-2013.nc")) .sel(time=slice("1981", "2010"), location="Vancouver") .pr ).load() diff --git a/tests/test_units.py b/tests/test_units.py index fa63bae..a475dfe 100644 --- a/tests/test_units.py +++ b/tests/test_units.py @@ -1,10 +1,7 @@ from __future__ import annotations -import numpy as np import pytest import xarray as xr -from cf_xarray import __version__ as __cfxr_version__ -from packaging.version import Version from xsdba.units import harmonize_units, str2pint, units, units2pint