diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 353a0014c..b24ab8ec3 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -19,7 +19,7 @@ on: - submitted env: - XCLIM_TESTDATA_BRANCH: v2024.8.23 + XCLIM_TESTDATA_BRANCH: v2025.1.8 concurrency: # For a given workflow, if we push to the same branch, cancel all previous builds on that branch except on main. diff --git a/CHANGELOG.rst b/CHANGELOG.rst index ae619b740..a978d99b6 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,11 +4,21 @@ Changelog v0.55.0 (unreleased) -------------------- -Contributors to this version: Juliette Lavoie (:user:`juliettelavoie`) +Contributors to this version: Juliette Lavoie (:user:`juliettelavoie`), Trevor James Smith (:user:`Zeitsperre`). + +New indicators +^^^^^^^^^^^^^^ +* Added ``xclim.indices.holiday_snow_days`` to compute the number of days with snow on the ground during holidays ("Christmas Days"). (:issue:`2029`, :pull:`2030`). +* Added ``xclim.indices.holiday_snow_and_snowfall_days`` to compute the number of days with snow on the ground and measurable snowfall during holidays ("Perfect Christmas Days"). (:issue:`2029`, :pull:`2030`). New features and enhancements ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ * New function ``ensemble.partition.general_partition`` (:pull:`2035`) +* Added a new ``xclim.indices.generic.bivariate_count_occurrences`` function to count instances where operations and performed and validated for two variables. (:pull:`2030`). + +Internal changes +^^^^^^^^^^^^^^^^ +* `sphinx-codeautolink` and `pygments` have been temporarily pinned due to breaking API changes. (:pull:`2030`). v0.54.0 (2024-12-16) -------------------- diff --git a/environment.yml b/environment.yml index f00d3c72e..a0be2c2da 100644 --- a/environment.yml +++ b/environment.yml @@ -29,14 +29,14 @@ dependencies: - lmoments3 >=1.0.7 # Required for some Jupyter notebooks - pot >=0.9.4 # Testing and development dependencies - - black ==24.10.0 - - blackdoc ==0.3.9 + - black =24.10.0 + - blackdoc =0.3.9 - bump-my-version >=0.28.1 - cairosvg >=2.6.0 - - codespell ==2.3.0 + - codespell =2.3.0 - coverage >=7.5.0 - coveralls >=4.0.1 # Note: coveralls is not yet compatible with Python 3.13 - - deptry ==0.20.0 + - deptry =0.21.2 - distributed >=2.0 - flake8 >=7.1.1 - flake8-rst-docstrings >=0.3.0 @@ -45,10 +45,10 @@ dependencies: - h5netcdf >=1.3.0 - ipykernel - ipython >=8.5.0 - - isort ==5.13.2 + - isort =5.13.2 - matplotlib >=3.6.0 - mypy >=1.10.0 - - nbconvert <7.14 # Pinned due to directive errors in sphinx. See: https://github.com/jupyter/nbconvert/issues/2092 + - nbconvert >=7.16.4 - nbqa >=1.8.2 - nbsphinx >=0.9.5 - nbval >=0.11.0 @@ -60,6 +60,7 @@ dependencies: - pooch >=1.8.0 - pre-commit >=3.7 - pybtex >=0.24.0 + - pygments <2.19 # FIXME: temporary fix for sphinx-codeautolink - pylint >=3.3.1 - pytest >=8.0.0 - pytest-cov >=5.0.0 @@ -69,7 +70,7 @@ dependencies: - sphinx >=7.0.0 - sphinx-autobuild >=2024.4.16 - sphinx-autodoc-typehints - - sphinx-codeautolink + - sphinx-codeautolink >=0.15.2,!=0.16.0 # FIXME: temporary fix for sphinx-codeautolink - sphinx-copybutton - sphinx-mdinclude - sphinxcontrib-bibtex @@ -77,7 +78,7 @@ dependencies: - tokenize-rt >=5.2.0 - tox >=4.21.2 - tox-gh >=1.4.4 - - vulture ==2.13 + - vulture =2.14 - xdoctest >=1.1.5 - yamllint >=1.35.1 - pip >=24.2.0 diff --git a/pyproject.toml b/pyproject.toml index 22b43f1c7..ace7a76c8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,7 +72,7 @@ dev = [ "ipython >=8.5.0", "isort ==5.13.2", "mypy >=1.10.0", - "nbconvert <7.14", # Pinned due to directive errors in sphinx. See: https://github.com/jupyter/nbconvert/issues/2092 + "nbconvert >=7.16.4", "nbqa >=1.8.2", "nbval >=0.11.0", "numpydoc >=1.8.0", @@ -104,10 +104,11 @@ docs = [ "nc-time-axis >=1.4.1", "pooch >=1.8.0", "pybtex >=0.24.0", + "pygments <2.19", # FIXME: temporary fix for sphinx-codeautolink "sphinx >=7.0.0", "sphinx-autobuild >=2024.4.16", "sphinx-autodoc-typehints", - "sphinx-codeautolink", + "sphinx-codeautolink >=0.15.2,!=0.16.0", # FIXME: temporary fix for sphinx-codeautolink "sphinx-copybutton", "sphinx-mdinclude", "sphinxcontrib-bibtex", diff --git a/src/xclim/data/fr.json b/src/xclim/data/fr.json index d42c6b437..548eb3cfb 100644 --- a/src/xclim/data/fr.json +++ b/src/xclim/data/fr.json @@ -1033,6 +1033,18 @@ "title": "Jours avec neige", "abstract": "Nombre de jours où la neige est entre une borne inférieure et supérieure." }, + "HOLIDAY_SNOW_DAYS": { + "long_name": "Nombre de jours de neige durant les jours de Noël", + "description": "Nombre de jours de neige durant les jours de Noël.", + "title": "Jours de neige durant les jours de Noël", + "abstract": "Nombre de jours de neige durant les jours de Noël." + }, + "HOLIDAY_SNOW_AND_SNOWFALL_DAYS": { + "long_name": "Nombre de jours de neige et de jours de chute de neige durant les jours de Noël", + "description": "Nombre de jours de neige et de jours de chute de neige durant les jours de Noël.", + "title": "Jours de neige et de chute de neige durant les jours de Noël", + "abstract": "Nombre de jours de neige et de jours de chute de neige durant les jours de Noël." + }, "SND_SEASON_LENGTH": { "long_name": "Durée de couvert de neige", "description": "La saison débute lorsque l'épaisseur de neige est au-dessus de {thresh} durant {window} jours et se termine lorsqu'elle redescend sous {thresh} durant {window} jours.", diff --git a/src/xclim/indicators/land/_snow.py b/src/xclim/indicators/land/_snow.py index ece6aca16..f4dd714ae 100644 --- a/src/xclim/indicators/land/_snow.py +++ b/src/xclim/indicators/land/_snow.py @@ -8,6 +8,8 @@ __all__ = [ "blowing_snow", + "holiday_snow_and_snowfall_days", + "holiday_snow_days", "snd_days_above", "snd_max_doy", "snd_season_end", @@ -254,3 +256,26 @@ class SnowWithIndexing(ResamplingIndicatorWithIndexing): abstract="Number of days when the snow amount is greater than or equal to a given threshold.", compute=xci.snw_days_above, ) + +holiday_snow_days = Snow( + title="Christmas snow days", + identifier="holiday_snow_days", + units="days", + long_name="Number of holiday days with snow", + description="The total number of days where snow on the ground was greater than or equal to {snd_thresh} " + "occurring on {date_start} and ending on {date_end}.", + abstract="The total number of days where there is a significant amount of snow on the ground on December 25th.", + compute=xci.holiday_snow_days, +) + +holiday_snow_and_snowfall_days = Snow( + title="Perfect Christmas snow days", + identifier="holiday_snow_and_snowfall_days", + units="days", + long_name="Number of holiday days with snow and snowfall", + description="The total number of days where snow on the ground was greater than or equal to {snd_thresh} " + "and snowfall was greater than or equal to {prsn_thresh} occurring on {date_start} and ending on {date_end}.", + abstract="The total number of days where there is a significant amount of snow on the ground " + "and a measurable snowfall occurring on December 25th.", + compute=xci.holiday_snow_and_snowfall_days, +) diff --git a/src/xclim/indices/_threshold.py b/src/xclim/indices/_threshold.py index 9f11be29c..a3580b212 100644 --- a/src/xclim/indices/_threshold.py +++ b/src/xclim/indices/_threshold.py @@ -9,7 +9,7 @@ import xarray from xclim.core import DayOfYearStr, Quantified -from xclim.core.calendar import doy_from_string, get_calendar +from xclim.core.calendar import doy_from_string, get_calendar, select_time from xclim.core.missing import at_least_n_valid from xclim.core.units import ( convert_units_to, @@ -22,7 +22,9 @@ ) from xclim.indices import run_length as rl from xclim.indices.generic import ( + bivariate_count_occurrences, compare, + count_occurrences, cumulative_difference, domain_count, first_day_threshold_reached, @@ -67,6 +69,8 @@ "growing_season_start", "heat_wave_index", "heating_degree_days", + "holiday_snow_and_snowfall_days", + "holiday_snow_days", "hot_spell_frequency", "hot_spell_max_length", "hot_spell_max_magnitude", @@ -376,7 +380,8 @@ def snd_season_end( window : int Minimum number of days with snow depth below threshold. freq : str - Resampling frequency. The default value is chosen for the northern hemisphere. + Resampling frequency. Default: "YS-JUL". + The default value is chosen for the northern hemisphere. Returns ------- @@ -2766,7 +2771,7 @@ def maximum_consecutive_frost_days( Let :math:`\mathbf{t}=t_0, t_1, \ldots, t_n` be a minimum daily temperature series and :math:`thresh` the threshold below which a day is considered a frost day. Let :math:`\mathbf{s}` be the sorted vector of indices :math:`i` where :math:`[t_i < thresh] \neq [t_{i+1} < thresh]`, that is, the days where the temperature crosses the threshold. - Then the maximum number of consecutive frost days is given by + Then the maximum number of consecutive frost days is given by: .. math:: @@ -2821,7 +2826,7 @@ def maximum_consecutive_dry_days( Let :math:`\mathbf{p}=p_0, p_1, \ldots, p_n` be a daily precipitation series and :math:`thresh` the threshold under which a day is considered dry. Then let :math:`\mathbf{s}` be the sorted vector of indices :math:`i` where :math:`[p_i < thresh] \neq [p_{i+1} < thresh]`, that is, the days where the precipitation crosses the threshold. - Then the maximum number of consecutive dry days is given by + Then the maximum number of consecutive dry days is given by: .. math:: @@ -3162,7 +3167,7 @@ def degree_days_exceedance_date( ----- Let :math:`TG_{ij}` be the daily mean temperature at day :math:`i` of period :math:`j`, :math:`T` is the reference threshold and :math:`ST` is the sum threshold. Then, starting - at day :math:i_0:, the degree days exceedance date is the first day :math:`k` such that + at day :math:i_0:, the degree days exceedance date is the first day :math:`k` such that: .. math:: @@ -3454,7 +3459,7 @@ def wet_spell_frequency( Resampling frequency. resample_before_rl : bool Determines if the resampling should take place before or after the run length encoding (or a similar algorithm) is applied to runs. - op : {"sum","min", "max", "mean"} + op : {"sum", "min", "max", "mean"} Operation to perform on the window. Default is "sum", which checks that the sum of accumulated precipitation over the whole window is more than the threshold. @@ -3633,3 +3638,145 @@ def wet_spell_max_length( resample_before_rl=resample_before_rl, **indexer, ) + + +@declare_units( + snd="[length]", + prsn="[precipitation]", + snd_thresh="[length]", + prsn_thresh="[length]", +) +def holiday_snow_days( + snd: xarray.DataArray, + snd_thresh: Quantified = "20 mm", + op: str = ">=", + date_start: str = "12-25", + date_end: str | None = None, + freq: str = "YS", +) -> xarray.DataArray: # numpydoc ignore=SS05 + """ + Christmas Days. + + Whether there is a significant amount of snow on the ground on December 25th (or a given date range). + + Parameters + ---------- + snd : xarray.DataArray + Surface snow depth. + snd_thresh : Quantified + Threshold snow amount. Default: 20 mm. + op : {">", "gt", ">=", "ge"} + Comparison operation. Default: ">=". + date_start : str + Beginning of analysis period. Default: "12-25" (December 25th). + date_end : str, optional + End of analysis period. If not provided, `date_start` is used. + Default: None. + freq : str + Resampling frequency. Default: "YS". + The default value is chosen for the northern hemisphere. + + Returns + ------- + xarray.DataArray, [bool] + Boolean array of years with Christmas Days. + + References + ---------- + https://www.canada.ca/en/environment-climate-change/services/weather-general-tools-resources/historical-christmas-snowfall-data.html + """ + snd_constrained = select_time( + snd, + date_bounds=(date_start, date_start if date_end is None else date_end), + ) + + xmas_days = count_occurrences( + snd_constrained, snd_thresh, freq, op, constrain=[">=", ">"] + ) + + xmas_days = to_agg_units(xmas_days, snd, "count") + return xmas_days + + +@declare_units( + snd="[length]", + prsn="[precipitation]", + snd_thresh="[length]", + prsn_thresh="[length]", +) +def holiday_snow_and_snowfall_days( + snd: xarray.DataArray, + prsn: xarray.DataArray | None = None, + snd_thresh: Quantified = "20 mm", + prsn_thresh: Quantified = "1 mm", + snd_op: str = ">=", + prsn_op: str = ">=", + date_start: str = "12-25", + date_end: str | None = None, + freq: str = "YS-JUL", +) -> xarray.DataArray: + r""" + Perfect Christmas Days. + + Whether there is a significant amount of snow on the ground and measurable snowfall occurring on December 25th. + + Parameters + ---------- + snd : xarray.DataArray + Surface snow depth. + prsn : xarray.DataArray + Snowfall flux. + snd_thresh : Quantified + Threshold snow amount. Default: 20 mm. + prsn_thresh : Quantified + Threshold daily snowfall liquid-water equivalent thickness. Default: 1 mm. + snd_op : {">", "gt", ">=", "ge"} + Comparison operation for snow depth. Default: ">=". + prsn_op : {">", "gt", ">=", "ge"} + Comparison operation for snowfall flux. Default: ">=". + date_start : str + Beginning of analysis period. Default: "12-25" (December 25th). + date_end : str, optional + End of analysis period. If not provided, `date_start` is used. + Default: None. + freq : str + Resampling frequency. Default: "YS-JUL". + The default value is chosen for the northern hemisphere. + + Returns + ------- + xarray.DataArray, [int] + The total number of days with snow and snowfall during the holiday. + + References + ---------- + https://www.canada.ca/en/environment-climate-change/services/weather-general-tools-resources/historical-christmas-snowfall-data.html + """ + snd_constrained = select_time( + snd, + date_bounds=(date_start, date_start if date_end is None else date_end), + ) + + prsn_mm = rate2amount( + convert_units_to(prsn, "mm day-1", context="hydro"), out_units="mm" + ) + prsn_mm_constrained = select_time( + prsn_mm, + date_bounds=(date_start, date_start if date_end is None else date_end), + ) + + perfect_xmas_days = bivariate_count_occurrences( + data_var1=snd_constrained, + data_var2=prsn_mm_constrained, + threshold_var1=snd_thresh, + threshold_var2=prsn_thresh, + op_var1=snd_op, + op_var2=prsn_op, + freq=freq, + var_reducer="all", + constrain_var1=[">=", ">"], + constrain_var2=[">=", ">"], + ) + + perfect_xmas_days = to_agg_units(perfect_xmas_days, snd, "count") + return perfect_xmas_days diff --git a/src/xclim/indices/generic.py b/src/xclim/indices/generic.py index e9dd96920..f8ead5977 100644 --- a/src/xclim/indices/generic.py +++ b/src/xclim/indices/generic.py @@ -34,6 +34,7 @@ __all__ = [ "aggregate_between_dates", "binary_ops", + "bivariate_count_occurrences", "bivariate_spell_length_statistics", "compare", "count_level_crossings", @@ -903,9 +904,9 @@ def count_occurrences( """ Calculate the number of times some condition is met. - First, the threshold is transformed to the same standard_name and units as the input data: + First, the threshold is transformed to the same standard_name and units as the input data; Then the thresholding is performed as condition(data, threshold), - i.e. if condition is `<`, then this counts the number of times `data < threshold`: + i.e. if condition is `<`, then this counts the number of times `data < threshold`; Finally, count the number of occurrences when condition is met. Parameters @@ -934,6 +935,79 @@ def count_occurrences( return to_agg_units(out, data, "count", dim="time") +@declare_relative_units(threshold_var1="", threshold_var2="") +def bivariate_count_occurrences( + *, + data_var1: xr.DataArray, + data_var2: xr.DataArray, + threshold_var1: Quantified, + threshold_var2: Quantified, + freq: str, + op_var1: str, + op_var2: str, + var_reducer: str, + constrain_var1: Sequence[str] | None = None, + constrain_var2: Sequence[str] | None = None, +) -> xr.DataArray: + """ + Calculate the number of times some conditions are met for two variables. + + First, the thresholds are transformed to the same standard_name and units as their corresponding input data; + Then the thresholding is performed as condition(data, threshold) for each variable, + i.e. if condition is `<`, then this counts the number of times `data < threshold`; + Then the conditions are combined according to `var_reducer`; + Finally, the number of occurrences where conditions are met for "all" or "any" events are counted. + + Parameters + ---------- + data_var1 : xr.DataArray + An array. + data_var2 : xr.DataArray + An array. + threshold_var1 : Quantified + Threshold for data variable 1. + threshold_var2 : Quantified + Threshold for data variable 2. + freq : str + Resampling frequency defining the periods as defined in :ref:`timeseries.resampling`. + op_var1 : {">", "gt", "<", "lt", ">=", "ge", "<=", "le", "==", "eq", "!=", "ne"} + Logical operator for data variable 1. e.g. arr > thresh. + op_var2 : {">", "gt", "<", "lt", ">=", "ge", "<=", "le", "==", "eq", "!=", "ne"} + Logical operator for data variable 2. e.g. arr > thresh. + var_reducer : {"all", "any"} + The condition must either be fulfilled on *all* or *any* variables for the period to be considered an occurrence. + constrain_var1 : sequence of str, optional + Optionally allowed comparison operators for variable 1. + constrain_var2 : sequence of str, optional + Optionally allowed comparison operators for variable 2. + + Returns + ------- + xr.DataArray + The DataArray of counted occurrences. + + Notes + ----- + Sampling and variable units are derived from `data_var1`. + """ + threshold_var1 = convert_units_to(threshold_var1, data_var1) + threshold_var2 = convert_units_to(threshold_var2, data_var2) + + cond_var1 = compare(data_var1, op_var1, threshold_var1, constrain_var1) + cond_var2 = compare(data_var2, op_var2, threshold_var2, constrain_var2) + + if var_reducer == "all": + cond = cond_var1 & cond_var2 + elif var_reducer == "any": + cond = cond_var1 | cond_var2 + else: + raise ValueError(f"Unsupported value for var_reducer: {var_reducer}") + + out = cond.resample(time=freq).sum() + + return to_agg_units(out, data_var1, "count", dim="time") + + def diurnal_temperature_range( low_data: xr.DataArray, high_data: xr.DataArray, reducer: str, freq: str ) -> xr.DataArray: diff --git a/src/xclim/testing/registry.txt b/src/xclim/testing/registry.txt index ec0fbbfd4..5a219a55d 100644 --- a/src/xclim/testing/registry.txt +++ b/src/xclim/testing/registry.txt @@ -1,6 +1,28 @@ +CRCM5/tasmax_bby_198406_se.nc sha256:9a80cc19ed212428ef90ce0cc40790fbf0d1fc301df0abdf578da45843dae93d CanESM2_365day/pr_day_CanESM2_rcp85_r1i1p1_na10kgrid_qm-moving-50bins-detrend_2095.nc sha256:16dafec260dd74bf38f87482baa34cc35a1689facfb5557ebfc7d2c928618fc7 CanESM2_365day/tasmax_day_CanESM2_rcp85_r1i1p1_na10kgrid_qm-moving-50bins-detrend_2095.nc sha256:0c57c56e38a9e5b0623180c3def9406e9ddabbe7b1c01b282f1a34c4a61ea357 CanESM2_365day/tasmin_day_CanESM2_rcp85_r1i1p1_na10kgrid_qm-moving-50bins-detrend_2095.nc sha256:5d43ec47759bf9d118942277fe8d7c632765c3a0ba02dc828b0610e1f2030a63 +ERA5/daily_surface_cancities_1990-1993.nc sha256:049d54ace3d229a96cc621189daa3e1a393959ab8d988221cfc7b2acd7ab94b2 +EnsembleReduce/TestEnsReduceCriteria.nc sha256:ae7a70b9d5c54ab072f1cfbfab91d430a41c5067db3c1968af57ea2122cfe8e7 +EnsembleStats/BCCAQv2+ANUSPLIN300_ACCESS1-0_historical+rcp45_r1i1p1_1950-2100_tg_mean_YS.nc sha256:ca0cc893cf91db7c6dfe3df10d605684eabbea55b7e26077c10142d302e55aed +EnsembleStats/BCCAQv2+ANUSPLIN300_BNU-ESM_historical+rcp45_r1i1p1_1950-2100_tg_mean_YS.nc sha256:c796276f563849c31bf388a3beb4a440eeb72062a84b4cf9760c854d1e990ca4 +EnsembleStats/BCCAQv2+ANUSPLIN300_CCSM4_historical+rcp45_r1i1p1_1950-2100_tg_mean_YS.nc sha256:9cfa9bc4e81e936eb680a55db428ccd9f0a6d366d4ae2c4a9064bfa5d71e5ca7 +EnsembleStats/BCCAQv2+ANUSPLIN300_CCSM4_historical+rcp45_r2i1p1_1950-2100_tg_mean_YS.nc sha256:ca36aafb3c63ddb6bfc8537abb854b71f719505c1145d5c81c3315eb1a13647c +EnsembleStats/BCCAQv2+ANUSPLIN300_CNRM-CM5_historical+rcp45_r1i1p1_1970-2050_tg_mean_YS.nc sha256:623eab96d75d8cc8abd59dfba1c14cfb06fd7c0fe9ce86788d3c8b0891684df2 +FWI/GFWED_sample_2017.nc sha256:cf3bde795825663894fa7619a028d5a14fee307c623968235f25393f7afe159e +FWI/cffdrs_test_fwi.nc sha256:147be24e080aa67f17261f61f05a5dfb381a66a23785a327e47e2303667ca3ab +FWI/cffdrs_test_wDC.nc sha256:ebadcad1dd6a1a1e93c29a1143d7caefd46593ea2fbeb721015245981cce90c3 +HadGEM2-CC_360day/pr_day_HadGEM2-CC_rcp85_r1i1p1_na10kgrid_qm-moving-50bins-detrend_2095.nc sha256:c45ff4c17ba9fd92392bb08a7705789071a0bec40bde48f5a838ff12413cc33b +HadGEM2-CC_360day/tasmax_day_HadGEM2-CC_rcp85_r1i1p1_na10kgrid_qm-moving-50bins-detrend_2095.nc sha256:aa3eb54ea69bb00330de1037a48ac13dbc5b72f346c801d97731dec8260f400c +HadGEM2-CC_360day/tasmin_day_HadGEM2-CC_rcp85_r1i1p1_na10kgrid_qm-moving-50bins-detrend_2095.nc sha256:5c8fa666603fd68f614d95ac8c5a0dbdfb9f8e2e86666a270516a38526c1aa20 +NRCANdaily/nrcan_canada_daily_pr_1990.nc sha256:144479ec7a976cfecb6a10762d128a771356093d72caf5f075508ee86d25a1b0 +NRCANdaily/nrcan_canada_daily_tasmax_1990.nc sha256:84880205b798740e37a102c7f40e595d7a4fde6e35fb737a1ef68b8dad447526 +NRCANdaily/nrcan_canada_daily_tasmin_1990.nc sha256:13d61fc54cdcb4c1617ec777ccbf59575d8fdc24754f914042301bc1b024d7f7 +Raven/q_sim.nc sha256:f7a0ae73c498235e1c3e7338a184c5ca3729941b81521e606aa60b2c639f6e71 +SpatialAnalogs/CanESM2_ScenGen_Chibougamau_2041-2070.nc sha256:b6cfc4a963d68b6da8978acd26ffb506f33c9c264d8057badd90bf47cd9f3f3d +SpatialAnalogs/NRCAN_SECan_1981-2010.nc sha256:bde680ddad84106caad3a2e83a70ecdd8138578a70e875d77c2ec6d3ff868fee +SpatialAnalogs/dissimilarity.nc sha256:200ab9b7d43d41e6db917c54d35b43e3c5853e0df701e44efd5b813e47590110 +SpatialAnalogs/indicators.nc sha256:3bcbb0e4540d4badc085ac42b9d04a353e815fb55c62271eb73275b889c80a15 cmip3/tas.sresb1.giss_model_e_r.run1.atm.da.nc sha256:e709552beeeccafcfe280759edf5477ae5241c698409ca051b0899c16e92c95e cmip5/tas_Amon_CanESM2_rcp85_r1i1p1_200701-200712.nc sha256:7471770e4e654997225ab158f2b24aa0510b6f06006fb757b9ea7c0d4a47e1f2 cmip5/tas_Amon_HadGEM2-ES_rcp85_r1i1p1_200512-203011.nc sha256:3cb54d67bf89cdf542a7b93205785da3800f9a77eaa8436f4ee74af13b248b95 @@ -17,25 +39,9 @@ cmip5/tas_Amon_HadGEM2-ES_rcp85_r1i1p1_224912-227411.nc sha256:abbe16349870c5013 cmip5/tas_Amon_HadGEM2-ES_rcp85_r1i1p1_227412-229911.nc sha256:ecf52dc8ac13e04d0b643fc53cc5b367b32e68a311e6718686eaa87088788f98 cmip5/tas_Amon_HadGEM2-ES_rcp85_r1i1p1_229912-229912.nc sha256:3fa657483072d8a04363b8718bc9c4e63e6354617a4ab3d627b25222a4cd094c cmip6/o3_Amon_GFDL-ESM4_historical_r1i1p1f1_gr1_185001-194912.nc sha256:cfff189d4986289efb2b88f418cd6d65b26b59355b67b73ca26ac8fa12a9f83f +cmip6/prsn_day_CanESM5_historical_r1i1p1f1_gn_19910101-20101231.nc sha256:b272ae29fd668cd8a63ed2dc7949a1fd380ec67da98561a4beb34da371439815 cmip6/sic_SImon_CCCma-CanESM5_ssp245_r13i1p2f1_2020.nc sha256:58a03aa401f80751ad60c8950f14bcf717aeb6ef289169cb5ae3081bb4689825 -CRCM5/tasmax_bby_198406_se.nc sha256:9a80cc19ed212428ef90ce0cc40790fbf0d1fc301df0abdf578da45843dae93d -EnsembleReduce/TestEnsReduceCriteria.nc sha256:ae7a70b9d5c54ab072f1cfbfab91d430a41c5067db3c1968af57ea2122cfe8e7 -EnsembleStats/BCCAQv2+ANUSPLIN300_ACCESS1-0_historical+rcp45_r1i1p1_1950-2100_tg_mean_YS.nc sha256:ca0cc893cf91db7c6dfe3df10d605684eabbea55b7e26077c10142d302e55aed -EnsembleStats/BCCAQv2+ANUSPLIN300_BNU-ESM_historical+rcp45_r1i1p1_1950-2100_tg_mean_YS.nc sha256:c796276f563849c31bf388a3beb4a440eeb72062a84b4cf9760c854d1e990ca4 -EnsembleStats/BCCAQv2+ANUSPLIN300_CCSM4_historical+rcp45_r1i1p1_1950-2100_tg_mean_YS.nc sha256:9cfa9bc4e81e936eb680a55db428ccd9f0a6d366d4ae2c4a9064bfa5d71e5ca7 -EnsembleStats/BCCAQv2+ANUSPLIN300_CCSM4_historical+rcp45_r2i1p1_1950-2100_tg_mean_YS.nc sha256:ca36aafb3c63ddb6bfc8537abb854b71f719505c1145d5c81c3315eb1a13647c -EnsembleStats/BCCAQv2+ANUSPLIN300_CNRM-CM5_historical+rcp45_r1i1p1_1970-2050_tg_mean_YS.nc sha256:623eab96d75d8cc8abd59dfba1c14cfb06fd7c0fe9ce86788d3c8b0891684df2 -ERA5/daily_surface_cancities_1990-1993.nc sha256:049d54ace3d229a96cc621189daa3e1a393959ab8d988221cfc7b2acd7ab94b2 -FWI/GFWED_sample_2017.nc sha256:cf3bde795825663894fa7619a028d5a14fee307c623968235f25393f7afe159e -FWI/cffdrs_test_fwi.nc sha256:147be24e080aa67f17261f61f05a5dfb381a66a23785a327e47e2303667ca3ab -FWI/cffdrs_test_wDC.nc sha256:ebadcad1dd6a1a1e93c29a1143d7caefd46593ea2fbeb721015245981cce90c3 -HadGEM2-CC_360day/pr_day_HadGEM2-CC_rcp85_r1i1p1_na10kgrid_qm-moving-50bins-detrend_2095.nc sha256:c45ff4c17ba9fd92392bb08a7705789071a0bec40bde48f5a838ff12413cc33b -HadGEM2-CC_360day/tasmax_day_HadGEM2-CC_rcp85_r1i1p1_na10kgrid_qm-moving-50bins-detrend_2095.nc sha256:aa3eb54ea69bb00330de1037a48ac13dbc5b72f346c801d97731dec8260f400c -HadGEM2-CC_360day/tasmin_day_HadGEM2-CC_rcp85_r1i1p1_na10kgrid_qm-moving-50bins-detrend_2095.nc sha256:5c8fa666603fd68f614d95ac8c5a0dbdfb9f8e2e86666a270516a38526c1aa20 -NRCANdaily/nrcan_canada_daily_pr_1990.nc sha256:144479ec7a976cfecb6a10762d128a771356093d72caf5f075508ee86d25a1b0 -NRCANdaily/nrcan_canada_daily_tasmax_1990.nc sha256:84880205b798740e37a102c7f40e595d7a4fde6e35fb737a1ef68b8dad447526 -NRCANdaily/nrcan_canada_daily_tasmin_1990.nc sha256:13d61fc54cdcb4c1617ec777ccbf59575d8fdc24754f914042301bc1b024d7f7 -Raven/q_sim.nc sha256:f7a0ae73c498235e1c3e7338a184c5ca3729941b81521e606aa60b2c639f6e71 +cmip6/snw_day_CanESM5_historical_r1i1p1f1_gn_19910101-20101231.nc sha256:05263d68f5c7325439a170990731fcb90d1103a6c5e4f0c0fd1d3a44b92e88e0 sdba/CanESM2_1950-2100.nc sha256:b41fe603676e70d16c747ec207eb75ec86a39b665de401dcb23b5969ab3e1b32 sdba/adjusted_external.nc sha256:ff325c88eca96844bc85863744e4e08bcdf3d257388255636427ad5e11960d2e sdba/ahccd_1950-2013.nc sha256:7e9a1f61c1d04ca257b09857a82715f1fa3f0550d77f97b7306d4eaaf0c70239 @@ -44,8 +50,4 @@ uncertainty_partitioning/cmip5_pr_global_mon.nc sha256:7e585c995e95861979fd23dd9 uncertainty_partitioning/cmip5_pr_pnw_mon.nc sha256:1cdfe74f5bd5cf71cd0737c190277821ea90e4e79de5b37367bf2b82c35a66c9 uncertainty_partitioning/cmip5_tas_global_mon.nc sha256:41ba79a43bab169a0487e3f3f66a68a699bef9355a13e26a87fdb65744555cb5 uncertainty_partitioning/cmip5_tas_pnw_mon.nc sha256:eeb48765fd430186f3634e7f779b4be45ab3df73e806a4cbb743fefb13279398 -SpatialAnalogs/CanESM2_ScenGen_Chibougamau_2041-2070.nc sha256:b6cfc4a963d68b6da8978acd26ffb506f33c9c264d8057badd90bf47cd9f3f3d -SpatialAnalogs/NRCAN_SECan_1981-2010.nc sha256:bde680ddad84106caad3a2e83a70ecdd8138578a70e875d77c2ec6d3ff868fee -SpatialAnalogs/dissimilarity.nc sha256:200ab9b7d43d41e6db917c54d35b43e3c5853e0df701e44efd5b813e47590110 -SpatialAnalogs/indicators.nc sha256:3bcbb0e4540d4badc085ac42b9d04a353e815fb55c62271eb73275b889c80a15 uncertainty_partitioning/seattle_avg_tas.csv sha256:157d6721f9925eec8268848e34548df2b1da50935f247a9b136d251ef53898d7 diff --git a/src/xclim/testing/utils.py b/src/xclim/testing/utils.py index 467fc6493..a06add28d 100644 --- a/src/xclim/testing/utils.py +++ b/src/xclim/testing/utils.py @@ -72,7 +72,7 @@ "testing_setup_warnings", ] -default_testdata_version = "v2024.8.23" +default_testdata_version = "v2025.1.8" """Default version of the testing data to use when fetching datasets.""" default_testdata_repo_url = ( diff --git a/tests/test_indices.py b/tests/test_indices.py index 22978b49d..f766b7d7f 100644 --- a/tests/test_indices.py +++ b/tests/test_indices.py @@ -28,6 +28,7 @@ from xclim.core.calendar import percentile_doy from xclim.core.options import set_options from xclim.core.units import convert_units_to, units +from xclim.indices import prsnd_to_prsn K2C = 273.15 @@ -1468,6 +1469,82 @@ def test_1d( np.testing.assert_allclose(hwml.values, expected) +class TestHolidayIndices: + + def test_xmas_days_simple(self, snd_series): + # 5ish years of data, starting from 2000-07-01 + snd = snd_series(np.zeros(365 * 5), units="cm") + + # add snow on ground on December 25 for first 3 years + snd.loc["2000-12-25"] = 2 + snd.loc["2001-12-25"] = 1.5 # not enough + snd.loc["2002-12-25"] = 2 + snd.loc["2003-12-25"] = 0 # no snow + snd.loc["2004-12-25"] = 6 + + out = xci.holiday_snow_days(snd) + np.testing.assert_array_equal(out, [1, 0, 1, 0, 1, 0]) + + def test_xmas_days_range(self, snd_series): + # 5ish years of data, starting from 2000-07-01 + snd = snd_series(np.zeros(365 * 5), units="cm") + + # add snow on ground on December 25 for first 3 years + snd.loc["2000-12-25"] = 2 + snd.loc["2001-12-25"] = 1.5 # not enough + snd.loc["2002-12-24"] = 10 # a réveillon miracle + snd.loc["2002-12-25"] = 2 + snd.loc["2003-12-25"] = 0 # no snow + snd.loc["2004-12-25"] = 6 + + out = xci.holiday_snow_days(snd, date_start="12-24", date_end="12-25") + np.testing.assert_array_equal(out, [1, 0, 2, 0, 1, 0]) + + def test_perfect_xmas_days(self, snd_series, prsn_series): + # 5ish years of data, starting from 2000-07-01 + a = np.zeros(365 * 5) + snd = snd_series(a, units="mm") + # prsnd is snowfall using snow density of 100 kg/m3 + prsnd = prsn_series(a.copy(), units="cm day-1") + + # add snow on ground on December 25 + snd.loc["2000-12-25"] = 20 + snd.loc["2001-12-25"] = 15 # not enough + snd.loc["2001-12-26"] = 30 # too bad it's Boxing Day + snd.loc["2002-12-25"] = 20 + snd.loc["2003-12-25"] = 0 # no snow + snd.loc["2004-12-25"] = 60 + + # add snowfall on December 25 + prsnd.loc["2000-12-25"] = 5 + prsnd.loc["2001-12-25"] = 2 + prsnd.loc["2001-12-26"] = 30 # too bad it's Boxing Day + prsnd.loc["2002-12-25"] = 1 # not quite enough + prsnd.loc["2003-12-25"] = 0 # no snow + prsnd.loc["2004-12-25"] = 10 + + prsn = prsnd_to_prsn(prsnd) + prsn = convert_units_to(prsn, "kg m-2 s-1", context="hydro") + + out1 = xci.holiday_snow_and_snowfall_days(snd, prsn) + np.testing.assert_array_equal(out1, [1, 0, 0, 0, 1]) + + out2 = xci.holiday_snow_and_snowfall_days( + snd, prsn, snd_thresh="15 mm", prsn_thresh="0.5 mm" + ) + np.testing.assert_array_equal(out2, [1, 1, 1, 0, 1]) + + out3 = xci.holiday_snow_and_snowfall_days( + snd, + prsn, + snd_thresh="10 mm", + prsn_thresh="0.5 mm", + date_start="12-25", + date_end="12-26", + ) + np.testing.assert_array_equal(out3, [1, 2, 1, 0, 1]) + + class TestHotSpellFrequency: @pytest.mark.parametrize( "thresh,window,op,expected", diff --git a/tests/test_snow.py b/tests/test_snow.py index 2144f757f..395faadbc 100644 --- a/tests/test_snow.py +++ b/tests/test_snow.py @@ -2,6 +2,7 @@ import numpy as np import pytest +import xarray as xr from xclim import land from xclim.core import ValidationError @@ -120,3 +121,61 @@ def test_simple(self, snw_series): snw = snw_series(a, start="2001-01-01") out = land.snw_max_doy(snw, freq="YS") np.testing.assert_array_equal(out, [21, np.nan]) + + +class TestHolidaySnowIndicators: + + def test_xmas_days_simple(self, nimbus): + ds = xr.open_dataset( + nimbus.fetch( + "cmip6/snw_day_CanESM5_historical_r1i1p1f1_gn_19910101-20101231.nc" + ) + ) + snd = land.snw_to_snd(ds.snw) + + out = land.holiday_snow_days(snd) + + assert out.units == "days" + assert out.long_name == "Number of holiday days with snow" + np.testing.assert_array_equal( + out.sum(dim="time"), + [ + [7.0, 5.0, 2.0, 0.0, 0.0], + [14.0, 13.0, 9.0, 6.0, 2.0], + [18.0, 19.0, 19.0, 18.0, 13.0], + [20.0, 20.0, 20.0, 20.0, 20.0], + [20.0, 20.0, 20.0, 20.0, 20.0], + [20.0, 20.0, 20.0, 20.0, 20.0], + ], + ) + + def test_perfect_xmas_days_simple(self, nimbus): + ds_snw = xr.open_dataset( + nimbus.fetch( + "cmip6/snw_day_CanESM5_historical_r1i1p1f1_gn_19910101-20101231.nc" + ) + ) + ds_prsn = xr.open_dataset( + nimbus.fetch( + "cmip6/prsn_day_CanESM5_historical_r1i1p1f1_gn_19910101-20101231.nc" + ) + ) + + snd = land.snw_to_snd(ds_snw.snw) + prsn = ds_prsn.prsn + + out = land.holiday_snow_and_snowfall_days(snd, prsn) + + assert out.units == "days" + assert out.long_name == "Number of holiday days with snow and snowfall" + np.testing.assert_array_equal( + out.sum(dim="time"), + [ + [3.0, 0.0, 0.0, 0.0, 0.0], + [5.0, 2.0, 1.0, 1.0, 1.0], + [6.0, 5.0, 4.0, 4.0, 5.0], + [7.0, 11.0, 12.0, 9.0, 6.0], + [10.0, 8.0, 12.0, 10.0, 8.0], + [9.0, 11.0, 10.0, 7.0, 9.0], + ], + )