Skip to content

Commit

Permalink
Add glathida to prepro as well
Browse files Browse the repository at this point in the history
  • Loading branch information
fmaussion committed Nov 21, 2023
1 parent ee71325 commit 7fe1f45
Show file tree
Hide file tree
Showing 4 changed files with 107 additions and 8 deletions.
25 changes: 20 additions & 5 deletions oggm/cli/prepro_levels.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def run_prepro_levels(rgi_version=None, rgi_reg=None, border=None,
select_source_from_dir=None, keep_dem_folders=False,
add_consensus_thickness=False, add_itslive_velocity=False,
add_millan_thickness=False, add_millan_velocity=False,
add_hugonnet_dhdt=False,
add_hugonnet_dhdt=False, add_glathida=False,
start_level=None, start_base_url=None, max_level=5,
logging_level='WORKFLOW',
dynamic_spinup=False, err_dmdtda_scaling_factor=0.2,
Expand Down Expand Up @@ -157,6 +157,9 @@ def run_prepro_levels(rgi_version=None, rgi_reg=None, border=None,
add_hugonnet_dhdt : bool
adds (reprojects) the hugonnet dhdt maps to the glacier
directories. With elev_bands=True, the data will also be binned.
add_glathida : bool
adds (reprojects) the glathida thickness data to the glacier
directories. Data points are stored as csv files.
start_level : int
the pre-processed level to start from (default is to start from
scratch). If set, you'll need to indicate start_base_url as well.
Expand Down Expand Up @@ -473,6 +476,9 @@ def _time_log():
from oggm.shop.hugonnet_maps import hugonnet_to_gdir
workflow.execute_entity_task(hugonnet_to_gdir, gdirs)
bin_variables.append('hugonnet_dhdt')
if add_glathida:
from oggm.shop.glathida import glathida_to_gdir
workflow.execute_entity_task(glathida_to_gdir, gdirs)

if bin_variables and gdirs_band:
workflow.execute_entity_task(tasks.elevation_band_flowline,
Expand Down Expand Up @@ -528,14 +534,18 @@ def _time_log():
from oggm.shop.millan22 import compile_millan_statistics
opath = os.path.join(sum_dir, 'millan_statistics_{}.csv'.format(rgi_reg))
compile_millan_statistics(gdirs, path=opath)
if add_hugonnet_dhdt:
from oggm.shop.hugonnet_maps import compile_hugonnet_statistics
opath = os.path.join(sum_dir, 'hugonnet_statistics_{}.csv'.format(rgi_reg))
compile_hugonnet_statistics(gdirs, path=opath)
if add_consensus_thickness:
from oggm.shop.bedtopo import compile_consensus_statistics
opath = os.path.join(sum_dir, 'consensus_statistics_{}.csv'.format(rgi_reg))
compile_consensus_statistics(gdirs, path=opath)
if add_hugonnet_dhdt:
from oggm.shop.hugonnet_maps import compile_hugonnet_statistics
opath = os.path.join(sum_dir, 'hugonnet_statistics_{}.csv'.format(rgi_reg))
compile_hugonnet_statistics(gdirs, path=opath)
if add_glathida:
from oggm.shop.glathida import compile_glathida_statistics
opath = os.path.join(sum_dir, 'glathida_statistics_{}.csv'.format(rgi_reg))
compile_glathida_statistics(gdirs, path=opath)

# And for level 2: shapes
if len(gdirs_cent) > 0:
Expand Down Expand Up @@ -868,6 +878,10 @@ def parse_args(args):
'maps to the glacier directories. '
'With --elev-bands, the data will also be '
'binned.')
parser.add_argument('--add-glathida', nargs='?', const=True, default=False,
help='adds (reprojects) the glathida point thickness '
'observations to the glacier directories. '
'The data points are stored as csv.')
parser.add_argument('--demo', nargs='?', const=True, default=False,
help='if you want to run the prepro for the '
'list of demo glaciers.')
Expand Down Expand Up @@ -955,6 +969,7 @@ def parse_args(args):
add_itslive_velocity=args.add_itslive_velocity,
add_millan_velocity=args.add_millan_velocity,
add_hugonnet_dhdt=args.add_hugonnet_dhdt,
add_glathida=args.add_glathida,
dynamic_spinup=dynamic_spinup,
err_dmdtda_scaling_factor=args.err_dmdtda_scaling_factor,
dynamic_spinup_start_year=args.dynamic_spinup_start_year,
Expand Down
79 changes: 77 additions & 2 deletions oggm/shop/glathida.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import logging
import os

# External libs
import pandas as pd
import numpy as np

# Optional libs
try:
Expand All @@ -10,7 +12,7 @@
pass

# Locals
from oggm import utils
from oggm import utils, cfg

# Module logger
log = logging.getLogger(__name__)
Expand Down Expand Up @@ -70,5 +72,78 @@ def glathida_to_gdir(gdir):
# We trick by creating an index of similar i's and j's
df['ij_grid'] = ['{:04d}_{:04d}'.format(i, j) for i, j in zip(ii, jj)]

df.to_csv(gdir.get_filepath('glathida_data'))
df.to_csv(gdir.get_filepath('glathida_data'), index=None)
return df


@utils.entity_task(log)
def glathida_statistics(gdir):
"""Gather statistics about the Glathida data interpolated to this glacier.
"""

d = dict()

# Easy stats - this should always be possible
d['rgi_id'] = gdir.rgi_id
d['rgi_region'] = gdir.rgi_region
d['rgi_subregion'] = gdir.rgi_subregion
d['rgi_area_km2'] = gdir.rgi_area_km2
d['n_points'] = 0
d['n_valid_thick_points'] = 0
d['n_valid_elev_points'] = 0
d['n_valid_gridded_points'] = 0
d['avg_thick'] = np.NaN
d['max_thick'] = np.NaN
d['date_mode'] = None
d['date_min'] = None
d['date_max'] = None

try:
dfo = pd.read_csv(gdir.get_filepath('glathida_data'))
d['n_points'] = len(dfo)
df = dfo.loc[dfo.thickness > 0]
dfg = df.groupby(by='ij_grid')['thickness'].mean()
d['n_valid_thick_points'] = len(df)
d['n_valid_elev_points'] = (dfo.elevation > 0).sum()
d['n_valid_gridded_points'] = len(dfg)
d['avg_thick'] = df.thickness.mean()
d['max_thick'] = df.thickness.max()
d['date_mode'] = df.date.mode().iloc[0]
d['date_min'] = df.date.min()
d['date_max'] = df.date.max()
except (FileNotFoundError, AttributeError, KeyError):
pass

return d


@utils.global_task(log)
def compile_glathida_statistics(gdirs, filesuffix='', path=True):
"""Gather as much statistics as possible about a list of glaciers.
It can be used to do result diagnostics and other stuffs.
Parameters
----------
gdirs : list of :py:class:`oggm.GlacierDirectory` objects
the glacier directories to process
filesuffix : str
add suffix to output file
path : str, bool
Set to "True" in order to store the info in the working directory
Set to a path to store the file to your chosen location
"""
from oggm.workflow import execute_entity_task

out_df = execute_entity_task(glathida_statistics, gdirs)
out = pd.DataFrame(out_df).set_index('rgi_id')

if path:
if path is True:
out.to_csv(os.path.join(cfg.PATHS['working_dir'],
('glathida_statistics' +
filesuffix + '.csv')))
else:
out.to_csv(path)

return out
5 changes: 4 additions & 1 deletion oggm/tests/funcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -354,7 +354,10 @@ def get_test_dir():
# If new ident, remove all other dirs so spare space
for d in os.listdir(cfg.PATHS['test_dir']):
if d and d != s:
shutil.rmtree(os.path.join(cfg.PATHS['test_dir'], d))
try:
shutil.rmtree(os.path.join(cfg.PATHS['test_dir'], d))
except NotADirectoryError:
pass

return _TEST_DIR

Expand Down
6 changes: 6 additions & 0 deletions oggm/tests/test_shop.py
Original file line number Diff line number Diff line change
Expand Up @@ -754,3 +754,9 @@ def test_to_glacier(self, class_case_dir, monkeypatch):

dsf = df[['ij_grid', 'thickness']].groupby('ij_grid').mean()
assert 1600 < len(dsf) < 1700

sdf = glathida.compile_glathida_statistics([gdir]).iloc[0]

assert sdf['n_valid_gridded_points'] < sdf['n_valid_thick_points']
assert sdf['date_mode'] < sdf['date_max']
assert sdf['avg_thick'] < sdf['max_thick']

0 comments on commit 7fe1f45

Please sign in to comment.