Skip to content

Commit

Permalink
More options for prepro directories
Browse files Browse the repository at this point in the history
  • Loading branch information
fmaussion committed Nov 17, 2023
1 parent d37540c commit e0eccd6
Show file tree
Hide file tree
Showing 6 changed files with 51 additions and 41 deletions.
46 changes: 12 additions & 34 deletions oggm/cfg.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@ def _log_param_change(self, key, value):
BASENAMES['elevation_band_flowline'] = ('elevation_band_flowline.csv', _doc)


def set_logging_config(logging_level='INFO', future=False):
def set_logging_config(logging_level='INFO'):
"""Set the global logger parameters.
Logging levels:
Expand Down Expand Up @@ -324,8 +324,6 @@ def set_logging_config(logging_level='INFO', future=False):
the logging level. See description above for a list of options. Setting
to `None` is equivalent to `'CRITICAL'`, i.e. no log output will be
generated.
future : bool
use the new behavior of logging='WORKFLOW'.
"""

# Add a custom level - just for us
Expand Down Expand Up @@ -358,31 +356,12 @@ def workflow(self, message, *args, **kws):

logging_level = logging_level.upper()

# Deprecation warning
if logging_level == 'WORKFLOW' and not future:

msg = ('In future versions of OGGM, the logging config WORKFLOW '
'will no longer print ERROR or WARNING messages, but only high '
'level information (i.e. hiding potential errors in your code '
'but also avoiding cluttered log files for runs with '
'many expected errors, e.g. global runs). If you want to obtain '
'a similar logger behavior as before, set '
"`logging_level='WARNING'`, which will print high level info "
"as well as errors and warnings during the run. If you "
"want to use the new behavior and suppress this warning, "
"set `logging_level='WORKFLOW'` and `future=True`.")
warnings.warn(msg, category=FutureWarning)

# Set old behavior
logging_level = 'WARNING'

logging.basicConfig(format='%(asctime)s: %(name)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=getattr(logging, logging_level))


def initialize_minimal(file=None, logging_level='INFO', params=None,
future=False):
def initialize_minimal(file=None, logging_level='INFO', params=None):
"""Same as initialise() but without requiring any download of data.
This is useful for "flowline only" OGGM applications
Expand All @@ -395,14 +374,12 @@ def initialize_minimal(file=None, logging_level='INFO', params=None,
set a logging level. See :func:`set_logging_config` for options.
params : dict
overrides for specific parameters from the config file
future : bool
use the new behavior of logging='WORKFLOW'.
"""
global IS_INITIALIZED
global PARAMS
global PATHS

set_logging_config(logging_level=logging_level, future=future)
set_logging_config(logging_level=logging_level)

is_default = False
if file is None:
Expand Down Expand Up @@ -542,6 +519,10 @@ def initialize_minimal(file=None, logging_level='INFO', params=None,
PARAMS[k] = [str(vk) for vk in cp.as_list(k)]
k = 'store_fl_diagnostic_variables'
PARAMS[k] = [str(vk) for vk in cp.as_list(k)]
k = 'by_bin_dx'
PARAMS[k] = [float(vk) for vk in cp.as_list(k)]
k = 'by_bin_bins'
PARAMS[k] = [float(vk) for vk in cp.as_list(k)]

# Flowline model
k = 'glacier_length_method'
Expand All @@ -566,12 +547,12 @@ def initialize_minimal(file=None, logging_level='INFO', params=None,

# Delete non-floats
ltr = ['working_dir', 'dem_file', 'climate_file', 'use_tar_shapefiles',
'grid_dx_method', 'compress_climate_netcdf',
'grid_dx_method', 'compress_climate_netcdf', 'by_bin_dx',
'mp_processes', 'use_multiprocessing', 'clip_dem_to_zero',
'topo_interp', 'use_compression', 'bed_shape', 'continue_on_error',
'use_multiple_flowlines', 'border', 'use_temp_bias_from_file',
'mpi_recv_buf_size', 'map_proj', 'evolution_model',
'hydro_month_sh', 'hydro_month_nh',
'hydro_month_sh', 'hydro_month_nh', 'by_bin_bins',
'use_intersects', 'filter_min_slope', 'clip_tidewater_border',
'auto_skip_task', 'ref_mb_valid_window',
'rgi_version', 'dl_verify', 'use_mp_spawn', 'calving_use_limiter',
Expand All @@ -597,7 +578,7 @@ def initialize_minimal(file=None, logging_level='INFO', params=None,
IS_INITIALIZED = True


def initialize(file=None, logging_level='INFO', params=None, future=False):
def initialize(file=None, logging_level='INFO', params=None):
"""Read the configuration file containing the run's parameters.
This should be the first call, before using any of the other OGGM modules
Expand All @@ -611,20 +592,17 @@ def initialize(file=None, logging_level='INFO', params=None, future=False):
set a logging level. See :func:`set_logging_config` for options.
params : dict
overrides for specific parameters from the config file
future : bool
use the new behavior of logging='WORKFLOW'.
"""
global PARAMS
global DATA

initialize_minimal(file=file, logging_level=logging_level, params=params,
future=future)
initialize_minimal(file=file, logging_level=logging_level, params=params)

# Do not spam
PARAMS.do_log = False

# Make sure we have a proper cache dir
from oggm.utils import download_oggm_files, get_demo_file
from oggm.utils import download_oggm_files
download_oggm_files()

# Read in the demo glaciers
Expand Down
2 changes: 1 addition & 1 deletion oggm/cli/benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def run_benchmark(rgi_version=None, rgi_reg=None, border=None,
override_params['working_dir'] = working_dir

# Initialize OGGM and set up the run parameters
cfg.initialize(logging_level=logging_level, params=override_params, future=True)
cfg.initialize(logging_level=logging_level, params=override_params)

# Use multiprocessing?
cfg.PARAMS['use_multiprocessing'] = True
Expand Down
18 changes: 12 additions & 6 deletions oggm/cli/prepro_levels.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,8 @@ def run_prepro_levels(rgi_version=None, rgi_reg=None, border=None,
output_folder : str
path to the output folder (where to put the preprocessed tar files)
dem_source : str
which DEM source to use: default, SOURCE_NAME or ALL
which DEM source to use: default, SOURCE_NAME, STANDARD or ALL
"standard" is COPDEM + NASADEM
working_dir : str
path to the OGGM working directory
params_file : str
Expand Down Expand Up @@ -238,8 +239,7 @@ def _time_log():

# Initialize OGGM and set up the run parameters
cfg.initialize(file=params_file, params=override_params,
logging_level=logging_level,
future=True)
logging_level=logging_level)

# Prepare the download of climate file to be shared across processes
# TODO
Expand Down Expand Up @@ -348,10 +348,16 @@ def _time_log():
cfg.PATHS['dem_file'] = test_topofile

# Which DEM source?
if dem_source.upper() == 'ALL':
if dem_source.upper() in ['ALL', 'STANDARD']:
# This is the complex one, just do the job and leave
log.workflow('Running prepro on ALL sources')
for i, s in enumerate(utils.DEM_SOURCES):

if dem_source.upper() == 'ALL':
sources = utils.DEM_SOURCES
if dem_source.upper() == 'STANDARD':
sources = ['COPDEM30', 'COPDEM90', 'NASADEM']

log.workflow('Running prepro on several sources')
for i, s in enumerate(sources):
rs = i == 0
log.workflow('Running prepro on sources: {}'.format(s))
gdirs = workflow.init_glacier_directories(rgidf, reset=rs,
Expand Down
7 changes: 7 additions & 0 deletions oggm/core/gis.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,6 +261,13 @@ def glacier_grid_params(gdir):
dx = np.rint(cfg.PARAMS['d1'] * np.sqrt(area) + cfg.PARAMS['d2'])
elif dxmethod == 'fixed':
dx = np.rint(cfg.PARAMS['fixed_dx'])
elif dxmethod == 'by_bin':
bins = cfg.PARAMS['by_bin_bins']
bin_dx = cfg.PARAMS['by_bin_dx']
for i, (b1, b2) in enumerate(zip(bins[:-1], bins[1:])):
if b1 < area <= b2:
dx = np.rint(bin_dx[i])
break
else:
raise InvalidParamsError('grid_dx_method not supported: {}'
.format(dxmethod))
Expand Down
7 changes: 7 additions & 0 deletions oggm/params.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ map_proj = 'tmerc'

# Decision on grid spatial resolution for each glacier
# 'fixed': dx (meters) = fixed_dx
# 'by_bin': dx (meters) = dx, chosen by bins
# 'linear': dx (meters) = d1 * AREA (km) + d2 ; clipped to dmax (e.g.: 5, 10, 200)
# 'square': dx (meters) = d1 * sqrt(AREA) (km) + d2 ; clipped to dmax (e.g.: 20, 10, 200)

Expand All @@ -84,6 +85,12 @@ dmax = 200.
# Ignored if grid_dx_method != 'fixed'
fixed_dx = 50.

# Ignored if grid_dx_method != 'by_bin'
# size_bins should be of len(by_bin_dx)+1 and given in km2.
# The first and last bin range should be chosen so that all glaciers are included
by_bin_dx = 25, 50, 100, 200
by_bin_bins = 0, 8, 80, 300, 1e12

# Which algorithm to use for interpolating the topography to the local grid
# 'bilinear' or 'cubic'
topo_interp = cubic
Expand Down
12 changes: 12 additions & 0 deletions oggm/tests/test_prepro.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,18 @@ def test_dx_methods(self):
mygrid = salem.Grid.from_json(gdir.get_filepath('glacier_grid'))
np.testing.assert_allclose(np.abs(mygrid.dx), 50.)

# Test binned method
cfg.PARAMS['grid_dx_method'] = 'by_bin'
gis.define_glacier_region(gdir)
mygrid = salem.Grid.from_json(gdir.get_filepath('glacier_grid'))
np.testing.assert_allclose(np.abs(mygrid.dx), 50.)

cfg.PARAMS['grid_dx_method'] = 'by_bin'
cfg.PARAMS['by_bin_dx'] = [25, 75, 100, 200]
gis.define_glacier_region(gdir)
mygrid = salem.Grid.from_json(gdir.get_filepath('glacier_grid'))
np.testing.assert_allclose(np.abs(mygrid.dx), 75.)

# Test linear method
cfg.PARAMS['grid_dx_method'] = 'linear'
cfg.PARAMS['d1'] = 5.
Expand Down

0 comments on commit e0eccd6

Please sign in to comment.