# Module logger import logging log = logging.getLogger(__name__) from oggm import cfg from oggm import tasks, utils, workflow, graphics from oggm.core import massbalance import numpy as np import pandas as pd import xarray as xr import os def _six_frames(gdir): """If something wrong happens below""" d1 = dict() # Area d2 = dict() # Average mass-Balance d3 = dict() # Temp d4 = dict() # Temp for melt d5 = dict() # PRCP d6 = dict() # PRCP Sol all = (d1, d2, d3, d4, d5, d6) # Easy stats - this should always be possible for d in all: d["rgi_id"] = gdir.rgi_id return all @utils.entity_task(log, fallback=_six_frames) def binned_average_mb_statistics(gdir): """Binned mass balance stats. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` where to write the data """ all = _six_frames(gdir) try: with xr.open_dataset(gdir.get_filepath("gridded_data")) as ds: dem = ds["topo"].data valid_mask = ds["glacier_mask"].data except: return all bsize = 10.0 dem_on_ice = dem[valid_mask == 1] bins = np.arange( utils.nicenumber(dem_on_ice.min(), bsize, lower=True), utils.nicenumber(dem_on_ice.max(), bsize) + 0.01, bsize, ) topo_digi = np.digitize(dem_on_ice, bins) - 1 midbins = [] for b, bs in enumerate((bins[1:] + bins[:-1]) / 2): on_bin = topo_digi == b all[0]["{:04d}".format(np.round(bs).astype(int))] = np.sum(on_bin) * gdir.grid.dx**2 midbins.append(np.round(bs).astype(int)) try: mb_model = massbalance.MonthlyTIModel(gdir) heights = np.array(midbins) years = np.arange(2000, 2020, dtype=int) mb, t, tm, pcp, pcps = (0, 0, 0, 0, 0) for y in years: _mb, _t, _tm, _pcp, _pcps = mb_model.get_annual_mb(heights, year=y, add_climate=True) mb += _mb * cfg.SEC_IN_YEAR * mb_model.rho t += _t tm += _tm pcp += _pcp pcps += _pcps mb /= len(years) t /= len(years) tm /= len(years) pcp /= len(years) pcps /= len(years) for i, b in enumerate(heights): all[1]["{:04d}".format(int(b))] = mb[i] all[2]["{:04d}".format(int(b))] = t[i] all[3]["{:04d}".format(int(b))] = tm[i] all[4]["{:04d}".format(int(b))] = pcp[i] all[5]["{:04d}".format(int(b))] = pcps[i] except: pass return all @utils.global_task(log) def compile_average_mb_statistics(gdirs, filesuffix="", dir_path=None): """Gather statistics about dems on binned elevations. Parameters ---------- gdirs : list of :py:class:`oggm.GlacierDirectory` objects the glacier directories to process filesuffix : str add suffix to output file path : str Folder where to write the csv file. Defaults to cfg.PATHS["working_dir"] """ from oggm.workflow import execute_entity_task out_df = execute_entity_task(binned_average_mb_statistics, gdirs) area = pd.DataFrame([d[0] for d in out_df]).set_index("rgi_id") mb = pd.DataFrame([d[1] for d in out_df]).set_index("rgi_id") t = pd.DataFrame([d[2] for d in out_df]).set_index("rgi_id") tm = pd.DataFrame([d[3] for d in out_df]).set_index("rgi_id") pcp = pd.DataFrame([d[4] for d in out_df]).set_index("rgi_id") pcps = pd.DataFrame([d[5] for d in out_df]).set_index("rgi_id") area = area[sorted(area.columns)] mb = mb[sorted(mb.columns)] t = t[sorted(t.columns)] tm = tm[sorted(tm.columns)] pcp = pcp[sorted(pcp.columns)] pcps = pcps[sorted(pcps.columns)] if dir_path is None: dir_path = cfg.PATHS["working_dir"] out_file = os.path.join(dir_path, f"binned_area{filesuffix}.csv") area.to_csv(out_file) out_file = os.path.join(dir_path, f"binned_mb{filesuffix}.csv") mb.to_csv(out_file) out_file = os.path.join(dir_path, f"binned_temp{filesuffix}.csv") t.to_csv(out_file) out_file = os.path.join(dir_path, f"binned_temp_for_melt{filesuffix}.csv") tm.to_csv(out_file) out_file = os.path.join(dir_path, f"binned_prcp{filesuffix}.csv") pcp.to_csv(out_file) out_file = os.path.join(dir_path, f"binned_solid_prcp{filesuffix}.csv") pcps.to_csv(out_file) return area, mb, t, tm, pcp, pcps def _one_frame(gdir): """If something wrong happens below""" d1 = dict() # Easy stats - this should always be possible d1["rgi_id"] = gdir.rgi_id return d1 @utils.entity_task(log, fallback=_one_frame) def binned_mb_ts(gdir, outdir=None): """Binned mass balance stats timeseries. Parameters ---------- gdir : :py:class:`oggm.GlacierDirectory` where to write the data """ with xr.open_dataset(gdir.get_filepath("gridded_data")) as ds: dem = ds["topo"].data valid_mask = ds["glacier_mask"].data bsize = 10.0 dem_on_ice = dem[valid_mask == 1] bins = np.arange( utils.nicenumber(dem_on_ice.min(), bsize, lower=True), utils.nicenumber(dem_on_ice.max(), bsize) + 0.01, bsize, ) topo_digi = np.digitize(dem_on_ice, bins) - 1 midbins = [] binarea = [] for b, bs in enumerate((bins[1:] + bins[:-1]) / 2): on_bin = topo_digi == b binarea.append(np.sum(on_bin) * gdir.grid.dx**2) midbins.append(np.round(bs).astype(int)) mb_model = massbalance.MonthlyTIModel(gdir) heights = np.array(midbins) odf = [] ts = utils.monthly_timeseries(1960, 2020)[:-1] for t in ts: mb = mb_model.get_monthly_mb(heights, year=t) mb *= cfg.SEC_IN_MONTH * mb_model.rho y, m = utils.floatyear_to_date(t) this = {'time': f'{y:04d}-{m:02d}-01'} for i, b in enumerate(heights): this["{:04d}".format(int(b))] = mb[i] odf.append(this) odf = pd.DataFrame(odf).set_index('time') odf.index = pd.DatetimeIndex(odf.index) ds = xr.DataArray(odf).rename({'dim_1':'elevation'}) ds['elevation'] = midbins out = ds.to_dataset(name='massbalance') out['massbalance'].attrs = {'name':'Monthly mass balance', 'unit': 'kg m-2 month-1'} out['bin_area'] = (('elevation',), binarea) out['bin_area'].attrs = {'name':'Glacier bin area', 'unit': 'm2'} if outdir is not None: out.to_netcdf(os.path.join(outdir, f'{gdir.rgi_id}.nc')) return out