import logging # External libs import xarray as xr # Optional libs try: import salem except ImportError: pass # Locals from oggm import utils from oggm import entity_task # Module logger log = logging.getLogger(__name__) LMR_SERVER = 'https://cluster.klima.uni-bremen.de/~fmaussion/runs/lmr_2025/data/unstacked/' @entity_task(log, writes=['climate_historical']) def process_mira_data(gdir, output_filesuffix=None, nat_forcing=False): """Processes and writes the baseline climate data for this glacier. Extracts the nearest timeseries and writes everything to a NetCDF file. Parameters ---------- output_filesuffix : str this add a suffix to the output file (useful to avoid overwriting previous experiments) """ # Use xarray to read the data lon = gdir.cenlon lat = gdir.cenlat # Files if nat_forcing: f_temp = utils.file_downloader(LMR_SERVER + 'tp_nat_monthly.nc') else: f_temp = utils.file_downloader(LMR_SERVER + 'tp_monthly.nc') f_pcp = utils.file_downloader(LMR_SERVER + 'p_monthly.nc') f_h = utils.file_downloader(LMR_SERVER + 'elev.nc') with xr.open_dataset(f_temp, use_cftime=True) as ds: yrs = ds['time.year'].data y0 = yrs[0] y1 = yrs[-1] ds = ds.sel(lon=lon, lat=lat, method='nearest') temp = ds['tas'].data - 273.15 time = ds.time.data ref_lon = float(ds['lon']) ref_lon = ref_lon - 360 if ref_lon > 180 else ref_lon ref_lat = float(ds['lat']) with xr.open_dataset(f_pcp, use_cftime=True) as ds: ds = ds.sel(time=slice(f'{y0:04d}-01-01', f'{y1:04d}-12-01')) ds = ds.sel(lon=lon, lat=lat, method='nearest') # 'kg m-2 s-1' to 'kg m-2 month' prcp = ds['pr'].data * ds['time.daysinmonth'] * 24 * 60 * 60 with xr.open_dataset(f_h) as ds: ds = ds.sel(lon=lon, lat=lat, method='nearest') hgt = ds['elevation'].data # OK, ready to write gdir.write_monthly_climate_file(time, prcp, temp, hgt, ref_lon, ref_lat, filesuffix=output_filesuffix, source='lmr_mira')