import os import logging import sys import json # Libs import numpy as np import xarray as xr import pandas as pd import geopandas as gpd import matplotlib.pyplot as plt # Locals import oggm.cfg as cfg from oggm import utils, workflow, tasks # Initialize OGGM and set up the default run parameters cfg.initialize(logging_level='ERROR') # cfg.initialize(logging_level='DEBUG') rgi_version = '62' cfg.PARAMS['border'] = 160 # Local working directory (where OGGM will write its output) WORKING_DIR = os.environ.get('OGGM_WORKDIR', '') if not WORKING_DIR: raise RuntimeError('Need a working dir') utils.mkdir(WORKING_DIR) cfg.PATHS['working_dir'] = WORKING_DIR OUTPUT_DIR = os.environ.get('OGGM_OUTDIR', '') if not OUTPUT_DIR: raise RuntimeError('Need an output dir') utils.mkdir(OUTPUT_DIR) cfg.PARAMS['continue_on_error'] = True cfg.PARAMS['store_diagnostic_variables'] = ['volume', 'volume_bsl', 'area', 'length'] cfg.PARAMS['store_model_geometry'] = True # Init workflow.init_mp_pool(True) rgi_reg = int(os.environ.get('OGGM_GLACIER_JOB', '')) # Module logger log = logging.getLogger(__name__) log.workflow('Starting run for RGI reg {}'.format(rgi_reg)) # RGI glaciers rgi_ids = gpd.read_file(utils.get_rgi_region_file(rgi_reg, version='62')) # rgi_ids = rgi_ids.iloc[:32] # Sort for more efficient mp # rgi_ids = rgi_ids.sort_values(by='Area', ascending=False) # Go - get the pre-processed glacier directories base_url = 'https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/L3-L5_files/2023.3/elev_bands/W5E5_spinup/' gdirs = workflow.init_glacier_directories(rgi_ids, from_prepro_level=5, prepro_base_url=base_url, prepro_rgi_version=rgi_version) gcms = pd.read_csv('/home/www/oggm/cmip6/all_gcm_list.csv', index_col=0) from oggm.shop.gcm_climate import process_lmr_data for ensm in range(0, 20): rid = f'lmr_v21_ens{ensm}' log.workflow(f'Bias correcting {rid}') workflow.execute_entity_task(process_lmr_data, gdirs, ensemble_member=ensm, filesuffix='_' + rid, ); log.workflow(f'Running {rid}') workflow.execute_entity_task(tasks.run_from_climate_data, gdirs, climate_filename='gcm_data', # use gcm_data, not climate_historical climate_input_filesuffix='_' + rid, output_filesuffix=rid, # recognize the run for later return_value=False, ys=1000, ); workflow.execute_entity_task(tasks.run_from_climate_data, gdirs, output_filesuffix=rid + '_hist', # recognize the run for later return_value=False, init_model_filesuffix=rid, ); log.workflow(f'Aggregating {rid}') out_nc_path = os.path.join(OUTPUT_DIR, rid + f'_reg{rgi_reg}.nc') utils.compile_run_output(gdirs, input_filesuffix=rid, path=out_nc_path) out_nc_path = os.path.join(OUTPUT_DIR, rid + '_hist' + f'_reg{rgi_reg}.nc') utils.compile_run_output(gdirs, input_filesuffix=rid + '_hist', path=out_nc_path)