from oggm import cfg from oggm import tasks, utils, workflow, graphics from oggm.core import massbalance import numpy as np import pandas as pd import geopandas as gpd import xarray as xr import matplotlib.pyplot as plt import os import logging import shutil # Local working directory (where OGGM will write its output) WORKING_DIR = os.environ.get('OGGM_WORKDIR', '') if not WORKING_DIR: raise RuntimeError('Need a working dir') WORKING_DIR = utils.mkdir(WORKING_DIR) OUTPUT_DIR = os.environ.get('OGGM_OUTDIR', '') if not OUTPUT_DIR: raise RuntimeError('Need an output dir') OUTPUT_DIR = utils.mkdir(OUTPUT_DIR) rgi_reg = os.environ.get('OGGM_RGI_REG', '') if rgi_reg not in ['{:02d}'.format(r) for r in range(1, 20)]: raise RuntimeError('Need an RGI Region') OUTPUT_DIR = os.path.join(OUTPUT_DIR, f'RGI{rgi_reg}') utils.mkdir(OUTPUT_DIR) # Initialize OGGM and set up the default run parameters cfg.initialize(logging_level='INFO') cfg.PARAMS['use_multiprocessing'] = True cfg.PARAMS['continue_on_error'] = True # Local working directory (where OGGM will write its output) cfg.PATHS['working_dir'] = utils.mkdir(WORKING_DIR) # Module logger log = logging.getLogger(__name__) log.workflow('Starting run for RGI reg {}'.format(rgi_reg)) utils.show_versions(logger=log) # RGI glaciers rgi_ids = gpd.read_file(utils.get_rgi_region_file(rgi_reg, version='62')) # rgi_ids = rgi_ids.sample(64, random_state=1) # rgi_ids = ['RGI60-11.03466'] # This is Talefre # For greenland we omit connectivity level 2 if rgi_reg == '05': rgi_ids = rgi_ids.loc[rgi_ids['Connect'] != 2] # This is the url with minimal data base_url = 'https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/L3-L5_files/2023.3/elev_bands/W5E5/' # This gets the data for this glacier gdirs = workflow.init_glacier_directories(rgi_ids, prepro_base_url=base_url, from_prepro_level=3, prepro_border=80) # Run the snowslide tasks from snowslide import oggm_snowslide_compat workflow.execute_entity_task(oggm_snowslide_compat.snowslide_to_gdir, gdirs) oggm_snowslide_compat.compile_snowslide_statistics(gdirs, dir_path=OUTPUT_DIR) oggm_snowslide_compat.compile_binned_statistics(gdirs, dir_path=OUTPUT_DIR) # Timing diagnostics from oggm import global_tasks global_tasks.compile_task_time(gdirs, path=os.path.join(OUTPUT_DIR, 'snowslide_time_statistics.csv'), task_names=['snowslide_to_gdir', 'snowslide_statistics', 'snowslide_binned_statistics']) # Then tar the gdirs and bundle # workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=True) # utils.base_dir_to_tar(delete=True) # Copy the outcome in a new directory: scratch folder, new machine, etc. # shutil.copytree(os.path.join(WORKING_DIR, 'per_glacier'), os.path.join(OUTPUT_DIR, 'gdirs'))