{ "cells": [ { "cell_type": "markdown", "id": "93c12cdd", "metadata": {}, "source": [ "# Example workflow to compare different climates for one glacier" ] }, { "cell_type": "code", "execution_count": 1, "id": "0b5a59c1", "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "2023-04-08 16:52:52: oggm.cfg: Reading default parameters from the OGGM `params.cfg` configuration file.\n", "2023-04-08 16:52:52: oggm.cfg: Multiprocessing switched OFF according to the parameter file.\n", "2023-04-08 16:52:52: oggm.cfg: Multiprocessing: using all available processors (N=32)\n", "2023-04-08 16:52:52: oggm.cfg: PARAMS['hydro_month_nh'] changed from `10` to `1`.\n", "2023-04-08 16:52:52: oggm.cfg: PARAMS['hydro_month_sh'] changed from `4` to `1`.\n" ] } ], "source": [ "import numpy as np\n", "import pandas as pd\n", "import xarray as xr\n", "import matplotlib.pyplot as plt\n", "import matplotlib\n", "\n", "import oggm\n", "from oggm import cfg, utils, workflow, tasks\n", "from oggm.core import massbalance, flowline, climate\n", "\n", "# just for the graphics\n", "SMALL_SIZE = 22\n", "MEDIUM_SIZE = 24\n", "BIGGER_SIZE = 26\n", "\n", "plt.rc('font', size=SMALL_SIZE) # controls default text sizes\n", "plt.rc('axes', titlesize=SMALL_SIZE) # fontsize of the axes title\n", "plt.rc('axes', labelsize=MEDIUM_SIZE) # fontsize of the x and y labels\n", "plt.rc('xtick', labelsize=SMALL_SIZE) # fontsize of the tick labels\n", "plt.rc('ytick', labelsize=SMALL_SIZE) # fontsize of the tick labels\n", "plt.rc('legend', fontsize=SMALL_SIZE) # legend fontsize\n", "plt.rc('figure', titlesize=BIGGER_SIZE) # fontsize of the figure title\n", "\n", "cfg.initialize()\n", "\n", "\n", "working_dir = utils.gettempdir(dirname='clim', reset=False)\n", "\n", "cfg.PATHS['working_dir'] = working_dir\n", "# use elevation band flowlines\n", "base_url = ('https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.4/'\n", " 'L1-L2_files/elev_bands')\n", "\n", "# let's look at calendar years!!!\n", "cfg.PARAMS['hydro_month_nh'] = 1\n", "cfg.PARAMS['hydro_month_sh'] = 1\n" ] }, { "cell_type": "code", "execution_count": 2, "id": "1de81978", "metadata": {}, "outputs": [], "source": [ "#df = ['RGI60-11.01450'] \n", "# Let's take Artesonraju\n", "df = ['RGI60-16.02444']" ] }, { "cell_type": "code", "execution_count": 5, "id": "239ddc41", "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "2023-04-08 16:53:37: oggm.workflow: init_glacier_directories from prepro level 2 on 1 glaciers.\n", "2023-04-08 16:53:37: oggm.workflow: Execute entity tasks [gdir_from_prepro] on 1 glaciers\n" ] } ], "source": [ "# preprocesseing \n", "gdirs = workflow.init_glacier_directories(df, from_prepro_level=2,\n", " prepro_border=10,\n", " prepro_base_url=base_url,\n", " prepro_rgi_version='62')\n", "#workflow.execute_entity_task(tasks.compute_downstream_line, gdirs)\n", "#workflow.execute_entity_task(tasks.compute_downstream_bedshape, gdirs)" ] }, { "cell_type": "code", "execution_count": 9, "id": "38ad49a6-258b-447b-9053-2bdd01a48739", "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "2023-04-08 16:55:02: oggm.workflow: Execute entity tasks [process_cru_data] on 1 glaciers\n", "2023-04-08 16:55:02: oggm.shop.cru: (RGI60-16.02444) process_cru_data\n", "2023-04-08 16:55:03: oggm.cfg: PARAMS['baseline_climate'] changed from `CRU` to `ERA5`.\n", "2023-04-08 16:55:03: oggm.workflow: Execute entity tasks [process_ecmwf_data] on 1 glaciers\n", "2023-04-08 16:55:03: oggm.shop.ecmwf: (RGI60-16.02444) process_ecmwf_data_ERA5\n" ] } ], "source": [ "# process cru and ERA5 data and get the climate data from the nearest gridpoint\n", "from oggm.shop.cru import process_cru_data\n", "cfg.PARAMS['baseline_climate'] = 'CRU'\n", "workflow.execute_entity_task(process_cru_data, gdirs)\n", "ds_cru = xr.open_dataset(gdirs[0].get_filepath('climate_historical'))\n", "from oggm.shop.ecmwf import process_ecmwf_data\n", "cfg.PARAMS['baseline_climate'] = 'ERA5'\n", "# I gave that one a filesuffix to not overwrite the CRU dataset (they are now both in the gdirs folder !!!)\n", "workflow.execute_entity_task(process_ecmwf_data, gdirs, dataset='ERA5', output_filesuffix = '_ERA5')\n", "ds_era5 = xr.open_dataset(gdirs[0].get_filepath('climate_historical', filesuffix='_ERA5'))" ] }, { "cell_type": "markdown", "id": "69887b20", "metadata": {}, "source": [ "`ref_hgt` is the mean altitude of the closest gridpoint from the applied climate dataset. The climate files in the gdir directory hence only represent the climate at the nearest gridpoint (not already downscaled to the glacier).\n", "\n", "Let's check it out for CRU and ERA5" ] }, { "cell_type": "code", "execution_count": null, "id": "a4f43474-1f0b-4143-b5dd-449cee02eff2", "metadata": {}, "outputs": [], "source": [ "gdirs[0]" ] }, { "cell_type": "code", "execution_count": 10, "id": "b0462d9d", "metadata": {}, "outputs": [ { "data": { "text/plain": [ "3805.6808868216976" ] }, "execution_count": 10, "metadata": {}, "output_type": "execute_result" } ], "source": [ "ds_era5.ref_hgt" ] }, { "cell_type": "code", "execution_count": 11, "id": "b2a372db", "metadata": {}, "outputs": [ { "data": { "text/plain": [ "4553.0" ] }, "execution_count": 11, "metadata": {}, "output_type": "execute_result" } ], "source": [ "ds_cru.ref_hgt" ] }, { "cell_type": "markdown", "id": "d8bdad99", "metadata": {}, "source": [ "Ok, we extracted the climate from the nearest gridpoint but they are on different altitudes. If we want to compare them, we need to downscale them to the same altitude.\n", "\n", "Here we just use the AWS station height of Artesonraju, but it could also be the height gridpoints of the glacier:\n", "-> if you just want to compare the two datasets without having an additional AWS station data, you can just use the \n", " heighest gridpoint of the glacier for testing:\n", " \n", " `h= gdirs[0].get_inversion_flowline_hw()[0][0]`" ] }, { "cell_type": "code", "execution_count": 13, "id": "9194e01a", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "-0.0065\n" ] } ], "source": [ "# default temp. lapse rate: \n", "print(cfg.PARAMS['temp_default_gradient']) # K per metre\n", "\n", "# station height\n", "h = 4797\n", "# just use the heightes gridpoint of the glacier for testing\n", "#h = gdirs[0].get_inversion_flowline_hw()[0][0]\n", "#print(h)" ] }, { "cell_type": "code", "execution_count": 19, "id": "f5d3435b-a8d3-4182-a355-a8460e2b2123", "metadata": {}, "outputs": [ { "data": { "text/html": [ "
<xarray.Dataset>\n",
"Dimensions: (time: 1428)\n",
"Coordinates:\n",
" * time (time) datetime64[ns] 1901-01-01 1901-02-01 ... 2019-12-01\n",
"Data variables:\n",
" prcp (time) float32 156.8 174.8 160.6 95.86 ... 39.48 93.93 100.2 97.48\n",
" temp (time) float32 5.126 4.764 5.062 4.933 ... 5.478 5.521 5.999 5.976\n",
"Attributes:\n",
" ref_hgt: 4553.0\n",
" ref_pix_lon: -77.58333333333333\n",
" ref_pix_lat: -8.916666666666657\n",
" ref_pix_dis: 7078.7544712035115\n",
" climate_source: CRU TS4.04\n",
" yr_0: 1901\n",
" yr_1: 2019\n",
" author: OGGM\n",
" author_info: Open Global Glacier Model<xarray.Dataset>\n",
"Dimensions: (month: 12)\n",
"Coordinates:\n",
" * month (month) int64 1 2 3 4 5 6 7 8 9 10 11 12\n",
"Data variables:\n",
" prcp (month) float32 130.0 239.9 152.4 97.79 ... 34.26 81.84 101.0 93.73\n",
" temp (month) float32 5.578 5.141 5.494 5.251 ... 5.229 5.493 5.648 5.599<xarray.Dataset>\n",
"Dimensions: (time: 120)\n",
"Coordinates:\n",
" lon float32 -77.75\n",
" lat float32 -8.75\n",
" * time (time) datetime64[ns] 2011-01-16 2011-02-15 ... 2020-12-16\n",
"Data variables:\n",
" tmp (time) float32 8.7 8.8 8.1 9.2 9.9 9.9 ... 9.6 9.5 9.2 10.1 9.3 9.1\n",
" stn (time) float64 8.0 8.0 8.0 8.0 8.0 8.0 ... 2.0 2.0 2.0 2.0 2.0 2.0\n",
"Attributes:\n",
" Conventions: CF-1.4\n",
" title: CRU TS4.05 Mean Temperature\n",
" institution: Data held at British Atmospheric Data Centre, RAL, UK.\n",
" source: Run ID = 2103051243. Data generated from:tmp.2103041709.dtb\n",
" history: Fri 5 Mar 13:25:53 GMT 2021 : User harry : Program makegri...\n",
" references: Information on the data is available at http://badc.nerc.ac...\n",
" comment: Access to these data is available to any registered CEDA user.\n",
" contact: support@ceda.ac.ukarray(-77.75, dtype=float32)
array(-8.75, dtype=float32)
array(['2011-01-16T00:00:00.000000000', '2011-02-15T00:00:00.000000000',\n",
" '2011-03-16T00:00:00.000000000', '2011-04-16T00:00:00.000000000',\n",
" '2011-05-16T00:00:00.000000000', '2011-06-16T00:00:00.000000000',\n",
" '2011-07-16T00:00:00.000000000', '2011-08-16T00:00:00.000000000',\n",
" '2011-09-16T00:00:00.000000000', '2011-10-16T00:00:00.000000000',\n",
" '2011-11-16T00:00:00.000000000', '2011-12-16T00:00:00.000000000',\n",
" '2012-01-16T00:00:00.000000000', '2012-02-15T00:00:00.000000000',\n",
" '2012-03-16T00:00:00.000000000', '2012-04-16T00:00:00.000000000',\n",
" '2012-05-16T00:00:00.000000000', '2012-06-16T00:00:00.000000000',\n",
" '2012-07-16T00:00:00.000000000', '2012-08-16T00:00:00.000000000',\n",
" '2012-09-16T00:00:00.000000000', '2012-10-16T00:00:00.000000000',\n",
" '2012-11-16T00:00:00.000000000', '2012-12-16T00:00:00.000000000',\n",
" '2013-01-16T00:00:00.000000000', '2013-02-15T00:00:00.000000000',\n",
" '2013-03-16T00:00:00.000000000', '2013-04-16T00:00:00.000000000',\n",
" '2013-05-16T00:00:00.000000000', '2013-06-16T00:00:00.000000000',\n",
" '2013-07-16T00:00:00.000000000', '2013-08-16T00:00:00.000000000',\n",
" '2013-09-16T00:00:00.000000000', '2013-10-16T00:00:00.000000000',\n",
" '2013-11-16T00:00:00.000000000', '2013-12-16T00:00:00.000000000',\n",
" '2014-01-16T00:00:00.000000000', '2014-02-15T00:00:00.000000000',\n",
" '2014-03-16T00:00:00.000000000', '2014-04-16T00:00:00.000000000',\n",
" '2014-05-16T00:00:00.000000000', '2014-06-16T00:00:00.000000000',\n",
" '2014-07-16T00:00:00.000000000', '2014-08-16T00:00:00.000000000',\n",
" '2014-09-16T00:00:00.000000000', '2014-10-16T00:00:00.000000000',\n",
" '2014-11-16T00:00:00.000000000', '2014-12-16T00:00:00.000000000',\n",
" '2015-01-16T00:00:00.000000000', '2015-02-15T00:00:00.000000000',\n",
" '2015-03-16T00:00:00.000000000', '2015-04-16T00:00:00.000000000',\n",
" '2015-05-16T00:00:00.000000000', '2015-06-16T00:00:00.000000000',\n",
" '2015-07-16T00:00:00.000000000', '2015-08-16T00:00:00.000000000',\n",
" '2015-09-16T00:00:00.000000000', '2015-10-16T00:00:00.000000000',\n",
" '2015-11-16T00:00:00.000000000', '2015-12-16T00:00:00.000000000',\n",
" '2016-01-16T00:00:00.000000000', '2016-02-15T00:00:00.000000000',\n",
" '2016-03-16T00:00:00.000000000', '2016-04-16T00:00:00.000000000',\n",
" '2016-05-16T00:00:00.000000000', '2016-06-16T00:00:00.000000000',\n",
" '2016-07-16T00:00:00.000000000', '2016-08-16T00:00:00.000000000',\n",
" '2016-09-16T00:00:00.000000000', '2016-10-16T00:00:00.000000000',\n",
" '2016-11-16T00:00:00.000000000', '2016-12-16T00:00:00.000000000',\n",
" '2017-01-16T00:00:00.000000000', '2017-02-15T00:00:00.000000000',\n",
" '2017-03-16T00:00:00.000000000', '2017-04-16T00:00:00.000000000',\n",
" '2017-05-16T00:00:00.000000000', '2017-06-16T00:00:00.000000000',\n",
" '2017-07-16T00:00:00.000000000', '2017-08-16T00:00:00.000000000',\n",
" '2017-09-16T00:00:00.000000000', '2017-10-16T00:00:00.000000000',\n",
" '2017-11-16T00:00:00.000000000', '2017-12-16T00:00:00.000000000',\n",
" '2018-01-16T00:00:00.000000000', '2018-02-15T00:00:00.000000000',\n",
" '2018-03-16T00:00:00.000000000', '2018-04-16T00:00:00.000000000',\n",
" '2018-05-16T00:00:00.000000000', '2018-06-16T00:00:00.000000000',\n",
" '2018-07-16T00:00:00.000000000', '2018-08-16T00:00:00.000000000',\n",
" '2018-09-16T00:00:00.000000000', '2018-10-16T00:00:00.000000000',\n",
" '2018-11-16T00:00:00.000000000', '2018-12-16T00:00:00.000000000',\n",
" '2019-01-16T00:00:00.000000000', '2019-02-15T00:00:00.000000000',\n",
" '2019-03-16T00:00:00.000000000', '2019-04-16T00:00:00.000000000',\n",
" '2019-05-16T00:00:00.000000000', '2019-06-16T00:00:00.000000000',\n",
" '2019-07-16T00:00:00.000000000', '2019-08-16T00:00:00.000000000',\n",
" '2019-09-16T00:00:00.000000000', '2019-10-16T00:00:00.000000000',\n",
" '2019-11-16T00:00:00.000000000', '2019-12-16T00:00:00.000000000',\n",
" '2020-01-16T00:00:00.000000000', '2020-02-15T00:00:00.000000000',\n",
" '2020-03-16T00:00:00.000000000', '2020-04-16T00:00:00.000000000',\n",
" '2020-05-16T00:00:00.000000000', '2020-06-16T00:00:00.000000000',\n",
" '2020-07-16T00:00:00.000000000', '2020-08-16T00:00:00.000000000',\n",
" '2020-09-16T00:00:00.000000000', '2020-10-16T00:00:00.000000000',\n",
" '2020-11-16T00:00:00.000000000', '2020-12-16T00:00:00.000000000'],\n",
" dtype='datetime64[ns]')array([ 8.7 , 8.8 , 8.1 , 9.2 , 9.900001, 9.900001,\n",
" 9.3 , 9.3 , 8.6 , 9.3 , 9.3 , 8.7 ,\n",
" 8.900001, 8.3 , 9. , 9.7 , 10.3 , 10.2 ,\n",
" 10. , 9.8 , 9.5 , 10. , 9.7 , 9.1 ,\n",
" 9.2 , 9.1 , 8.7 , 8. , 9.1 , 8.1 ,\n",
" 8. , 8.6 , 8.900001, 9.400001, 8.900001, 9.400001,\n",
" 9.6 , 8.7 , 8.7 , 8.5 , 10.7 , 10.5 ,\n",
" 9.1 , 9.3 , 9.2 , 9.900001, 9.5 , 9. ,\n",
" 8.900001, 9.3 , 9.3 , 9.3 , 11. , 10.7 ,\n",
" 10.1 , 10.2 , 10.8 , 11.2 , 10.5 , 10.2 ,\n",
" 10.6 , 10.2 , 9.8 , 9.6 , 10.1 , 9.2 ,\n",
" 9.7 , 10.3 , 9.900001, 10.7 , 10. , 9.6 ,\n",
" 10.7 , 10.2 , 11. , 10.3 , 10.6 , 9.6 ,\n",
" 9. , 9.3 , 9. , 10.3 , 9.6 , 9.1 ,\n",
" 9. , 8.900001, 8.900001, 9.1 , 9.900001, 8.6 ,\n",
" 8.8 , 9.1 , 9.3 , 9.6 , 9.3 , 9.2 ,\n",
" 9.1 , 9.3 , 9. , 9.1 , 10. , 9. ,\n",
" 9.2 , 9.7 , 9.400001, 10. , 9.900001, 9.7 ,\n",
" 9.6 , 9.6 , 9. , 9. , 9.8 , 9.3 ,\n",
" 9.6 , 9.5 , 9.2 , 10.1 , 9.3 , 9.1 ],\n",
" dtype=float32)array([8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 4., 8., 8., 8., 8.,\n",
" 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8.,\n",
" 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8., 8.,\n",
" 8., 8., 8., 8., 8., 8., 8., 8., 6., 8., 8., 8., 8., 4., 3., 2., 8., 3.,\n",
" 2., 5., 3., 3., 4., 4., 3., 2., 1., 3., 3., 2., 2., 3., 2., 2., 2., 1.,\n",
" 1., 3., 2., 0., 1., 1., 2., 4., 2., 2., 3., 8., 2., 2., 2., 2., 2., 2.,\n",
" 2., 3., 1., 1., 2., 2., 2., 2., 2., 2., 2., 2.])