Global warming is expected to lead to a large increase in atmospheric water vapor content and to changes in the hydrological cycle, which include an intensification of precipitation extremes. The intensity of precipitation extremes is widely held to increase proportionately to the increase in atmospheric water vapor content.
O'Gorman & Schneider (2009) in CMIP3, O'Gorman (2015) in CMIP5 show P-T scaling in GCMs.
Westra et al. (2013a), among others, have confirmed from long‐term records that the median intensity of observed annual maximum daily rainfall is increasing a rate of 5.9% to 7.7% per °C globally averaged near‐surface atmospheric temperature, but with significant variation by latitude.
import intake
import xarray as xr
from matplotlib import pyplot as plt
import numpy as np
import xgcm
%matplotlib inline
plt.rcParams['figure.figsize'] = 12, 6
from dask.distributed import Client, progress
from dask_kubernetes import KubeCluster
cluster = KubeCluster(n_workers=20)
client = Client(cluster)
cluster
cat_url = 'https://raw.githubusercontent.com/pangeo-data/pangeo-datastore/master/intake-catalogs/master.yaml'
master_cat = intake.Catalog(cat_url)
list(master_cat)
A3hr_cat = master_cat.cmip6.A3hr.get()
list(A3hr_cat)
#model = 'GISS_E2_1_G.historical.r1i1p1f1'
model = 'IPSL_CM6A_LR.historical.r1i1p1f1'
ds_pr = A3hr_cat[f'{model}.pr'].to_dask()
ds_pr
ds_tas = A3hr_cat[f'{model}.tas'].to_dask()
ds_tas
These two datasets are offset by 1.5 hours. This makes it impossible to compute joint statistics. We can use xgcm to fix this. Below we interpolate the tas
values to the pr
times.
ds_pr = ds_pr.rename({'time': 'time_center'}).reset_coords(drop=True)
ds_tas = ds_tas.rename({'time': 'time_right'}).reset_coords(drop=True)
ds = xr.merge([ds_pr, ds_tas])
grid = xgcm.Grid(ds, periodic=False,
coords={'T': {'center': 'time_center', 'right': 'time_right'}})
tas_interp = grid.interp(ds.tas, 'T', boundary='extend').chunk({'time_center': 900})
ds = ds.drop('time_right')
ds['tas'] = tas_interp
ds = ds.rename({'time_center': 'time'})
ds
fig, axs = plt.subplots(ncols=2, figsize=(12, 4))
ds.tas[0].plot(ax=axs[0])
ds.pr[0].plot(ax=axs[1]);
t_bins = np.arange(270,315,2)
pr_bins = np.logspace(-7, -2, 200)
t_center = 0.5*(t_bins[:-1] + t_bins[1:])
pr_center = 0.5*(pr_bins[:-1] + pr_bins[1:])
def wrapped_hist2d(a, b):
h2d, _, _ = np.histogram2d(a.ravel(), b.ravel(),
bins=[pr_bins, t_bins])
return h2d[None, :, :]
#region = dict(lat=slice(-20, 20)) # tropics
region = dict(lat=slice(30, 50), lon=slice(230, 300)) # USA
ds_reg = ds.sel(**region)
import dask.array as dsa
h2d_full = dsa.map_blocks(wrapped_hist2d, ds_reg.pr.data, ds_reg.tas.data,
dtype='i8',
chunks=(1, len(pr_center), len(t_center)))
h2d_full
h2d = xr.DataArray(h2d_full.sum(axis=0).compute(),
dims=['pr_bin', 'tas_bin'],
coords={'pr_bin': pr_center,
'tas_bin': t_center})
h2d.coords['pr_bin'].attrs.update(ds.pr.attrs)
h2d.coords['tas_bin'].attrs.update(ds.tas.attrs)
from matplotlib.colors import LogNorm
h2d.where(h2d>0).plot(yscale='log', norm=LogNorm())
plt.title('Joint Probability Distribution');
h2d_normed = h2d / h2d.sum(dim='pr_bin')
h2d_cum = h2d_normed.cumsum(dim='pr_bin')
fig, ax = plt.subplots(figsize=(12, 7))
h2d_cum.plot.contourf(yscale='log', levels=np.arange(0,1,0.1), cmap='Blues_r')
cs = h2d_cum.plot.contour(yscale='log', levels=[0.7, 0.8, 0.9, 0.99, 0.999], colors='k')
ax.clabel(cs)
for scale in np.arange(-16, -10, 0.3):
pr_cc = np.exp(0.068*t_bins)*10**scale
plt.plot(t_bins, pr_cc, color='m', linestyle='--', linewidth=1)
ax.set_xlim(285, 310)
ax.set_ylim(pr_bins[0], pr_bins[-1]);