timestaps
in Field construction¶from parcels import Field
from glob import glob
import numpy as np
Some NetCDF files, such as for example those from the World Ocean Atlas, have time calendars that can't be parsed by xarray
. These result in a ValueError: unable to decode time units
, for example when the calendar is in 'months since' a particular date.
In these cases, a workaround in Parcels is to use the timestamps
argument in Field
(or FieldSet
) creation. Here, we show how this works for example temperature data from the World Ocean Atlas in the Pacific Ocean
The following cell raises an error, since the calendar of the World Ocean Atlas data is in "months since 1955-01-01 00:00:00"
tempfield = Field.from_netcdf(glob('WOA_data/woa18_decav_*_04.nc'), 't_an',
{'lon': 'lon', 'lat': 'lat', 'time': 'time'})
WARNING: File WOA_data/woa18_decav_t10_04.nc could not be decoded properly by xarray (version 0.20.1). It will be opened with no decoding. Filling values might be wrongly parsed.
--------------------------------------------------------------------------- KeyError Traceback (most recent call last) File /opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/coding/times.py:240, in decode_cf_datetime(num_dates, units, calendar, use_cftime) 239 try: --> 240 dates = _decode_datetime_with_pandas(flat_num_dates, units, calendar) 241 except (KeyError, OutOfBoundsDatetime, OverflowError): File /opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/coding/times.py:186, in _decode_datetime_with_pandas(flat_num_dates, units, calendar) 185 delta, ref_date = _unpack_netcdf_time_units(units) --> 186 delta = _netcdf_to_numpy_timeunit(delta) 187 try: File /opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/coding/times.py:83, in _netcdf_to_numpy_timeunit(units) 82 units = f"{units}s" ---> 83 return { 84 "nanoseconds": "ns", 85 "microseconds": "us", 86 "milliseconds": "ms", 87 "seconds": "s", 88 "minutes": "m", 89 "hours": "h", 90 "days": "D", 91 }[units] KeyError: 'months' During handling of the above exception, another exception occurred: ValueError Traceback (most recent call last) File /opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/coding/times.py:153, in _decode_cf_datetime_dtype(data, units, calendar, use_cftime) 152 try: --> 153 result = decode_cf_datetime(example_value, units, calendar, use_cftime) 154 except Exception: File /opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/coding/times.py:242, in decode_cf_datetime(num_dates, units, calendar, use_cftime) 241 except (KeyError, OutOfBoundsDatetime, OverflowError): --> 242 dates = _decode_datetime_with_cftime( 243 flat_num_dates.astype(float), units, calendar 244 ) 246 if ( 247 dates[np.nanargmin(num_dates)].year < 1678 248 or dates[np.nanargmax(num_dates)].year >= 2262 249 ): File /opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/coding/times.py:174, in _decode_datetime_with_cftime(num_dates, units, calendar) 172 raise ModuleNotFoundError("No module named 'cftime'") 173 return np.asarray( --> 174 cftime.num2date(num_dates, units, calendar, only_use_cftime_datetimes=True) 175 ) File src/cftime/_cftime.pyx:498, in cftime._cftime.num2date() File src/cftime/_cftime.pyx:99, in cftime._cftime._dateparse() ValueError: 'months since' units only allowed for '360_day' calendar During handling of the above exception, another exception occurred: ValueError Traceback (most recent call last) File ~/Codes/parcels/parcels/tools/converters.py:233, in convert_xarray_time_units(ds, time) 232 try: --> 233 da2 = xr.decode_cf(da2) 234 except ValueError: File /opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/conventions.py:646, in decode_cf(obj, concat_characters, mask_and_scale, decode_times, decode_coords, drop_variables, use_cftime, decode_timedelta) 644 raise TypeError("can only decode Dataset or DataStore objects") --> 646 vars, attrs, coord_names = decode_cf_variables( 647 vars, 648 attrs, 649 concat_characters, 650 mask_and_scale, 651 decode_times, 652 decode_coords, 653 drop_variables=drop_variables, 654 use_cftime=use_cftime, 655 decode_timedelta=decode_timedelta, 656 ) 657 ds = Dataset(vars, attrs=attrs) File /opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/conventions.py:512, in decode_cf_variables(variables, attributes, concat_characters, mask_and_scale, decode_times, decode_coords, drop_variables, use_cftime, decode_timedelta) 506 stack_char_dim = ( 507 concat_characters 508 and v.dtype == "S1" 509 and v.ndim > 0 510 and stackable(v.dims[-1]) 511 ) --> 512 new_vars[k] = decode_cf_variable( 513 k, 514 v, 515 concat_characters=concat_characters, 516 mask_and_scale=mask_and_scale, 517 decode_times=decode_times, 518 stack_char_dim=stack_char_dim, 519 use_cftime=use_cftime, 520 decode_timedelta=decode_timedelta, 521 ) 522 if decode_coords in [True, "coordinates", "all"]: File /opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/conventions.py:360, in decode_cf_variable(name, var, concat_characters, mask_and_scale, decode_times, decode_endianness, stack_char_dim, use_cftime, decode_timedelta) 359 if decode_times: --> 360 var = times.CFDatetimeCoder(use_cftime=use_cftime).decode(var, name=name) 362 dimensions, data, attributes, encoding = variables.unpack_for_decoding(var) File /opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/coding/times.py:526, in CFDatetimeCoder.decode(self, variable, name) 525 calendar = pop_to(attrs, encoding, "calendar") --> 526 dtype = _decode_cf_datetime_dtype(data, units, calendar, self.use_cftime) 527 transform = partial( 528 decode_cf_datetime, 529 units=units, 530 calendar=calendar, 531 use_cftime=self.use_cftime, 532 ) File /opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/coding/times.py:163, in _decode_cf_datetime_dtype(data, units, calendar, use_cftime) 158 msg = ( 159 f"unable to decode time units {units!r} with {calendar_msg!r}. Try " 160 "opening your dataset with decode_times=False or installing cftime " 161 "if it is not installed." 162 ) --> 163 raise ValueError(msg) 164 else: ValueError: unable to decode time units 'months since 1955-01-01 00:00:00' with 'the default calendar'. Try opening your dataset with decode_times=False or installing cftime if it is not installed. During handling of the above exception, another exception occurred: RuntimeError Traceback (most recent call last) Input In [2], in <cell line: 1>() ----> 1 tempfield = Field.from_netcdf(glob('WOA_data/woa18_decav_*_04.nc'), 't_an', 2 {'lon': 'lon', 'lat': 'lat', 'time': 'time'}) File ~/Codes/parcels/parcels/field.py:390, in Field.from_netcdf(cls, filenames, variable, dimensions, indices, grid, mesh, timestamps, allow_time_extrapolation, time_periodic, deferred_load, **kwargs) 385 raise RuntimeError('Multiple files given but no time dimension specified') 387 if grid is None: 388 # Concatenate time variable to determine overall dimension 389 # across multiple files --> 390 time, time_origin, timeslices, dataFiles = cls.collect_timeslices(timestamps, data_filenames, 391 _grid_fb_class, dimensions, 392 indices, netcdf_engine, netcdf_decodewarning) 393 grid = Grid.create_grid(lon, lat, depth, time, time_origin=time_origin, mesh=mesh) 394 grid.timeslices = timeslices File ~/Codes/parcels/parcels/field.py:248, in Field.collect_timeslices(timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine, netcdf_decodewarning) 245 for fname in data_filenames: 246 with _grid_fb_class(fname, dimensions, indices, netcdf_engine=netcdf_engine, 247 netcdf_decodewarning=netcdf_decodewarning) as filebuffer: --> 248 ftime = filebuffer.time 249 timeslices.append(ftime) 250 dataFiles.append([fname] * len(ftime)) File ~/Codes/parcels/parcels/fieldfilebuffer.py:171, in NetcdfFileBuffer.time(self) 169 @property 170 def time(self): --> 171 return self.time_access() File ~/Codes/parcels/parcels/fieldfilebuffer.py:181, in NetcdfFileBuffer.time_access(self) 178 return np.array([None]) 180 time_da = self.dataset[self.dimensions['time']] --> 181 convert_xarray_time_units(time_da, self.dimensions['time']) 182 time = np.array([time_da[self.dimensions['time']].data]) if len(time_da.shape) == 0 else np.array(time_da[self.dimensions['time']]) 183 if isinstance(time[0], datetime.datetime): File ~/Codes/parcels/parcels/tools/converters.py:235, in convert_xarray_time_units(ds, time) 233 da2 = xr.decode_cf(da2) 234 except ValueError: --> 235 raise RuntimeError('Xarray could not convert the calendar. If you''re using from_netcdf, ' 236 'try using the timestamps keyword in the construction of your Field. ' 237 'See also the tutorial at https://nbviewer.jupyter.org/github/OceanParcels/' 238 'parcels/blob/master/parcels/examples/tutorial_timestamps.ipynb') 239 ds[time] = da2[time] RuntimeError: Xarray could not convert the calendar. If youre using from_netcdf, try using the timestamps keyword in the construction of your Field. See also the tutorial at https://nbviewer.jupyter.org/github/OceanParcels/parcels/blob/master/parcels/examples/tutorial_timestamps.ipynb
However, we can create our own numpy array of timestamps associated with each of the 12 snapshots in the netcdf file
timestamps = np.expand_dims(np.array([np.datetime64('2001-%.2d-15' %m) for m in range(1,13)]), axis=1)
And then we can add the timestamps
as an extra argument
tempfield = Field.from_netcdf(glob('WOA_data/woa18_decav_*_04.nc'), 't_an',
{'lon': 'lon', 'lat': 'lat', 'time': 'time'},
netcdf_decodewarning=False,
timestamps=timestamps)
Note, by the way, that adding the time_periodic=True
argument to Field.from_netcdf()
will also mean that the climatology can be cycled for multiple years.
Furthermore, note that we used netcdf_decodewarning=False
in the FieldSet.from_netcdf()
call above. This is to silence an expected warning because the time dimension in the coordinates.nc
file can't be decoded by xarray
.