Exploring use of Python to formulate NERACOOS ERDDAP data requests and process the responses.
%matplotlib inline
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import urllib
# Use ERDDAP's built-in relative time functionality to get last 48 hours:
#start='now-7days'
#stop='now'
# or specify a specific period:
start = '2016-09-06T00:00:00Z'
stop = '2016-11-07T00:00:00Z'
# read instrument data (E01_sbe16)
url='http://www.neracoos.org/erddap/tabledap/E01_sbe16_trans_all.csv?\
station,time,depth,longitude,latitude,attenuation,sigma_t,temperature,salinity\
&time>={}&time<={}'.format(start,stop)
print(url)
df_sb = pd.read_csv(url,index_col='time',parse_dates=True,skiprows=[1]) # skip the units row
http://www.neracoos.org/erddap/tabledap/E01_sbe16_trans_all.csv?station,time,depth,longitude,latitude,attenuation,sigma_t,temperature,salinity&time>=2016-09-06T00:00:00Z&time<=2016-11-07T00:00:00Z
--------------------------------------------------------------------------- HTTPError Traceback (most recent call last) <ipython-input-3-2aa6ae972294> in <module>() 3 4 print(url) ----> 5 df_sb = pd.read_csv(url,index_col='time',parse_dates=True,skiprows=[1]) # skip the units row ~\miniconda3\envs\IOOS\lib\site-packages\pandas\io\parsers.py in parser_f(filepath_or_buffer, sep, delimiter, header, names, index_col, usecols, squeeze, prefix, mangle_dupe_cols, dtype, engine, converters, true_values, false_values, skipinitialspace, skiprows, nrows, na_values, keep_default_na, na_filter, verbose, skip_blank_lines, parse_dates, infer_datetime_format, keep_date_col, date_parser, dayfirst, iterator, chunksize, compression, thousands, decimal, lineterminator, quotechar, quoting, escapechar, comment, encoding, dialect, tupleize_cols, error_bad_lines, warn_bad_lines, skipfooter, skip_footer, doublequote, delim_whitespace, as_recarray, compact_ints, use_unsigned, low_memory, buffer_lines, memory_map, float_precision) 707 skip_blank_lines=skip_blank_lines) 708 --> 709 return _read(filepath_or_buffer, kwds) 710 711 parser_f.__name__ = name ~\miniconda3\envs\IOOS\lib\site-packages\pandas\io\parsers.py in _read(filepath_or_buffer, kwds) 431 compression = _infer_compression(filepath_or_buffer, compression) 432 filepath_or_buffer, _, compression = get_filepath_or_buffer( --> 433 filepath_or_buffer, encoding, compression) 434 kwds['compression'] = compression 435 ~\miniconda3\envs\IOOS\lib\site-packages\pandas\io\common.py in get_filepath_or_buffer(filepath_or_buffer, encoding, compression) 188 189 if _is_url(filepath_or_buffer): --> 190 req = _urlopen(filepath_or_buffer) 191 content_encoding = req.headers.get('Content-Encoding', None) 192 if content_encoding == 'gzip': ~\miniconda3\envs\IOOS\lib\urllib\request.py in urlopen(url, data, timeout, cafile, capath, cadefault, context) 221 else: 222 opener = _opener --> 223 return opener.open(url, data, timeout) 224 225 def install_opener(opener): ~\miniconda3\envs\IOOS\lib\urllib\request.py in open(self, fullurl, data, timeout) 530 for processor in self.process_response.get(protocol, []): 531 meth = getattr(processor, meth_name) --> 532 response = meth(req, response) 533 534 return response ~\miniconda3\envs\IOOS\lib\urllib\request.py in http_response(self, request, response) 640 if not (200 <= code < 300): 641 response = self.parent.error( --> 642 'http', request, response, code, msg, hdrs) 643 644 return response ~\miniconda3\envs\IOOS\lib\urllib\request.py in error(self, proto, *args) 568 if http_err: 569 args = (dict, 'default', 'http_error_default') + orig_args --> 570 return self._call_chain(*args) 571 572 # XXX probably also want an abstract factory that knows when it makes ~\miniconda3\envs\IOOS\lib\urllib\request.py in _call_chain(self, chain, kind, meth_name, *args) 502 for handler in handlers: 503 func = getattr(handler, meth_name) --> 504 result = func(*args) 505 if result is not None: 506 return result ~\miniconda3\envs\IOOS\lib\urllib\request.py in http_error_default(self, req, fp, code, msg, hdrs) 648 class HTTPDefaultErrorHandler(BaseHandler): 649 def http_error_default(self, req, fp, code, msg, hdrs): --> 650 raise HTTPError(req.full_url, code, msg, hdrs, fp) 651 652 class HTTPRedirectHandler(BaseHandler): HTTPError: HTTP Error 400: Bad Request
url
urllib.parse.quote(url)
# read met data (E01_met)
url='https://www.neracoos.org/erddap/tabledap/E01_met_all.csv?\
station,time,air_temperature,barometric_pressure,wind_gust,wind_speed,\
wind_direction,visibility\
&time>={}&time<={}'.format(start,stop)
print(url)
df_met = pd.read_csv(url,index_col='time',parse_dates=True,skiprows=[1]) # skip the units row
# read wave data (E01_accelerometer)
url='http://www.neracoos.org/erddap/tabledap/E01_accelerometer_all.csv?\
station,time,mooring_site_desc,significant_wave_height,dominant_wave_period&\
time>={}&time<={}'.format(start,stop)
print(url)
# Load the CSV data directly into Pandas
df_wave = pd.read_csv(url,index_col='time',parse_dates=True,skiprows=[1]) # skip the units row
# List last ten records
df_sb.tail(10)
df_sb['sigma_t'].plot(figsize=(12,4),title=df_sb['station'][0]);plt.legend(loc=2)
df_sb['attenuation'].plot(figsize=(12,4),secondary_y=True,legend=True);
p1=df_met['wind_speed'].plot(figsize=(12,4),x_compat=True);plt.legend(loc=2)
p2=df_wave['significant_wave_height'].plot(secondary_y=True,legend=True,x_compat=True);
df_wave.plot(figsize=(12,4));
plt.scatter(df_sb['sigma_t'],df_sb['attenuation'])
plt.grid()
def tsplot(sobs,tobs):
smin=sobs.min()
smax=sobs.max()
tmin=tobs.min()
tmax=tobs.max()
s_inc=(smax-smin)/8.
t_inc=(tmax-tmin)/8.
t = np.arange(tmin,tmax+t_inc,t_inc)
s = np.arange(smin,smax+s_inc,s_inc)
S, T = np.meshgrid(s, t)
st = sw.dens0(S, T) - 1000
st_inc=(st.max()-st.min())/8.
levels = np.arange(st.min(),st.max()+st_inc,st_inc)
from matplotlib import rcParams
from matplotlib.ticker import MultipleLocator
rcParams['xtick.direction'] = 'out'
rcParams['ytick.direction'] = 'out'
fig, ax = plt.subplots(figsize=(6, 4))
# ax.xaxis.set_minor_locator(MultipleLocator(0.1))
# ax.yaxis.set_minor_locator(MultipleLocator(1))
ax.set_ylabel(u"Temperature \u00b0C")
ax.set_xlabel(r"Salinity [g kg$^{-1}$]")
ax.axis([smin,smax,tmin,tmax])
cs = ax.contour(s, t, st, colors='black', levels=levels)
ax.clabel(cs, fontsize=9, inline=1, fmt='%3.2f')
#sg = ax.contour(s, t, sigma_theta, linestyle='--', colors='grey', levels=[0, line])
#ax.clabel(sg, fontsize=9, inline=1, fmt='%2.1f')
ax.plot(sobs,tobs,'o')
tsplot(df_sb['salinity'],df_sb['temperature'])