import warnings
# Suppresing warnings for a "pretty output."
warnings.simplefilter('ignore')
%%writefile config.yaml
date:
start: 2018-2-28 00:00:00
stop: 2018-3-6 00:00:00
run_name: 'latest'
region:
bbox: [-71.4, 41.4, -69.5, 43.7]
crs: 'urn:ogc:def:crs:OGC:1.3:CRS84'
sos_name: 'water_surface_height_above_reference_datum'
cf_names:
- sea_surface_height
- sea_surface_elevation
- sea_surface_height_above_geoid
- sea_surface_height_above_sea_level
- water_surface_height_above_reference_datum
- sea_surface_height_above_reference_ellipsoid
units: 'm'
catalogs:
- https://data.ioos.us/csw
Overwriting config.yaml
import os
import shutil
from datetime import datetime
from ioos_tools.ioos import parse_config
config = parse_config('config.yaml')
# Saves downloaded data into a temporary directory.
save_dir = os.path.abspath(config['run_name'])
if os.path.exists(save_dir):
shutil.rmtree(save_dir)
os.makedirs(save_dir)
fmt = '{:*^64}'.format
print(fmt('Saving data inside directory {}'.format(save_dir)))
print(fmt(' Run information '))
print('Run date: {:%Y-%m-%d %H:%M:%S}'.format(datetime.utcnow()))
print('Start: {:%Y-%m-%d %H:%M:%S}'.format(config['date']['start']))
print('Stop: {:%Y-%m-%d %H:%M:%S}'.format(config['date']['stop']))
print('Bounding box: {0:3.2f}, {1:3.2f},'
'{2:3.2f}, {3:3.2f}'.format(*config['region']['bbox']))
Saving data inside directory c:\users\rsignell\documents\github\testing\latest *********************** Run information ************************ Run date: 2018-03-06 14:05:25 Start: 2018-02-28 00:00:00 Stop: 2018-03-06 00:00:00 Bounding box: -71.40, 41.40,-69.50, 43.70
def make_filter(config):
from owslib import fes
from ioos_tools.ioos import fes_date_filter
kw = dict(wildCard='*', escapeChar='\\',
singleChar='?', propertyname='apiso:Subject')
or_filt = fes.Or([fes.PropertyIsLike(literal=('*%s*' % val), **kw)
for val in config['cf_names']])
not_filt = fes.Not([fes.PropertyIsLike(literal='GRIB-2', **kw)])
begin, end = fes_date_filter(config['date']['start'],
config['date']['stop'])
bbox_crs = fes.BBox(config['region']['bbox'],
crs=config['region']['crs'])
filter_list = [fes.And([bbox_crs, begin, end, or_filt, not_filt])]
return filter_list
filter_list = make_filter(config)
from ioos_tools.ioos import service_urls, get_csw_records
from owslib.csw import CatalogueServiceWeb
dap_urls = []
print(fmt(' Catalog information '))
for endpoint in config['catalogs']:
print('URL: {}'.format(endpoint))
try:
csw = CatalogueServiceWeb(endpoint, timeout=120)
except Exception as e:
print('{}'.format(e))
continue
csw = get_csw_records(csw, filter_list, esn='full')
OPeNDAP = service_urls(csw.records, identifier='OPeNDAP:OPeNDAP')
odp = service_urls(csw.records, identifier='urn:x-esri:specification:ServiceType:odp:url')
dap = OPeNDAP + odp
dap_urls.extend(dap)
print('Number of datasets available: {}'.format(len(csw.records.keys())))
for rec, item in csw.records.items():
print('{}'.format(item.title))
if dap:
print(fmt(' DAP '))
for url in dap:
print('{}.html'.format(url))
print('\n')
# Get only unique endpoints.
dap_urls = list(set(dap_urls))
********************* Catalog information ********************** URL: https://data.ioos.us/csw Number of datasets available: 10 COAWST Modeling System: USEast: ROMS-WRF-SWAN coupled model (aka CNAPS) Coupled Northwest Atlantic Prediction System (CNAPS) HYbrid Coordinate Ocean Model (HYCOM): Global NECOFS (FVCOM) - Hampton - Latest Forecast NECOFS (FVCOM) - Scituate - Latest Forecast NECOFS GOM3 Wave - Northeast US - Latest Forecast NECOFS Massachusetts (FVCOM) - Boston - Latest Forecast NECOFS Massachusetts (FVCOM) - Massachusetts Coastal - Latest Forecast ROMS ESPRESSO Real-Time Operational IS4DVAR Forecast System Version 2 (NEW) 2013-present FMRC Averages ROMS ESPRESSO Real-Time Operational IS4DVAR Forecast System Version 2 (NEW) 2013-present FMRC History ***************************** DAP ****************************** http://oos.soest.hawaii.edu/thredds/dodsC/pacioos/hycom/global.html http://tds.marine.rutgers.edu/thredds/dodsC/roms/espresso/2013_da/avg/ESPRESSO_Real-Time_v2_Averages_Best.html http://tds.marine.rutgers.edu/thredds/dodsC/roms/espresso/2013_da/his/ESPRESSO_Real-Time_v2_History_Best.html http://thredds.secoora.org/thredds/dodsC/SECOORA_NCSU_CNAPS.nc.html http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_BOSTON_FORECAST.nc.html http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_HAMPTON_FORECAST.nc.html http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_MASSBAY_FORECAST.nc.html http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_SCITUATE_FORECAST.nc.html http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_WAVE_FORECAST.nc.html
from timeout_decorator import TimeoutError
from ioos_tools.ioos import is_station
# Filter out some station endpoints.
non_stations = []
for url in dap_urls:
try:
if not is_station(url):
non_stations.append(url)
except (IOError, OSError, RuntimeError, TimeoutError) as e:
print('Could not access URL {}.html\n{!r}'.format(url, e))
dap_urls = non_stations
print(fmt(' Filtered DAP '))
for url in dap_urls:
print('{}.html'.format(url))
************************* Filtered DAP ************************* http://tds.marine.rutgers.edu/thredds/dodsC/roms/espresso/2013_da/avg/ESPRESSO_Real-Time_v2_Averages_Best.html http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_WAVE_FORECAST.nc.html http://thredds.secoora.org/thredds/dodsC/SECOORA_NCSU_CNAPS.nc.html http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_HAMPTON_FORECAST.nc.html http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_MASSBAY_FORECAST.nc.html http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_SCITUATE_FORECAST.nc.html http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_BOSTON_FORECAST.nc.html http://oos.soest.hawaii.edu/thredds/dodsC/pacioos/hycom/global.html http://tds.marine.rutgers.edu/thredds/dodsC/roms/espresso/2013_da/his/ESPRESSO_Real-Time_v2_History_Best.html
from pyoos.collectors.coops.coops_sos import CoopsSos
collector_coops = CoopsSos()
collector_coops.set_bbox(config['region']['bbox'])
collector_coops.end_time = config['date']['stop']
collector_coops.start_time = config['date']['start']
collector_coops.variables = [config['sos_name']]
ofrs = collector_coops.server.offerings
title = collector_coops.server.identification.title
print(fmt(' Collector offerings '))
print('{}: {} offerings'.format(title, len(ofrs)))
********************* Collector offerings ********************** NOAA.NOS.CO-OPS SOS: 1187 offerings
import pandas as pd
from ioos_tools.ioos import collector2table
data = collector2table(
collector=collector_coops,
config=config,
col='water_surface_height_above_reference_datum (m)'
)
df = dict(
station_name=[s._metadata.get('station_name') for s in data],
station_code=[s._metadata.get('station_code') for s in data],
sensor=[s._metadata.get('sensor') for s in data],
lon=[s._metadata.get('lon') for s in data],
lat=[s._metadata.get('lat') for s in data],
depth=[s._metadata.get('depth') for s in data],
)
pd.DataFrame(df).set_index('station_code')
depth | lat | lon | sensor | station_name | |
---|---|---|---|---|---|
station_code | |||||
8418150 | None | 43.6561 | -70.2461 | urn:ioos:sensor:NOAA.NOS.CO-OPS:8418150:B1 | Portland, ME |
8419317 | None | 43.3200 | -70.5633 | urn:ioos:sensor:NOAA.NOS.CO-OPS:8419317:B1 | Wells, ME |
8423898 | None | 43.0714 | -70.7106 | urn:ioos:sensor:NOAA.NOS.CO-OPS:8423898:A1 | Fort Point, NH |
8443970 | None | 42.3539 | -71.0503 | urn:ioos:sensor:NOAA.NOS.CO-OPS:8443970:Y1 | Boston, MA |
8447386 | None | 41.7043 | -71.1641 | urn:ioos:sensor:NOAA.NOS.CO-OPS:8447386:B1 | Fall River, MA |
8447435 | None | 41.6885 | -69.9510 | urn:ioos:sensor:NOAA.NOS.CO-OPS:8447435:A1 | Chatham, Lydia Cove, MA |
8447930 | None | 41.5236 | -70.6711 | urn:ioos:sensor:NOAA.NOS.CO-OPS:8447930:B1 | Woods Hole, MA |
8452660 | None | 41.5044 | -71.3261 | urn:ioos:sensor:NOAA.NOS.CO-OPS:8452660:B1 | Newport, RI |
8452944 | None | 41.7170 | -71.3430 | urn:ioos:sensor:NOAA.NOS.CO-OPS:8452944:Y1 | Conimicut Light, RI |
index = pd.date_range(
start=config['date']['start'].replace(tzinfo=None),
end=config['date']['stop'].replace(tzinfo=None),
freq='1H'
)
# Preserve metadata with `reindex`.
observations = []
for series in data:
_metadata = series._metadata
obs = series.reindex(index=index, limit=1, method='nearest')
obs._metadata = _metadata
observations.append(obs)
import iris
from ioos_tools.tardis import series2cube
attr = dict(
featureType='timeSeries',
Conventions='CF-1.6',
standard_name_vocabulary='CF-1.6',
cdm_data_type='Station',
comment='Data from http://opendap.co-ops.nos.noaa.gov'
)
cubes = iris.cube.CubeList(
[series2cube(obs, attr=attr) for obs in observations]
)
outfile = os.path.join(save_dir, 'OBS_DATA.nc')
iris.save(cubes, outfile)
from iris.exceptions import (CoordinateNotFoundError, ConstraintMismatchError,
MergeError)
from ioos_tools.ioos import get_model_name
from ioos_tools.tardis import quick_load_cubes, proc_cube, is_model, get_surface
print(fmt(' Models '))
cubes = dict()
for k, url in enumerate(dap_urls):
print('\n[Reading url {}/{}]: {}'.format(k+1, len(dap_urls), url))
try:
cube = quick_load_cubes(url, config['cf_names'],
callback=None, strict=True)
if is_model(cube):
cube = proc_cube(cube,
bbox=config['region']['bbox'],
time=(config['date']['start'],
config['date']['stop']),
units=config['units'])
else:
print('[Not model data]: {}'.format(url))
continue
mod_name = get_model_name(url)
cubes.update({mod_name: cube})
except (RuntimeError, ValueError,
ConstraintMismatchError, CoordinateNotFoundError,
IndexError) as e:
print('Cannot get cube for: {}\n{}'.format(url, e))
**************************** Models **************************** [Reading url 1/9]: http://tds.marine.rutgers.edu/thredds/dodsC/roms/espresso/2013_da/avg/ESPRESSO_Real-Time_v2_Averages_Best [Reading url 2/9]: http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_WAVE_FORECAST.nc [Reading url 3/9]: http://thredds.secoora.org/thredds/dodsC/SECOORA_NCSU_CNAPS.nc [Reading url 4/9]: http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_HAMPTON_FORECAST.nc [Reading url 5/9]: http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_MASSBAY_FORECAST.nc [Reading url 6/9]: http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_SCITUATE_FORECAST.nc [Reading url 7/9]: http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_BOSTON_FORECAST.nc [Reading url 8/9]: http://oos.soest.hawaii.edu/thredds/dodsC/pacioos/hycom/global [Reading url 9/9]: http://tds.marine.rutgers.edu/thredds/dodsC/roms/espresso/2013_da/his/ESPRESSO_Real-Time_v2_History_Best
import iris
from iris.pandas import as_series
from ioos_tools.tardis import (make_tree, get_nearest_water,
add_station, ensure_timeseries, remove_ssh)
for mod_name, cube in cubes.items():
fname = '{}.nc'.format(mod_name)
fname = os.path.join(save_dir, fname)
print(fmt(' Downloading to file {} '.format(fname)))
try:
tree, lon, lat = make_tree(cube)
except CoordinateNotFoundError as e:
print('Cannot make KDTree for: {}'.format(mod_name))
continue
# Get model series at observed locations.
raw_series = dict()
for obs in observations:
obs = obs._metadata
station = obs['station_code']
try:
kw = dict(k=10, max_dist=0.08, min_var=0.01)
args = cube, tree, obs['lon'], obs['lat']
try:
series, dist, idx = get_nearest_water(*args, **kw)
except RuntimeError as e:
print('Cannot download {!r}.\n{}'.format(cube, e))
series = None
except ValueError as e:
status = 'No Data'
print('[{}] {}'.format(status, obs['station_name']))
continue
if not series:
status = 'Land '
else:
raw_series.update({station: series})
series = as_series(series)
status = 'Water '
print('[{}] {}'.format(status, obs['station_name']))
if raw_series: # Save cube.
for station, cube in raw_series.items():
cube = add_station(cube, station)
cube = remove_ssh(cube)
try:
cube = iris.cube.CubeList(raw_series.values()).merge_cube()
except MergeError as e:
print(e)
ensure_timeseries(cube)
try:
iris.save(cube, fname)
except AttributeError:
# FIXME: we should patch the bad attribute instead of removing everything.
cube.attributes = {}
iris.save(cube, fname)
del cube
print('Finished processing [{}]'.format(mod_name))
Downloading to file c:\users\rsignell\documents\github\testing\latest\roms_2013_da_avg-ESPRESSO_Real-Time_v2_Averages_Best.nc [No Data] Portland, ME [No Data] Wells, ME [No Data] Fort Point, NH [Land ] Boston, MA [Land ] Fall River, MA [Water ] Chatham, Lydia Cove, MA [Water ] Woods Hole, MA [Water ] Newport, RI [Land ] Conimicut Light, RI Finished processing [roms_2013_da_avg-ESPRESSO_Real-Time_v2_Averages_Best] Downloading to file c:\users\rsignell\documents\github\testing\latest\FVCOM_Forecasts-NECOFS_WAVE_FORECAST.nc [Land ] Portland, ME [Land ] Wells, ME [Land ] Fort Point, NH [Land ] Boston, MA [Land ] Fall River, MA [Land ] Chatham, Lydia Cove, MA [Land ] Woods Hole, MA [Land ] Newport, RI [Land ] Conimicut Light, RI Finished processing [FVCOM_Forecasts-NECOFS_WAVE_FORECAST] Downloading to file c:\users\rsignell\documents\github\testing\latest\SECOORA_NCSU_CNAPS.nc [Land ] Portland, ME [Water ] Wells, ME [Water ] Fort Point, NH [Land ] Boston, MA [Land ] Fall River, MA [Land ] Chatham, Lydia Cove, MA [Water ] Woods Hole, MA [Water ] Newport, RI [Land ] Conimicut Light, RI Finished processing [SECOORA_NCSU_CNAPS] Downloading to file c:\users\rsignell\documents\github\testing\latest\Forecasts-NECOFS_FVCOM_OCEAN_HAMPTON_FORECAST.nc [No Data] Portland, ME [No Data] Wells, ME [No Data] Fort Point, NH [No Data] Boston, MA [No Data] Fall River, MA [No Data] Chatham, Lydia Cove, MA [No Data] Woods Hole, MA [No Data] Newport, RI [No Data] Conimicut Light, RI Finished processing [Forecasts-NECOFS_FVCOM_OCEAN_HAMPTON_FORECAST] Downloading to file c:\users\rsignell\documents\github\testing\latest\Forecasts-NECOFS_FVCOM_OCEAN_MASSBAY_FORECAST.nc [No Data] Portland, ME [Water ] Wells, ME [Water ] Fort Point, NH [Water ] Boston, MA [No Data] Fall River, MA [Water ] Chatham, Lydia Cove, MA [Water ] Woods Hole, MA [No Data] Newport, RI [No Data] Conimicut Light, RI Finished processing [Forecasts-NECOFS_FVCOM_OCEAN_MASSBAY_FORECAST] Downloading to file c:\users\rsignell\documents\github\testing\latest\Forecasts-NECOFS_FVCOM_OCEAN_SCITUATE_FORECAST.nc [No Data] Portland, ME [No Data] Wells, ME [No Data] Fort Point, NH [No Data] Boston, MA [No Data] Fall River, MA [No Data] Chatham, Lydia Cove, MA [No Data] Woods Hole, MA [No Data] Newport, RI [No Data] Conimicut Light, RI Finished processing [Forecasts-NECOFS_FVCOM_OCEAN_SCITUATE_FORECAST] Downloading to file c:\users\rsignell\documents\github\testing\latest\Forecasts-NECOFS_FVCOM_OCEAN_BOSTON_FORECAST.nc [No Data] Portland, ME [No Data] Wells, ME [No Data] Fort Point, NH [Water ] Boston, MA [No Data] Fall River, MA [No Data] Chatham, Lydia Cove, MA [No Data] Woods Hole, MA [No Data] Newport, RI [No Data] Conimicut Light, RI Finished processing [Forecasts-NECOFS_FVCOM_OCEAN_BOSTON_FORECAST] Downloading to file c:\users\rsignell\documents\github\testing\latest\pacioos_hycom-global.nc [Land ] Portland, ME [Water ] Wells, ME [Water ] Fort Point, NH [Land ] Boston, MA [Land ] Fall River, MA [Water ] Chatham, Lydia Cove, MA [Land ] Woods Hole, MA [Land ] Newport, RI [Land ] Conimicut Light, RI Finished processing [pacioos_hycom-global] Downloading to file c:\users\rsignell\documents\github\testing\latest\roms_2013_da-ESPRESSO_Real-Time_v2_History_Best.nc [No Data] Portland, ME [No Data] Wells, ME [No Data] Fort Point, NH [Land ] Boston, MA [Land ] Fall River, MA [Water ] Chatham, Lydia Cove, MA [Water ] Woods Hole, MA [Water ] Newport, RI [Land ] Conimicut Light, RI Finished processing [roms_2013_da-ESPRESSO_Real-Time_v2_History_Best]
from ioos_tools.ioos import stations_keys
def rename_cols(df, config):
cols = stations_keys(config, key='station_name')
return df.rename(columns=cols)
from ioos_tools.ioos import load_ncs
from ioos_tools.skill_score import mean_bias, apply_skill
dfs = load_ncs(config)
df = apply_skill(dfs, mean_bias, remove_mean=False, filter_tides=False)
skill_score = dict(mean_bias=df.to_dict())
# Filter out stations with no valid comparison.
df.dropna(how='all', axis=1, inplace=True)
df = df.applymap('{:.2f}'.format).replace('nan', '--')
from ioos_tools.skill_score import rmse
dfs = load_ncs(config)
df = apply_skill(dfs, rmse, remove_mean=True, filter_tides=False)
skill_score['rmse'] = df.to_dict()
# Filter out stations with no valid comparison.
df.dropna(how='all', axis=1, inplace=True)
df = df.applymap('{:.2f}'.format).replace('nan', '--')
import pandas as pd
# Stringfy keys.
for key in skill_score.keys():
skill_score[key] = {str(k): v for k, v in skill_score[key].items()}
mean_bias = pd.DataFrame.from_dict(skill_score['mean_bias'])
mean_bias = mean_bias.applymap('{:.2f}'.format).replace('nan', '--')
skill_score = pd.DataFrame.from_dict(skill_score['rmse'])
skill_score = skill_score.applymap('{:.2f}'.format).replace('nan', '--')
import folium
from ioos_tools.ioos import get_coordinates
def make_map(bbox, **kw):
line = kw.pop('line', True)
layers = kw.pop('layers', True)
zoom_start = kw.pop('zoom_start', 5)
lon = (bbox[0] + bbox[2]) / 2
lat = (bbox[1] + bbox[3]) / 2
m = folium.Map(width='100%', height='100%',
location=[lat, lon], zoom_start=zoom_start)
if line:
p = folium.PolyLine(get_coordinates(bbox),
color='#FF0000',
weight=2,
opacity=0.9,
latlon=True)
p.add_to(m)
return m
bbox = config['region']['bbox']
m = make_map(
bbox,
zoom_start=6,
line=True,
layers=True
)
all_obs = stations_keys(config)
from glob import glob
from operator import itemgetter
import iris
from folium.plugins import MarkerCluster
iris.FUTURE.netcdf_promote = True
big_list = []
for fname in glob(os.path.join(save_dir, '*.nc')):
if 'OBS_DATA' in fname:
continue
cube = iris.load_cube(fname)
model = os.path.split(fname)[1].split('-')[-1].split('.')[0]
lons = cube.coord(axis='X').points
lats = cube.coord(axis='Y').points
stations = cube.coord('station_code').points
models = [model]*lons.size
lista = zip(models, lons.tolist(), lats.tolist(), stations.tolist())
big_list.extend(lista)
big_list.sort(key=itemgetter(3))
df = pd.DataFrame(big_list, columns=['name', 'lon', 'lat', 'station'])
df.set_index('station', drop=True, inplace=True)
groups = df.groupby(df.index)
locations, popups = [], []
for station, info in groups:
sta_name = all_obs[station]
for lat, lon, name in zip(info.lat, info.lon, info.name):
locations.append([lat, lon])
popups.append('[{}]: {}'.format(name, sta_name))
MarkerCluster(locations=locations, popups=popups, name='Cluster').add_to(m);
titles = {
'coawst_4_use_best': 'COAWST_4',
'global': 'HYCOM',
'NECOFS_GOM3_FORECAST': 'NECOFS_GOM3',
'NECOFS_FVCOM_OCEAN_MASSBAY_FORECAST': 'NECOFS_MassBay',
'OBS_DATA': 'Observations'
}
from bokeh.resources import CDN
from bokeh.plotting import figure
from bokeh.embed import file_html
from bokeh.models import HoverTool
from itertools import cycle
from bokeh.palettes import Category20
from folium import IFrame
# Plot defaults.
colors = Category20[20]
colorcycler = cycle(colors)
tools = 'pan,box_zoom,reset'
width, height = 750, 250
def make_plot(df, station):
p = figure(
toolbar_location='above',
x_axis_type='datetime',
width=width,
height=height,
tools=tools,
title=str(station)
)
for column, series in df.iteritems():
series.dropna(inplace=True)
if not series.empty:
if 'OBS_DATA' not in column:
bias = mean_bias[str(station)][column]
skill = skill_score[str(station)][column]
line_color = next(colorcycler)
kw = dict(alpha=0.65, line_color=line_color)
else:
skill = bias = 'NA'
kw = dict(alpha=1, color='crimson')
line = p.line(
x=series.index,
y=series.values,
legend='{}'.format(titles.get(column, column)),
line_width=5,
line_cap='round',
line_join='round',
**kw
)
p.add_tools(HoverTool(tooltips=[('Name', '{}'.format(titles.get(column, column))),
('Bias', bias),
('Skill', skill)],
renderers=[line]))
return p
def make_marker(p, station):
lons = stations_keys(config, key='lon')
lats = stations_keys(config, key='lat')
lon, lat = lons[station], lats[station]
html = file_html(p, CDN, station)
iframe = IFrame(html, width=width+40, height=height+80)
popup = folium.Popup(iframe, max_width=2650)
icon = folium.Icon(color='green', icon='stats')
marker = folium.Marker(location=[lat, lon],
popup=popup,
icon=icon)
return marker
dfs = load_ncs(config)
for station in dfs.iloc[0:1]:
sta_name = all_obs[station]
df = dfs[station]
if df.empty:
continue
p = make_plot(df, station)
marker = make_marker(p, station)
marker.add_to(m)
folium.LayerControl().add_to(m);
def embed_map(m):
from IPython.display import HTML
m.save('index.html')
with open('index.html') as f:
html = f.read()
iframe = '<iframe srcdoc="{srcdoc}" style="width: 100%; height: 750px; border: none"></iframe>'
srcdoc = html.replace('"', '"')
return HTML(iframe.format(srcdoc=srcdoc))
embed_map(m)