#!/usr/bin/env python # coding: utf-8 # # Sentinel-2 Reservoir and Lake Surface Area Monitoring # This notebook shows how to use [Sentinel-2 Reservoir and Lake Surface Area Monitoring](https://up42.com/marketplace/blocks/processing/terracover-realsat) processing block to map changes in surface extents of water bodies. In this example, we will visualize the impact of recent droughts on some of the reservoirs in Taiwan. # The notebook is divided in following sections: # 1. Define AOI and time range. # 2. Validate the AOI and time range to ensure that Sentinel-2 imagery is available. # 3. Run the workflow to generate water extent maps for pre-defined water bodies within the AOI. # 4. Download raw multi-spectral imagery for visual comparison. # 5. Visualize surface water extent maps # # Please note that this example assumes that your project settings allow: # - jobs can be executed in parallel # - at least 15 images can be returned as output # - maximum AOI size limit is greater than 15 square kilometers. # # If the above settings are not feasible for your project, please adjust the AOI size and time range accordingly. # # **Please note**, this workflow takes approximately 1 hour to run. # In[2]: # load packages import os from functools import partial import geopandas as gpd import matplotlib.pyplot as plt import numpy as np import pandas as pd import pyproj import rasterio import rasterio.mask from rasterio import features from rasterio.plot import reshape_as_raster, show from shapely.geometry import LineString, MultiPolygon, Point, Polygon, box from shapely.geometry import shape as shapely_shp from shapely.ops import cascaded_union, transform from rasterio.plot import show from datetime import date from dateutil.relativedelta import relativedelta import folium import up42 import geopandas import time import math import glob from matplotlib.colors import LinearSegmentedColormap # allows for ignoring errors + division by zero np.seterr(divide='ignore', invalid='ignore') # In[3]: # provide your project id and project api key in a config file up42.authenticate(cfg_file="config.json") project = up42.initialize_project() project.update_project_settings(max_concurrent_jobs=20, number_of_images=40) # ## 1. Define AOI and time range. # In[4]: # set AOI (as JSON format dictionary). To get AOI as JSON, please use http://geojson.io aoi = { "type": "FeatureCollection", "features": [ { "type": "Feature", "properties": {}, "geometry": { "type": "Polygon", "coordinates": [ [ [ 121.051051, 24.758307 ], [ 121.027337, 24.747545 ], [ 121.036327, 24.735015 ], [ 121.032854, 24.722302 ], [ 121.043744, 24.711475 ], [ 121.055938, 24.715882 ], [ 121.078329, 24.730913 ], [ 121.051051, 24.758307 ] ] ] } } ] } aoi = aoi['features'][0]['geometry'] # Set start and end date arrays. This allows comparison across non-consecutive time periods # In this example, we intend to compare water extents of 7 months ago with extents of 2 months ago (this is necessary because Sobloo has a rolling archive) time_interval_1_end = date.today() + relativedelta(months=-7) time_interval_1_start = date.today() + relativedelta(months=-7, days=-14) time_interval_2_end = date.today() + relativedelta(months=-2) time_interval_2_start = date.today() + relativedelta(months=-2, days=-14) time_interval_1 = (time_interval_1_start, time_interval_1_end) time_interval_2 = (time_interval_2_start, time_interval_2_end) limit = 15 #set the limit on number of images num_parallel_jobs = 15 #set the limit on number of parallel tasks # set up working directories dry_run_dir = './realsat_demo_dryrun/' map_run_dir = './realsat_demo_maps/' img_run_dir = './realsat_demo_imgs/' # ## 2. Validate the AOI and time range. # In[ ]: # run the data block first in DRY_RUN mode to confirm that the query returns non-zero number of sentinel-2 images # initialize workflow workflow = project.create_workflow(name="realsat_demo", use_existing=True) # declare workflow tasks input_tasks = ['sobloo-s2-l1c-fullscene'] # Update workflow object with our desired data block as input_task workflow.add_workflow_tasks(input_tasks=input_tasks) input_parameters = workflow.construct_parameters_parallel( geometries=[aoi], geometry_operation='intersects', interval_dates=[time_interval_1, time_interval_2], limit_per_job=limit) # run the job in dry run mode job = workflow.test_jobs_parallel(input_parameters_list=input_parameters, max_concurrent_jobs=num_parallel_jobs) # download results upon job completion results = job.download_results(dry_run_dir) # load metadata to print number of available images within the AOI and time range metadata = geopandas.read_file(dry_run_dir + 'data.json') num_images = metadata.shape[0] print('Number of images available: ' + str(metadata.shape[0])) # ## 3. Run the workflow to generate water extent maps. # - The processing block will analyze pre-defined water bodies within the AOI. # In[ ]: # run the workflow in live mode with the the processing block # initialize workflow workflow = project.create_workflow(name="realsat_demo") # add the processing block input_tasks = ['sobloo-s2-l1c-fullscene','terracover-realsat'] # Update workflow object with our desired data block as input_task workflow.add_workflow_tasks(input_tasks=input_tasks) # extracting scene ids to create job parameters for parallel execution scene_ids_list = [] for i in range(metadata.shape[0]): cur_name = metadata['identification'].values[i]['externalId'] scene_ids_list.append(cur_name) # dividing the scenes into batches to submit multiple jobs for faster execution batch_size = math.floor(num_images/num_parallel_jobs) num_remaining = num_images%num_parallel_jobs i = 0 total = 0 input_parameters_list = [] while (i