In [ ]:
import numpy as np

Sentinel_1_GRD

Define Area Extents


In [ ]:
from datetime import datetime
# That I can sqap areas
vietnam_extent = dict(
    lat  = (10.9348, 11.0190),
    lon  = (107.8164, 107.9168),
    time = (datetime(2015,1,1), datetime(2017,1,1)),
)

menindee_extent = dict(
    lat = ( -32.420,-32.272),
    lon = (142.2348, 142.407),
)

extent = menindee_extent

A quick visualization of the area before it is loaded through the datacube


In [ ]:
from utils.data_cube_utilities.dc_display_map import display_map

display_map(latitude  = extent['lat'],
            longitude = extent['lon'])

Display Available Products. Pick a product to load.

The following code loads a datacube object. Loads a list of available products, and filters in favor of products containing the word s1.


In [ ]:
import datacube
dc = datacube.Datacube()

products = dc.list_products()
products[products['name'].str.contains("s1")]

Define a product and platform


In [ ]:
menindee_extent["product"]  = "s1_gamma0_menindee_lakes"
menindee_extent["platform"] = "SENTINEL_1"

vietnam_extent["product"]   = "s1_gamma0_vietnam"
vietnam_extent["platform"]  = "SENTINEL_1"

View basic metadata about this product


In [ ]:
from utils.data_cube_utilities.data_access_api import DataAccessApi
dca = DataAccessApi()

dca.get_query_metadata(platform = extent["platform"],
                       product  = extent["product"])

In [ ]:
product_details = dc.list_products()[dc.list_products().name ==extent["product"]]
product_details

Load Data


In [ ]:
import datacube
dc = datacube.Datacube()

dataset = dc.load(**extent)

In [ ]:
dataset

View an acquisition in dataset


In [ ]:
first_slice = dataset.isel(time = 1) # iselect selects by index, rather than value.

In [ ]:
first_slice

Helper function for plotting


In [ ]:
def figure_ratio(ds, fixed_width = 20):
    width = fixed_width
    height = len(ds.latitude) * (fixed_width / len(ds.longitude))
    return (width, height)

plot vh

A few outliers might distort the output. The following code will plot vh bands on a logarithmic scale


In [ ]:
%matplotlib inline
np.log10(first_slice.vh).plot(cmap = "Greys", figsize = figure_ratio(first_slice))

plot vv

A few outliers might distort the output. The following code will plot vh bands on a logarithmic scale


In [ ]:
np.log10(first_slice.vv).plot(cmap = "Greys",figsize = figure_ratio(first_slice))

plot vv/vh

Define ratio function

This is a very simple function. It's defined in this notebook just to show how how arithmetic operations scale on an xarray datasets and how to add/augment a dataset with synthetic variables

$$ dataset_{a\_per\_b} = \frac{dataset_a}{dataset_b} $$



In [ ]:
import xarray as xr 
def augment_dataset_with_ratio(ds: xr.Dataset, band_name_1: str, band_name_2 :str) -> xr.Dataset:
    a_per_b = (ds[band_name_1]/ds[band_name_2])
    a_per_b = a_per_b.to_dataset(name = '{b1}_per_{b2}'.format(b1 = band_name_1, b2 = band_name_2)) # turn xarray.dataarray into xarray dataset and name dataset variable.
    return ds.merge(a_per_b)



Apply function and display new xarray.Dataset


In [ ]:
first_slice = augment_dataset_with_ratio(first_slice, "vv", "vh")
first_slice = augment_dataset_with_ratio(first_slice, "vh", "vv")

print(first_slice)

In [ ]:
np.log10(first_slice.vv_per_vh).plot(cmap = "Greys", figsize = figure_ratio(first_slice))

plot false color

The function below defines a basic normalization and plotting function for xarrays. I would not recommend re-using this unless you understand your data in great enough detail to determine that this suites your needs. This is a first pass at including a plotting utility in our S1 analysis workflow. The canonical approach or desired results would be something along the lines of following established processing methods used in something like ESA's Sentinel-1 Toolbox or from the Sentinel Callibration Guide.


In [ ]:
import numpy as np
import xarray as xr

def build_rgb_from_ds(_dataset: xr.Dataset,
                      r:str = None,
                      g:str = None,
                      b:str = None, 
                      logarithmic_scale_enabled = False):
    
    norm = _dataset.copy()

    if logarithmic_scale_enabled == True:
        norm[r] = np.log10(norm[r]) 
        norm[g] = np.log10(norm[g]) 
        norm[b] = np.log10(norm[b]) 

    norm = (255 * _dataset/_dataset.max()).astype(np.uint16)
    
    _r = norm[r].values #.astype(np.float32)
    _g = norm[g].values #.astype(np.float32)
    _b = norm[b].values #_per_vh.astype(np.float32)
    _rgb = np.dstack([_r,_g,_b])
    return _rgb

In [ ]:
rgb = build_rgb_from_ds(first_slice,
                        r = "vv", 
                        g = "vh",
                        b = "vv_per_vh")



In [ ]:
import matplotlib.pyplot as plt
plt.figure( figsize = figure_ratio(first_slice))
plt.imshow( rgb )


Build a median value composite(mosaic) for your time series

The following section gets messy. We build a mosaic peice by peice, rather than all at once. This employs a chunking processes whereby small extents are queried and processed.


In [ ]:
from utils.data_cube_utilities.dc_chunker import create_geographic_chunks, combine_geographic_chunks

geographic_chunks = create_geographic_chunks(latitude=extent["lat"], longitude=extent["lon"], geographic_chunk_size=.05)



In [ ]:
for x in geographic_chunks:
    print(x)



In [ ]:
from utils.data_cube_utilities.dc_mosaic import create_median_mosaic
from utils.data_cube_utilities.dc_sar_utils import dn_to_db 
import warnings
warnings.filterwarnings("ignore")

import numpy as np  

measurements = ['vv', 'vh']
product_chunks = []

# This part is unpythonic but explicit
for index, chunk in enumerate(geographic_chunks):
    data = dca.get_dataset_by_extent(extent["product"], 
                                    longitude=chunk['longitude'], latitude=chunk['latitude'], 
                                    measurements=measurements)
    if 'vv' in data:
        product_chunks.append(create_median_mosaic(data, clean_mask=np.full((data.vv.shape), True), dtype="float32", no_data=0))
        

final_mosaic = combine_geographic_chunks(product_chunks)

In [ ]:
final_mosaic

In [ ]:
final_mosaic = augment_dataset_with_ratio(final_mosaic, "vv", "vh")
final_mosaic = augment_dataset_with_ratio(final_mosaic, "vh", "vv")

In [ ]:
final_mosaic

Plot Median Composite


In [ ]:
rgb = build_rgb_from_ds(final_mosaic,
                        r = "vv", 
                        g = "vh",
                        b = "vh_per_vv")

In [ ]:
import matplotlib.pyplot as plt
plt.figure( figsize = figure_ratio(final_mosaic))
plt.imshow( rgb )

Pixel Drill Analysis


In [ ]:
dataset = augment_dataset_with_ratio(dataset, "vv", "vh") ## Compute VV/VH for entire dataset rather than just a slice(see section above)

Chose a pixel

Choose a pixel from the bounded box below (A click will reveal lat and lon coordinates)


In [ ]:
display_map(latitude  = extent['lat'], longitude = extent['lon'])

In [ ]:
# Lat and Lon coordinates extracted from the map above 
pixel_lat = -32.3626 #Menindee
pixel_lon = 142.2764 #Menindee

pixel_lat = 11.1306
pixel_lon = 107.6052

Select a pixel from our xarray.Dataset using nearest neighbor.


In [ ]:
pixel = dataset.sel(latitude  = pixel_lat,
                    longitude = pixel_lon,
                    method = 'nearest') # nearest neighbor selection



In [ ]:
pixel


Distributions( pixel histogram per band)

VH


In [ ]:
pixel.vh.plot.hist()

VV


In [ ]:
pixel.vv.plot.hist()

Plot Pixel Bands

VH


In [ ]:
## X-axis time, Y-axis values of vh
plt.figure(figsize = (20,5)) 
plt.scatter(pixel.time.values, pixel.vh.values)

Log10(VH)


In [ ]:
## X-axis time, Y-axis values of vh
plt.figure(figsize = (20,5)) 
plt.scatter(pixel.time.values,  np.log10(pixel.vh.values))

VV


In [ ]:
## X-axis time, Y-axis values of vv
plt.figure(figsize = (20,5))
plt.scatter(pixel.time.values,  pixel.vv.values)

Log10(VV)


In [ ]:
## X-axis time, Y-axis values of vv
plt.figure(figsize = (20,5))
plt.scatter(pixel.time.values,  np.log10(pixel.vv.values))

VV/VH


In [ ]:
## X-axis time, Y-axis values of vv
plt.figure(figsize = (20,5))
plt.scatter(pixel.time.values, pixel.vv_per_vh.values)

Log10(VV) / Log10(VH)


In [ ]:
## X-axis time, Y-axis values of vv
plt.figure(figsize = (20,5))
plt.scatter(pixel.time.values,
            (np.log10(pixel.vv) / np.log10(pixel.vh)).values
           )




Box and Whisker Plot for the Loaded Dataset


In [ ]:
def ds_to_timeseries(ds, band, log = False):
    da = ds[band]
    da = np.log10(da) if log == True else da
    da_by_time_slices = [da.isel(time = i) for i in range(len(da.time))]
    return [x.values[np.isfinite(x.values)].flatten() for x in da_by_time_slices]

VV


In [ ]:
plt.figure(figsize = (20,5))

plt.boxplot(ds_to_timeseries(dataset, "vv"), 0, "");

VH


In [ ]:
plt.figure(figsize = (20,5))
plt.boxplot(ds_to_timeseries(dataset, "vh"), 0, "");

VV/VH


In [ ]:
plt.figure(figsize = (20,5))
plt.boxplot(ds_to_timeseries(dataset, "vv_per_vh"), 0, "");

Apply threshold to data


In [ ]:
import matplotlib
def plot_threshold(ds, band_name, bottom = None , top = None, log_scaled = False, cmap_name = 'Greys'):
    # Threshold is applied to original data, not log scaled data(if you haven't scaled already)
    _range = "Full {} range: {}-{}".format(band_name, ds[band_name].min().values,ds[band_name].max().values )
    
    def figure_ratio(ds, fixed_width = 20):
        width = fixed_width
        height = len(ds.latitude) * (fixed_width / len(ds.longitude))
        return (width, height)
    
    selection = ds[band_name]
    
    my_cmap = matplotlib.cm.get_cmap(cmap_name)
    my_cmap.set_over('r')
    my_cmap.set_under('b')

    plt.figure(figsize = figure_ratio(ds))
    
    selection = np.log10(selection) if log_scaled == True else selection
    
    bottom    = np.log10(bottom)    if log_scaled == True and bottom is not None else bottom
    top       = np.log10(top)       if log_scaled == True and top is not None else top
    
    selection.plot(cmap = my_cmap, vmax =top, vmin = bottom)    
    plt.figtext(0.7,0,_range, horizontalalignment = "center")

VV(no threshold)


In [ ]:
plot_threshold(first_slice, "vv", log_scaled = True)

VV(threshold)


In [ ]:
plot_threshold(first_slice, "vv",
               bottom = 0.005, 
               top = .1, 
               log_scaled = True)

VH( no threshold )


In [ ]:
plot_threshold(first_slice, "vh", log_scaled = True)

VV(threshold)


In [ ]:
plot_threshold(first_slice,
               "vh",
               log_scaled = True,
               bottom = 0.001125,
               top = 0.01125
               )

In [ ]: