diff --git a/README.md b/README.md
index 154ac367..ccf851ff 100644
--- a/README.md
+++ b/README.md
@@ -20,7 +20,7 @@ CoastSat is an open-source software toolkit written in Python that enables users
Latest updates
:arrow_forward: *(2024/10/02)*
-CoastSat v3.0: integration with [FES2022 global tide model](https://www.aviso.altimetry.fr/en/data/products/auxiliary-products/global-tide-fes/release-fes22.html) to perform tidal correction within CoastSat.
+CoastSat v3.0: integration with [FES2022 global tide model](https://www.aviso.altimetry.fr/en/data/products/auxiliary-products/global-tide-fes/release-fes22.html) to perform **tidal correction** and **beach slope estimation** within CoastSat.
:arrow_forward: *(2024/08/29)*
CoastSat v2.7: reverse compatibility for file downloads (pre v2.6) and removed Collection 1 (deprecated, throws an error)
@@ -67,6 +67,7 @@ The toolbox has the following functionalities:
3. intersection of the 2D shorelines with user-defined shore-normal transects.
4. tidal correction using tide/water levels and an estimate of the beach slope.
5. post-processing of the shoreline time-series, despiking and seasonal averaging.
+6. Beach slope estimation using satellite-derived shorelines and predicted tides
### Table of Contents
diff --git a/example.py b/example.py
index 59f4183b..42e1c923 100644
--- a/example.py
+++ b/example.py
@@ -21,7 +21,7 @@
from datetime import datetime, timedelta
import pytz
from pyproj import CRS
-from coastsat import SDS_download, SDS_preprocess, SDS_shoreline, SDS_tools, SDS_transects
+from coastsat import SDS_download, SDS_preprocess, SDS_shoreline, SDS_tools, SDS_transects, SDS_slope
# region of interest (longitude, latitude in WGS84)
polygon = [[[151.301454, -33.700754],
@@ -246,7 +246,7 @@
df.to_csv(fn, sep=',')
print('Time-series of the shoreline change along the transects saved as:\n%s'%fn)
-#%% 4. Tidal correction
+#%% 5. Tidal correction
# For this example, we can use the FES2022 global tide model to predict tides at our beach for all the image times.
# To setup FES2022, follow the instructions at https://github.com/kvos/CoastSat/blob/master/doc/FES2022_setup
@@ -259,8 +259,6 @@
handlers = pyfes.load_config(config)
ocean_tide = handlers['tide']
load_tide = handlers['radial']
-# load coastsat module to estimate slopes
-from coastsat import SDS_slopes
# get polygon centroid
centroid = np.mean(polygon[0], axis=0)
@@ -270,10 +268,10 @@
date_range = [pytz.utc.localize(datetime(2024,1,1)),
pytz.utc.localize(datetime(2025,1,1))]
timestep = 900 # seconds
-dates_ts, tides_ts = SDS_slopes.compute_tide(centroid, date_range, timestep, ocean_tide, load_tide)
+dates_ts, tides_ts = SDS_slope.compute_tide(centroid, date_range, timestep, ocean_tide, load_tide)
# get tide levels corresponding to the time of image acquisition
dates_sat = output['dates']
-tides_sat = SDS_slopes.compute_tide_dates(centroid, output['dates'], ocean_tide, load_tide)
+tides_sat = SDS_slope.compute_tide_dates(centroid, output['dates'], ocean_tide, load_tide)
# If you have measure tide levels you can also use those instead.
# When using your own file make sure that the dates are in UTC time, as the CoastSat shorelines are also in UTC
@@ -330,8 +328,9 @@
ax.text(0.5,0.95, key, bbox=dict(boxstyle="square", ec='k',fc='w'), ha='center',
va='top', transform=ax.transAxes, fontsize=14)
ax.legend()
+fig.savefig(os.path.join(filepath,'%s_timeseries_corrected.jpg'%sitename),dpi=200)
-#%% 5. Time-series post-processing
+#%% 6. Time-series post-processing
# load mapped shorelines from 1984 (mapped with the previous code)
filename_output = os.path.join(os.getcwd(),'examples','NARRA_output.pkl')
@@ -356,14 +355,14 @@
va='center', ha='right', bbox=dict(boxstyle="square", ec='k',fc='w'))
# load long time-series (1984-2021)
-filepath = os.path.join(os.getcwd(),'examples','NARRA_time_series_tidally_corrected.csv')
-df = pd.read_csv(filepath, parse_dates=['dates'])
+filepath_ts = os.path.join(os.getcwd(),'examples','NARRA_time_series_tidally_corrected.csv')
+df = pd.read_csv(filepath_ts, parse_dates=['dates'])
dates = [_.to_pydatetime() for _ in df['dates']]
cross_distance = dict([])
for key in transects.keys():
cross_distance[key] = np.array(df[key])
-#%% 5.1 Remove outliers
+#%% 6.1 Remove outliers
# plot Otsu thresholds for the mapped shorelines
fig,ax = plt.subplots(1,1,figsize=[12,5],tight_layout=True)
@@ -374,7 +373,7 @@
ax.set(title='Otsu thresholds on MNDWI for the %d shorelines mapped'%len(output['shorelines']),
ylim=[-0.6,0.2],ylabel='otsu threshold')
ax.legend(loc='upper left')
-fig.savefig(os.path.join(inputs['filepath'], inputs['sitename'], 'otsu_threhsolds.jpg'))
+fig.savefig(os.path.join(inputs['filepath'], inputs['sitename'], '%s_otsu_threhsolds.jpg'%sitename), dpi=200)
# remove outliers in the time-series (despiking)
settings_outliers = {'otsu_threshold': [-.5,0], # min and max intensity threshold use for contouring the shoreline
@@ -383,8 +382,11 @@
}
cross_distance = SDS_transects.reject_outliers(cross_distance,output,settings_outliers)
-#%% 5.2 Seasonal averaging
+#%% 6.2 Seasonal averaging
+fp_seasonal = os.path.join(filepath,'jpg_files','seasonal_timeseries')
+if not os.path.exists(fp_seasonal): os.makedirs(fp_seasonal)
+print('Outputs will be saved in %s'%fp_seasonal)
# compute seasonal averages along each transect
season_colors = {'DJF':'C3', 'MAM':'C1', 'JJA':'C2', 'SON':'C0'}
for key in cross_distance.keys():
@@ -408,9 +410,13 @@
ax.plot(dict_seas[seas]['dates'], dict_seas[seas]['chainages'],
'o', mec='k', color=season_colors[seas], label=seas,ms=5)
ax.legend(loc='lower left',ncol=6,markerscale=1.5,frameon=True,edgecolor='k',columnspacing=1)
+ fig.savefig(os.path.join(fp_seasonal,'%s_timeseries_seasonal.jpg'%sitename), dpi=200)
+
+#%% 6.3 Monthly averaging
-#%% 5.3 Monthly averaging
-
+fp_monthly = os.path.join(filepath,'jpg_files','seasonal_timeseries')
+if not os.path.exists(fp_seasonal): os.makedirs(fp_monthly)
+print('Outputs will be saved in %s'%fp_monthly)
# compute monthly averages along each transect
month_colors = plt.get_cmap('tab20')
for key in cross_distance.keys():
@@ -434,13 +440,126 @@
ax.plot(dict_month[month]['dates'], dict_month[month]['chainages'],
'o', mec='k', color=month_colors(k), label=month,ms=5)
ax.legend(loc='lower left',ncol=7,markerscale=1.5,frameon=True,edgecolor='k',columnspacing=1)
+ fig.savefig(os.path.join(fp_monthly,'%s_timeseries_monthly.jpg'%sitename), dpi=200)
+
+#%% 7. Beach slope estimation
+
+# This section uses the same long-term time-series of shoreline change to demonstrate how to estimate the beach-face slope.
+# For a more detailed tutorial visit https://github.com/kvos/CoastSat.slope
+
+# create folder to save outputs from slope estimation
+fp_slopes = os.path.join(filepath,'slope_estimation')
+if not os.path.exists(fp_slopes):
+ os.makedirs(fp_slopes)
+print('Outputs will be saved in %s'%fp_slopes)
+
+# load mapped shorelines from 1984 using Landsat 5, 7 and 8
+filename_output = os.path.join(os.getcwd(),'examples','NARRA_output.pkl')
+with open(filename_output, 'rb') as f:
+ output = pickle.load(f)
+# remove duplicates (images taken on the same date by the same satellite)
+output = SDS_tools.remove_duplicates(output)
+# remove inaccurate georeferencing (set threshold to 10 m)
+output = SDS_tools.remove_inaccurate_georef(output, 10)
+# compute intersections
+settings_transects = { # parameters for computing intersections
+ 'along_dist': 25, # along-shore distance to use for computing the intersection
+ 'min_points': 3, # minimum number of shoreline points to calculate an intersection
+ 'max_std': 15, # max std for points around transect
+ 'max_range': 30, # max range for points around transect
+ 'min_chainage': -100, # largest negative value along transect (landwards of transect origin)
+ 'multiple_inter': 'auto', # mode for removing outliers ('auto', 'nan', 'max')
+ 'auto_prc': 0.1, # percentage of the time that multiple intersects are present to use the max
+ }
+cross_distance = SDS_transects.compute_intersection_QC(output, transects, settings_transects)
+# remove outliers in the time-series (coastal despiking)
+settings_outliers = {'max_cross_change': 40, # maximum cross-shore change observable between consecutive timesteps
+ 'otsu_threshold': [-.5,0], # min and max intensity threshold use for contouring the shoreline
+ 'plot_fig': False, # whether to plot the intermediate steps
+ }
+cross_distance = SDS_transects.reject_outliers(cross_distance,output,settings_outliers)
+
+# plot time-series
+SDS_slope.plot_cross_distance(output['dates'],cross_distance)
+
+# slope estimation settings
+days_in_year = 365.2425
+seconds_in_day = 24*3600
+settings_slope = {'slope_min': 0.035, # minimum slope to trial
+ 'slope_max': 0.2, # maximum slope to trial
+ 'delta_slope': 0.005, # slope increment
+ 'n0': 50, # parameter for Nyquist criterium in Lomb-Scargle transforms
+ 'freq_cutoff': 1./(seconds_in_day*30), # 1 month frequency
+ 'delta_f': 100*1e-10, # deltaf for identifying peak tidal frequency band
+ 'prc_conf': 0.05, # percentage above minimum to define confidence bands in energy curve
+ 'plot_fig': True, # whether to plot the intermediary products during analysis
+ }
+# range of slopes to test for
+beach_slopes = SDS_slope.range_slopes(settings_slope['slope_min'], settings_slope['slope_max'], settings_slope['delta_slope'])
+
+# range of dates over which to perform the analysis (2 Landsat satellites)
+settings_slope['date_range'] = [1999,2022]
+# re-write in datetime objects (same as shoreline in UTC)
+settings_slope['date_range'] = [pytz.utc.localize(datetime(settings_slope['date_range'][0],5,1)),
+ pytz.utc.localize(datetime(settings_slope['date_range'][1],1,1))]
+# clip the time-series between 1999 and 2022 as we need at least 2 Landsat satellites
+idx_dates = [np.logical_and(_>settings_slope['date_range'][0],_