Skip to content

Commit

Permalink
Making W&C using D50
Browse files Browse the repository at this point in the history
  • Loading branch information
pfeiffea committed Sep 12, 2024
1 parent 69c8a08 commit 33819bd
Showing 1 changed file with 225 additions and 31 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from landlab.data_record.data_record import DataRecord
from landlab.grid.network import NetworkModelGrid

_SUPPORTED_TRANSPORT_METHODS = frozenset(("WilcockCrowe",))
_SUPPORTED_TRANSPORT_METHODS = frozenset(("WilcockCrowe","WilcockCroweD50"))
_SUPPORTED_ACTIVE_LAYER_METHODS = frozenset(
("WongParker", "GrainSizeDependent", "Constant10cm")
)
Expand Down Expand Up @@ -366,6 +366,9 @@ def __init__(
if self._transport_method == "WilcockCrowe":
self._update_transport_time = self._calc_transport_wilcock_crowe

if self._transport_method == "WilcockCroweD50":
self._update_transport_time = self._calc_transport_wilcock_crowe_d50

if active_layer_method in _SUPPORTED_ACTIVE_LAYER_METHODS:
self._active_layer_method = active_layer_method
else:
Expand Down Expand Up @@ -405,6 +408,11 @@ def time(self):
def d_mean_active(self):
"""Mean parcel grain size of active parcels aggregated at link."""
return self._d_mean_active

@property
def d50_active(self):
"""Median parcel grain size of active parcels aggregated at link."""
return self._d50_active

@property
def rhos_mean_active(self):
Expand Down Expand Up @@ -462,35 +470,32 @@ def _calculate_mean_D_and_rho(self):
# self._d_mean_active = self._grid.zeros(at="link")
# self._rhos_mean_active = self._grid.zeros(at="link")

# 8/19 XXX these aren't filtered for only active parcels
self._rhos_mean_active = aggregate_items_as_mean(
self._parcels.dataset["element_id"].values[:, -1].astype(int),
self._parcels.dataset["density"].values.reshape(-1),
weights=self._parcels.dataset["volume"].values[:, -1],
size=self._grid.number_of_links,
)

self._d_mean_active = aggregate_items_as_mean(
self._parcels.dataset["element_id"].values[:, -1].astype(int),
self._parcels.dataset["D"].values.reshape(-1),
weights=self._parcels.dataset["volume"].values[:, -1],
size=self._grid.number_of_links,
)

# aggregate_items_as_mean(
# self._rhos_mean_active,
# self.grid.number_of_links,
# self._parcels.dataset.element_id.values[:, -1].astype(int),
# len(self._parcels.dataset.element_id.values[:, -1]),
# self._parcels.dataset.density.values.reshape(-1),
# self._parcels.dataset.volume.values[:, -1],
# )
# aggregate_items_as_mean(
# self._d_mean_active,
# self.grid.number_of_links,
# self._parcels.dataset.element_id.values[:, -1].astype(int),
# len(self._parcels.dataset.element_id.values[:, -1]),
# self._parcels.dataset.D.values.reshape(-1),
# self._parcels.dataset.volume.values[:, -1],
# )
# new code to calc D50 instead of dmean 8/19/24
self._d50_active = np.full(self.grid.number_of_links,np.nan)
for link in range(self.grid.number_of_links):
mask_here = self._parcels.dataset.element_id.values[:,-1] == link
mask_active = self._parcels.dataset.active_layer[:,-1] == 1

parcel_vol = self._parcels.dataset.volume.values[mask_here & mask_active,-1]
parcel_D = self._parcels.dataset.D.values[mask_here & mask_active,-1]

self._d50_active[link] = calculate_x_percentile_grain_size(parcel_vol,parcel_D,50)


def _partition_active_and_storage_layers(self, **kwds):
"""For each parcel in the network, determines whether it is in the
Expand Down Expand Up @@ -528,37 +533,32 @@ def _partition_active_and_storage_layers(self, **kwds):
)

# calcuate taustar
# taustar = tau / (
# (self._rhos_mean_active - self._fluid_density)
# * self._g
# * self._d_mean_active
# )
taustar = np.zeros_like(tau)
np.divide(
tau,
(self._rhos_mean_active - self._fluid_density)
* self._g
* self._d_mean_active,
* self._d50_active,
where=self._rhos_mean_active > self._fluid_density,
out=taustar,
)

# calculate active layer thickness
self._active_layer_thickness = (
0.515
* self._d_mean_active
* self._d50_active
* (3.09 * (taustar - 0.0549).clip(0.0, None) ** 0.56)
) # in units of m

elif self._active_layer_method == "GrainSizeDependent":
# Set all active layers to a multiple of the lnk mean grain size
self._active_layer_thickness = (
self._d_mean_active * self._active_layer_d_multiplier
self._d50_active * self._active_layer_d_multiplier
)

elif self._active_layer_method == "Constant10cm":
# Set all active layers to 10 cm thickness.
self._active_layer_thickness = 0.1 * np.ones_like(self._d_mean_active)
self._active_layer_thickness = 0.1 * np.ones_like(self._d50_active)

# If links have no parcels, we still need to assign them an active layer
# thickness..
Expand Down Expand Up @@ -825,6 +825,142 @@ def _calc_transport_wilcock_crowe(self):
self._grid.at_link["sediment__active__volume"] = self._vol_act
self._grid.at_link["sediment__active__sand_fraction"] = frac_sand

def _calc_transport_wilcock_crowe_d50(self):
"""Method to determine the transport time for each parcel in the active
layer using a sediment transport equation.
Note: could have options here (e.g. Wilcock and Crowe, FLVB, MPM, etc)
"""
# Initialize _pvelocity, the virtual velocity of each parcel
# (link length / link travel time)
self._pvelocity = np.zeros(self._num_parcels)

# parcel attribute arrays from DataRecord

Darray = self._parcels.dataset.D[:, self._time_idx].values
Activearray = self._parcels.dataset.active_layer[:, self._time_idx].values
Rhoarray = self._parcels.dataset.density.values
Volarray = self._parcels.dataset.volume[:, self._time_idx].values
Linkarray = self._parcels.dataset.element_id[
:, self._time_idx
].values # link that the parcel is currently in

R = (Rhoarray - self._fluid_density) / self._fluid_density

# parcel attribute arrays to populate below
frac_sand_array = np.zeros(self._num_parcels)
vol_act_array = np.zeros(self._num_parcels)
Sarray = np.zeros(self._num_parcels)
Harray = np.zeros(self._num_parcels)
Larray = np.zeros(self._num_parcels)

D50_activearray = np.full(self._num_parcels, np.nan)
active_layer_thickness_array = np.full(self._num_parcels, np.nan)

# find active sand
# since find active already sets all prior timesteps to False, we
# can use D for all timesteps here.
findactivesand = (
self._parcels.dataset.D < _SAND_SIZE
) * self._active_parcel_records

sand_parcel_volumes = self._parcels.dataset.volume.values[:, -1].copy()
sand_parcel_volumes[~findactivesand[:, -1].astype(bool)] = 0.0

vol_act_sand = aggregate_items_as_sum(
self._parcels.dataset["element_id"].values[:, -1].astype(int),
sand_parcel_volumes,
size=self._grid.number_of_links,
)

frac_sand = np.zeros_like(self._vol_act)
frac_sand[self._vol_act != 0.0] = (
vol_act_sand[self._vol_act != 0.0] / self._vol_act[self._vol_act != 0.0]
)
frac_sand[np.isnan(frac_sand)] = 0.0

# Calc attributes for each link, map to parcel arrays
for i in range(self._grid.number_of_links):
active_here = np.nonzero(
np.logical_and(Linkarray == i, Activearray == _ACTIVE)
)[0]
d_act_i = Darray[active_here]
vol_act_i = Volarray[active_here]
rhos_act_i = Rhoarray[active_here]
vol_act_tot_i = np.sum(vol_act_i)

self._d50_active[i] = calculate_x_percentile_grain_size(d_act_i,vol_act_i,50)

if vol_act_tot_i > 0:
self._rhos_mean_active[i] = np.sum(rhos_act_i * vol_act_i) / (
vol_act_tot_i
)
else:
self._rhos_mean_active[i] = np.nan

D50_activearray[Linkarray == i] = self._d50_active[i]
frac_sand_array[Linkarray == i] = frac_sand[i]
vol_act_array[Linkarray == i] = self._vol_act[i]
Sarray[Linkarray == i] = self._grid.at_link["channel_slope"][i]
Harray[Linkarray == i] = self._grid.at_link["flow_depth"][i]
Larray[Linkarray == i] = self._grid.at_link["reach_length"][i]
active_layer_thickness_array[Linkarray == i] = self._active_layer_thickness[
i
]

# Wilcock and Crowe calculate transport for all parcels (active and inactive)
taursg = _calculate_reference_shear_stress(
self._fluid_density, R, self._g, D50_activearray, frac_sand_array
)

frac_parcel = np.nan * np.zeros_like(Volarray)
frac_parcel[vol_act_array != 0.0] = (
Volarray[vol_act_array != 0.0] / vol_act_array[vol_act_array != 0.0]
)

b = 0.67 / (1.0 + np.exp(1.5 - Darray / D50_activearray))

tau = self._fluid_density * self._g * Harray * Sarray
tau = np.atleast_1d(tau)

taur = taursg * (Darray / D50_activearray) ** b
tautaur = tau / taur
self._tautaur = tautaur.copy() # use below for xport dependent abrasion
tautaur_cplx = tautaur.astype(np.complex128)
# ^ work around needed b/c np fails with non-integer powers of negative numbers

W = 0.002 * np.power(tautaur_cplx.real, 7.5)
W[tautaur >= 1.35] = 14 * np.power(
(1 - (0.894 / np.sqrt(tautaur_cplx.real[tautaur >= 1.35]))), 4.5
)

active_parcel_idx = Activearray == _ACTIVE

# compute parcel virtual velocity, m/s
self._pvelocity[active_parcel_idx] = (
W.real[active_parcel_idx]
* (tau[active_parcel_idx] ** (3.0 / 2.0))
* frac_parcel[active_parcel_idx]
/ (self._fluid_density ** (3.0 / 2.0))
/ self._g
/ R[active_parcel_idx]
/ active_layer_thickness_array[active_parcel_idx]
)

self._pvelocity[np.isnan(self._pvelocity)] = 0.0

if np.max(self._pvelocity) > 1:
warnings.warn(
"NetworkSedimentTransporter: Maximum parcel virtual velocity exceeds 1 m/s",
stacklevel=2,
)

# Assign those things to the grid -- might be useful for plotting
self._grid.at_link["sediment_total_volume"] = self._vol_tot
self._grid.at_link["sediment__active__volume"] = self._vol_act
self._grid.at_link["sediment__active__sand_fraction"] = frac_sand


def _move_parcel_downstream(self, dt):
"""Method to update parcel location for each parcel in the active layer."""

Expand Down Expand Up @@ -950,7 +1086,7 @@ def _move_parcel_downstream(self, dt):
else:
abrasion_now = self._parcels.dataset.abrasion_rate.copy()

# reduce D and volume due to abrasion
# reduce D and volume due to abrasion
vol = _calculate_parcel_volume_post_abrasion(
self._parcels.dataset.volume[active_parcel_ids, self._time_idx],
distance_to_travel_this_timestep[active_parcel_ids],
Expand Down Expand Up @@ -1127,7 +1263,7 @@ def _calculate_alluvium_depth(


def _calculate_reference_shear_stress(
fluid_density, R, g, mean_active_grain_size, frac_sand
fluid_density, R, g, median_active_grain_size, frac_sand
):
"""Calculate reference Shields stress (taursg) using the sand content of
the bed surface, as per Wilcock and Crowe (2003).
Expand All @@ -1140,8 +1276,8 @@ def _calculate_reference_shear_stress(
Specific weight..?
g: float
Gravitational acceleration.
mean_active_grain_size: float
Mean grain size of the 'active' sediment parcels.
median_active_grain_size: float
Median grain size of the 'active' sediment parcels.
frac_sand: float
Fraction of the bed surface grain size composed of sand sized parcels.
Expand All @@ -1162,7 +1298,7 @@ def _calculate_reference_shear_stress(
fluid_density
* R
* g
* mean_active_grain_size
* median_active_grain_size
* (0.021 + 0.015 * np.exp(-20.0 * frac_sand))
)

Expand Down Expand Up @@ -1315,3 +1451,61 @@ def _calculate_parcel_grain_diameter_post_abrasion(
) ** (1.0 / 3.0)

return abraded_grain_diameter

def calculate_x_percentile_grain_size(D_array,vol_array,percentile):
"""Calculate the percentile grain size, e.g. D50, given an array of
parcel grain diameters and associated parcel volumes. Returns NaN if there
are fewer than 3 parcels.
Parameters
----------
D_array : array
Grain diameters
vol_array: array
Parcel volumes associated with D_array.
percentile: float
Grain size distribution percentile of interest. 50 returns the
median grain diameter, D50.
Examples
--------
>>> import numpy as np
>>> from numpy.testing import assert_almost_equal
If all grains are the same size, D50 should be that size.
>>> D_array = np.array([2,2,2])
>>> vol_array= np.array([4,1,1])
>>> _calculate_x_percentile_grain_size(D_array,vol_array,50)
2.0
Large parcels of small diameter, outlier large grain, D16 should be small.
>>> D_array = np.array([0.5,0.5,1,1,100])
>>> vol_array= np.array([4,4,.1,.1,.1])
>>> _calculate_x_percentile_grain_size(D_array,vol_array,16)
0.5
"""

if D_array.size > 1: # if array has at least one value
idx_Dsort = np.argsort(D_array)
D_sorted = D_array[idx_Dsort]
vol_sorted_by_D = vol_array[idx_Dsort]
cum_vol_sorted_by_D = np.cumsum(vol_sorted_by_D)
vol_percentile = cum_vol_sorted_by_D/np.max(cum_vol_sorted_by_D)

two_closest_D = D_sorted[np.argsort(np.abs(vol_percentile-(percentile/100)))[:2]]
two_closest_percentile = vol_percentile[np.argsort(np.abs(vol_percentile-(percentile/100)))[:2]]

D_x = ((two_closest_D[1]-two_closest_D[0])
*(((percentile/100)-two_closest_percentile[0])
/(two_closest_percentile[1]-two_closest_percentile[0]))
+ two_closest_D[0]
) # Bunte and Abt (2001) Eqn 2.15

elif D_array.size == 1: # if just one parcel, median is that D
D_x = D_array[0]

else:
D_x = np.nan

return(D_x)

0 comments on commit 33819bd

Please sign in to comment.