Skip to content

Commit

Permalink
Merge pull request #19 from nasa-nccs-hpda/1.1.0
Browse files Browse the repository at this point in the history
1.1.0
  • Loading branch information
jordancaraballo authored Dec 19, 2023
2 parents 5e1cb57 + a749a39 commit 1cd1d39
Show file tree
Hide file tree
Showing 7 changed files with 240 additions and 8 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
data,label,ntiles
/explore/nobackup/projects/ilab/projects/srlite/input/Laselva/GE01_20100310_M1BS_1050410004191E00-toa.tif,/explore/nobackup/projects/ilab/projects/CloudMask/products/srlite/v1.2/Laselva/GE01_20100310_M1BS_1050410004191E00-toa.cloudmask.v1.2.tif,1000
/explore/nobackup/projects/ilab/projects/srlite/input/Laselva/GE01_20150621_M1BS_10504100130C2700-toa.tif,/explore/nobackup/projects/ilab/projects/CloudMask/products/srlite/v1.2/Laselva/GE01_20150621_M1BS_10504100130C2700-toa.cloudmask.v1.2.tif,1000
/explore/nobackup/projects/ilab/projects/srlite/input/Laselva/GE01_20160531_M1BS_10500100049D6600-toa.tif,/explore/nobackup/projects/ilab/projects/CloudMask/products/srlite/v1.2/Laselva/GE01_20160531_M1BS_10500100049D6600-toa.cloudmask.v1.2.tif,1000
/explore/nobackup/projects/ilab/projects/srlite/input/Laselva/GE01_20160531_M1BS_10500100049D6800-toa.tif,/explore/nobackup/projects/ilab/projects/CloudMask/products/srlite/v1.2/Laselva/GE01_20160531_M1BS_10500100049D6800-toa.cloudmask.v1.2.tif,1000
/explore/nobackup/projects/ilab/projects/srlite/input/Laselva/GE01_20160608_M1BS_1050010004BA6D00-toa.tif,/explore/nobackup/projects/ilab/projects/CloudMask/products/srlite/v1.2/Laselva/GE01_20160608_M1BS_1050010004BA6D00-toa.cloudmask.v1.2.tif,1000
/explore/nobackup/projects/ilab/projects/srlite/input/Laselva/GE01_20210214_M1BS_10500100227B0500-toa.tif,/explore/nobackup/projects/ilab/projects/CloudMask/products/srlite/v1.2/Laselva/GE01_20210214_M1BS_10500100227B0500-toa.cloudmask.v1.2.tif,1000
/explore/nobackup/projects/ilab/projects/srlite/input/Laselva/QB02_20020606_M1BS_1010010000918F00-toa.tif,/explore/nobackup/projects/ilab/projects/CloudMask/products/srlite/v1.2/Laselva/QB02_20020606_M1BS_1010010000918F00-toa.cloudmask.v1.2.tif,1000
/explore/nobackup/projects/ilab/projects/srlite/input/Laselva/QB02_20070419_M1BS_1010010005952200-toa.tif,/explore/nobackup/projects/ilab/projects/CloudMask/products/srlite/v1.2/Laselva/QB02_20070419_M1BS_1010010005952200-toa.cloudmask.v1.2.tif,1000
/explore/nobackup/projects/ilab/projects/srlite/input/Laselva/QB02_20080408_M1BS_1010010007E4C300-toa.tif,/explore/nobackup/projects/ilab/projects/CloudMask/products/srlite/v1.2/Laselva/QB02_20080408_M1BS_1010010007E4C300-toa.cloudmask.v1.2.tif,1000
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
# --------------------------------------------------------------------------
# Configuration of the parameters for training and preprocessing
# very-high resolution dataset using deep learning techniques.
# --------------------------------------------------------------------------

experiment_name: cloud-global-standardization
experiment_type: cloud

data_dir: '/lscratch/jacaraba/vhr-cloudmask/cloud.disturbance-global-standardization'
model_dir: /explore/nobackup/projects/ilab/projects/CloudMask/development/disturbance
seed: 42
gpu_devices: 0,1,2,3
mixed_precision: False
xla: False

input_bands:
- CoastalBlue
- Blue
- Green
- Yellow
- Red
- RedEdge
- NIR1
- NIR2

output_bands:
- Blue
- Green
- Red
- NIR1


substract_labels: False
normalize: 1.0
rescale: None

#modify_labels:
# - "x == 0": 8
# - "x == 1": 9
# - "x == 4": 7
# - "x == 3": 0
# - "x == 2": 0
# - "x == 8": 1
# - "x == 9": 2
# - "x == 7": 3

expand_dims: True
tile_size: 256
include_classes: False
augment: True

# standardization functions: local, global, mixed
# global standardization
# mixed standardization
#metadata_regex: '/explore/nobackup/projects/3sl/development/cnn_landcover/normalization/*.csv'
standardization: 'global'
batch_size: 32
n_classes: 1
test_size: 0.30
learning_rate: 0.0001
max_epochs: 6000
patience: 10

model: "tfc.unet.unet_batchnorm(nclass=1, input_size=(256, 256, 4),maps=[64, 128, 256, 512, 1024])"
loss: 'tf.keras.losses.BinaryCrossentropy()'
optimizer: tf.keras.optimizers.Adam

metrics:
- 'tf.keras.metrics.BinaryAccuracy()'
- 'tf.keras.metrics.Recall()'
- 'tf.keras.metrics.Precision()'
- 'sm.metrics.iou_score'

callbacks:
- "tf.keras.callbacks.ModelCheckpoint(save_best_only=True, mode='min', monitor='val_loss', filepath='${model_dir}/${experiment_name}{epoch:02d}-{val_loss:.2f}.hdf5')"
- "tf.keras.callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=4)"
- "tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=10, restore_best_weights=False)"
- "tf.keras.callbacks.TerminateOnNaN()"

# window_size: 8192
window_size: 10000
pred_batch_size: 32
inference_overlap: 0.50
inference_treshold: 0.50

# Prediction location
inference_regex_list:
- '/explore/nobackup/people/ameddens/1_SmallSat/2_Neigh_API/cloud_mask_img/WV02_20150626_M1BS_1030010044251900-toa.tif'
- '/explore/nobackup/people/ameddens/1_SmallSat/2_Neigh_API/cloud_mask_img/QB02_20020603_M1BS_10100100008D5B00-toa.tif'
- '/explore/nobackup/people/ameddens/1_SmallSat/2_Neigh_API/cloud_mask_img/*.tif'
inference_save_dir: '/explore/nobackup/projects/ilab/projects/CloudMask/development/disturbance/predictions'
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
experiment_name: cloudmask-alaska
experiment_type: cloudmask

data_dir: /lscratch/jacaraba/srlite/clouds-binary-alaska
model_dir: /explore/nobackup/projects/ilab/projects/CloudMask/development/alaska-norm

seed: 42
gpu_devices: 0,1,2,3
mixed_precision: True
xla: False

#input_bands:
# - CoastalBlue
# - Blue
# - Green
# - Yellow
# - Red
# - RedEdge
# - NIR1
# - NIR2

input_bands:
- CoastalBlue
- Blue
- Green
- Yellow
- Red
- RedEdge
- NIR1
- NIR2

#input_bands:
# - Blue
# - Green
# - Red
# - NIR1
# - HOM1
# - HOM2

output_bands:
- Blue
- Green
- Red
- NIR1

substract_labels: False
normalize: 10000.0
#standardization: 'local'

expand_dims: True
tile_size: 256
include_classes: True
augment: True

# standardization functions: local, global, mixed
# global standardization
# mixed standardization
#standardization: 'local'
batch_size: 32
n_classes: 1
test_size: 0.20
learning_rate: 0.0001
max_epochs: 6000
patience: 10


model: "tfc.unet.unet_batchnorm(nclass=1, input_size=(256, 256, 4),maps=[64, 128, 256, 512, 1024])"
#model: "sm.Unet('resnet34', input_shape=(128, 128, 4), encoder_weights=None, classes=1, activation='sigmoid')"

loss: 'tf.keras.losses.BinaryCrossentropy()'
#loss: sm.losses.CategoricalFocalLoss
#loss: sm.losses.categorical_focal_dice_loss
#loss: sm.losses.categorical_focal_jaccard_loss
#loss: 'sm.losses.BinaryCELoss()'
#loss: 'sm.losses.DiceLoss(smooth=1e-08)'

optimizer: tf.keras.optimizers.Adam

metrics:
- 'tf.keras.metrics.BinaryAccuracy(threshold=0.5)'
- 'tf.keras.metrics.Recall()'
- 'tf.keras.metrics.Precision()'
- 'sm.metrics.iou_score'

callbacks:
- "tf.keras.callbacks.ModelCheckpoint(save_best_only=True, mode='min', monitor='val_loss', filepath='${model_dir}/${experiment_name}{epoch:02d}-{val_loss:.2f}.hdf5')"
- "tf.keras.callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=4)"
- "tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=10, restore_best_weights=False)"
- "tf.keras.callbacks.TerminateOnNaN()"

# window_size: 8192
window_size: 5000
pred_batch_size: 128
inference_overlap: 0.50
inference_treshold: 0.50

# Prediction location
#model_filename: /adapt/nobackup/projects/ilab/projects/CloudMask/development/alaska/cloudmask-alaska36-0.02.hdf5
#model_filename: /adapt/nobackup/projects/ilab/projects/CloudMask/development/alaska-std/cloudmask-alaska89-0.01.hdf5
model_filename: /explore/nobackup/projects/ilab/projects/CloudMask/development/alaska-norm/cloudmask-alaska25-0.02.hdf5
#model_filename: '/explore/nobackup/projects/ilab/projects/CloudMask/development/alaska-std/cloudmask-alaska89-0.01.hdf5'
#inference_regex: '/adapt/nobackup/people/mwooten3/Senegal_LCLUC/testForMark/5-toas/WV02_20101020_M1BS_1030010007BBFA00-toa.tif'
#inference_regex: '/adapt/nobackup/projects/ilab/projects/srlite/input/TOA_v4/ahri_plus_p1_UTM3N/5-toas/*.tif'
#inference_save_dir: /adapt/nobackup/projects/ilab/projects/Vietnam/Jordan/VIETNAM_PRIORITY/clouds-binary-tensorflow-2022-05-02-senegal


#inference_regex: '/adapt/nobackup/projects/ilab/projects/srlite/input/TOA_v2/Siberia/5-toas/*.tif'
#inference_regex: '/adapt/nobackup/projects/ilab/projects/srlite/input/TOA_v2/Siberia/5-toas/WV02_20130218_M1BS_103001001F5AEF00-toa.tif'
#inference_save_dir: /adapt/nobackup/projects/ilab/projects/Vietnam/Jordan/VIETNAM_PRIORITY/clouds-binary-tensorflow-2022-05-02-siberia
#inference_regex: '/adapt/nobackup/projects/ilab/projects/srlite/input/YukonDelta/*-toa.tif'
#inference_regex: '/adapt/nobackup/projects/ilab/projects/srlite/input/Alaska/*M1BS*0-toa.tif'
#nference_regex: '/explore/nobackup/projects/ilab/data/srlite/toa/Alaska_ahri_20221004/initial_output/split_1/5-toas/*-toa.tif'
#inference_regex: '/explore/nobackup/projects/ilab/data/srlite/toa/Alaska_ahri_20221004/initial_output/split_1/5-toas/WV03_20200730_M1BS_104001005CC6ED00-toa.tif'
#inference_regex: '/explore/nobackup/projects/ilab/projects/srlite/input/Siberia/WV02_20130216_M1BS_1030010020848600-toa.tif'
#inference_regex: '/explore/nobackup/projects/ilab/projects/Vietnam/Sarah/data/Keelin00_20120130_data.tif'
#inference_regex: '/explore/nobackup/people/ameddens/1_SmallSat/2_Neigh_API/cloud_mask_img/WV02_20150626_M1BS_1030010044251900-toa.tif'
inference_regex: '/explore/nobackup/people/ameddens/1_SmallSat/2_Neigh_API/cloud_mask_img/*.tif'
inference_save_dir: '/explore/nobackup/projects/ilab/projects/CloudMask/products/disturbance_request'
2 changes: 1 addition & 1 deletion tools/fix_labels.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def arr_to_tif(raster_f, segments, out_tif='s.tif', ndval=-10001):
nodatavals = src.read_masks(1).astype('int16')

# load numpy array if file is given
if type(segments) == 'str':
if isinstance(segments, str):
segments = np.load(segments)
segments = segments.astype('int16')

Expand Down
2 changes: 1 addition & 1 deletion tools/validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def arr_to_tif(raster_f, segments, out_tif='s.tif', ndval=-10001):
nodatavals = src.read_masks(1).astype('int16')

# load numpy array if file is given
if type(segments) == 'str':
if isinstance(segments, str):
segments = np.load(segments)
segments = segments.astype('int16')

Expand Down
Empty file removed vhr_cloudmask/model/.gitkeep
Empty file.
25 changes: 19 additions & 6 deletions vhr_cloudmask/view/cloudmask_cnn_pipeline_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import logging
import argparse
from tensorflow_caney.model.pipelines.cnn_segmentation import \
CNNSegmentation as CloudmaskPipeline
CNNSegmentation as CloudMaskPipeline


# -----------------------------------------------------------------------------
Expand All @@ -20,19 +20,18 @@ def main():
parser.add_argument('-c',
'--config-file',
type=str,
required=True,
required=False,
dest='config_file',
help='Path to the configuration file')

parser.add_argument('-d',
'--data-csv',
type=str,
required=True,
required=False,
dest='data_csv',
help='Path to the data configuration file')

parser.add_argument(
'-s',
parser.add_argument('-s',
'--step',
type=str,
nargs='*',
Expand All @@ -42,13 +41,27 @@ def main():
default=['preprocess', 'train', 'predict'],
choices=['preprocess', 'train', 'predict'])

parser.add_argument('-m',
'--model-filename',
type=str,
required=False,
dest='model_filename',
help='Path to model file')

parser.add_argument('-o',
'--output-dir',
type=str,
required=False,
dest='output_dir',
help='Path to output directory')

args = parser.parse_args()

# Setup timer to monitor script execution time
timer = time.time()

# Initialize pipeline object
pipeline = CloudmaskPipeline(args.config_file, args.data_csv)
pipeline = CloudMaskPipeline(args.config_file, args.data_csv)

# Regression CHM pipeline steps
if "preprocess" in args.pipeline_step:
Expand Down

0 comments on commit 1cd1d39

Please sign in to comment.