From 1a655eecef4c518a21ce91ef33eaefba3d2ef51c Mon Sep 17 00:00:00 2001 From: wang chenyu <494450105@qq.com> Date: Sun, 6 Jan 2019 13:38:55 +0800 Subject: [PATCH] Do not clip gradient if gradient is `None` --- seq2seq/models/seq2seq_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/seq2seq/models/seq2seq_model.py b/seq2seq/models/seq2seq_model.py index 423ffb75..2825c24c 100644 --- a/seq2seq/models/seq2seq_model.py +++ b/seq2seq/models/seq2seq_model.py @@ -74,7 +74,7 @@ def _clip_gradients(self, grads_and_vars): clipped_gradients = [] variables = [] for gradient, variable in grads_and_vars: - if "embedding" in variable.name: + if "embedding" in variable.name and gradient != None: tmp = tf.clip_by_norm( gradient.values, self.params["optimizer.clip_embed_gradients"]) gradient = tf.IndexedSlices(tmp, gradient.indices, gradient.dense_shape)