From 686604440a75f4219cfbc2c971d110ee38c4b656 Mon Sep 17 00:00:00 2001 From: Lucas Steinmann Date: Wed, 30 Sep 2020 14:03:53 +0200 Subject: [PATCH] Use correct decoder layer num. I guess this is a typo or copy-paste error. Though I have not a lot knowledge about this model. I just thought it doesn't make sense to have ``num_dec_layers`` and not use it. --- transformer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/transformer.py b/transformer.py index bf915eb..3202c92 100644 --- a/transformer.py +++ b/transformer.py @@ -430,7 +430,7 @@ def decoder_layer(self, target, enc_out, input_mask, target_mask, scope): def decoder(self, target, enc_out, input_mask, target_mask, scope='decoder'): out = target with tf.variable_scope(scope): - for i in range(self.num_enc_layers): + for i in range(self.num_dec_layers): out = self.decoder_layer(out, enc_out, input_mask, target_mask, f'dec_{i}') return out