1
0
mirror of https://github.com/gryf/coach.git synced 2025-12-17 11:10:20 +01:00

fixing the dropout rate code (#72)

addresses issue #53
This commit is contained in:
Itai Caspi
2018-11-08 16:53:47 +02:00
committed by GitHub
parent 389c65cbbe
commit 3a0a1159e9
11 changed files with 33 additions and 33 deletions

View File

@@ -8,7 +8,7 @@ from rl_coach.architectures import layers
from rl_coach.architectures.tensorflow_components import utils
def batchnorm_activation_dropout(input_layer, batchnorm, activation_function, dropout, dropout_rate, is_training, name):
def batchnorm_activation_dropout(input_layer, batchnorm, activation_function, dropout_rate, is_training, name):
layers = [input_layer]
# batchnorm
@@ -26,7 +26,7 @@ def batchnorm_activation_dropout(input_layer, batchnorm, activation_function, dr
)
# dropout
if dropout:
if dropout_rate > 0:
layers.append(
tf.layers.dropout(layers[-1], dropout_rate, name="{}_dropout".format(name), training=is_training)
)
@@ -100,7 +100,7 @@ class BatchnormActivationDropout(layers.BatchnormActivationDropout):
"""
return batchnorm_activation_dropout(input_layer, batchnorm=self.batchnorm,
activation_function=self.activation_function,
dropout=self.dropout_rate > 0, dropout_rate=self.dropout_rate,
dropout_rate=self.dropout_rate,
is_training=is_training, name=name)
@staticmethod