mirror of
https://github.com/gryf/coach.git
synced 2025-12-17 19:20:19 +01:00
batchnorm fixes + disabling batchnorm in DDPG (#353)
Co-authored-by: James Casbon <casbon+gh@gmail.com>
This commit is contained in:
@@ -26,6 +26,9 @@ from rl_coach.architectures.tensorflow_components import utils
|
||||
def batchnorm_activation_dropout(input_layer, batchnorm, activation_function, dropout_rate, is_training, name):
|
||||
layers = [input_layer]
|
||||
|
||||
# Rationale: passing a bool here will mean that batchnorm and or activation will never activate
|
||||
assert not isinstance(is_training, bool)
|
||||
|
||||
# batchnorm
|
||||
if batchnorm:
|
||||
layers.append(
|
||||
|
||||
Reference in New Issue
Block a user