1
0
mirror of https://github.com/gryf/coach.git synced 2025-12-17 19:20:19 +01:00

batchnorm fixes + disabling batchnorm in DDPG (#353)

Co-authored-by: James Casbon <casbon+gh@gmail.com>
This commit is contained in:
Gal Leibovich
2019-06-23 11:28:22 +03:00
committed by GitHub
parent 7b5d6a3f03
commit d6795bd524
22 changed files with 105 additions and 50 deletions

View File

@@ -26,6 +26,9 @@ from rl_coach.architectures.tensorflow_components import utils
def batchnorm_activation_dropout(input_layer, batchnorm, activation_function, dropout_rate, is_training, name):
layers = [input_layer]
# Rationale: passing a bool here will mean that batchnorm and or activation will never activate
assert not isinstance(is_training, bool)
# batchnorm
if batchnorm:
layers.append(