1
0
mirror of https://github.com/gryf/coach.git synced 2025-12-17 19:20:19 +01:00

fix for intel optimized tensorflow on distributed runs + adding coach_env to .gitignore

This commit is contained in:
galleibo-intel
2017-11-06 19:41:32 +02:00
parent b40259c61a
commit f47b8092af
3 changed files with 4 additions and 4 deletions

1
.gitignore vendored
View File

@@ -14,3 +14,4 @@ roboschool
*.doc
*.orig
docs/site
coach_env

View File

@@ -343,8 +343,8 @@ class Agent(object):
:param to_type: can be 'channels_first' or 'channels_last'
:return: a new observation with the requested axes order
"""
if from_type == to_type:
return
if from_type == to_type or len(observation.shape) == 1:
return observation
assert 2 <= len(observation.shape) <= 3, 'num axes of an observation must be 2 for a vector or 3 for an image'
assert type(observation) == np.ndarray, 'observation must be a numpy array'
if len(observation.shape) == 3:

View File

@@ -37,7 +37,6 @@ time_started = datetime.datetime.now()
cur_time = time_started.time()
cur_date = time_started.date()
def get_experiment_path(general_experiments_path):
if not os.path.exists(general_experiments_path):
os.makedirs(general_experiments_path)
@@ -265,7 +264,7 @@ if __name__ == "__main__":
# Multi-threaded runs
else:
assert args.framework.lower() == 'tensorflow', "Distributed training works only with TensorFlow"
os.environ["OMP_NUM_THREADS"]="1"
# set parameter server and workers addresses
ps_hosts = "localhost:{}".format(get_open_port())
worker_hosts = ",".join(["localhost:{}".format(get_open_port()) for i in range(run_dict['num_threads'] + 1)])