1
0
mirror of https://github.com/gryf/coach.git synced 2026-02-14 04:45:50 +01:00

tests: new checkpoint mxnet test + fix utils (#273)

* tests: new mxnet test + fix utils

new test added:
- test_restore_checkpoint[tensorflow, mxnet]

fix failed tests in CI
improve utils

* tests: fix comments for mxnet checkpoint test and utils
This commit is contained in:
anabwan
2019-04-07 07:36:44 +03:00
committed by GitHub
parent e1e335a4ef
commit 881f78f45a
3 changed files with 42 additions and 14 deletions

View File

@@ -395,9 +395,9 @@ def validate_arg_result(flag, p_valid_params, clres=None, process=None,
results.append(last_step[-1])
time.sleep(1)
assert results[-1] >= Def.Consts.num_hs, \
Def.Consts.ASSERT_MSG.format("bigger than " + Def.Consts.num_hs,
results[-1])
assert int(results[-1]) >= Def.Consts.num_hs, \
Def.Consts.ASSERT_MSG.format("bigger than " +
str(Def.Consts.num_hs), results[-1])
elif flag[0] == "-f" or flag[0] == "--framework":
"""
@@ -445,7 +445,8 @@ def validate_arg_result(flag, p_valid_params, clres=None, process=None,
"""
lst_csv = []
# wait until files created
csv_path = get_csv_path(clres=clres, extra_tries=10)
csv_path = get_csv_path(clres=clres, extra_tries=20,
num_expected_files=int(flag[1]))
assert len(csv_path) > 0, \
Def.Consts.ASSERT_MSG.format("paths are not found", csv_path)
@@ -491,8 +492,8 @@ def validate_arg_result(flag, p_valid_params, clres=None, process=None,
# wait until files created
csv_path = get_csv_path(clres=clres, extra_tries=20)
expected_files = int(flag[1])
assert len(csv_path) >= expected_files, \
Def.Consts.ASSERT_MSG.format(str(expected_files),
num_expected_files = int(flag[1])
assert len(csv_path) >= num_expected_files, \
Def.Consts.ASSERT_MSG.format(str(num_expected_files),
str(len(csv_path)))

View File

@@ -52,7 +52,7 @@ def print_progress(averaged_rewards, last_num_episodes, start_time, time_limit,
def read_csv_paths(test_path, filename_pattern, read_csv_tries=120,
extra_tries=0):
extra_tries=0, num_expected_files=None):
"""
Return file path once it found
:param test_path: test folder path
@@ -60,6 +60,7 @@ def read_csv_paths(test_path, filename_pattern, read_csv_tries=120,
:param read_csv_tries: number of iterations until file found
:param extra_tries: add number of extra tries to check after getting all
the paths.
:param num_expected_files: find all expected file in experiment folder.
:return: |string| return csv file path
"""
csv_paths = []
@@ -68,6 +69,10 @@ def read_csv_paths(test_path, filename_pattern, read_csv_tries=120,
csv_paths = glob.glob(path.join(test_path, '*', filename_pattern))
if tries_counter > read_csv_tries:
break
if num_expected_files and num_expected_files == len(csv_paths):
break
time.sleep(1)
tries_counter += 1
@@ -131,17 +136,19 @@ def find_string_in_logs(log_path, str, timeout=Def.TimeOuts.wait_for_files,
def get_csv_path(clres, tries_for_csv=Def.TimeOuts.wait_for_csv,
extra_tries=0):
extra_tries=0, num_expected_files=None):
"""
Get the csv path with the results - reading csv paths will take some time
:param clres: object of files that test is creating
:param tries_for_csv: timeout of tires until getting all csv files
:param extra_tries: add number of extra tries to check after getting all
the paths.
:param num_expected_files: find all expected file in experiment folder.
:return: |list| csv path
"""
return read_csv_paths(test_path=clres.exp_path,
filename_pattern=clres.fn_pattern,
read_csv_tries=tries_for_csv,
extra_tries=extra_tries)
extra_tries=extra_tries,
num_expected_files=num_expected_files)