1
0
mirror of https://github.com/gryf/coach.git synced 2026-02-21 17:25:53 +01:00

Make distributed coach work end-to-end.

- With data store, memory backend and orchestrator interfaces.
This commit is contained in:
Balaji Subramaniam
2018-10-04 12:28:21 -07:00
committed by zach dwiel
parent 9f92064e67
commit 844a5af831
8 changed files with 300 additions and 169 deletions

View File

@@ -0,0 +1,15 @@
#
# Copyright (c) 2017 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

View File

@@ -1,5 +1,6 @@
from rl_coach.data_stores.nfs_data_store import NFSDataStore, NFSDataStoreParameters
from rl_coach.data_stores.s3_data_store import S3DataStore, S3DataStoreParameters
from rl_coach.data_stores.data_store import DataStoreParameters
def get_data_store(params):
@@ -10,3 +11,14 @@ def get_data_store(params):
data_store = S3DataStore(params)
return data_store
def construct_data_store_params(json: dict):
ds_params_instance = None
ds_params = DataStoreParameters(json['store_type'], json['orchestrator_type'], json['orchestrator_params'])
if json['store_type'] == 'nfs':
ds_params_instance = NFSDataStoreParameters(ds_params)
elif json['store_type'] == 's3':
ds_params_instance = S3DataStoreParameters(ds_params=ds_params, end_point=json['end_point'],
bucket_name=json['bucket_name'], checkpoint_dir=json['checkpoint_dir'])
return ds_params_instance

View File

@@ -46,6 +46,7 @@ class S3DataStore(DataStore):
def save_to_store(self):
try:
print("saving to s3")
for root, dirs, files in os.walk(self.params.checkpoint_dir):
for filename in files:
abs_name = os.path.abspath(os.path.join(root, filename))
@@ -56,6 +57,7 @@ class S3DataStore(DataStore):
def load_from_store(self):
try:
print("loading from s3")
objects = self.mc.list_objects_v2(self.params.bucket_name, recursive=True)
for obj in objects:
filename = os.path.abspath(os.path.join(self.params.checkpoint_dir, obj.object_name))