mirror of
https://github.com/gryf/coach.git
synced 2025-12-18 11:40:18 +01:00
Make distributed coach work end-to-end.
- With data store, memory backend and orchestrator interfaces.
This commit is contained in:
committed by
zach dwiel
parent
9f92064e67
commit
844a5af831
@@ -4,15 +4,19 @@ import argparse
|
||||
import time
|
||||
import json
|
||||
|
||||
from threading import Thread
|
||||
|
||||
from rl_coach.base_parameters import TaskParameters
|
||||
from rl_coach.coach import expand_preset
|
||||
from rl_coach import core_types
|
||||
from rl_coach.utils import short_dynamic_import
|
||||
from rl_coach.memories.backend.memory_impl import construct_memory_params
|
||||
from rl_coach.data_stores.data_store_impl import get_data_store, construct_data_store_params
|
||||
|
||||
# Q: specify alternative distributed memory, or should this go in the preset?
|
||||
# A: preset must define distributed memory to be used. we aren't going to take a non-distributed preset and automatically distribute it.
|
||||
|
||||
def data_store_ckpt_save(data_store):
|
||||
while True:
|
||||
data_store.save_to_store()
|
||||
time.sleep(10)
|
||||
|
||||
def training_worker(graph_manager, checkpoint_dir):
|
||||
"""
|
||||
@@ -58,16 +62,26 @@ def main():
|
||||
parser.add_argument('--memory_backend_params',
|
||||
help="(string) JSON string of the memory backend params",
|
||||
type=str)
|
||||
parser.add_argument('--data_store_params',
|
||||
help="(string) JSON string of the data store params",
|
||||
type=str)
|
||||
args = parser.parse_args()
|
||||
|
||||
graph_manager = short_dynamic_import(expand_preset(args.preset), ignore_module_case=True)
|
||||
|
||||
if args.memory_backend_params:
|
||||
args.memory_backend_params = json.loads(args.memory_backend_params)
|
||||
if 'run_type' not in args.memory_backend_params:
|
||||
args.memory_backend_params['run_type'] = 'trainer'
|
||||
args.memory_backend_params['run_type'] = 'trainer'
|
||||
graph_manager.agent_params.memory.register_var('memory_backend_params', construct_memory_params(args.memory_backend_params))
|
||||
|
||||
if args.data_store_params:
|
||||
data_store_params = construct_data_store_params(json.loads(args.data_store_params))
|
||||
data_store_params.checkpoint_dir = args.checkpoint_dir
|
||||
graph_manager.data_store_params = data_store_params
|
||||
# data_store = get_data_store(data_store_params)
|
||||
# thread = Thread(target = data_store_ckpt_save, args = [data_store])
|
||||
# thread.start()
|
||||
|
||||
training_worker(
|
||||
graph_manager=graph_manager,
|
||||
checkpoint_dir=args.checkpoint_dir,
|
||||
|
||||
Reference in New Issue
Block a user