Commit 7efe4939 authored by Yuxin Wu's avatar Yuxin Wu

fix missing chief setting in Distributed Trainer

parent 10cc1962
......@@ -63,6 +63,9 @@ class DistributedTrainerReplicated(Trainer):
if self.job_name == 'worker':
# ps doesn't build any graph
self._builder = DistributedReplicatedBuilder(config.tower, server)
self.is_chief = self._builder.is_chief
else:
self.is_chief = False
logger.info("Distributed training on cluster:\n" + str(server.server_def.cluster))
self._input_source = config.data
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment