Skip to content

Commit

Permalink
Prevent SingleMachine::Run from deadlocking if it times out during the
Browse files Browse the repository at this point in the history
initialization
Change: 150887058
  • Loading branch information
benoitsteiner authored and tensorflower-gardener committed Mar 22, 2017
1 parent da52fa9 commit 53bf266
Showing 1 changed file with 7 additions and 6 deletions.
13 changes: 7 additions & 6 deletions tensorflow/core/grappler/clusters/single_machine.cc
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,6 @@ Status SingleMachine::Run(const GraphDef& graph_def,
const std::vector<std::pair<string, Tensor>>& feed,
const std::vector<string>& fetch,
RunMetadata* metadata) {
// Interface idea: What about having Initialize(item, graph_def), which
// initializes the graph, and then Run(feed, fetch, metadata).
{
mutex_lock l(this->last_graph_mu_);
if (last_graph_ != &graph_def) {
Expand All @@ -118,18 +116,23 @@ Status SingleMachine::Run(const GraphDef& graph_def,
coordinator_->RegisterRunner(std::move(queue_runner)));
TF_RETURN_IF_ERROR(coordinator_->GetStatus());
}
last_graph_ = &graph_def;

// Warmup TensorFlow if needed
for (int i = 0;
i < options_.config.graph_options().build_cost_model_after(); ++i) {
TF_RETURN_IF_ERROR(RunWithTimeout(feed, fetch, nullptr));
}

last_graph_ = &graph_def;
}
}

TF_RETURN_IF_ERROR(RunWithTimeout(feed, fetch, metadata));
return coordinator_->ExportCostGraph(metadata->mutable_cost_graph());
if (metadata) {
return coordinator_->ExportCostGraph(metadata->mutable_cost_graph());
} else {
return Status::OK();
}
}

Status SingleMachine::RunWithTimeout(
Expand All @@ -149,8 +152,6 @@ Status SingleMachine::RunWithTimeout(
},
timeout_s_ * 1000, thread_pool_.get());
if (!executed_in_time) {
mutex_lock l(last_graph_mu_);
last_graph_ = nullptr;
return errors::DeadlineExceeded("Failed to run the graph after ",
timeout_s_, " seconds, aborting");
} else if (run_metadata && status->ok()) {
Expand Down

0 comments on commit 53bf266

Please sign in to comment.