Skip to content

Commit 200da8a

Browse files
committed
Refactor try!() into question mark operator
1 parent 81456d3 commit 200da8a

File tree

8 files changed

+83
-83
lines changed

8 files changed

+83
-83
lines changed

examples/addition.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ use tensorflow::Tensor;
1717

1818
fn main() {
1919
// Putting the main code in another function serves two purposes:
20-
// 1. We can use the try! macro.
20+
// 1. We can use the `?` operator.
2121
// 2. We can call exit safely, which does not run any destructors.
2222
exit(match run() {
2323
Ok(_) => 0,
@@ -47,7 +47,7 @@ fn run() -> Result<(), Box<Error>> {
4747
// Load the computation graph defined by regression.py.
4848
let mut graph = Graph::new();
4949
let mut proto = Vec::new();
50-
try!(try!(File::open(filename)).read_to_end(&mut proto));
50+
File::open(filename)?.read_to_end(&mut proto)?;
5151
graph.import_graph_def(&proto, &ImportGraphDefOptions::new())?;
5252
let mut session = Session::new(&SessionOptions::new(), &graph)?;
5353

@@ -56,7 +56,7 @@ fn run() -> Result<(), Box<Error>> {
5656
step.add_input(&graph.operation_by_name_required("x")?, 0, &x);
5757
step.add_input(&graph.operation_by_name_required("y")?, 0, &y);
5858
let z = step.request_output(&graph.operation_by_name_required("z")?, 0);
59-
try!(session.run(&mut step));
59+
session.run(&mut step)?;
6060

6161
// Check our results.
6262
let z_res: i32 = step.take_output(z)?[0];

examples/expressions.rs

+8-8
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ use tensorflow::Tensor;
1515

1616
fn main() {
1717
// Putting the main code in another function serves two purposes:
18-
// 1. We can use the try! macro.
18+
// 1. We can use the `?` operator.
1919
// 2. We can call exit safely, which does not run any destructors.
2020
exit(match run() {
2121
Ok(_) => 0,
@@ -53,7 +53,7 @@ impl Checker {
5353
if self.success {
5454
Ok(())
5555
} else {
56-
Err(Box::new(try!(Status::new_set(Code::Internal, "At least one check failed"))))
56+
Err(Box::new(Status::new_set(Code::Internal, "At least one check failed")?))
5757
}
5858
}
5959
}
@@ -64,19 +64,19 @@ fn run() -> Result<(), Box<Error>> {
6464
let y_node = {
6565
let mut compiler = Compiler::new(&mut g);
6666
let x_expr = <Placeholder<f32>>::new_expr(&vec![2], "x");
67-
try!(compiler.compile(x_expr * 2.0f32 + 1.0f32))
67+
compiler.compile(x_expr * 2.0f32 + 1.0f32)?
6868
};
69-
let x_node = try!(g.operation_by_name_required("x"));
69+
let x_node = g.operation_by_name_required("x")?;
7070
// This is another valid way to get x_node and y_node:
7171
// let (x_node, y_node) = {
7272
// let mut compiler = Compiler::new(&mut g);
7373
// let x_expr = <Placeholder<f32>>::new_expr(&vec![2], "x");
74-
// let x_node = try!(compiler.compile(x_expr.clone()));
75-
// let y_node = try!(compiler.compile(x_expr * 2.0f32 + 1.0f32));
74+
// let x_node = compiler.compile(x_expr.clone())?;
75+
// let y_node = compiler.compile(x_expr * 2.0f32 + 1.0f32)?;
7676
// (x_node, y_node)
7777
// };
7878
let options = SessionOptions::new();
79-
let mut session = try!(Session::new(&options, &g));
79+
let mut session = Session::new(&options, &g)?;
8080

8181
// Evaluate the graph.
8282
let mut x = <Tensor<f32>>::new(&[2]);
@@ -88,7 +88,7 @@ fn run() -> Result<(), Box<Error>> {
8888
session.run(&mut step).unwrap();
8989

9090
// Check our results.
91-
let output_tensor = try!(step.take_output::<f32>(output_token));
91+
let output_tensor = step.take_output::<f32>(output_token)?;
9292
let mut checker = Checker::new(1e-3);
9393
checker.check("output_tensor[0]", 5.0, output_tensor[0]);
9494
checker.check("output_tensor[1]", 7.0, output_tensor[1]);

examples/regression.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ use tensorflow::Tensor;
1919

2020
fn main() {
2121
// Putting the main code in another function serves two purposes:
22-
// 1. We can use the try! macro.
22+
// 1. We can use the `?` operator.
2323
// 2. We can call exit safely, which does not run any destructors.
2424
exit(match run() {
2525
Ok(_) => 0,
@@ -56,7 +56,7 @@ fn run() -> Result<(), Box<Error>> {
5656
// Load the computation graph defined by regression.py.
5757
let mut graph = Graph::new();
5858
let mut proto = Vec::new();
59-
try!(try!(File::open(filename)).read_to_end(&mut proto));
59+
File::open(filename)?.read_to_end(&mut proto)?;
6060
graph.import_graph_def(&proto, &ImportGraphDefOptions::new())?;
6161
let mut session = Session::new(&SessionOptions::new(), &graph)?;
6262
let op_x = graph.operation_by_name_required("x")?;
@@ -71,22 +71,22 @@ fn run() -> Result<(), Box<Error>> {
7171
init_step.add_input(&op_x, 0, &x);
7272
init_step.add_input(&op_y, 0, &y);
7373
init_step.add_target(&op_init);
74-
try!(session.run(&mut init_step));
74+
session.run(&mut init_step)?;
7575

7676
// Train the model.
7777
let mut train_step = StepWithGraph::new();
7878
train_step.add_input(&op_x, 0, &x);
7979
train_step.add_input(&op_y, 0, &y);
8080
train_step.add_target(&op_train);
8181
for _ in 0..steps {
82-
try!(session.run(&mut train_step));
82+
session.run(&mut train_step)?;
8383
}
8484

8585
// Grab the data out of the session.
8686
let mut output_step = StepWithGraph::new();
8787
let w_ix = output_step.request_output(&op_w, 0);
8888
let b_ix = output_step.request_output(&op_b, 0);
89-
try!(session.run(&mut output_step));
89+
session.run(&mut output_step)?;
9090

9191
// Check our results.
9292
let w_hat: f32 = output_step.take_output(w_ix)?[0];

examples/regression_savedmodel.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ use tensorflow::Tensor;
1616

1717
fn main() {
1818
// Putting the main code in another function serves two purposes:
19-
// 1. We can use the try! macro.
19+
// 1. We can use the `?` operator.
2020
// 2. We can call exit safely, which does not run any destructors.
2121
exit(match run() {
2222
Ok(_) => 0,
@@ -68,14 +68,14 @@ fn run() -> Result<(), Box<Error>> {
6868
train_step.add_input(&op_y, 0, &y);
6969
train_step.add_target(&op_train);
7070
for _ in 0..steps {
71-
try!(session.run(&mut train_step));
71+
session.run(&mut train_step)?;
7272
}
7373

7474
// Grab the data out of the session.
7575
let mut output_step = StepWithGraph::new();
7676
let w_ix = output_step.request_output(&op_w, 0);
7777
let b_ix = output_step.request_output(&op_b, 0);
78-
try!(session.run(&mut output_step));
78+
session.run(&mut output_step)?;
7979

8080
// Check our results.
8181
let w_hat: f32 = output_step.take_output(w_ix)?[0];

src/expr.rs

+25-25
Original file line numberDiff line numberDiff line change
@@ -116,11 +116,11 @@ impl<T: TensorType> ExprImpl<T> for T {
116116
_children: &[Operation],
117117
id_gen: &mut FnMut() -> String)
118118
-> Result<Operation, Status> {
119-
let mut nd = try!(graph.new_operation("Const", &id_gen()));
120-
try!(nd.set_attr_type("dtype", DataType::Float));
119+
let mut nd = graph.new_operation("Const", &id_gen())?;
120+
nd.set_attr_type("dtype", DataType::Float)?;
121121
let mut value = Tensor::new(&[1]);
122122
value[0] = *self;
123-
try!(nd.set_attr_tensor("value", value));
123+
nd.set_attr_tensor("value", value)?;
124124
nd.finish()
125125
}
126126

@@ -170,11 +170,11 @@ macro_rules! impl_bin_op {
170170
impl<T: TensorType> Display for $name<T> {
171171
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
172172
if self.left.expr.op_level() < OpLevel::$op_level {
173-
try!(write!(f, "({})", self.left));
173+
write!(f, "({})", self.left)?;
174174
} else {
175-
try!(write!(f, "{}", self.left));
175+
write!(f, "{}", self.left)?;
176176
}
177-
try!(write!(f, concat!(" ", $op, " ")));
177+
write!(f, concat!(" ", $op, " "))?;
178178
let paren = if $assoc {
179179
self.right.expr.op_level() < OpLevel::$op_level
180180
} else {
@@ -199,7 +199,7 @@ macro_rules! impl_bin_op {
199199

200200
fn create_operation(&self, graph: &mut Graph, children: &[Operation],
201201
id_gen: &mut FnMut() -> String) -> Result<Operation, Status> {
202-
let mut nd = try!(graph.new_operation($tf_op, &id_gen()));
202+
let mut nd = graph.new_operation($tf_op, &id_gen())?;
203203
nd.add_input(Output {operation: children[0].clone(), index: 0});
204204
nd.add_input(Output {operation: children[1].clone(), index: 0});
205205
nd.finish()
@@ -213,37 +213,37 @@ macro_rules! impl_bin_op {
213213
impl_bin_op!(
214214
Add, add, "+", Add, true, "Add", "Expression resulting from adding two subexpressions.",
215215
fn derivative_by_variable(&self, var: &str) -> Result<Expr<T>, Status> {
216-
Ok(try!(self.left.derivative_by_variable(var)) + try!(self.right.derivative_by_variable(var)))
216+
Ok(self.left.derivative_by_variable(var)? + self.right.derivative_by_variable(var)?)
217217
}
218218
);
219219
impl_bin_op!(
220220
Sub, sub, "-", Add, false, "Sub", "Expression resulting from subtracting two subexpressions.",
221221
fn derivative_by_variable(&self, var: &str) -> Result<Expr<T>, Status> {
222-
Ok(try!(self.left.derivative_by_variable(var)) - try!(self.right.derivative_by_variable(var)))
222+
Ok(self.left.derivative_by_variable(var)? - self.right.derivative_by_variable(var)?)
223223
}
224224
);
225225
impl_bin_op!(
226226
Mul, mul, "*", Mul, true, "Mul", "Expression resulting from multiplying two subexpressions.",
227227
fn derivative_by_variable(&self, var: &str) -> Result<Expr<T>, Status> {
228-
Ok(try!(self.left.derivative_by_variable(var)) * self.right.clone()
229-
+ self.left.clone() * try!(self.right.derivative_by_variable(var)))
228+
Ok(self.left.derivative_by_variable(var)? * self.right.clone()
229+
+ self.left.clone() * self.right.derivative_by_variable(var)?)
230230
}
231231
);
232232
impl_bin_op!(
233233
Div, div, "/", Mul, false, "Div", "Expression resulting from dividing two subexpressions.",
234234
fn derivative_by_variable(&self, var: &str) -> Result<Expr<T>, Status> {
235-
let num = try!(self.left.derivative_by_variable(var)) * self.right.clone()
236-
- self.left.clone() * try!(self.right.derivative_by_variable(var));
235+
let num = self.left.derivative_by_variable(var)? * self.right.clone()
236+
- self.left.clone() * self.right.derivative_by_variable(var)?;
237237
let denom = self.right.clone() * self.right.clone();
238238
Ok(num / denom)
239239
}
240240
);
241241
impl_bin_op!(
242242
Rem, rem, "%", Mul, false, "Mod", "Expression resulting from taking a modulus.",
243243
fn derivative_by_variable(&self, var: &str) -> Result<Expr<T>, Status> {
244-
Ok(try!(self.left.derivative_by_variable(var))
244+
Ok(self.left.derivative_by_variable(var)?
245245
- TruncateDiv::new_expr(self.left.clone(), self.right.clone())
246-
* try!(self.right.derivative_by_variable(var)))
246+
* self.right.derivative_by_variable(var)?)
247247
}
248248
);
249249

@@ -290,7 +290,7 @@ impl<T: TensorType> ExprImpl<T> for TruncateDiv<T> {
290290
children: &[Operation],
291291
id_gen: &mut FnMut() -> String)
292292
-> Result<Operation, Status> {
293-
let mut nd = try!(graph.new_operation("TruncateDiv", &id_gen()));
293+
let mut nd = graph.new_operation("TruncateDiv", &id_gen())?;
294294
nd.add_input(Output {
295295
operation: children[0].clone(),
296296
index: 0,
@@ -307,8 +307,8 @@ impl<T: TensorType> ExprImpl<T> for TruncateDiv<T> {
307307
// TruncateDiv(x, y) = (x - Mod(x, y)) / y
308308
// d/dt TruncateDiv(x, y) = (y * d/dt (x - Mod(x, y)) - (x - Mod(x, y)) dy/dt) / (y * y)
309309
let diff = self.left.clone() - self.left.clone() % self.right.clone();
310-
let term1 = self.right.clone() * try!(diff.derivative_by_variable(var));
311-
let term2 = diff * try!(self.right.derivative_by_variable(var));
310+
let term1 = self.right.clone() * diff.derivative_by_variable(var)?;
311+
let term2 = diff * self.right.derivative_by_variable(var)?;
312312
Ok((term1 - term2) / (self.right.clone() * self.right.clone()))
313313
}
314314
}
@@ -331,7 +331,7 @@ impl<T: TensorType> ops::Neg for Expr<T> {
331331

332332
impl<T: TensorType> Display for Neg<T> {
333333
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
334-
try!(write!(f, "-"));
334+
write!(f, "-")?;
335335
if self.expr.expr.op_level() <= OpLevel::Unary {
336336
write!(f, "({})", self.expr)
337337
} else {
@@ -354,7 +354,7 @@ impl<T: TensorType> ExprImpl<T> for Neg<T> {
354354
children: &[Operation],
355355
id_gen: &mut FnMut() -> String)
356356
-> Result<Operation, Status> {
357-
let mut nd = try!(graph.new_operation("Neg", &id_gen()));
357+
let mut nd = graph.new_operation("Neg", &id_gen())?;
358358
nd.add_input(Output {
359359
operation: children[0].clone(),
360360
index: 0,
@@ -363,7 +363,7 @@ impl<T: TensorType> ExprImpl<T> for Neg<T> {
363363
}
364364

365365
fn derivative_by_variable(&self, var: &str) -> Result<Expr<T>, Status> {
366-
Ok(-try!(self.expr.derivative_by_variable(var)))
366+
Ok(-self.expr.derivative_by_variable(var)?)
367367
}
368368
}
369369

@@ -412,7 +412,7 @@ impl<T: TensorType> ExprImpl<T> for Variable<T> {
412412
_children: &[Operation],
413413
_id_gen: &mut FnMut() -> String)
414414
-> Result<Operation, Status> {
415-
let mut nd = try!(graph.new_operation("Variable", &self.name));
415+
let mut nd = graph.new_operation("Variable", &self.name)?;
416416
nd.set_attr_type("dtype", DataType::Float).unwrap();
417417
nd.set_attr_shape("shape", &Shape(Some(vec![]))).unwrap();
418418
nd.finish()
@@ -472,7 +472,7 @@ impl<T: TensorType> ExprImpl<T> for Placeholder<T> {
472472
_children: &[Operation],
473473
_id_gen: &mut FnMut() -> String)
474474
-> Result<Operation, Status> {
475-
let mut nd = try!(graph.new_operation("Placeholder", &self.name));
475+
let mut nd = graph.new_operation("Placeholder", &self.name)?;
476476
nd.set_attr_type("dtype", DataType::Float).unwrap();
477477
nd.set_attr_shape("shape", &Shape(Some(vec![]))).unwrap();
478478
nd.finish()
@@ -526,7 +526,7 @@ impl<T: TensorType> ExprImpl<T> for Assign<T> {
526526
children: &[Operation],
527527
id_gen: &mut FnMut() -> String)
528528
-> Result<Operation, Status> {
529-
let mut nd = try!(graph.new_operation("Assign", &id_gen()));
529+
let mut nd = graph.new_operation("Assign", &id_gen())?;
530530
nd.add_input(Output {
531531
operation: children[0].clone(),
532532
index: 0,
@@ -649,7 +649,7 @@ impl<'l> Compiler<'l> {
649649
let value = self.operations.get(&key).map(|v| v.clone());
650650
child_operations.push(match value {
651651
Some(v) => v,
652-
None => try!(self.compile_any(child)),
652+
None => self.compile_any(child)?,
653653
});
654654
}
655655
let mut next_id = self.next_id;

0 commit comments

Comments
 (0)