Skip to content

Commit

Permalink
[AutoDiff] Update method-style differential operators. (tensorflow#395)
Browse files Browse the repository at this point in the history
Use top-level differential operators instead of method-style ones, which have
been removed. Method-style differential operators have been removed for a full
release cycle now.
  • Loading branch information
dan-zheng authored Feb 28, 2020
1 parent 1db911d commit 787904e
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 6 deletions.
8 changes: 4 additions & 4 deletions docs/site/tutorials/custom_differentiation.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@
"outputs": [],
"source": [
"var x: Float = 30\n",
"x.gradient { x -> Float in\n",
"gradient(at: x) { x -> Float in\n",
" // Print the partial derivative with respect to the result of `sin(x)`.\n",
" let a = sin(x).withDerivative { print(\"∂+/∂sin = \\($0)\") } \n",
" // Force the partial derivative with respect to `x` to be `0.5`.\n",
Expand Down Expand Up @@ -270,7 +270,7 @@
"let y: Tensor<Float> = [0, 1, 1, 0]\n",
"\n",
"for _ in 0..<10 {\n",
" let 𝛁model = classifier.gradient { classifier -> Tensor<Float> in\n",
" let 𝛁model = gradient(at: classifier) { classifier -> Tensor<Float> in\n",
" let ŷ = classifier(x).withDerivative { print(\"∂L/∂ŷ =\", $0) }\n",
" let loss = (ŷ - y).squared().mean()\n",
" print(\"Loss: \\(loss)\")\n",
Expand Down Expand Up @@ -378,7 +378,7 @@
"}\n",
"\n",
"// Differentiate `f(x) = (cos(x))^2`.\n",
"let (output, backprop) = input.valueWithPullback { input -> Float in\n",
"let (output, backprop) = valueWithPullback(at: input) { input -> Float in\n",
" return square(cos(input))\n",
"}\n",
"print(\"Running backpropagation...\")\n",
Expand Down Expand Up @@ -598,7 +598,7 @@
" print(\"Starting training step \\(i)\")\n",
" print(\" Running original computation...\")\n",
" let (logits, backprop) = model.appliedForBackpropagation(to: x)\n",
" let (loss, dL_dŷ) = logits.valueWithGradient { logits in\n",
" let (loss, dL_dŷ) = valueWithGradient(at: logits) { logits in\n",
" softmaxCrossEntropy(logits: logits, labels: y)\n",
" }\n",
" print(\" Loss: \\(loss)\")\n",
Expand Down
4 changes: 2 additions & 2 deletions docs/site/tutorials/model_training_walkthrough.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -750,7 +750,7 @@
},
"outputs": [],
"source": [
"let (loss, grads) = model.valueWithGradient { model -> Tensor<Float> in\n",
"let (loss, grads) = valueWithGradient(at: model) { model -> Tensor<Float> in\n",
" let logits = model(firstTrainFeatures)\n",
" return softmaxCrossEntropy(logits: logits, labels: firstTrainLabels)\n",
"}\n",
Expand Down Expand Up @@ -871,7 +871,7 @@
" var epochAccuracy: Float = 0\n",
" var batchCount: Int = 0\n",
" for batch in trainDataset {\n",
" let (loss, grad) = model.valueWithGradient { (model: IrisModel) -> Tensor<Float> in\n",
" let (loss, grad) = valueWithGradient(at: model) { (model: IrisModel) -> Tensor<Float> in\n",
" let logits = model(batch.features)\n",
" return softmaxCrossEntropy(logits: logits, labels: batch.labels)\n",
" }\n",
Expand Down

0 comments on commit 787904e

Please sign in to comment.