Skip to content

Commit

Permalink
Change all TO DO to TODO
Browse files Browse the repository at this point in the history
  • Loading branch information
KajvanRijn committed Nov 22, 2024
1 parent 6d425c0 commit 6072ad4
Show file tree
Hide file tree
Showing 5 changed files with 13 additions and 13 deletions.
2 changes: 1 addition & 1 deletion Notebooks/Chap02/2_1_Supervised_Learning.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@
{
"cell_type": "code",
"source": [
"# TO DO -- Change the parameters manually to fit the model\n",
"# TODO -- Change the parameters manually to fit the model\n",
"# First fix phi1 and try changing phi0 until you can't make the loss go down any more\n",
"# Then fix phi0 and try changing phi1 until you can't make the loss go down any more\n",
"# Repeat this process until you find a set of parameters that fit the model as in figure 2.2d\n",
Expand Down
6 changes: 3 additions & 3 deletions Notebooks/Chap06/6_5_Adam.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -185,11 +185,11 @@
" for c_step in range(n_steps):\n",
" # Measure the gradient as in equation 6.13 (first line)\n",
" m = get_loss_gradient(grad_path[0,c_step], grad_path[1,c_step]);\n",
" # TO DO -- compute the squared gradient as in equation 6.13 (second line)\n",
" # TODO -- compute the squared gradient as in equation 6.13 (second line)\n",
" # Replace this line:\n",
" v = np.ones_like(grad_path[:,0])\n",
"\n",
" # TO DO -- apply the update rule (equation 6.14)\n",
" # TODO -- apply the update rule (equation 6.14)\n",
" # Replace this line:\n",
" grad_path[:,c_step+1] = grad_path[:,c_step]\n",
"\n",
Expand Down Expand Up @@ -254,7 +254,7 @@
" v_tilde = v\n",
"\n",
"\n",
" # TO DO -- apply the update rule (equation 6.17)\n",
" # TODO -- apply the update rule (equation 6.17)\n",
" # Replace this line:\n",
" grad_path[:,c_step+1] = grad_path[:,c_step]\n",
"\n",
Expand Down
2 changes: 1 addition & 1 deletion Notebooks/Chap07/7_2_Backpropagation.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@
" all_h[layer+1] = all_f[layer]\n",
"\n",
" # Compute the output from the last hidden layer\n",
" # TO DO -- Replace the line below\n",
" # TODO -- Replace the line below\n",
" all_f[K] = np.zeros_like(all_biases[-1])\n",
"\n",
" # Retrieve the output\n",
Expand Down
12 changes: 6 additions & 6 deletions Notebooks/Chap10/10_4_Downsampling_and_Upsampling.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@
"source": [
"def subsample(x_in):\n",
" x_out = np.zeros(( int(np.ceil(x_in.shape[0]/2)), int(np.ceil(x_in.shape[1]/2)) ))\n",
" # TO DO -- write the subsampling routine\n",
" # TODO -- write the subsampling routine\n",
" # Replace this line\n",
" x_out = x_out\n",
"\n",
Expand Down Expand Up @@ -159,7 +159,7 @@
"# Now let's try max-pooling\n",
"def maxpool(x_in):\n",
" x_out = np.zeros(( int(np.floor(x_in.shape[0]/2)), int(np.floor(x_in.shape[1]/2)) ))\n",
" # TO DO -- write the maxpool routine\n",
" # TODO -- write the maxpool routine\n",
" # Replace this line\n",
" x_out = x_out\n",
"\n",
Expand Down Expand Up @@ -230,7 +230,7 @@
"# Finally, let's try mean pooling\n",
"def meanpool(x_in):\n",
" x_out = np.zeros(( int(np.floor(x_in.shape[0]/2)), int(np.floor(x_in.shape[1]/2)) ))\n",
" # TO DO -- write the meanpool routine\n",
" # TODO -- write the meanpool routine\n",
" # Replace this line\n",
" x_out = x_out\n",
"\n",
Expand Down Expand Up @@ -316,7 +316,7 @@
"# Let's first use the duplication method\n",
"def duplicate(x_in):\n",
" x_out = np.zeros(( x_in.shape[0]*2, x_in.shape[1]*2 ))\n",
" # TO DO -- write the duplication routine\n",
" # TODO -- write the duplication routine\n",
" # Replace this line\n",
" x_out = x_out\n",
"\n",
Expand Down Expand Up @@ -388,7 +388,7 @@
"# The input x_high_res is the original high res image, from which you can deduce the position of the maximum index\n",
"def max_unpool(x_in, x_high_res):\n",
" x_out = np.zeros(( x_in.shape[0]*2, x_in.shape[1]*2 ))\n",
" # TO DO -- write the subsampling routine\n",
" # TODO -- write the subsampling routine\n",
" # Replace this line\n",
" x_out = x_out\n",
"\n",
Expand Down Expand Up @@ -460,7 +460,7 @@
" x_out = np.zeros(( x_in.shape[0]*2, x_in.shape[1]*2 ))\n",
" x_in_pad = np.zeros((x_in.shape[0]+1, x_in.shape[1]+1))\n",
" x_in_pad[0:x_in.shape[0],0:x_in.shape[1]] = x_in\n",
" # TO DO -- write the duplication routine\n",
" # TODO -- write the duplication routine\n",
" # Replace this line\n",
" x_out = x_out\n",
"\n",
Expand Down
4 changes: 2 additions & 2 deletions Notebooks/Chap21/21_1_Bias_Mitigation.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,7 @@
},
"outputs": [],
"source": [
"# TO DO -- try to change the two thresholds so the overall probability of getting the loan is 0.6 for each group\n",
"# TODO -- try to change the two thresholds so the overall probability of getting the loan is 0.6 for each group\n",
"# Change the values in these lines\n",
"tau0 = 0.3\n",
"tau1 = -0.1\n",
Expand Down Expand Up @@ -393,7 +393,7 @@
},
"outputs": [],
"source": [
"# TO DO -- try to change the two thresholds so the true positive are 0.8 for each group\n",
"# TODO --try to change the two thresholds so the true positive are 0.8 for each group\n",
"# Change the values in these lines so that both points on the curves have a height of 0.8\n",
"tau0 = -0.1\n",
"tau1 = -0.7\n",
Expand Down

0 comments on commit 6072ad4

Please sign in to comment.