Skip to content

Commit

Permalink
removed answers :(
Browse files Browse the repository at this point in the history
  • Loading branch information
BarclayII committed Feb 7, 2018
1 parent 5f50145 commit 00b81a9
Showing 1 changed file with 6 additions and 10 deletions.
16 changes: 6 additions & 10 deletions notebook/labs/03 - SVM vs LogReg.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
"# CSCI-UA 0473 - Introduction to Machine Learning\n",
"## Wednesday, February 07, 2018\n",
"\n",
"**Please download\n",
"\n",
"This lab will discuss the difference between SVM and Logistic Regression, and go through the implementation of both using naive `numpy` (not really, because we are using `autograd` to skip manual computation of the gradients).\n",
"\n",
"We will also go through the usage of `autograd` and `scipy.optimize.minimize()` in greater details, and take a look at numerical stability issues.\n",
Expand Down Expand Up @@ -324,20 +326,17 @@
" # (1)\n",
" # Get the bias term from the last element of @params, and get the weight vector from the rest.\n",
" # YOUR CODE HERE\n",
" w, b = params[:-1], params[-1]\n",
" \n",
" # (2)\n",
" # Compute the score. Do not forget the bias term!\n",
" # Your score should be a 1D array with num_samples elements for a 2D @x, or a scalar for a 1D @x.\n",
" # YOUR CODE HERE\n",
" s = x @ w + b\n",
" \n",
" # (3) If @pre is True, return the score itself. Otherwise, return the sign of the score.\n",
" # YOUR CODE HERE\n",
" return s if pre else numpy.sign(s)\n",
" \n",
" # (4) Delete the \"pass\" statement if you have completed everything above.\n",
" #pass\n",
" pass\n",
"\n",
"# Empirical cost function.\n",
"# y: 1D array or a scalar containing the ground truth label(s). Each element is either +1 or -1.\n",
Expand All @@ -348,27 +347,24 @@
" # (1)\n",
" # Call svm() to get the pre-sign score.\n",
" # YOUR CODE HERE\n",
" s = svm(x, params, True)\n",
" \n",
" # (2)\n",
" # Compute hinge loss, which is the \"max\" term within the summation, for each example.\n",
" # YOUR CODE HERE\n",
" l = numpy.maximum(0, 1 - y * s)\n",
" \n",
" # (3)\n",
" # If avg is False, return the per-example hinge loss. Otherwise, return the empirical cost function.\n",
" # Recall that the cost function is the sum of an average and a norm.\n",
" # YOUR CODE HERE\n",
" return l if not avg else numpy.mean(l) + lambda_ * numpy.sum(params ** 2)\n",
" \n",
" # (4) Delete the \"pass\" statement if you have completed everything above.\n",
" #pass\n",
" pass\n",
"\n",
"def svm_cost(w, x, y, lambda_=1):\n",
" return svm_dist(y, x, w, avg=True, lambda_=lambda_)\n",
"\n",
"# Compute the gradient.\n",
"svm_rule = grad(svm_cost, 0) # YOUR CODE HERE: replace None with the correct function call."
"svm_rule = None # YOUR CODE HERE: replace None with the correct function call."
]
},
{
Expand Down Expand Up @@ -479,7 +475,7 @@
"params = 1e-4 * numpy.random.randn(n_dim+1); params[-1] = 0.\n",
"\n",
"# (1) Call scipy's minimize method on SVM.\n",
"svm_model = scipy.optimize.minimize(svm_cost, params, (x_train, y_train_svm), jac=svm_rule) # YOUR CODE HERE: replace None with the function call.\n",
"svm_model = None # YOUR CODE HERE: replace None with the function call.\n",
"\n",
"params_svm = svm_model.x\n",
"\n",
Expand Down

0 comments on commit 00b81a9

Please sign in to comment.