From 00b81a9a67f17ae4cb4ce34e7ddd34757784e2cb Mon Sep 17 00:00:00 2001 From: Gan Quan Date: Wed, 7 Feb 2018 09:25:27 -0500 Subject: [PATCH] removed answers :( --- notebook/labs/03 - SVM vs LogReg.ipynb | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/notebook/labs/03 - SVM vs LogReg.ipynb b/notebook/labs/03 - SVM vs LogReg.ipynb index be0d439..1d77745 100644 --- a/notebook/labs/03 - SVM vs LogReg.ipynb +++ b/notebook/labs/03 - SVM vs LogReg.ipynb @@ -7,6 +7,8 @@ "# CSCI-UA 0473 - Introduction to Machine Learning\n", "## Wednesday, February 07, 2018\n", "\n", + "**Please download\n", + "\n", "This lab will discuss the difference between SVM and Logistic Regression, and go through the implementation of both using naive `numpy` (not really, because we are using `autograd` to skip manual computation of the gradients).\n", "\n", "We will also go through the usage of `autograd` and `scipy.optimize.minimize()` in greater details, and take a look at numerical stability issues.\n", @@ -324,20 +326,17 @@ " # (1)\n", " # Get the bias term from the last element of @params, and get the weight vector from the rest.\n", " # YOUR CODE HERE\n", - " w, b = params[:-1], params[-1]\n", " \n", " # (2)\n", " # Compute the score. Do not forget the bias term!\n", " # Your score should be a 1D array with num_samples elements for a 2D @x, or a scalar for a 1D @x.\n", " # YOUR CODE HERE\n", - " s = x @ w + b\n", " \n", " # (3) If @pre is True, return the score itself. Otherwise, return the sign of the score.\n", " # YOUR CODE HERE\n", - " return s if pre else numpy.sign(s)\n", " \n", " # (4) Delete the \"pass\" statement if you have completed everything above.\n", - " #pass\n", + " pass\n", "\n", "# Empirical cost function.\n", "# y: 1D array or a scalar containing the ground truth label(s). Each element is either +1 or -1.\n", @@ -348,27 +347,24 @@ " # (1)\n", " # Call svm() to get the pre-sign score.\n", " # YOUR CODE HERE\n", - " s = svm(x, params, True)\n", " \n", " # (2)\n", " # Compute hinge loss, which is the \"max\" term within the summation, for each example.\n", " # YOUR CODE HERE\n", - " l = numpy.maximum(0, 1 - y * s)\n", " \n", " # (3)\n", " # If avg is False, return the per-example hinge loss. Otherwise, return the empirical cost function.\n", " # Recall that the cost function is the sum of an average and a norm.\n", " # YOUR CODE HERE\n", - " return l if not avg else numpy.mean(l) + lambda_ * numpy.sum(params ** 2)\n", " \n", " # (4) Delete the \"pass\" statement if you have completed everything above.\n", - " #pass\n", + " pass\n", "\n", "def svm_cost(w, x, y, lambda_=1):\n", " return svm_dist(y, x, w, avg=True, lambda_=lambda_)\n", "\n", "# Compute the gradient.\n", - "svm_rule = grad(svm_cost, 0) # YOUR CODE HERE: replace None with the correct function call." + "svm_rule = None # YOUR CODE HERE: replace None with the correct function call." ] }, { @@ -479,7 +475,7 @@ "params = 1e-4 * numpy.random.randn(n_dim+1); params[-1] = 0.\n", "\n", "# (1) Call scipy's minimize method on SVM.\n", - "svm_model = scipy.optimize.minimize(svm_cost, params, (x_train, y_train_svm), jac=svm_rule) # YOUR CODE HERE: replace None with the function call.\n", + "svm_model = None # YOUR CODE HERE: replace None with the function call.\n", "\n", "params_svm = svm_model.x\n", "\n",