Skip to content

Commit

Permalink
Make cosmetic adjustments, Fix random.py header, use message with ass…
Browse files Browse the repository at this point in the history
…ert_warns
  • Loading branch information
hamsal committed Aug 8, 2014
1 parent 7cfea35 commit 2a3f504
Show file tree
Hide file tree
Showing 5 changed files with 32 additions and 20 deletions.
4 changes: 2 additions & 2 deletions sklearn/dummy.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Author: Mathieu Blondel <[email protected]>
# Arnaud Joly <[email protected]>
# Maheshakya Wijewardena<[email protected]>
# Maheshakya Wijewardena <[email protected]>
# License: BSD 3 clause
from __future__ import division

Expand Down Expand Up @@ -100,7 +100,7 @@ def fit(self, X, y, sample_weight=None):

if self.strategy == "uniform" and sp.issparse(y):
y = y.toarray()
warnings.warn('A Local copy of the target data has been converted '
warnings.warn('A local copy of the target data has been converted '
'to a numpy array. Predicting on sparse target data '
'with the uniform strategy would not save memory '
'and would be slower.',
Expand Down
6 changes: 4 additions & 2 deletions sklearn/tests/test_dummy.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import assert_warns_message

from sklearn.dummy import DummyClassifier, DummyRegressor

Expand Down Expand Up @@ -430,7 +430,9 @@ def test_uniform_strategy_sparse_target_warning():
[1, 1]]))

clf = DummyClassifier(strategy="uniform", random_state=0)
assert_warns(UserWarning, clf.fit, X, y)
assert_warns_message(UserWarning,
"the uniform strategy would not save memory",
clf.fit, X, y)

X = [[0]] * 500
y_pred = clf.predict(X)
Expand Down
7 changes: 5 additions & 2 deletions sklearn/utils/random.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# This file contains a backport of np.random.choice from numpy 1.7
# The function can be removed when we bump the requirements to >=1.7
# Author: Hamzeh Alsalhi <[email protected]>
#
# License: BSD 3 clause
from __future__ import division
import numpy as np
import scipy.sparse as sp
Expand All @@ -13,6 +14,8 @@
__all__ = ['sample_without_replacement', 'choice']


# This is a backport of np.random.choice from numpy 1.7
# The function can be removed when we bump the requirements to >=1.7
def choice(a, size=None, replace=True, p=None, random_state=None):
"""
choice(a, size=None, replace=True, p=None)
Expand Down
33 changes: 20 additions & 13 deletions sklearn/utils/tests/test_multiclass.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,12 +347,15 @@ def test_class_distribution():
[1, 3, 0, 1]])

classes, n_classes, class_prior = class_distribution(y)
classes_expected = [np.array([1, 2, 4]), np.array([0, 2, 3]),
np.array([0]), np.array([1])]
classes_expected = [[1, 2, 4],
[0, 2, 3],
[0],
[1]]
n_classes_expected = [3, 3, 1, 1]
class_prior_expected = [np.array([3/6, 2/6, 1/6]),
np.array([1/3, 1/3, 1/3]),
np.array([1.0]), np.array([1.0])]
class_prior_expected = [[3/6, 2/6, 1/6],
[1/3, 1/3, 1/3],
[1.0],
[1.0]]

for k in range(y.shape[1]):
assert_array_almost_equal(classes[k], classes_expected[k])
Expand All @@ -369,12 +372,15 @@ def test_class_distribution_sparse():
[1, 3, 0, 1]]))

classes, n_classes, class_prior = class_distribution(y)
classes_expected = [np.array([1, 2, 4]), np.array([0, 2, 3]),
np.array([0]), np.array([1])]
classes_expected = [[1, 2, 4],
[0, 2, 3],
[0],
[1]]
n_classes_expected = [3, 3, 1, 1]
class_prior_expected = [np.array([3/6, 2/6, 1/6]),
np.array([1/3, 1/3, 1/3]),
np.array([1.0]), np.array([1.0])]
class_prior_expected = [[3/6, 2/6, 1/6],
[1/3, 1/3, 1/3],
[1.0],
[1.0]]

for k in range(y.shape[1]):
assert_array_almost_equal(classes[k], classes_expected[k])
Expand All @@ -385,9 +391,10 @@ def test_class_distribution_sparse():
(classes,
n_classes,
class_prior) = class_distribution(y, [1.0, 2.0, 1.0, 2.0, 1.0, 2.0])
class_prior_expected = [np.array([4/9, 3/9, 2/9]),
np.array([2/9, 4/9, 3/9]),
np.array([1.0]), np.array([1.0])]
class_prior_expected = [[4/9, 3/9, 2/9],
[2/9, 4/9, 3/9],
[1.0],
[1.0]]

for k in range(y.shape[1]):
assert_array_almost_equal(classes[k], classes_expected[k])
Expand Down
2 changes: 1 addition & 1 deletion sklearn/utils/tests/test_random.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def test_random_choice_csc(n_samples=10000, random_state=24):
assert_array_almost_equal(class_probabilites[k], p, decimal=1)

# Implicit class probabilities
classes = [[0, 1], [1, 2]] # array like support
classes = [[0, 1], [1, 2]] # test for array-like support
class_probabilites = [np.array([0.5, 0.5]), np.array([0, 1/2, 1/2])]

got = random_choice_csc(n_samples=n_samples,
Expand Down

0 comments on commit 2a3f504

Please sign in to comment.