Skip to content

Commit

Permalink
BUG: Increment stacklevel for warnings to account for NEP-18 overrides (
Browse files Browse the repository at this point in the history
numpy#13589)

* Increment stacklevel for warnings to account for NEP-18 overrides

For NumPy functions that make use of `__array_function__`, the appropriate the
stack level for warnings should generally be increased by 1 to account for
the override function defined in numpy.core.overrides.

Fixes numpyGH-13329

* Update numpy/lib/type_check.py

Co-Authored-By: Sebastian Berg <[email protected]>
  • Loading branch information
shoyer and seberg authored May 20, 2019
1 parent bdd75df commit f9c1502
Show file tree
Hide file tree
Showing 8 changed files with 44 additions and 37 deletions.
4 changes: 2 additions & 2 deletions numpy/core/fromnumeric.py
Original file line number Diff line number Diff line change
Expand Up @@ -2145,7 +2145,7 @@ def sum(a, axis=None, dtype=None, out=None, keepdims=np._NoValue,
warnings.warn(
"Calling np.sum(generator) is deprecated, and in the future will give a different result. "
"Use np.sum(np.fromiter(generator)) or the python sum builtin instead.",
DeprecationWarning, stacklevel=2)
DeprecationWarning, stacklevel=3)

res = _sum_(a)
if out is not None:
Expand Down Expand Up @@ -3569,5 +3569,5 @@ def rank(a):
warnings.warn(
"`rank` is deprecated; use the `ndim` attribute or function instead. "
"To find the rank of a matrix see `numpy.linalg.matrix_rank`.",
VisibleDeprecationWarning, stacklevel=2)
VisibleDeprecationWarning, stacklevel=3)
return ndim(a)
24 changes: 12 additions & 12 deletions numpy/lib/function_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -682,7 +682,7 @@ def select(condlist, choicelist, default=0):
# 2014-02-24, 1.9
warnings.warn("select with an empty condition list is not possible"
"and will be deprecated",
DeprecationWarning, stacklevel=2)
DeprecationWarning, stacklevel=3)
return np.asarray(default)[()]

choicelist = [np.asarray(choice) for choice in choicelist]
Expand Down Expand Up @@ -717,7 +717,7 @@ def select(condlist, choicelist, default=0):
msg = "select condlists containing integer ndarrays is deprecated " \
"and will be removed in the future. Use `.astype(bool)` to " \
"convert to bools."
warnings.warn(msg, DeprecationWarning, stacklevel=2)
warnings.warn(msg, DeprecationWarning, stacklevel=3)

if choicelist[0].ndim == 0:
# This may be common, so avoid the call.
Expand Down Expand Up @@ -2443,7 +2443,7 @@ def cov(m, y=None, rowvar=True, bias=False, ddof=None, fweights=None,

if fact <= 0:
warnings.warn("Degrees of freedom <= 0 for slice",
RuntimeWarning, stacklevel=2)
RuntimeWarning, stacklevel=3)
fact = 0.0

X -= avg[:, None]
Expand Down Expand Up @@ -2522,7 +2522,7 @@ def corrcoef(x, y=None, rowvar=True, bias=np._NoValue, ddof=np._NoValue):
if bias is not np._NoValue or ddof is not np._NoValue:
# 2015-03-15, 1.10
warnings.warn('bias and ddof have no effect and are deprecated',
DeprecationWarning, stacklevel=2)
DeprecationWarning, stacklevel=3)
c = cov(x, y, rowvar)
try:
d = diag(c)
Expand Down Expand Up @@ -4304,7 +4304,7 @@ def delete(arr, obj, axis=None):
# 2013-09-24, 1.9
warnings.warn(
"in the future the special handling of scalars will be removed "
"from delete and raise an error", DeprecationWarning, stacklevel=2)
"from delete and raise an error", DeprecationWarning, stacklevel=3)
if wrap:
return wrap(arr)
else:
Expand Down Expand Up @@ -4373,7 +4373,7 @@ def delete(arr, obj, axis=None):
if obj.dtype == bool:
warnings.warn("in the future insert will treat boolean arrays and "
"array-likes as boolean index instead of casting it "
"to integer", FutureWarning, stacklevel=2)
"to integer", FutureWarning, stacklevel=3)
obj = obj.astype(intp)
if isinstance(_obj, (int, long, integer)):
# optimization for a single value
Expand Down Expand Up @@ -4401,7 +4401,7 @@ def delete(arr, obj, axis=None):
# 2013-09-24, 1.9
warnings.warn(
"using a non-integer array as obj in delete will result in an "
"error in the future", DeprecationWarning, stacklevel=2)
"error in the future", DeprecationWarning, stacklevel=3)
obj = obj.astype(intp)
keep = ones(N, dtype=bool)

Expand All @@ -4412,13 +4412,13 @@ def delete(arr, obj, axis=None):
warnings.warn(
"in the future out of bounds indices will raise an error "
"instead of being ignored by `numpy.delete`.",
DeprecationWarning, stacklevel=2)
DeprecationWarning, stacklevel=3)
obj = obj[inside_bounds]
positive_indices = obj >= 0
if not positive_indices.all():
warnings.warn(
"in the future negative indices will not be ignored by "
"`numpy.delete`.", FutureWarning, stacklevel=2)
"`numpy.delete`.", FutureWarning, stacklevel=3)
obj = obj[positive_indices]

keep[obj, ] = False
Expand Down Expand Up @@ -4543,7 +4543,7 @@ def insert(arr, obj, values, axis=None):
# 2013-09-24, 1.9
warnings.warn(
"in the future the special handling of scalars will be removed "
"from insert and raise an error", DeprecationWarning, stacklevel=2)
"from insert and raise an error", DeprecationWarning, stacklevel=3)
arr = arr.copy(order=arrorder)
arr[...] = values
if wrap:
Expand All @@ -4567,7 +4567,7 @@ def insert(arr, obj, values, axis=None):
warnings.warn(
"in the future insert will treat boolean arrays and "
"array-likes as a boolean index instead of casting it to "
"integer", FutureWarning, stacklevel=2)
"integer", FutureWarning, stacklevel=3)
indices = indices.astype(intp)
# Code after warning period:
#if obj.ndim != 1:
Expand Down Expand Up @@ -4617,7 +4617,7 @@ def insert(arr, obj, values, axis=None):
# 2013-09-24, 1.9
warnings.warn(
"using a non-integer array as obj in insert will result in an "
"error in the future", DeprecationWarning, stacklevel=2)
"error in the future", DeprecationWarning, stacklevel=3)
indices = indices.astype(intp)

indices[indices < 0] += N
Expand Down
11 changes: 6 additions & 5 deletions numpy/lib/histograms.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,8 @@ def jhat(nbins):
nbins_upper_bound = max(100, int(np.sqrt(n)))
nbins = min(_range(1, nbins_upper_bound + 1), key=jhat)
if nbins == nbins_upper_bound:
warnings.warn("The number of bins estimated may be suboptimal.", RuntimeWarning, stacklevel=2)
warnings.warn("The number of bins estimated may be suboptimal.",
RuntimeWarning, stacklevel=3)
return ptp_x / nbins


Expand Down Expand Up @@ -279,7 +280,7 @@ def _ravel_and_check_weights(a, weights):
if a.dtype == np.bool_:
warnings.warn("Converting input from {} to {} for compatibility."
.format(a.dtype, np.uint8),
RuntimeWarning, stacklevel=2)
RuntimeWarning, stacklevel=3)
a = a.astype(np.uint8)

if weights is not None:
Expand Down Expand Up @@ -888,7 +889,7 @@ def histogram(a, bins=10, range=None, normed=None, weights=None,
warnings.warn(
"The normed argument is ignored when density is provided. "
"In future passing both will result in an error.",
DeprecationWarning, stacklevel=2)
DeprecationWarning, stacklevel=3)
normed = None

if density:
Expand All @@ -904,7 +905,7 @@ def histogram(a, bins=10, range=None, normed=None, weights=None,
"density=True will produce the same result anyway. "
"The argument will be removed in a future version of "
"numpy.",
np.VisibleDeprecationWarning, stacklevel=2)
np.VisibleDeprecationWarning, stacklevel=3)

# this normalization is incorrect, but
db = np.array(np.diff(bin_edges), float)
Expand All @@ -915,7 +916,7 @@ def histogram(a, bins=10, range=None, normed=None, weights=None,
warnings.warn(
"Passing normed=False is deprecated, and has no effect. "
"Consider passing the density argument instead.",
DeprecationWarning, stacklevel=2)
DeprecationWarning, stacklevel=3)
return n, bin_edges


Expand Down
25 changes: 16 additions & 9 deletions numpy/lib/nanfunctions.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,8 @@ def _remove_nan_1d(arr1d, overwrite_input=False):
c = np.isnan(arr1d)
s = np.nonzero(c)[0]
if s.size == arr1d.size:
warnings.warn("All-NaN slice encountered", RuntimeWarning, stacklevel=4)
warnings.warn("All-NaN slice encountered", RuntimeWarning,
stacklevel=5)
return arr1d[:0], True
elif s.size == 0:
return arr1d, overwrite_input
Expand Down Expand Up @@ -318,7 +319,8 @@ def nanmin(a, axis=None, out=None, keepdims=np._NoValue):
# which do not implement isnan (gh-9009), or fmin correctly (gh-8975)
res = np.fmin.reduce(a, axis=axis, out=out, **kwargs)
if np.isnan(res).any():
warnings.warn("All-NaN slice encountered", RuntimeWarning, stacklevel=2)
warnings.warn("All-NaN slice encountered", RuntimeWarning,
stacklevel=3)
else:
# Slow, but safe for subclasses of ndarray
a, mask = _replace_nan(a, +np.inf)
Expand All @@ -330,7 +332,8 @@ def nanmin(a, axis=None, out=None, keepdims=np._NoValue):
mask = np.all(mask, axis=axis, **kwargs)
if np.any(mask):
res = _copyto(res, np.nan, mask)
warnings.warn("All-NaN axis encountered", RuntimeWarning, stacklevel=2)
warnings.warn("All-NaN axis encountered", RuntimeWarning,
stacklevel=3)
return res


Expand Down Expand Up @@ -431,7 +434,8 @@ def nanmax(a, axis=None, out=None, keepdims=np._NoValue):
# which do not implement isnan (gh-9009), or fmax correctly (gh-8975)
res = np.fmax.reduce(a, axis=axis, out=out, **kwargs)
if np.isnan(res).any():
warnings.warn("All-NaN slice encountered", RuntimeWarning, stacklevel=2)
warnings.warn("All-NaN slice encountered", RuntimeWarning,
stacklevel=3)
else:
# Slow, but safe for subclasses of ndarray
a, mask = _replace_nan(a, -np.inf)
Expand All @@ -443,7 +447,8 @@ def nanmax(a, axis=None, out=None, keepdims=np._NoValue):
mask = np.all(mask, axis=axis, **kwargs)
if np.any(mask):
res = _copyto(res, np.nan, mask)
warnings.warn("All-NaN axis encountered", RuntimeWarning, stacklevel=2)
warnings.warn("All-NaN axis encountered", RuntimeWarning,
stacklevel=3)
return res


Expand Down Expand Up @@ -947,7 +952,7 @@ def nanmean(a, axis=None, dtype=None, out=None, keepdims=np._NoValue):

isbad = (cnt == 0)
if isbad.any():
warnings.warn("Mean of empty slice", RuntimeWarning, stacklevel=2)
warnings.warn("Mean of empty slice", RuntimeWarning, stacklevel=3)
# NaN is the only possible bad value, so no further
# action is needed to handle bad results.
return avg
Expand All @@ -959,7 +964,7 @@ def _nanmedian1d(arr1d, overwrite_input=False):
See nanmedian for parameter usage
"""
arr1d, overwrite_input = _remove_nan_1d(arr1d,
overwrite_input=overwrite_input)
overwrite_input=overwrite_input)
if arr1d.size == 0:
return np.nan

Expand Down Expand Up @@ -1002,7 +1007,8 @@ def _nanmedian_small(a, axis=None, out=None, overwrite_input=False):
a = np.ma.masked_array(a, np.isnan(a))
m = np.ma.median(a, axis=axis, overwrite_input=overwrite_input)
for i in range(np.count_nonzero(m.mask.ravel())):
warnings.warn("All-NaN slice encountered", RuntimeWarning, stacklevel=3)
warnings.warn("All-NaN slice encountered", RuntimeWarning,
stacklevel=4)
if out is not None:
out[...] = m.filled(np.nan)
return out
Expand Down Expand Up @@ -1547,7 +1553,8 @@ def nanvar(a, axis=None, dtype=None, out=None, ddof=0, keepdims=np._NoValue):

isbad = (dof <= 0)
if np.any(isbad):
warnings.warn("Degrees of freedom <= 0 for slice.", RuntimeWarning, stacklevel=2)
warnings.warn("Degrees of freedom <= 0 for slice.", RuntimeWarning,
stacklevel=3)
# NaN, inf, or negative numbers are all possible bad
# values, so explicitly replace them with NaN.
var = _copyto(var, np.nan, isbad)
Expand Down
2 changes: 1 addition & 1 deletion numpy/lib/polynomial.py
Original file line number Diff line number Diff line change
Expand Up @@ -634,7 +634,7 @@ def polyfit(x, y, deg, rcond=None, full=False, w=None, cov=False):
# warn on rank reduction, which indicates an ill conditioned matrix
if rank != order and not full:
msg = "Polyfit may be poorly conditioned"
warnings.warn(msg, RankWarning, stacklevel=3)
warnings.warn(msg, RankWarning, stacklevel=4)

if full:
return c, resids, rank, s, rcond
Expand Down
2 changes: 1 addition & 1 deletion numpy/lib/shape_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -578,7 +578,7 @@ def expand_dims(a, axis):
# 2017-05-17, 1.13.0
warnings.warn("Both axis > a.ndim and axis < -a.ndim - 1 are "
"deprecated and will raise an AxisError in the future.",
DeprecationWarning, stacklevel=2)
DeprecationWarning, stacklevel=3)
# When the deprecation period expires, delete this if block,
if axis < 0:
axis = axis + a.ndim + 1
Expand Down
7 changes: 3 additions & 4 deletions numpy/lib/type_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -541,6 +541,9 @@ def real_if_close(a, tol=100):


def _asscalar_dispatcher(a):
# 2018-10-10, 1.16
warnings.warn('np.asscalar(a) is deprecated since NumPy v1.16, use '
'a.item() instead', DeprecationWarning, stacklevel=3)
return (a,)


Expand Down Expand Up @@ -569,10 +572,6 @@ def asscalar(a):
>>> np.asscalar(np.array([24]))
24
"""

# 2018-10-10, 1.16
warnings.warn('np.asscalar(a) is deprecated since NumPy v1.16, use '
'a.item() instead', DeprecationWarning, stacklevel=1)
return a.item()

#-----------------------------------------------------------------------------
Expand Down
6 changes: 3 additions & 3 deletions numpy/linalg/linalg.py
Original file line number Diff line number Diff line change
Expand Up @@ -890,12 +890,12 @@ def qr(a, mode='reduced'):
msg = "".join((
"The 'full' option is deprecated in favor of 'reduced'.\n",
"For backward compatibility let mode default."))
warnings.warn(msg, DeprecationWarning, stacklevel=2)
warnings.warn(msg, DeprecationWarning, stacklevel=3)
mode = 'reduced'
elif mode in ('e', 'economic'):
# 2013-04-01, 1.8
msg = "The 'economic' option is deprecated."
warnings.warn(msg, DeprecationWarning, stacklevel=2)
warnings.warn(msg, DeprecationWarning, stacklevel=3)
mode = 'economic'
else:
raise ValueError("Unrecognized mode '%s'" % mode)
Expand Down Expand Up @@ -2245,7 +2245,7 @@ def lstsq(a, b, rcond="warn"):
"To use the future default and silence this warning "
"we advise to pass `rcond=None`, to keep using the old, "
"explicitly pass `rcond=-1`.",
FutureWarning, stacklevel=2)
FutureWarning, stacklevel=3)
rcond = -1
if rcond is None:
rcond = finfo(t).eps * max(n, m)
Expand Down

0 comments on commit f9c1502

Please sign in to comment.