-
Notifications
You must be signed in to change notification settings - Fork 1.3k
/
Copy pathignored_warnings.py
127 lines (117 loc) · 3.52 KB
/
ignored_warnings.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
from typing import Iterator, List, Tuple, Type
import warnings
from contextlib import contextmanager
from sklearn.exceptions import ConvergenceWarning
regressor_warnings = [
(
UserWarning,
( # From QuantileTransformer
r"n_quantiles \(\d+\) is greater than the total number of samples \(\d+\)\."
r" n_quantiles is set to n_samples\."
),
),
(
ConvergenceWarning,
( # From GaussianProcesses
r"The optimal value found for dimension \d+ of parameter \w+ is close"
r" to the specified (upper|lower) bound .*(Increasing|Decreasing) the bound"
r" and calling fit again may find a better value."
),
),
(
UserWarning,
(r"n_components is too large: it will be set to \d+"), # From FastICA
),
(
ConvergenceWarning,
( # From SGD
r"Maximum number of iteration reached before convergence\."
r" Consider increasing max_iter to improve the fit\."
),
),
(
ConvergenceWarning,
( # From MLP
r"Stochastic Optimizer: Maximum iterations \(\d+\) reached and the"
r" optimization hasn't converged yet\."
),
),
]
classifier_warnings = [
(
UserWarning,
( # From QuantileTransformer
r"n_quantiles \(\d+\) is greater than the total number of samples \(\d+\)\."
r" n_quantiles is set to n_samples\."
),
),
(
UserWarning,
(r"n_components is too large: it will be set to \d+"), # From FastICA
),
(
ConvergenceWarning,
( # From Liblinear
r"Liblinear failed to converge, increase the number of iterations\."
),
),
(
ConvergenceWarning,
( # From SGD
r"Maximum number of iteration reached before convergence\."
r"Consider increasing max_iter to improve the fit\."
),
),
(
ConvergenceWarning,
( # From MLP
r"Stochastic Optimizer: Maximum iterations \(\d+\) reached and the"
r" optimization hasn't converged yet\."
),
),
(
ConvergenceWarning,
( # From FastICA
r"FastICA did not converge\."
r" Consider increasing tolerance or the maximum number of iterations\."
),
),
(
UserWarning,
(r"Variables are collinear"), # From LDA (Linear Discriminant Analysis)
),
(
UserWarning,
(
r"Clustering metrics expects discrete values but received continuous values"
r" for label, and multiclass values for target"
),
),
]
feature_preprocessing_warnings = [
(
ConvergenceWarning,
( # From liblinear
r"Liblinear failed to converge, increase the number of iterations."
),
)
]
ignored_warnings = (
regressor_warnings + classifier_warnings + feature_preprocessing_warnings
)
@contextmanager
def ignore_warnings(
to_ignore: List[Tuple[Type[Warning], str]] = ignored_warnings
) -> Iterator[None]:
"""A context manager to ignore warnings
>>> with ignore_warnings(classifier_warnings):
>>> ...
Parameters
----------
to_ignore: List[Tuple[Exception, str]] = ignored_warnings
The list of warnings to ignore, defaults to all registered warnings
"""
with warnings.catch_warnings():
for category, message in to_ignore:
warnings.filterwarnings("ignore", category=category, message=message)
yield