forked from pytorch/pytorch
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtest_minifier.py
116 lines (91 loc) · 3.47 KB
/
test_minifier.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
# Owner(s): ["module: functorch"]
import torch
from functorch.compile import minifier
from torch._functorch.compile_utils import get_placeholders, get_outputs
from functorch import make_fx
from torch.testing._internal.common_utils import TestCase, run_tests
class TestMinifier(TestCase):
def test_has_mul_minifier(self):
def failing_f(x, y):
y = y / 3
x = x + 3
x = x * y
return x + y
inps = [torch.randn(3), torch.randn(3)]
failing_f = make_fx(failing_f)(*inps)
def has_mul(fx_g, inps):
return (torch.ops.aten.mul.Tensor in (i.target for i in fx_g.graph.nodes))
min_f, inps = minifier(failing_f, inps, has_mul)
self.assertEqual(len(min_f.graph.nodes), 4)
self.assertEqual(len(inps), 2)
def test_has_add_mul(self):
def failing_f(x):
x = x * 3
x = x + 5
x = x.cos()
zero = x - x
result = zero / zero
result = result + 3
return (result * 2,)
inps = [torch.randn(3)]
failing_f = make_fx(failing_f)(*inps)
def has_nans(fx_g, inps):
# Basically, make sure none of the nodes are computing nans
for i in inps:
if torch.isnan(i).any():
return False
return torch.isnan(fx_g(*inps)[0]).any()
min_f, inps = minifier(failing_f, inps, has_nans)
self.assertEqual(len(min_f.graph.nodes), 3)
self.assertEqual(len(inps), 1)
def test_input_returned(self):
def f(a, b, c):
a = a.sin()
c = c.cos()
d = a * c
return (a, b, c, d)
inps = [torch.randn(3) for _ in range(3)]
def inputs_returned(fx_g, inps):
inps = set(get_placeholders(fx_g.graph))
outs = set(get_outputs(fx_g.graph))
return len(inps & outs) > 0
failing_f = make_fx(f)(*inps)
min_f, inps = minifier(failing_f, inps, inputs_returned)
self.assertEqual(len(min_f.graph.nodes), 2)
self.assertEqual(len(inps), 1)
def test_tup_use(self):
def f(a, b):
tup = torch.std_mean(a)
return (tup[0] + b * tup[1],)
inps = [torch.randn(3), torch.randn(3)]
def has_add(fx_g, inps):
return (torch.ops.aten.add.Tensor in (i.target for i in fx_g.graph.nodes))
failing_f = make_fx(f)(*inps)
min_f, inps = minifier(failing_f, inps, has_add)
self.assertEqual(len(min_f.graph.nodes), 4)
self.assertEqual(len(inps), 2)
def test_module(self):
class MockModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.relu = torch.nn.ReLU()
def forward(self, x):
y = self.relu(x)
zero = y - y
result = zero / zero
result = result + 3
return result
mod = MockModule()
failing_f = torch.fx.symbolic_trace(mod)
inps = [torch.randn(3)]
def pass_checker(fx_g, inps):
# Basically, make sure none of the inputs are nans
for i in inps:
if torch.isnan(i).any():
return False
return torch.isnan(fx_g(*inps)[0]).any()
min_f, inps = minifier(failing_f, inps, pass_checker)
assert len(min_f.graph.nodes) == 3
assert len(inps) == 1
if __name__ == "__main__":
run_tests()