Skip to content

Commit

Permalink
Revert D17112656: [pytorch][PR] fix bug in assertNotEqual for int ten…
Browse files Browse the repository at this point in the history
…sors

Test Plan: revert-hammer

Differential Revision:
D17112656

Original commit changeset: 43e0e7da6d58

fbshipit-source-id: 0a0f7b8b125f24a45023ddb46fe144f21499b723
  • Loading branch information
jerryzh168 authored and facebook-github-bot committed Aug 29, 2019
1 parent 8cdad0a commit e231bd1
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 8 deletions.
4 changes: 1 addition & 3 deletions test/common_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -731,9 +731,7 @@ def assertNotEqual(self, x, y, prec=None, message=''):
if diff.is_signed():
diff = diff.abs()
diff[nan_mask] = 0
# Use `item()` to work around:
# https://github.com/pytorch/pytorch/issues/22301
max_err = diff.max().item()
max_err = diff.max()
self.assertGreaterEqual(max_err, prec, message)
elif type(x) == str and type(y) == str:
super(TestCase, self).assertNotEqual(x, y)
Expand Down
5 changes: 0 additions & 5 deletions test/test_torch.py
Original file line number Diff line number Diff line change
Expand Up @@ -4919,11 +4919,6 @@ def test_random(self):
self.assertEqual(t.min(), 0)
self.assertEqual(t.max(), ub - 1)

def test_not_equal(self):
ones = torch.ones(10, dtype=torch.int)
self.assertRaisesRegex(AssertionError, "0 not greater than or equal to",
lambda: self.assertNotEqual(ones, ones))

@staticmethod
def _test_random_neg_values(self, use_cuda=False):
signed_types = ['torch.DoubleTensor', 'torch.FloatTensor', 'torch.LongTensor',
Expand Down

0 comments on commit e231bd1

Please sign in to comment.