Skip to content

Commit

Permalink
[pt2] remove meta skips for aminmax, decomp exists (pytorch#106670)
Browse files Browse the repository at this point in the history
Pull Request resolved: pytorch#106670
Approved by: https://github.com/ezyang
  • Loading branch information
nkaretnikov authored and pytorchmergebot committed Aug 7, 2023
1 parent 26e9804 commit 05e1a50
Showing 1 changed file with 0 additions and 2 deletions.
2 changes: 0 additions & 2 deletions test/test_meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -677,7 +677,6 @@ def run_meta_crossref(
torch.svd : {c128, c64},
torch.take_along_dim : {bf16, i8, i64, u8, c128, b8, f64, i16, i32, f32, f16, c64},
torch.vstack : {bf16, i8, c32, i64, u8, c128, b8, f64, i16, i32, f32, f16, c64},
torch.aminmax : {i8, i64, u8, f64, b8, f32, i32, i16},
torch.diff : {b8},
torch.equal : {bf16, i8, c32, i64, u8, c128, b8, f64, i16, i32, f32, f16, c64},
torch.nanmean : {bf16, f64, f32, f16, c32, c64, c128},
Expand Down Expand Up @@ -828,7 +827,6 @@ def __torch_function__(self, func, types, args=(), kwargs=None):
meta_dispatch_skips = {
aten.index.Tensor: {i64, bf16, f16, u8, b8, f32, i8, f64, i16, i32, c32, c64, c128}, # at::nonzero doesn't have a Meta function
aten._to_copy.default: {i64, bf16, f16, u8, b8, f32, i8, f64, i16, i32, c32, c64, c128},
aten.aminmax.default: {i64, u8, b8, f32, i8, f64, i16, i32},
aten.empty.memory_format: {b8, bf16, c128, c64, c32, f16, f32, f64, i16, i32, i64, i8, u8},
aten.addbmm_.default: {bf16, c128, c64, f32, f64, i16, i32, i64, i8, u8},
}
Expand Down

0 comments on commit 05e1a50

Please sign in to comment.