Skip to content

Commit

Permalink
[Relay][MXNet] Support broadcast_like (apache#6561)
Browse files Browse the repository at this point in the history
  • Loading branch information
Trevor Morris authored Oct 11, 2020
1 parent d3ef137 commit 74b6922
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 0 deletions.
11 changes: 11 additions & 0 deletions python/tvm/relay/frontend/mxnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -2251,6 +2251,16 @@ def _mx_broadcast_to(inputs, attrs):
return _op.broadcast_to(data, tgt_shape)


def _mx_broadcast_like(inputs, attrs):
assert len(inputs) == 2
for axes in ["lhs_axes", "rhs_axes"]:
if axes in attrs.attrs:
raise tvm.error.OpAttributeUnImplemented(
'Attribute "{}" is not supported for operator broadcast_like.'.format(axes)
)
return _op.broadcast_to_like(*inputs)


def _mx_logical_not(inputs, input_types):
data = inputs[0]
dtype = _infer_type(data).checked_type.dtype
Expand Down Expand Up @@ -2410,6 +2420,7 @@ def _mx_npi_where_rscalar(inputs, attrs):
"broadcast_logical_and": _mx_broadcast_logical(_op.logical_and),
"broadcast_logical_xor": _mx_broadcast_logical(_op.logical_xor),
"broadcast_to": _mx_broadcast_to,
"broadcast_like": _mx_broadcast_like,
"logical_not": _mx_logical_not,
"_equal": _mx_compare(_op.equal, _rename),
"_not_equal": _mx_compare(_op.not_equal, _rename),
Expand Down
18 changes: 18 additions & 0 deletions tests/python/frontend/mxnet/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -754,6 +754,24 @@ def verify(input_shape, shape):
verify((4, 1, 32, 32), (4, 8, 32, 32))


@tvm.testing.uses_gpu
def test_forward_broadcast_like():
def verify(input_shape, like_shape):
x_np = np.random.uniform(size=input_shape).astype("float32")
y_np = np.random.uniform(size=like_shape).astype("float32")
ref_res = mx.nd.broadcast_like(mx.nd.array(x_np), mx.nd.array(y_np))
mx_sym = mx.sym.broadcast_like(mx.sym.var("x"), mx.sym.var("y"))
mod, _ = relay.frontend.from_mxnet(mx_sym, {"x": input_shape, "y": like_shape})
for target, ctx in tvm.testing.enabled_targets():
for kind in ["graph", "debug"]:
intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target)
op_res = intrp.evaluate()(x_np, y_np)
tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy())

verify((1, 2, 3), (3, 2, 3))
verify((4, 1, 32, 32), (4, 8, 32, 32))


@tvm.testing.uses_gpu
def test_forward_logical_not():
a_shape = (3, 4, 5)
Expand Down

0 comments on commit 74b6922

Please sign in to comment.