Skip to content

Commit

Permalink
Temporarily remove prelu from generated_examples_zip_test
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 192453411
  • Loading branch information
tensorflower-gardener committed Apr 11, 2018
1 parent 77548a7 commit 8e1b323
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 49 deletions.
1 change: 0 additions & 1 deletion tensorflow/contrib/lite/testing/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ gen_zipped_test_files(
"minimum.zip",
"mul.zip",
"pad.zip",
"prelu.zip",
"relu.zip",
"relu1.zip",
"relu6.zip",
Expand Down
48 changes: 0 additions & 48 deletions tensorflow/contrib/lite/testing/generate_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -630,54 +630,6 @@ def build_inputs(parameters, sess, inputs, outputs):
make_zip_of_tests(zip_path, test_parameters, build_graph, build_inputs)


def make_prelu_tests(zip_path):
"""Make a set of tests to do PReLU."""

test_parameters = [{
# The canonical case for image processing is having a 4D `input` (NHWC)
# and `shared_axes`=[1, 2], so the alpha parameter is per channel.
"input_shape": [[1, 10, 10, 3], [3, 3, 3, 3]],
"shared_axes": [[1, 2], [1]],
}]

def build_graph(parameters):
"""Build the graph for the test case."""

input_tensor = tf.placeholder(
dtype=tf.float32, name="input", shape=parameters["input_shape"])
prelu = tf.keras.layers.PReLU(shared_axes=parameters["shared_axes"])
out = prelu(input_tensor)
return [input_tensor], [out]

def build_inputs(parameters, sess, inputs, outputs):
"""Build the inputs for the test case."""

input_shape = parameters["input_shape"]
input_values = create_tensor_data(
np.float32, input_shape, min_value=-10, max_value=10)
shared_axes = parameters["shared_axes"]

alpha_shape = []
for dim in range(1, len(input_shape)):
alpha_shape.append(1 if dim in shared_axes else input_shape[dim])

alpha_values = create_tensor_data(np.float32, alpha_shape)

with tf.variable_scope("", reuse=True):
alpha = tf.get_variable("p_re_lu/alpha")
sess.run(alpha.assign(alpha_values))

return [input_values], sess.run(
outputs, feed_dict=dict(zip(inputs, [input_values])))

make_zip_of_tests(
zip_path,
test_parameters,
build_graph,
build_inputs,
use_frozen_graph=True)


# This function tests various TensorFLow functions that generates Const op,
# including `tf.ones`, `tf.zeros` and random functions.
def make_constant_tests(zip_path):
Expand Down

0 comments on commit 8e1b323

Please sign in to comment.