Skip to content

Commit

Permalink
fix function-redefined 1 (#34507)
Browse files Browse the repository at this point in the history
  • Loading branch information
lelelelelez authored Jul 30, 2021
1 parent 2ad1e4c commit 06b55ea
Show file tree
Hide file tree
Showing 16 changed files with 19 additions and 73 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -678,7 +678,7 @@ def test_residual_block_skip_pattern(self):
self.residual_block_quant(
quantizable_op_type, skip_pattern='skip_quant', for_ci=True)

def test_residual_block_skip_pattern(self):
def test_residual_block_skip_pattern_1(self):
quantizable_op_type = ['elementwise_add', 'pool2d', 'mul', 'matmul']
self.residual_block_quant(
quantizable_op_type,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -406,7 +406,7 @@ def set_test_func(self):
self.dygraph_func = for_enumerate_var_numpy_with_break


class TestForEnumerateVarNumpyWithBreak(TestForIterVarNumpy):
class TestForEnumerateVarNumpyWithContinue(TestForIterVarNumpy):
def set_test_func(self):
self.dygraph_func = for_enumerate_var_numpy_with_continue

Expand All @@ -416,7 +416,7 @@ def set_test_func(self):
self.dygraph_func = for_enumerate_var_numpy_with_start_break


class TestForEnumerateVarNumpyWithStartAndBreak(TestForIterVarNumpy):
class TestForEnumerateVarNumpyWithStartAndContinue(TestForIterVarNumpy):
def set_test_func(self):
self.dygraph_func = for_enumerate_var_numpy_with_start_continue

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def setUp(self):
}


class TestDropoutOpInput1d(TestDropoutOp):
class TestDropoutOpInput1d_1(TestDropoutOp):
# the input is 1-D
def setUp(self):
self.op_type = "dropout"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def initParameters(self):
self.lod = [3, 0, 6, 3]


class TestSequenceReverse3(TestSequenceReverseBase):
class TestSequenceReverse4(TestSequenceReverseBase):
def initParameters(self):
self.size = (12, 10)
self.lod = [0, 2, 10, 0]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ def init_activation(self):
self.activation = 'identity'


class TestIdentityActivation(TestConv2DFusionOp):
class TestIdentityActivation1(TestConv2DFusionOp):
def init_activation(self):
self.activation = 'identity'
self.add_residual_data = False
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/fluid/tests/unittests/test_conv_nn_grad.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def test_grad(self):
self.func(p)


class TestConvDoubleGradCheck(unittest.TestCase):
class TestConvDoubleGradCheckTest0(unittest.TestCase):
@prog_scope()
def func(self, place):
shape = [2, 4, 3, 3]
Expand Down
20 changes: 0 additions & 20 deletions python/paddle/fluid/tests/unittests/test_dist_fleet_grad_clip.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,26 +50,6 @@ def _setup_config(self):
self._sync_mode = False
self._grad_clip_mode = 2

def check_with_place(self,
model_file,
delta=1e-3,
check_error_log=False,
need_envs={}):
required_envs = {
"PATH": os.getenv("PATH", ""),
"PYTHONPATH": os.getenv("PYTHONPATH", ""),
"LD_LIBRARY_PATH": os.getenv("LD_LIBRARY_PATH", ""),
"FLAGS_rpc_deadline": "5000", # 5sec to fail fast
"http_proxy": ""
}
required_envs.update(need_envs)

tr0_losses, tr1_losses = self._run_cluster(model_file, required_envs)

def test_dist_train(self):
self.check_with_place(
"dist_fleet_ctr.py", delta=1e-5, check_error_log=True)


class TestDistASyncClipByValue(TestFleetBase):
def _setup_config(self):
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/fluid/tests/unittests/test_eye_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,10 +149,10 @@ def test_num_columns_type_check():

self.assertRaises(TypeError, test_num_columns_type_check)

def test_num_columns_type_check():
def test_num_columns_type_check1():
paddle.eye(10, num_columns=10, dtype="int8")

self.assertRaises(TypeError, test_num_columns_type_check)
self.assertRaises(TypeError, test_num_columns_type_check1)


if __name__ == "__main__":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,40 +77,6 @@ def test_recompute_optimizer_backward_optimize(self):
]
self.assertIn('subprog', ''.join(outs))

def test_recompute_optimizer_backward(self):
""" test recompute optimizer backward """
train_prog, startup_prog = fluid.Program(), fluid.Program()
avg_cost, strategy = self.net(train_prog, startup_prog)

self.set_strategy(strategy, 'recompute')
opt = fluid.optimizer.MomentumOptimizer(
learning_rate=0.001, momentum=0.9)
opt = RecomputeOptimizer(opt)
opt.user_defined_strategy = strategy
params_grads = opt.backward(avg_cost, startup_prog)

outs = [
op.output('Out')[0] for op in avg_cost.block.ops if op.type == 'mul'
]
self.assertIn('subprog', ''.join(outs))

def test_recompute_optimizer_backward(self):
""" test recompute optimizer backward """
train_prog, startup_prog = fluid.Program(), fluid.Program()
avg_cost, strategy = self.net(train_prog, startup_prog)

self.set_strategy(strategy, 'recompute')
opt = fluid.optimizer.MomentumOptimizer(
learning_rate=0.001, momentum=0.9)
opt = RecomputeOptimizer(opt)
opt.user_defined_strategy = strategy
params_grads = opt.backward(avg_cost, startup_prog)

outs = [
op.output('Out')[0] for op in avg_cost.block.ops if op.type == 'mul'
]
self.assertIn('subprog', ''.join(outs))

def test_recompute_optimizer(self):
train_prog, startup_prog = fluid.Program(), fluid.Program()
avg_cost, strategy = self.net(train_prog, startup_prog)
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/fluid/tests/unittests/test_gather_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,10 +275,10 @@ def test_axis_dtype():

self.assertRaises(TypeError, test_axis_dtype)

def test_axis_dtype():
def test_axis_dtype1():
paddle.gather(x, index, axis=axis)

self.assertRaises(TypeError, test_axis_dtype)
self.assertRaises(TypeError, test_axis_dtype1)

def test_error2(self):
with fluid.program_guard(fluid.Program(), fluid.Program()):
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/fluid/tests/unittests/test_huber_loss_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def set_shape(self):
return (6, 6)


def TestHuberLossOp2(TestHuberLossOp):
def TestHuberLossOp3(TestHuberLossOp):
def set_shape(self):
return (6, 6, 1)

Expand Down
2 changes: 1 addition & 1 deletion python/paddle/fluid/tests/unittests/test_meshgrid_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ def test_api_with_dygraph_list_input(self):
assert np.array_equal(res_4.shape, [100, 200])


class TestMeshgridOp7(unittest.TestCase):
class TestMeshgridOp8(unittest.TestCase):
def test_api_with_dygraph_tuple_input(self):
input_3 = np.random.randint(0, 100, [100, ]).astype('int32')
input_4 = np.random.randint(0, 100, [200, ]).astype('int32')
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/fluid/tests/unittests/test_onnx_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def test_with_tensor(self):
paddle.onnx.export(model, 'linear_net', input_spec=[self.x_spec])


class TestExportWithTensor(unittest.TestCase):
class TestExportWithTensor1(unittest.TestCase):
def setUp(self):
self.x = paddle.to_tensor(np.random.random((1, 128)))

Expand Down
6 changes: 3 additions & 3 deletions python/paddle/fluid/tests/unittests/test_random_seed.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def test_generator_randint_dygraph(self):
self.assertTrue(np.allclose(x1_np, x2_np))
self.assertTrue(np.allclose(x_np, x3_np))

def test_generator_uniform_random_static(self):
def test_generator_uniform_random_static_1(self):
fluid.disable_dygraph()

gen = paddle.seed(123123143)
Expand Down Expand Up @@ -255,7 +255,7 @@ def test_generator_uniform_random_static(self):
self.assertTrue(np.allclose(out1_res2, out2_res2))
self.assertTrue(not np.allclose(out1_res2, out1_res1))

def test_generator_randint_dygraph(self):
def test_generator_randint_dygraph_1(self):
"""Test Generator seed."""
fluid.enable_dygraph()

Expand Down Expand Up @@ -405,7 +405,7 @@ def test_generator_sampling_id_dygraph(self):
self.assertTrue(np.allclose(x1_np, x2_np))
self.assertTrue(np.allclose(x_np, x3_np))

def test_generator_randperm_static(self):
def test_generator_randperm_static_1(self):

fluid.disable_dygraph()

Expand Down
2 changes: 1 addition & 1 deletion python/paddle/fluid/tests/unittests/test_scatter_nd_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ def test_dygraph(self):
shape = [3, 5, 9, 10]
output = paddle.scatter_nd(index, updates, shape)

def test_dygraph(self):
def test_dygraph_1(self):
with fluid.dygraph.guard(fluid.CPUPlace()):
x = paddle.rand(shape=[3, 5, 9, 10], dtype='float32')
updates = paddle.rand(shape=[3, 9, 10], dtype='float32')
Expand Down
2 changes: 1 addition & 1 deletion tools/codestyle/test_docstring_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def test():
assert len(got) == 1
assert 'W9001' == got[0][0]

def test_one_line(self):
def test_one_line_1(self):
func_node = astroid.extract_node('''
def test():
"""get news"""
Expand Down

0 comments on commit 06b55ea

Please sign in to comment.