Skip to content

Commit

Permalink
Port test_desc_clone
Browse files Browse the repository at this point in the history
  • Loading branch information
velconia committed Aug 20, 2018
1 parent 50d66a0 commit 5a83776
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 44 deletions.
14 changes: 7 additions & 7 deletions python/paddle/fluid/tests/unittests/test_desc_clone.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def get_transpiler(trainer_id, main_program, pserver_endpoints, trainers):


def operator_equal(a, b):
for k, v in a.__dict__.iteritems():
for k, v in six.iteritems(a.__dict__):
if isinstance(v, fluid.framework.Program) or \
isinstance(v, fluid.framework.Block):
continue
Expand All @@ -120,8 +120,8 @@ def operator_equal(a, b):
raise ValueError("In operator_equal not equal:{0}\n".format(k))

elif isinstance(v, collections.OrderedDict):
v0 = sorted(v.iteritems(), key=lambda x: x[0])
v1 = sorted(b.__dict__[k].iteritems(), key=lambda x: x[0])
v0 = sorted(six.iteritems(v), key=lambda x: x[0])
v1 = sorted(six.iteritems(b.__dict__[k]), key=lambda x: x[0])

if v0 != v1:
raise ValueError("In operator_equal not equal:{0}\n".format(k))
Expand All @@ -133,7 +133,7 @@ def operator_equal(a, b):


def block_equal(a, b):
for k, v in a.__dict__.iteritems():
for k, v in six.iteritems(a.__dict__):
if isinstance(v, core.ProgramDesc) or isinstance(
v, fluid.framework.Program) or isinstance(v, core.BlockDesc):
continue
Expand All @@ -145,8 +145,8 @@ def block_equal(a, b):
assert (len(a.ops) == len(b.ops))

elif isinstance(v, collections.OrderedDict):
v0 = sorted(v.iteritems(), key=lambda x: x[0])
v1 = sorted(b.__dict__[k].iteritems(), key=lambda x: x[0])
v0 = sorted(six.iteritems(v), key=lambda x: x[0])
v1 = sorted(six.iteritems(b.__dict__[k]), key=lambda x: x[0])

if v0 != v1:
raise ValueError("In block_equal not equal:{0}\n".format(k))
Expand All @@ -158,7 +158,7 @@ def block_equal(a, b):


def program_equal(a, b):
for k, v in a.__dict__.iteritems():
for k, v in six.iteritems(a.__dict__):
if isinstance(v, core.ProgramDesc):
continue

Expand Down
52 changes: 15 additions & 37 deletions python/paddle/fluid/tests/unittests/test_prelu_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,6 @@

class PReluTest(OpTest):
def setUp(self):
print('setUp')
import sys
sys.stdout.flush()
self.op_type = "prelu"
self.initTestCase()
x_np = np.random.normal(size=(3, 5, 5, 10)).astype("float32")
Expand All @@ -48,39 +45,19 @@ def setUp(self):
assert out_np is not self.inputs['X']
self.outputs = {'Out': out_np}

def tearDown(self):
print('tearDown')
import sys
sys.stdout.flush()
del self.outputs
del self.inputs

def initTestCase(self):
self.attrs = {'mode': "channel"}

def test_check_4_output(self):
print('test_check_0_output')
import sys
sys.stdout.flush()
def test_check_output(self):
self.check_output()

def test_check_0_grad_2_ignore_x(self):
print('test_check_2_grad_2_ignore_x')
import sys
sys.stdout.flush()
self.check_grad(['Alpha'], 'Out', no_grad_set=set('X'))

# TODO(minqiyang): remove the order of tests
def test_check_1_grad_1(self):
print('test_check_1_grad_1')
import sys
sys.stdout.flush()
def test_check_grad(self):
self.check_grad(['X', 'Alpha'], 'Out')

def test_check_3_grad_3_ignore_alpha(self):
print('test_check_3_grad_3_ignore_alpha')
import sys
sys.stdout.flush()
def test_check_grad_ignore_x(self):
self.check_grad(['Alpha'], 'Out', no_grad_set=set('X'))

def test_check_grad_ignore_alpha(self):
self.check_grad(['X'], 'Out', no_grad_set=set('Alpha'))


Expand All @@ -89,14 +66,15 @@ def initTestCase(self):
self.attrs = {'mode': "all"}


#class TestCase2(PReluTest):
# def initTestCase(self):
# self.attrs = {'mode': "channel"}
#
#
#class TestCase3(PReluTest):
# def initTestCase(self):
# self.attrs = {'mode': "element"}
class TestCase2(PReluTest):
def initTestCase(self):
self.attrs = {'mode': "channel"}


class TestCase3(PReluTest):
def initTestCase(self):
self.attrs = {'mode': "element"}


if __name__ == "__main__":
unittest.main()

0 comments on commit 5a83776

Please sign in to comment.