Skip to content

Commit

Permalink
CodeStyle Task No.5 - Enable Ruff PLR1701 rule in python/paddle and t…
Browse files Browse the repository at this point in the history
  • Loading branch information
linzeyang authored Sep 18, 2023
1 parent 64b7e2e commit d476117
Show file tree
Hide file tree
Showing 18 changed files with 71 additions and 91 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ select = [
"PLC3002",
"PLR0206",
"PLR0402",
# "PLR1701",
"PLR1701",
# "PLR1711", # Confirmation required
"PLR1722",
"PLW3301", # Confirmation required
Expand Down
6 changes: 3 additions & 3 deletions python/paddle/base/compiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,9 +206,9 @@ def _compile_data_parallel(self, places, use_device, scope=None):
assert scope is not None, ""
self._local_scopes = []

assert isinstance(places, tuple) or isinstance(
places, list
), "Currently , The places type can only be list or tuple, but the input type is {}.".format(
assert isinstance(
places, (list, tuple)
), "Currently, The places type can only be list or tuple, but the input type is {}.".format(
type(places)
)

Expand Down
20 changes: 8 additions & 12 deletions python/paddle/base/executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -495,8 +495,8 @@ def _add_feed_fetch_ops(
global_block, fetch_list, fetch_var_name, fetch_op
):
for i, var in enumerate(fetch_list):
assert isinstance(var, Variable) or isinstance(
var, str
assert isinstance(
var, (Variable, str)
), "Wrong type for fetch_list[%s]: %s" % (i, type(var))
global_block.append_op(
type=fetch_op,
Expand Down Expand Up @@ -664,7 +664,7 @@ def _get_program_cache_key(feed, fetch_list):
feed_var_names = []
if isinstance(feed, dict):
feed_var_names = list(feed.keys())
elif isinstance(feed, list) or isinstance(feed, tuple):
elif isinstance(feed, (list, tuple)):
for i, each in enumerate(feed):
feed_var_names += list(each.keys())
fetch_var_names = list(map(_to_name_str, fetch_list))
Expand Down Expand Up @@ -1295,11 +1295,7 @@ def _get_targets(_optimize_ops, _fetch_list, item):
raise TypeError(
"The operator in fetch_list is not an optimize_op"
)
elif (
isinstance(item, Variable)
or isinstance(item, str)
or isinstance(item, str)
):
elif isinstance(item, (Variable, str)):
_fetch_list.append(item)
else:
raise TypeError(
Expand Down Expand Up @@ -1365,7 +1361,7 @@ def _prune_program(
feed_names = []
if isinstance(feed, dict):
feed_names = list(feed.keys())
elif isinstance(feed, list) or isinstance(feed, tuple):
elif isinstance(feed, (list, tuple)):
for i, each in enumerate(feed):
feed_names += list(each.keys())

Expand Down Expand Up @@ -1426,7 +1422,7 @@ def _update_feed(cls, program, feed):
% feed_name
)

elif isinstance(feed, list) or isinstance(feed, tuple):
elif isinstance(feed, (list, tuple)):
for i, each in enumerate(feed):
for feed_name in list(each.keys()):
if not global_block.has_var(feed_name):
Expand Down Expand Up @@ -2805,8 +2801,8 @@ def _add_fetch_ops(
global_block, fetch_list, fetch_var_name, fetch_op
):
for i, var in enumerate(fetch_list):
assert isinstance(var, Variable) or isinstance(
var, str
assert isinstance(
var, (Variable, str)
), "Wrong type for fetch_list[%s]: %s" % (i, type(var))
global_block.append_op(
type=fetch_op,
Expand Down
12 changes: 3 additions & 9 deletions python/paddle/base/framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -3430,9 +3430,7 @@ def _update_desc_attr(self, name, val):
self.desc.set_block_attr(name, val.desc)
elif isinstance(val, list) and val and _all_is_type(val, Block):
self.desc.set_blocks_attr(name, [v.desc for v in val])
elif isinstance(val, core.BlockDesc) or isinstance(
val, core.ProgramDesc
):
elif isinstance(val, (core.BlockDesc, core.ProgramDesc)):
self.desc.set_serialized_attr(name, val.serialize_to_string())
else:
self._update_desc_plain_attr(name, val)
Expand Down Expand Up @@ -5068,9 +5066,7 @@ def _update_desc_attr(self, name, val):
desc.set_block_attr(name, val.desc)
elif isinstance(val, list) and val and _all_is_type(val, Block):
desc.set_blocks_attr(name, [v.desc for v in val])
elif isinstance(val, core.BlockDesc) or isinstance(
val, core.ProgramDesc
):
elif isinstance(val, (core.BlockDesc, core.ProgramDesc)):
desc.set_serialized_attr(name, val.serialize_to_string())
else:
desc._set_attr(name, val)
Expand Down Expand Up @@ -5547,9 +5543,7 @@ def _update_desc_attr(self, desc, name, val):
desc.set_block_attr(name, val.desc)
elif isinstance(val, list) and val and _all_is_type(val, Block):
desc.set_blocks_attr(name, [v.desc for v in val])
elif isinstance(val, core.BlockDesc) or isinstance(
val, core.ProgramDesc
):
elif isinstance(val, (core.BlockDesc, core.ProgramDesc)):
desc.set_serialized_attr(name, val.serialize_to_string())
else:
desc._set_attr(name, val)
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/base/layer_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def append_op(self, *args, **kwargs):
def multiple_input(self, input_param_name='input'):
inputs = self.kwargs.get(input_param_name, [])
ret = []
if isinstance(inputs, list) or isinstance(inputs, tuple):
if isinstance(inputs, (list, tuple)):
for inp in inputs:
ret.append(self.to_variable(inp))
else:
Expand Down
6 changes: 1 addition & 5 deletions python/paddle/base/layers/layer_function_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,11 +235,7 @@ def func(*args, **kwargs):
outputs = dict()
out = kwargs.pop(_convert_(o_name), [])
if out:
out_var = (
out[0]
if (isinstance(out, list) or isinstance(out, tuple))
else out
)
out_var = out[0] if (isinstance(out, (list, tuple))) else out
else:
out_var = helper.create_variable_for_type_inference(dtype=dtype)
outputs[o_name] = [out_var]
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/base/param_attr.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def _to_attr(arg):
"""
if arg is None:
return ParamAttr()
elif isinstance(arg, list) or isinstance(arg, tuple):
elif isinstance(arg, (list, tuple)):
return [ParamAttr._to_attr(a) for a in arg]
elif isinstance(arg, ParamAttr):
return arg
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/incubate/optimizer/recompute.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,8 +109,8 @@ def _set_checkpoints(self, checkpoints):
checkpoints, list
), "_checkpoints should be a list of Variable or a list of String"
for ckpt in checkpoints:
assert isinstance(ckpt, str) or isinstance(
ckpt, Variable
assert isinstance(
ckpt, (Variable, str)
), "_checkpoints should be a list of Variable or a list of String"
self._checkpoints = checkpoints

Expand Down
2 changes: 1 addition & 1 deletion python/paddle/io/dataloader/dataloader_iter.py
Original file line number Diff line number Diff line change
Expand Up @@ -711,7 +711,7 @@ def _get_data(self):
)

# get(timeout) will call _poll(timeout) and may raise IOError
if isinstance(e, queue.Empty) or isinstance(e, IOError):
if isinstance(e, (IOError, queue.Empty)):
# continue on timeout to keep getting data from queue
continue

Expand Down
4 changes: 1 addition & 3 deletions python/paddle/tensor/manipulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -3607,9 +3607,7 @@ def reshape(x, shape, name=None):
out = x
else:
out = _C_ops.reshape(x, new_shape)
elif isinstance(shape, core.eager.Tensor) or isinstance(
shape, paddle.ir.OpResult
):
elif isinstance(shape, (core.eager.Tensor, paddle.ir.OpResult)):
shape.stop_gradient = True
out = _C_ops.reshape(x, shape)
else:
Expand Down
4 changes: 1 addition & 3 deletions test/ir/inference/auto_scan_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -772,9 +772,7 @@ def random_to_skip():
if isinstance(threshold, float):
atol = threshold
rtol = 1e-8
elif isinstance(threshold, list) or isinstance(
threshold, tuple
):
elif isinstance(threshold, (list, tuple)):
atol = threshold[0]
rtol = threshold[1]
else:
Expand Down
4 changes: 1 addition & 3 deletions test/legacy_test/nets.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,9 +222,7 @@ def img_conv_group(
pool_stride=2)
"""
tmp = input
assert isinstance(conv_num_filter, list) or isinstance(
conv_num_filter, tuple
)
assert isinstance(conv_num_filter, (list, tuple))

def __extend_list__(obj):
if not hasattr(obj, '__len__'):
Expand Down
12 changes: 6 additions & 6 deletions test/legacy_test/test_bicubic_interp_v2_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,12 +104,12 @@ def bicubic_interp_test(
align_corners=True,
align_mode=0,
):
if isinstance(scale, float) or isinstance(scale, int):
if isinstance(scale, (float, int)):
scale_list = []
for _ in range(len(x.shape) - 2):
scale_list.append(scale)
scale = list(map(float, scale_list))
elif isinstance(scale, list) or isinstance(scale, tuple):
elif isinstance(scale, (list, tuple)):
scale = list(map(float, scale))
if SizeTensor is not None:
if not isinstance(SizeTensor, list) and not isinstance(
Expand Down Expand Up @@ -266,7 +266,7 @@ def setUp(self):
in_w = self.input_shape[2]

if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0.0:
scale_h = scale_w = float(self.scale)
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down Expand Up @@ -305,7 +305,7 @@ def setUp(self):
'data_layout': self.data_layout,
}
if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0.0:
self.scale = [self.scale]
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down Expand Up @@ -433,7 +433,7 @@ def setUp(self):
in_w = self.input_shape[2]

if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0.0:
scale_h = scale_w = float(self.scale)
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down Expand Up @@ -472,7 +472,7 @@ def setUp(self):
'data_layout': self.data_layout,
}
if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0.0:
self.scale = [self.scale]
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down
20 changes: 10 additions & 10 deletions test/legacy_test/test_bilinear_interp_v2_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,12 +121,12 @@ def bilinear_interp_test(
align_corners=True,
align_mode=0,
):
if isinstance(scale, float) or isinstance(scale, int):
if isinstance(scale, (float, int)):
scale_list = []
for _ in range(len(x.shape) - 2):
scale_list.append(scale)
scale = list(map(float, scale_list))
elif isinstance(scale, list) or isinstance(scale, tuple):
elif isinstance(scale, (list, tuple)):
scale = list(map(float, scale))
if SizeTensor is not None:
if not isinstance(SizeTensor, list) and not isinstance(
Expand Down Expand Up @@ -254,7 +254,7 @@ def setUp(self):
scale_h = 0
scale_w = 0
if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0.0:
scale_h = scale_w = float(self.scale)
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down Expand Up @@ -295,7 +295,7 @@ def setUp(self):
'data_layout': self.data_layout,
}
if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0.0:
self.scale = [self.scale]
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down Expand Up @@ -463,7 +463,7 @@ def setUp(self):
scale_h = 0
scale_w = 0
if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0.0:
scale_h = scale_w = float(self.scale)
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down Expand Up @@ -504,7 +504,7 @@ def setUp(self):
'data_layout': self.data_layout,
}
if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0.0:
self.scale = [self.scale]
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down Expand Up @@ -604,7 +604,7 @@ def setUp(self):
).astype("uint8")

if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0:
scale_h = scale_w = float(self.scale)
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down Expand Up @@ -641,7 +641,7 @@ def setUp(self):
'align_mode': self.align_mode,
}
if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0:
self.scale = [self.scale]
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down Expand Up @@ -778,7 +778,7 @@ def setUp(self):
if self.scale_by_1Dtensor:
self.inputs['Scale'] = np.array([self.scale]).astype("float32")
elif self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0:
scale_h = scale_w = float(self.scale)
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand All @@ -805,7 +805,7 @@ def setUp(self):
self.attrs['out_h'] = self.out_h
self.attrs['out_w'] = self.out_w
if self.scale:
if isinstance(self.scale, float) or isinstance(self.scale, int):
if isinstance(self.scale, (float, int)):
if self.scale > 0:
self.scale = [self.scale]
if isinstance(self.scale, list) and len(self.scale) == 1:
Expand Down
Loading

0 comments on commit d476117

Please sign in to comment.