Skip to content

Commit

Permalink
[CodeStyle][task 1] enable Ruff UP032 rule in python/paddle/base (P…
Browse files Browse the repository at this point in the history
…addlePaddle#57408)

* base up032

* update up032

* Apply suggestions from code review
  • Loading branch information
Liyulingyue authored and jiahy0825 committed Oct 16, 2023
1 parent 9d95b8f commit 354328a
Show file tree
Hide file tree
Showing 17 changed files with 135 additions and 241 deletions.
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,6 @@ ignore = [

# Temporarily ignored
"python/paddle/base/**" = [
"UP032",
"UP031",
"C408",
"UP030",
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/base/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,10 +188,10 @@ def __bootstrap__():

if num_threads > 1:
print(
'WARNING: OMP_NUM_THREADS set to {0}, not 1. The computation '
f'WARNING: OMP_NUM_THREADS set to {num_threads}, not 1. The computation '
'speed will not be optimized if you use data parallel. It will '
'fail if this PaddlePaddle binary is compiled with OpenBlas since'
' OpenBlas does not support multi-threads.'.format(num_threads),
' OpenBlas does not support multi-threads.',
file=sys.stderr,
)
print('PLEASE USE OMP_NUM_THREADS WISELY.', file=sys.stderr)
Expand Down
12 changes: 4 additions & 8 deletions python/paddle/base/backward.py
Original file line number Diff line number Diff line change
Expand Up @@ -995,9 +995,7 @@ def _append_backward_ops_with_checkpoints_(
segments.append([min_idx, max_idx + 1])
else:
_logger.info(
"Could not recompute op range [{}] - [{}] ".format(
min_idx, max_idx + 1
)
f"Could not recompute op range [{min_idx}] - [{max_idx + 1}] "
)

start_idx += 1
Expand All @@ -1008,7 +1006,7 @@ def _append_backward_ops_with_checkpoints_(
recompute_segments = segments

for i, (idx1, idx2) in enumerate(recompute_segments):
_logger.info("recompute segment[{}]".format(i))
_logger.info(f"recompute segment[{i}]")
_logger.info(
"segment start op: [{}]: [{}]".format(
ops[idx1].desc.type(), ops[idx1].desc.input_arg_names()
Expand All @@ -1019,7 +1017,7 @@ def _append_backward_ops_with_checkpoints_(
ops[idx2 - 1].desc.type(), ops[idx2 - 1].desc.input_arg_names()
)
)
_logger.info("recompute segment[{}]".format(i))
_logger.info(f"recompute segment[{i}]")
_logger.info(
"segment start op: [{}]: [{}]".format(
ops[idx1].desc.type(), ops[idx1].desc.input_arg_names()
Expand Down Expand Up @@ -2193,9 +2191,7 @@ def append_backward(
grad_block = grad_info[1]
if not grad_block.has_var(grad_info[0]):
raise ValueError(
"grad block[{0}] did not have grad var {1}".format(
grad_info[1], grad_info[0]
)
f"grad block[{grad_info[1]}] did not have grad var {grad_info[0]}"
)
# Get the param var from the global block
param_var = program.global_block().var(param)
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/base/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ def load_dso(dso_absolute_path):

cdll.LoadLibrary(dso_absolute_path)
except:
warnings.warn("Load {} failed".format(dso_absolute_path))
warnings.warn(f"Load {dso_absolute_path} failed")


def pre_load(dso_name):
Expand Down
8 changes: 4 additions & 4 deletions python/paddle/base/dygraph/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -747,19 +747,19 @@ def test_dygraph_grad(grad_outputs=None):
return gradients(outputs, inputs, grad_outputs, no_grad_vars)

def check_in_out(in_out_list, name):
assert in_out_list is not None, "{} should not be None".format(name)
assert in_out_list is not None, f"{name} should not be None"

if isinstance(in_out_list, (list, tuple)):
assert len(in_out_list) > 0, "{} cannot be empty".format(name)
assert len(in_out_list) > 0, f"{name} cannot be empty"
for each_var in in_out_list:
assert isinstance(
each_var, core.eager.Tensor
), "Elements of {} must be Tensor".format(name)
), f"Elements of {name} must be Tensor"
return in_out_list
else:
assert isinstance(
in_out_list, core.eager.Tensor
), "{} must be Tensor or list of Tensor".format(name)
), f"{name} must be Tensor or list of Tensor"
return [in_out_list]

outputs = check_in_out(outputs, 'outputs')
Expand Down
14 changes: 4 additions & 10 deletions python/paddle/base/executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -340,9 +340,7 @@ def has_feed_operators(block, feed_targets, feed_holder_name):
feed_target_name = op.desc.output('Out')[0]
if feed_target_name not in feed_targets:
raise Exception(
"'feed_targets' does not have {} variable".format(
feed_target_name
)
f"'feed_targets' does not have {feed_target_name} variable"
)
else:
break
Expand Down Expand Up @@ -387,9 +385,7 @@ def has_fetch_operators(
var.desc.name() for var in fetch_targets
]:
raise Exception(
"'fetch_targets' does not have {} variable".format(
fetch_target_name
)
f"'fetch_targets' does not have {fetch_target_name} variable"
)
idx = op.desc.attr('col')
assert fetch_target_name == fetch_targets[idx].desc.name()
Expand Down Expand Up @@ -710,9 +706,7 @@ def _as_lodtensor(data, place, dtype=None):
data = data.astype(dtype)
else:
raise TypeError(
"Convert data of type {} to Tensor is not supported".format(
type(data)
)
f"Convert data of type {type(data)} to Tensor is not supported"
)

# convert numpy.ndarray to tensor
Expand Down Expand Up @@ -752,7 +746,7 @@ def __init__(self, var_dict=None, period_secs=60):
def handler(self, res_dict):
for key in res_dict:
if type(res_dict[key]) is np.ndarray:
sys.stdout.write("{}[0]: {} ".format(key, res_dict[key][0]))
sys.stdout.write(f"{key}[0]: {res_dict[key][0]} ")
sys.stdout.write("\n")

@staticmethod
Expand Down
Loading

0 comments on commit 354328a

Please sign in to comment.