From 354328a422525fb378a60739055df4d721d1e964 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BC=A0=E6=98=A5=E4=B9=94?= <83450930+Liyulingyue@users.noreply.github.com> Date: Fri, 22 Sep 2023 10:16:06 +0800 Subject: [PATCH] [CodeStyle][task 1] enable Ruff UP032 rule in `python/paddle/base` (#57408) * base up032 * update up032 * Apply suggestions from code review --- pyproject.toml | 1 - python/paddle/base/__init__.py | 4 +- python/paddle/base/backward.py | 12 +- python/paddle/base/core.py | 2 +- python/paddle/base/dygraph/base.py | 8 +- python/paddle/base/executor.py | 14 +- python/paddle/base/framework.py | 139 +++++++----------- .../incubate/checkpoint/auto_checkpoint.py | 68 ++++----- .../incubate/checkpoint/checkpoint_saver.py | 22 +-- python/paddle/base/layer_helper.py | 2 +- python/paddle/base/layer_helper_base.py | 12 +- .../base/layers/layer_function_generator.py | 26 ++-- python/paddle/base/layers/math_op_patch.py | 16 +- python/paddle/base/param_attr.py | 2 +- python/paddle/base/reader.py | 8 +- python/paddle/base/trainer_factory.py | 8 +- python/paddle/base/variable_index.py | 32 ++-- 17 files changed, 135 insertions(+), 241 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9b247f4a738a9..372895a3f02a1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -105,7 +105,6 @@ ignore = [ # Temporarily ignored "python/paddle/base/**" = [ - "UP032", "UP031", "C408", "UP030", diff --git a/python/paddle/base/__init__.py b/python/paddle/base/__init__.py index acc6f9f51ae2f..857d6d58e4718 100644 --- a/python/paddle/base/__init__.py +++ b/python/paddle/base/__init__.py @@ -188,10 +188,10 @@ def __bootstrap__(): if num_threads > 1: print( - 'WARNING: OMP_NUM_THREADS set to {0}, not 1. The computation ' + f'WARNING: OMP_NUM_THREADS set to {num_threads}, not 1. The computation ' 'speed will not be optimized if you use data parallel. It will ' 'fail if this PaddlePaddle binary is compiled with OpenBlas since' - ' OpenBlas does not support multi-threads.'.format(num_threads), + ' OpenBlas does not support multi-threads.', file=sys.stderr, ) print('PLEASE USE OMP_NUM_THREADS WISELY.', file=sys.stderr) diff --git a/python/paddle/base/backward.py b/python/paddle/base/backward.py index 1f3f67a98b640..a6786848ddb85 100755 --- a/python/paddle/base/backward.py +++ b/python/paddle/base/backward.py @@ -995,9 +995,7 @@ def _append_backward_ops_with_checkpoints_( segments.append([min_idx, max_idx + 1]) else: _logger.info( - "Could not recompute op range [{}] - [{}] ".format( - min_idx, max_idx + 1 - ) + f"Could not recompute op range [{min_idx}] - [{max_idx + 1}] " ) start_idx += 1 @@ -1008,7 +1006,7 @@ def _append_backward_ops_with_checkpoints_( recompute_segments = segments for i, (idx1, idx2) in enumerate(recompute_segments): - _logger.info("recompute segment[{}]".format(i)) + _logger.info(f"recompute segment[{i}]") _logger.info( "segment start op: [{}]: [{}]".format( ops[idx1].desc.type(), ops[idx1].desc.input_arg_names() @@ -1019,7 +1017,7 @@ def _append_backward_ops_with_checkpoints_( ops[idx2 - 1].desc.type(), ops[idx2 - 1].desc.input_arg_names() ) ) - _logger.info("recompute segment[{}]".format(i)) + _logger.info(f"recompute segment[{i}]") _logger.info( "segment start op: [{}]: [{}]".format( ops[idx1].desc.type(), ops[idx1].desc.input_arg_names() @@ -2193,9 +2191,7 @@ def append_backward( grad_block = grad_info[1] if not grad_block.has_var(grad_info[0]): raise ValueError( - "grad block[{0}] did not have grad var {1}".format( - grad_info[1], grad_info[0] - ) + f"grad block[{grad_info[1]}] did not have grad var {grad_info[0]}" ) # Get the param var from the global block param_var = program.global_block().var(param) diff --git a/python/paddle/base/core.py b/python/paddle/base/core.py index 285a9f1b1a61b..158e556cd1afe 100644 --- a/python/paddle/base/core.py +++ b/python/paddle/base/core.py @@ -210,7 +210,7 @@ def load_dso(dso_absolute_path): cdll.LoadLibrary(dso_absolute_path) except: - warnings.warn("Load {} failed".format(dso_absolute_path)) + warnings.warn(f"Load {dso_absolute_path} failed") def pre_load(dso_name): diff --git a/python/paddle/base/dygraph/base.py b/python/paddle/base/dygraph/base.py index d85fc8ca25bf7..22a63ff3c0190 100644 --- a/python/paddle/base/dygraph/base.py +++ b/python/paddle/base/dygraph/base.py @@ -747,19 +747,19 @@ def test_dygraph_grad(grad_outputs=None): return gradients(outputs, inputs, grad_outputs, no_grad_vars) def check_in_out(in_out_list, name): - assert in_out_list is not None, "{} should not be None".format(name) + assert in_out_list is not None, f"{name} should not be None" if isinstance(in_out_list, (list, tuple)): - assert len(in_out_list) > 0, "{} cannot be empty".format(name) + assert len(in_out_list) > 0, f"{name} cannot be empty" for each_var in in_out_list: assert isinstance( each_var, core.eager.Tensor - ), "Elements of {} must be Tensor".format(name) + ), f"Elements of {name} must be Tensor" return in_out_list else: assert isinstance( in_out_list, core.eager.Tensor - ), "{} must be Tensor or list of Tensor".format(name) + ), f"{name} must be Tensor or list of Tensor" return [in_out_list] outputs = check_in_out(outputs, 'outputs') diff --git a/python/paddle/base/executor.py b/python/paddle/base/executor.py index 0921d7b79d14b..9fd421f54cb4c 100755 --- a/python/paddle/base/executor.py +++ b/python/paddle/base/executor.py @@ -340,9 +340,7 @@ def has_feed_operators(block, feed_targets, feed_holder_name): feed_target_name = op.desc.output('Out')[0] if feed_target_name not in feed_targets: raise Exception( - "'feed_targets' does not have {} variable".format( - feed_target_name - ) + f"'feed_targets' does not have {feed_target_name} variable" ) else: break @@ -387,9 +385,7 @@ def has_fetch_operators( var.desc.name() for var in fetch_targets ]: raise Exception( - "'fetch_targets' does not have {} variable".format( - fetch_target_name - ) + f"'fetch_targets' does not have {fetch_target_name} variable" ) idx = op.desc.attr('col') assert fetch_target_name == fetch_targets[idx].desc.name() @@ -710,9 +706,7 @@ def _as_lodtensor(data, place, dtype=None): data = data.astype(dtype) else: raise TypeError( - "Convert data of type {} to Tensor is not supported".format( - type(data) - ) + f"Convert data of type {type(data)} to Tensor is not supported" ) # convert numpy.ndarray to tensor @@ -752,7 +746,7 @@ def __init__(self, var_dict=None, period_secs=60): def handler(self, res_dict): for key in res_dict: if type(res_dict[key]) is np.ndarray: - sys.stdout.write("{}[0]: {} ".format(key, res_dict[key][0])) + sys.stdout.write(f"{key}[0]: {res_dict[key][0]} ") sys.stdout.write("\n") @staticmethod diff --git a/python/paddle/base/framework.py b/python/paddle/base/framework.py index 83f3ee734b8f2..3c99297c20875 100644 --- a/python/paddle/base/framework.py +++ b/python/paddle/base/framework.py @@ -1235,9 +1235,7 @@ def _debug_string_(proto, throw_on_error=True): error_fields = list() if not proto.IsInitialized(error_fields) and throw_on_error: raise ValueError( - "{0} are not initialized.\nThe message is {1}:\n".format( - error_fields, proto - ) + f"{error_fields} are not initialized.\nThe message is {proto}:\n" ) return proto.__str__() @@ -1296,7 +1294,7 @@ def wrap_as_scalar(number): # it is a numpy scalar return core.Scalar(number.item()) else: - raise TypeError("Cannot wrap {} as core.Scalar".format(number)) + raise TypeError(f"Cannot wrap {number} as core.Scalar") def wrap_as_scalars(array): @@ -1486,9 +1484,9 @@ def __init__( self.desc.set_type(type) elif self.desc.type() != type: raise ValueError( - "Variable '{0}' has been created before. The " - "previous type is {1}, the new type is {2}. They" - " are not matched".format(self.name, self.desc.type(), type) + f"Variable '{self.name}' has been created before. The " + f"previous type is {self.desc.type()}, the new type is {type}. They" + " are not matched" ) if shape is not None: @@ -1499,9 +1497,9 @@ def __init__( shape = tuple(shape) if shape != old_shape: raise ValueError( - "Variable '{0}' has been created before. The previous " - "shape is {1}, the new shape is {2}. They are not " - "matched.".format(self.name, old_shape, shape) + f"Variable '{self.name}' has been created before. The previous " + f"shape is {old_shape}, the new shape is {shape}. They are not " + "matched." ) if dtype is not None: if is_new_var: @@ -1510,10 +1508,10 @@ def __init__( old_dtype = self.dtype if dtype != old_dtype: raise ValueError( - "Variable '{0}' has been created before. " - "The previous data type is {1}, the new " - "data type is {2}. They are not " - "matched.".format(self.name, old_dtype, dtype) + f"Variable '{self.name}' has been created before. " + f"The previous data type is {old_dtype}, the new " + f"data type is {dtype}. They are not " + "matched." ) if lod_level is not None: @@ -1522,10 +1520,10 @@ def __init__( else: if lod_level != self.lod_level: raise ValueError( - "Variable '{0}' has been created before. " - "The previous lod_level is {1}, the new " - "lod_level is {2}. They are not " - "matched".format(self.name, self.lod_level, lod_level) + f"Variable '{self.name}' has been created before. " + f"The previous lod_level is {self.lod_level}, the new " + f"lod_level is {lod_level}. They are not " + "matched" ) if persistable is not None: if is_new_var: @@ -1533,11 +1531,9 @@ def __init__( else: if persistable != self.persistable: raise ValueError( - "Variable '{0}' has been created before." - "The previous persistable is {1}, the new " - "persistable is {2}. They are not matched".format( - self.name, self.persistable, persistable - ) + f"Variable '{self.name}' has been created before." + f"The previous persistable is {self.persistable}, the new " + f"persistable is {persistable}. They are not matched" ) if need_check_feed and is_new_var: @@ -1832,7 +1828,7 @@ def _to_readable_code(self): stop_gradient=self.stop_gradient, ) else: - var_str = "{name} : {type})".format(name=self.name, type=type_str) + var_str = f"{self.name} : {type_str})" if self.is_parameter: if self.trainable: @@ -2549,7 +2545,7 @@ def get_value(self, scope=None): var_temp = scope.find_var(self.name) if var_temp is None: raise ValueError( - "Can not find Variable '{}' in the Scope.".format(self.name) + f"Can not find Variable '{self.name}' in the Scope." ) t = var_temp.get_tensor() return t @@ -2624,7 +2620,7 @@ def set_value(self, value, scope=None): var_temp = scope.find_var(self.name) if var_temp is None: raise ValueError( - "Can not find Variable '{}' in the Scope.".format(self.name) + f"Can not find Variable '{self.name}' in the Scope." ) t = var_temp.get_tensor() @@ -2982,13 +2978,9 @@ def __init__( op_attrs[callstack_var_name] = [] for frame in traceback.extract_stack(): op_attrs[callstack_var_name].append( - ' File "{}", line {}, in {}'.format( - frame[0], frame[1], frame[2] - ) - ) - op_attrs[callstack_var_name].append( - ' {}'.format(frame[3]) + f' File "{frame[0]}", line {frame[1]}, in {frame[2]}' ) + op_attrs[callstack_var_name].append(f' {frame[3]}') self.desc.set_type(type) proto = OpProtoHolder.instance().get_op_proto(type) @@ -3036,7 +3028,7 @@ def find_name(var_list, name): found = find_name(inputs, in_proto.name) assert ( found or in_proto.dispensable - ), "Input {} not found".format(in_proto.name) + ), f"Input {in_proto.name} not found" if found: in_args = inputs[in_proto.name] if not isinstance(in_args, (list, tuple)): @@ -3249,18 +3241,18 @@ def _to_readable_code(self, skip_op_callstack=True): ) outputs_str = "{" for i in range(0, len(self.output_names)): - outputs_str += "{name}=".format(name=self.output_names[i]) + outputs_str += f"{self.output_names[i]}=" o = self.output(self.output_names[i]) - outputs_str += "{value}".format(value=o) + outputs_str += f"{o}" if i != len(self.output_names) - 1: outputs_str += ", " outputs_str += "}" inputs_str = "{" for i in range(0, len(self.input_names)): - inputs_str += "{name}=".format(name=self.input_names[i]) + inputs_str += f"{self.input_names[i]}=" o = self.input(self.input_names[i]) - inputs_str += "{value}".format(value=o) + inputs_str += f"{o}" if i != len(self.input_names) - 1: inputs_str += ", " @@ -3276,9 +3268,7 @@ def _to_readable_code(self, skip_op_callstack=True): attr_type = self.desc.attr_type(name, True) if attr_type == core.AttrType.VAR: attr_var_name = self.desc.attr(name, True).name() - a = "{name} = Var['{value}']".format( - name=name, value=attr_var_name - ) + a = f"{name} = Var['{attr_var_name}']" attrs_str += a if i != len(attr_names) - 1: attrs_str += ", " @@ -3297,18 +3287,14 @@ def _to_readable_code(self, skip_op_callstack=True): continue if attr_type == core.AttrType.BLOCK: - a = "{name} = block[{value}]".format( - name=name, value=self._block_attr_id(name) - ) + a = f"{name} = block[{self._block_attr_id(name)}]" attrs_str += a if i != len(attr_names) - 1: attrs_str += ", " continue if attr_type == core.AttrType.BLOCKS: - a = "{name} = blocks{value}".format( - name=name, value=self._blocks_attr_ids(name) - ) + a = f"{name} = blocks{self._blocks_attr_ids(name)}" attrs_str += a if i != len(attr_names) - 1: attrs_str += ", " @@ -3331,7 +3317,7 @@ def _to_readable_code(self, skip_op_callstack=True): else: value = self.desc.attr(name) - a = "{name} = {value}".format(name=name, value=value) + a = f"{name} = {value}" attrs_str += a if i != len(attr_names) - 1: @@ -3349,16 +3335,11 @@ def _to_readable_code(self, skip_op_callstack=True): ) if outputs_str != "{}": - op_str = "{outputs} = {op_type}(inputs={inputs}, {attrs})".format( - outputs=outputs_str, - op_type=self.type, - inputs=inputs_str, - attrs=attrs_str, + op_str = ( + f"{outputs_str} = {self.type}(inputs={inputs_str}, {attrs_str})" ) else: - op_str = "{op_type}(inputs={inputs}, {attrs})".format( - op_type=self.type, inputs=inputs_str, attrs=attrs_str - ) + op_str = f"{self.type}(inputs={inputs_str}, {attrs_str})" return op_str def __str__(self): @@ -3641,9 +3622,7 @@ def _var_attr(self, name): attr_type = self.desc.attr_type(name, True) assert ( attr_type == core.AttrType.VAR - ), "Required type attr({}) is Variable, but received {}".format( - name, attr_type - ) + ), f"Required type attr({name}) is Variable, but received {attr_type}" attr_var_name = self.desc.attr(name, True).name() return self.block._var_recursive(attr_var_name) @@ -3660,9 +3639,7 @@ def _vars_attr(self, name): attr_type = self.desc.attr_type(name, True) assert ( attr_type == core.AttrType.VARS - ), "Required type attr({}) is list[Variable], but received {}".format( - name, attr_type - ) + ), f"Required type attr({name}) is list[Variable], but received {attr_type}" attr_vars = [ self.block._var_recursive(var.name()) for var in self.desc.attr(name, True) @@ -4033,14 +4010,12 @@ def _to_readable_code(self, skip_op_callstack=True): type(skip_op_callstack) ) block_str = "{ // block " - block_str += "{}\n".format(self.idx) + block_str += f"{self.idx}\n" for var in list(self.vars.values()): - block_str += " {}\n".format(var._to_readable_code()) + block_str += f" {var._to_readable_code()}\n" block_str += "\n" for op in self.ops: - block_str += " {}\n".format( - op._to_readable_code(skip_op_callstack) - ) + block_str += f" {op._to_readable_code(skip_op_callstack)}\n" block_str += "}" return block_str @@ -4194,7 +4169,7 @@ def _var_recursive(self, name): if var: return var else: - raise ValueError("Var {0} is not found recursively".format(name)) + raise ValueError(f"Var {name} is not found recursively") def all_parameters(self): return list(self.iter_parameters()) @@ -5547,9 +5522,7 @@ def _convert_to_pdf(dot_file_path): ) if exited_code != 0: print('The dot command is needed for creating pdf files.') - print( - 'The {} is saved as the dot filetype.'.format(dot_file_path) - ) + print(f'The {dot_file_path} is saved as the dot filetype.') remove_ctr_vars = set() if remove_ctr_var: @@ -5557,7 +5530,7 @@ def _convert_to_pdf(dot_file_path): if node.is_ctrl_var(): remove_ctr_vars.add(node) self.safe_remove_nodes(remove_ctr_vars) - print('Total ops num = {}.'.format(len(self.all_op_nodes()))) + print(f'Total ops num = {len(self.all_op_nodes())}.') if marked_nodes is not None: if not isinstance(marked_nodes, set): @@ -7124,9 +7097,7 @@ def state_dict(self, mode='all', scope=None): if not isinstance(mode, str): raise TypeError( - "Type of `mode` should be string, but received {}.".format( - type(mode) - ) + f"Type of `mode` should be string, but received {type(mode)}." ) def is_parameter(var): @@ -7219,9 +7190,7 @@ def set_state_dict(self, state_dict, scope=None): if not isinstance(state_dict, dict): raise TypeError( - "Type of `state_dict` should be dict, but received {}.".format( - type(state_dict) - ) + f"Type of `state_dict` should be dict, but received {type(state_dict)}." ) vars_dict = {var.name: var for var in self.list_vars()} @@ -7238,18 +7207,12 @@ def set_state_dict(self, state_dict, scope=None): try: vars_dict[name].set_value(value, scope) except ValueError as err: - warnings.warn( - "Skip loading for '{}'. ".format(name) + str(err) - ) + warnings.warn(f"Skip loading for '{name}'. " + str(err)) except TypeError as err: - warnings.warn( - "Skip loading for '{}'. ".format(name) + str(err) - ) + warnings.warn(f"Skip loading for '{name}'. " + str(err)) else: warnings.warn( - "Skip loading for '{0}'. Because '{0}' not in the program.".format( - name - ) + f"Skip loading for '{name}'. Because '{name}' not in the program." ) @@ -7512,9 +7475,7 @@ def __str__(self): [-0.70368278, 0.52986908, -0.68742192], [-0.54217887, 0.48439729, 0.34082305]]) """ - return "Parameter containing:\n{tensor}".format( - tensor=super().__str__() - ) + return f"Parameter containing:\n{super().__str__()}" def __deepcopy__(self, memo): """ diff --git a/python/paddle/base/incubate/checkpoint/auto_checkpoint.py b/python/paddle/base/incubate/checkpoint/auto_checkpoint.py index e8f75f3a4ed55..9bf737fb055dc 100644 --- a/python/paddle/base/incubate/checkpoint/auto_checkpoint.py +++ b/python/paddle/base/incubate/checkpoint/auto_checkpoint.py @@ -115,21 +115,17 @@ def __init__(self): ), "hdfs environ must set" except Exception as e: - logger.fatal("exception:{}".format(e)) + logger.fatal(f"exception:{e}") sys.exit(1) def get_range_checkpoint_path(self, name): - return "{}/{}/range/{}".format( - self.hdfs_checkpoint_path, self.job_id, name - ) + return f"{self.hdfs_checkpoint_path}/{self.job_id}/range/{name}" def get_exe_checkpoint_path(self, name): - return "{}/{}/exe/{}".format( - self.hdfs_checkpoint_path, self.job_id, name - ) + return f"{self.hdfs_checkpoint_path}/{self.job_id}/exe/{name}" def get_job_path(self): - return "{}/{}".format(self.hdfs_checkpoint_path, self.job_id) + return f"{self.hdfs_checkpoint_path}/{self.job_id}" @property def save_checkpoint_inter(self): @@ -235,7 +231,7 @@ def __ne__(self, t): return not self == t def serialize(self, path): - file_name = "{}/{}".format(path, self._file_name) + file_name = f"{path}/{self._file_name}" with open(file_name, 'w') as f: s = self._serialize() f.write(s) @@ -248,7 +244,7 @@ def _serialize(self, pop_keys=["restored_from"]): def deserialize(self, path): d = None - file_name = "{}/{}".format(path, self._file_name) + file_name = f"{path}/{self._file_name}" with open(file_name, 'r') as f: s = f.read() self._deserialize(s) @@ -297,7 +293,7 @@ def __init__( self._save_checkpoint_inter = self._checker.save_checkpoint_inter assert ( self._save_checkpoint_inter >= 0 - ), "checkpointer:{} must >=0".format(self._save_checkpoint_inter) + ), f"checkpointer:{self._save_checkpoint_inter} must >=0" self._last_checkpoint_time = time.time() self._load_cp_nos = None @@ -344,7 +340,7 @@ def _look_for_valid(self, cp_nos): local_cache_path=self._checker._fs_cache, ) cps.append(t) - logger.debug("look for valid:{} t:{}".format(i, t._serialize())) + logger.debug(f"look for valid:{i} t:{t._serialize()}") if epoch_no < 0: epoch_no = t._epoch_no else: @@ -354,7 +350,7 @@ def _look_for_valid(self, cp_nos): def _get_last_valid_checkpoint(self): self._load_cp_nos = self._cper.get_checkpoint_no(self._checkpoint_path) - logger.info("find checkpoint nos:{}".format(self._load_cp_nos)) + logger.info(f"find checkpoint nos:{self._load_cp_nos}") if len(self._load_cp_nos) < 1: self._restored_from = CONST_MEMORYINIT @@ -371,9 +367,7 @@ def _get_last_valid_checkpoint(self): self._restored_from = CONST_CHECKPOINT self._checkpoint_epoch_no = self._epoch_no - logger.info( - "load tain_epoch_range checkpoint:{}".format(self._serialize()) - ) + logger.info(f"load tain_epoch_range checkpoint:{self._serialize()}") elif g_acp_type == CONST_DACP_TYPE: t, i = self._look_for_valid(self._load_cp_nos) @@ -391,11 +385,9 @@ def _get_last_valid_checkpoint(self): self._restored_from = CONST_CHECKPOINT self._checkpoint_epoch_no = self._epoch_no - logger.info( - "load tain_epoch_range checkpoint:{}".format(self._serialize()) - ) + logger.info(f"load tain_epoch_range checkpoint:{self._serialize()}") else: - raise AssertionError("not supported acp_type:{}".format(g_acp_type)) + raise AssertionError(f"not supported acp_type:{g_acp_type}") def _to_dict(self): d = { @@ -416,7 +408,7 @@ def name(self): return self._name def serialize(self, path): - file_name = "{}/{}".format(path, self._file_name) + file_name = f"{path}/{self._file_name}" with open(file_name, 'w') as f: s = self._serialize() f.write(s) @@ -440,7 +432,7 @@ def restored_from(self): def deserialize(self, path): d = None - file_name = "{}/{}".format(path, self._file_name) + file_name = f"{path}/{self._file_name}" with open(file_name, 'r') as f: d = json.load(f) @@ -463,16 +455,14 @@ def next(self): if self._max_epoch_num < 0: self._max_epoch_num = sys.maxint - assert self._epoch_no >= -1, "self._epoch_no:{} must >=-1".format( - self._epoch_no - ) + assert ( + self._epoch_no >= -1 + ), f"self._epoch_no:{self._epoch_no} must >=-1" self._last_checkpoint_time = time.time() start = self._epoch_no + 1 logger.info( - "started epoch_no:{} max_epoch_num:{}".format( - start, self._max_epoch_num - ) + f"started epoch_no:{start} max_epoch_num:{self._max_epoch_num}" ) for i in range(start, self._max_epoch_num): @@ -501,9 +491,7 @@ def save_checkpoint(self): elif g_acp_type == CONST_DACP_TYPE: self._save_checkpoint() else: - raise AssertionError( - "not supported acp_type:{}".format(g_acp_type) - ) + raise AssertionError("not supported acp_type:{g_acp_type}") self._last_checkpoint_time = time.time() def _save_checkpoint(self): @@ -531,7 +519,7 @@ def _save_checkpoint(self): e[t._key] = t - logger.debug("save executor checkpoint:{}".format(t._serialize())) + logger.debug(f"save executor checkpoint:{t._serialize()}") if len(self._exe_status) > 0: self._cper.save_checkpoint( @@ -540,7 +528,7 @@ def _save_checkpoint(self): local_cache_path=self._checker._fs_cache, ) logger.info( - "save train_epoch_range checkpoint:{}".format(self._serialize()) + f"save train_epoch_range checkpoint:{self._serialize()}" ) self._generate_flag() @@ -607,9 +595,7 @@ def _can_auto_checkpoint(prog): g_program_attr[program._auto_checkpoint_name] = ret if not ret: logger.debug( - "program {} need't to auto checkpoint".format( - program._auto_checkpoint_name - ) + f"program {program._auto_checkpoint_name} need't to auto checkpoint" ) return False @@ -617,7 +603,7 @@ def _can_auto_checkpoint(prog): def _get_running_key(exe_name, program_name): - return "{}_{}".format(exe_name, program_name) + return f"{exe_name}_{program_name}" def _get_checker(): @@ -653,7 +639,7 @@ def train_epoch_range(max_epoch_num, save_checkpoint_inter=None): return g_acp_type = CONST_ACP_TYPE - logger.info("acp_type:{}".format(g_acp_type)) + logger.info(f"acp_type:{g_acp_type}") global g_train_epoch_range try: @@ -694,9 +680,7 @@ def _auto_checkpoint(exe, prog): if g_train_epoch_range.restored_from == CONST_CHECKPOINT: assert ( key in exe_status - ), "when restored key:{} must be in train_epoch_range:{}".format( - key, g_train_epoch_range - ) + ), f"when restored key:{key} must be in train_epoch_range:{g_train_epoch_range}" t = None if key in exe_status: @@ -712,7 +696,7 @@ def _auto_checkpoint(exe, prog): local_cache_path=g_checker._fs_cache, ) t._restored_from = CONST_CHECKPOINT - logger.info("load executor checkpoint {}".format(t)) + logger.info(f"load executor checkpoint {t}") t._exe = exe t._program = program t._epoch_no = g_train_epoch_range.get() diff --git a/python/paddle/base/incubate/checkpoint/checkpoint_saver.py b/python/paddle/base/incubate/checkpoint/checkpoint_saver.py index 0b113c2b87fc8..b597cf9c37f2f 100644 --- a/python/paddle/base/incubate/checkpoint/checkpoint_saver.py +++ b/python/paddle/base/incubate/checkpoint/checkpoint_saver.py @@ -69,17 +69,15 @@ def save_checkpoint( if not self._fs.is_exist(path): self._fs.mkdirs(path) else: - assert self._fs.is_dir(path), "path:{} must be a directory".format( - path - ) + assert self._fs.is_dir(path), f"path:{path} must be a directory" max_no = self._get_last_checkpoint_no(path) if max_no < 0: max_no = -1 max_no += 1 - real_path = "{}/{}.{}".format(path, self._checkpoint_prefix, max_no) - tmp_path = "{}.tmp".format(real_path) + real_path = f"{path}/{self._checkpoint_prefix}.{max_no}" + tmp_path = f"{real_path}.tmp" saved_path = tmp_path from paddle.distributed.fleet.utils.fs import LocalFS @@ -93,14 +91,14 @@ def save_checkpoint( ) if trainer_id is not None: - cache_path = "{}.{}".format(cache_path, trainer_id) + cache_path = f"{cache_path}.{trainer_id}" if not local_fs.is_exist(cache_path): local_fs.mkdirs(cache_path) else: assert local_fs.is_dir( cache_path - ), "cache path:{} must be a directory".format(cache_path) + ), f"cache path:{cache_path} must be a directory" saved_path = cache_path @@ -151,16 +149,14 @@ def load_checkpoint( ) if trainer_id is not None: - cache_path = "{}.{}".format(cache_path, trainer_id) + cache_path = f"{cache_path}.{trainer_id}" if not local_fs.is_exist(local_cache_path): local_fs.mkdirs(local_cache_path) if local_fs.is_exist(cache_path): local_fs.delete(cache_path) - real_path = "{}/{}.{}".format( - path, self._checkpoint_prefix, checkpoint_no - ) + real_path = f"{path}/{self._checkpoint_prefix}.{checkpoint_no}" load_path = real_path if self._fs.need_upload_download(): self._fs.download(real_path, cache_path) @@ -225,9 +221,7 @@ def clean_redundant_checkpoints(self, root_path, reserved=[]): try: n = int(g[1]) if n not in s: - path = "{}/{}.{}".format( - root_path, self._checkpoint_prefix, n - ) + path = f"{root_path}/{self._checkpoint_prefix}.{n}" self._fs.delete(path) except Exception as e: print(e) diff --git a/python/paddle/base/layer_helper.py b/python/paddle/base/layer_helper.py index 312eaf67a3320..333b176337a95 100644 --- a/python/paddle/base/layer_helper.py +++ b/python/paddle/base/layer_helper.py @@ -56,7 +56,7 @@ def multiple_input(self, input_param_name='input'): def input(self, input_param_name='input'): inputs = self.multiple_input(input_param_name) if len(inputs) != 1: - raise "{0} layer only takes one input".format(self.layer_type) + raise f"{self.layer_type} layer only takes one input" return inputs[0] @property diff --git a/python/paddle/base/layer_helper_base.py b/python/paddle/base/layer_helper_base.py index 6c047c08766fe..51680a1abbc4e 100644 --- a/python/paddle/base/layer_helper_base.py +++ b/python/paddle/base/layer_helper_base.py @@ -291,12 +291,12 @@ def __weight_normalize(g, v, dim): g_param = self.startup_program.global_block().create_parameter( dtype=dtype, shape=g_param_shape, - **g_param_attr._to_kwargs(with_initializer=False) + **g_param_attr._to_kwargs(with_initializer=False), ) v_param = self.startup_program.global_block().create_parameter( dtype=dtype, shape=v_param_shape, - **v_param_attr._to_kwargs(with_initializer=True) + **v_param_attr._to_kwargs(with_initializer=True), ) __norm_except_dim( x=v_param, @@ -354,7 +354,7 @@ def create_parameter( for i, size in enumerate(shape): assert size > 0, ( "Expected every dim's size to be larger than 0, " - "but the size of the {}-th dim is {}".format(i, size) + f"but the size of the {i}-th dim is {size}" ) # set global dtype if not dtype: @@ -430,20 +430,20 @@ def create_parameter( shape=shape, type=type, stop_gradient=stop_gradient, - **attr._to_kwargs(with_initializer=True) + **attr._to_kwargs(with_initializer=True), ) else: if in_pir_mode(): return paddle.ir.core.create_parameter( dtype=dtype, shape=shape, - **attr._to_kwargs(with_initializer=True) + **attr._to_kwargs(with_initializer=True), ) self.startup_program.global_block().create_parameter( dtype=dtype, shape=shape, type=type, - **attr._to_kwargs(with_initializer=True) + **attr._to_kwargs(with_initializer=True), ) return self.main_program.global_block().create_parameter( dtype=dtype, shape=shape, type=type, **attr._to_kwargs() diff --git a/python/paddle/base/layers/layer_function_generator.py b/python/paddle/base/layers/layer_function_generator.py index bd11a412ffc5b..82db72f7c4ce5 100644 --- a/python/paddle/base/layers/layer_function_generator.py +++ b/python/paddle/base/layers/layer_function_generator.py @@ -87,7 +87,7 @@ def _generate_doc_string_( buf.write(escape_math(op_proto.comment)) buf.write('\nArgs:\n') for each_input in op_proto.inputs: - line_begin = ' {0}'.format(_convert_(each_input.name)) + line_begin = f' {_convert_(each_input.name)}' buf.write(line_begin) buf.write(" (Tensor): ") buf.write(escape_math(each_input.comment)) @@ -156,7 +156,7 @@ def generate_layer_fn(op_type): if len(not_intermediate_outputs) != 1: raise ValueError( "Only one non intermediate output operator can be", - "automatically generated. {0}".format(op_type), + f"automatically generated. {op_type}", ) if not_intermediate_outputs[0].duplicable: @@ -193,9 +193,7 @@ def infer_and_check_dtype(op_proto, *args, **kwargs): for each in val: if not isinstance(each, Variable): - raise ValueError( - "input of {0} must be variable".format(op_type) - ) + raise ValueError(f"input of {op_type} must be variable") if dtype is None: dtype = each.dtype @@ -402,23 +400,17 @@ def __impl__(func): args = {"comment": trim_ending_dot(comment)} for each_input in op_proto.inputs: input_name = _convert_(each_input.name) - args["{0}_comment".format(input_name)] = trim_ending_dot( - each_input.comment - ) - args["{0}_type".format(input_name)] = "Variable" + args[f"{input_name}_comment"] = trim_ending_dot(each_input.comment) + args[f"{input_name}_type"] = "Variable" for each_attr in op_proto.attrs: input_name = _convert_(each_attr.name) - args["{0}_comment".format(input_name)] = trim_ending_dot( - each_attr.comment - ) - args["{0}_type".format(input_name)] = _type_to_str_(each_attr.type) + args[f"{input_name}_comment"] = trim_ending_dot(each_attr.comment) + args[f"{input_name}_type"] = _type_to_str_(each_attr.type) for each_opt in op_proto.outputs: output_name = _convert_(each_opt.name) - args["{0}_comment".format(output_name)] = trim_ending_dot( - each_opt.comment - ) - args["{0}_type".format(output_name)] = "Variable" + args[f"{output_name}_comment"] = trim_ending_dot(each_opt.comment) + args[f"{output_name}_type"] = "Variable" func.__doc__ = tmpl.substitute(args) return func diff --git a/python/paddle/base/layers/math_op_patch.py b/python/paddle/base/layers/math_op_patch.py index 53f35939b1f3a..cba6b9a3b55de 100644 --- a/python/paddle/base/layers/math_op_patch.py +++ b/python/paddle/base/layers/math_op_patch.py @@ -314,9 +314,7 @@ def append(self, var): var = to_tensor(var) else: raise TypeError( - "Required input var should be Variable, but received {}".format( - type(var) - ) + f"Required input var should be Variable, but received {type(var)}" ) if self.type != core.VarDesc.VarType.LOD_TENSOR_ARRAY: raise TypeError( @@ -336,9 +334,7 @@ def _item(self): """ if len(self.shape) > 1: raise TypeError( - "Required input var should be 1-D Variable, but received {}".format( - self.shape - ) + f"Required input var should be 1-D Variable, but received {self.shape}" ) return self @@ -575,17 +571,15 @@ def __impl__(self, other_var): comment = OpProtoHolder.instance().get_op_proto(op_type).comment - __impl__.__doc__ = """ - {0} + __impl__.__doc__ = f""" + {comment} Args: self(Variable): left hand variable other_var(Variable|float|int): right hand variable Returns: Variable - """.format( - comment - ) + """ __impl__.__name__ = method_name return __impl__ diff --git a/python/paddle/base/param_attr.py b/python/paddle/base/param_attr.py index 674c4ad4328c5..75064a449db38 100644 --- a/python/paddle/base/param_attr.py +++ b/python/paddle/base/param_attr.py @@ -187,7 +187,7 @@ def _to_attr(arg): elif isinstance(arg, bool): return ParamAttr._to_attr(None) if arg else False else: - raise TypeError("{0} cast to ParamAttr".format(type(arg))) + raise TypeError(f"{type(arg)} cast to ParamAttr") def _to_kwargs(self, with_initializer=False): """ diff --git a/python/paddle/base/reader.py b/python/paddle/base/reader.py index c3a65721db275..4ec5d3c4a9607 100644 --- a/python/paddle/base/reader.py +++ b/python/paddle/base/reader.py @@ -1623,9 +1623,7 @@ def __init__(self, dataset, places, drop_last): if dataset.thread_num != 0 and dataset.thread_num != thread_num: logging.warn( - 'thread_num {} which is set in Dataset is ignored'.format( - dataset.thread_num - ) + f'thread_num {dataset.thread_num} which is set in Dataset is ignored' ) dataset._set_thread(thread_num) @@ -1637,9 +1635,7 @@ def __init__(self, dataset, places, drop_last): and dataset.queue_num > thread_num ): logging.warn( - "queue_num {} which is set in Dataset is ignored".format( - dataset.queue_num - ) + f"queue_num {dataset.queue_num} which is set in Dataset is ignored" ) dataset._set_queue_num(thread_num) diff --git a/python/paddle/base/trainer_factory.py b/python/paddle/base/trainer_factory.py index 75351872d73d6..e5c5fa48b7155 100644 --- a/python/paddle/base/trainer_factory.py +++ b/python/paddle/base/trainer_factory.py @@ -181,9 +181,7 @@ def handler_launch_func(self, scope, handler): if isinstance(fetch_instance.var_dict[key], Variable): var_name_to_key[fetch_instance.var_dict[key].name] = key else: - local_logger.warning( - "the value of {} is not a Variable".format(key) - ) + local_logger.warning(f"the value of {key} is not a Variable") var_name_to_key["None.var"] = key elapsed_secs = 0 while True: @@ -202,9 +200,7 @@ def handler_launch_func(self, scope, handler): fetch_dict[key] = var if var is None: local_logger.warning( - "{} value currently not available".format( - var_name_to_key[key] - ) + f"{var_name_to_key[key]} value currently not available" ) res_dict = {} for key in fetch_dict: diff --git a/python/paddle/base/variable_index.py b/python/paddle/base/variable_index.py index dcc87b74ea658..6d034b80c8d9c 100644 --- a/python/paddle/base/variable_index.py +++ b/python/paddle/base/variable_index.py @@ -97,9 +97,7 @@ def update(self, index): self.pre_shape = self.indexes[-1].shape else: raise ValueError( - "Index should be list/tuple of int or Tensor, but received {}.".format( - index - ) + f"Index should be list/tuple of int or Tensor, but received {index}." ) def shape_stride(self, shape): @@ -116,9 +114,7 @@ def get_offset_stride(self, tensor_shape): for index in self.indexes: if not isinstance(index, paddle.base.Variable): raise ValueError( - "only support list/tensor index, but received {}.".format( - type(index) - ) + f"only support list/tensor index, but received {type(index)}." ) if len(self.indexes) <= len(tensor_shape) or len(self.indexes) == 1: @@ -182,9 +178,7 @@ def set_item(self, tensor_origin, value): or value_dims_bd[i] == 1 ): raise ValueError( - "{} can not broadcast into {}".format( - value.shape, gather_tensor_shape - ) + f"{value.shape} can not broadcast into {gather_tensor_shape}" ) value_broadcast = paddle.broadcast_to(value, gather_tensor_shape) @@ -324,7 +318,7 @@ def get_value_for_bool_tensor(var, item): raise IndexError( "The dims of bool index doesn't match indexed array, " "the dims of bool index except to be equal or less " - "than {}, but received {}.".format(len(var.shape), len(item.shape)) + f"than {len(var.shape)}, but received {len(item.shape)}." ) i = 0 item_shape = item.shape @@ -433,7 +427,7 @@ def _setitem_impl_(var, item, value): if not isinstance(step, Variable) and step == 0: raise ValueError( "When assign a value to a paddle.Tensor, step can not be 0, " - "but received step is {}.".format(step) + f"but received step is {step}." ) if isinstance(step, Variable) and (start is None or end is None): @@ -454,9 +448,7 @@ def _setitem_impl_(var, item, value): for i in slice_item: if not isinstance(i, bool): - raise TypeError( - "Doesn't support {} in index list.".format(type(i)) - ) + raise TypeError(f"Doesn't support {type(i)} in index list.") if len(item) != 1: raise IndexError( @@ -543,9 +535,7 @@ def _setitem_impl_(var, item, value): else: raise TypeError( "Only support to assign an integer, float, numpy.ndarray or " - "paddle.Tensor to a paddle.Tensor, but received {}".format( - type(value) - ) + f"paddle.Tensor to a paddle.Tensor, but received {type(value)}" ) if paddle.in_dynamic_mode(): @@ -587,7 +577,7 @@ def set_value_for_bool_tensor(var, item, value): raise IndexError( "The dims of bool index doesn't match indexed array, " "the dims of bool index except to be equal or less " - "than {}, but received {}.".format(len(var.shape), len(item.shape)) + f"than {len(var.shape)}, but received {len(item.shape)}." ) for i, dim_len in enumerate(item.shape): if dim_len != -1 and var.shape[i] != -1 and dim_len != var.shape[i]: @@ -895,9 +885,7 @@ def _setitem_static(x, indices, values): else: raise TypeError( "Only support to assign an integer, float, numpy.ndarray or " - "paddle.Tensor to a paddle.Tensor, but received {}".format( - type(values) - ) + f"paddle.Tensor to a paddle.Tensor, but received {type(values)}" ) # step3.1: Only basic indexing, use OP set_value to set value. @@ -908,7 +896,7 @@ def _setitem_static(x, indices, values): StartsTensorList, EndsTensorList, StepsTensorList, - *itertools.chain.from_iterable(attrs.items()) + *itertools.chain.from_iterable(attrs.items()), ) else: helper = paddle.base.layer_helper.LayerHelper(