From 985669317faf7efb1e8f7e839b67cb04fd287a6f Mon Sep 17 00:00:00 2001 From: gouzil <66515297+gouzil@users.noreply.github.com> Date: Fri, 3 Nov 2023 10:42:47 +0800 Subject: [PATCH] [CodeStyle][task 35-37] enable Flake8 E712, E266, E714 rule in `python/paddle/base` (#58319) * [CodeStyle] fix E712 * [CodeStyle] fix E266 * [CodeStyle] fix E714 --- .flake8 | 6 ------ python/paddle/base/backward.py | 2 +- python/paddle/base/device_worker.py | 4 ++-- python/paddle/base/dygraph/base.py | 2 +- python/paddle/base/executor.py | 16 ++++++++-------- python/paddle/base/framework.py | 6 +++--- python/paddle/base/trainer_desc.py | 2 +- python/paddle/base/trainer_factory.py | 2 +- 8 files changed, 17 insertions(+), 23 deletions(-) diff --git a/.flake8 b/.flake8 index 91137a006d0885..5187a0cdefe032 100644 --- a/.flake8 +++ b/.flake8 @@ -28,9 +28,3 @@ per-file-ignores = # Ignore compare with True in sot unittest test/sot/test_dup_top.py:E712 - - # temp ignore base directory - python/paddle/base/*: - E712, - E266, - E714 diff --git a/python/paddle/base/backward.py b/python/paddle/base/backward.py index 876db0abc3aa70..e62a5b9245a1b6 100755 --- a/python/paddle/base/backward.py +++ b/python/paddle/base/backward.py @@ -2348,7 +2348,7 @@ def _find_op_path_( # If block is while block, dealing with op specifically again. # TODO(liym27): Consider special types of ops. for i, op in reversed(list(enumerate(block.ops))): - if relevant_op_flags[i] == False and _some_in_set_( + if relevant_op_flags[i] is False and _some_in_set_( op.desc.output_arg_names(), output_names ): relevant_op_flags[i] = True diff --git a/python/paddle/base/device_worker.py b/python/paddle/base/device_worker.py index 755f7257b735ad..c20677f6acd5e4 100644 --- a/python/paddle/base/device_worker.py +++ b/python/paddle/base/device_worker.py @@ -450,7 +450,7 @@ def _gen_worker_desc(self, trainer_desc): if ( opt_info["use_cvm"] or "no_cvm" in opt_info - and opt_info["no_cvm"] == True + and opt_info["no_cvm"] is True ): sparse_table.emb_dim = self._fleet_desc.server_param.downpour_server_param.downpour_table_param[ i @@ -560,7 +560,7 @@ def _gen_worker_desc(self, trainer_desc): if ( opt_info["use_cvm"] or "no_cvm" in opt_info - and opt_info["no_cvm"] == True + and opt_info["no_cvm"] is True ): sparse_table.emb_dim = self._fleet_desc.server_param.downpour_server_param.downpour_table_param[ i diff --git a/python/paddle/base/dygraph/base.py b/python/paddle/base/dygraph/base.py index cadb6bcb089377..69ee71395b9aa1 100644 --- a/python/paddle/base/dygraph/base.py +++ b/python/paddle/base/dygraph/base.py @@ -928,7 +928,7 @@ def to_variable(value, name=None, zero_copy=None, dtype=None): # (2): when used in flask framework, it may result in hang. # Details: https://github.com/PaddlePaddle/Paddle/issues/26635 # So, we temporally diable the zero_copy strategy. - if zero_copy == True: + if zero_copy is True: warnings.warn( "Currently, zero_copy is not supported, and it will be discarded." ) diff --git a/python/paddle/base/executor.py b/python/paddle/base/executor.py index 40f1471788278c..7dc9f8d5f9848f 100755 --- a/python/paddle/base/executor.py +++ b/python/paddle/base/executor.py @@ -1711,7 +1711,7 @@ def _run_impl( if isinstance(program, Program) and program._heter_pipeline_opt: # print("program._heter_pipeline_opt: {}".format( # program._heter_pipeline_opt)) - ## change default executor + # change default executor heter_place = program._heter_pipeline_opt["heter_place"] heter_place = framework._get_paddle_place(heter_place) p = core.Place() @@ -1868,12 +1868,12 @@ def _run_impl( varobj = global_block.vars[varname] if ( - vardesc.persistable() == False + vardesc.persistable() is False and vardesc.type() == core.VarDesc.VarType.LOD_TENSOR - and vardesc.need_check_feed() == True - and varobj.stop_gradient == True - and varobj.is_data == True - and varobj.belong_to_optimizer == False + and vardesc.need_check_feed() is True + and varobj.stop_gradient is True + and varobj.is_data is True + and varobj.belong_to_optimizer is False and varname not in feed ): raise ValueError('Need feed data for variable %s' % varname) @@ -2161,7 +2161,7 @@ def _prepare_trainer( ): is_heter = 0 use_ps_gpu = 0 - if not program._fleet_opt is None: + if program._fleet_opt is not None: if program._fleet_opt.get("worker_class", "") == "HeterCpuWorker": is_heter = 1 if program._fleet_opt.get("trainer", "") == "HeterXpuTrainer": @@ -2287,7 +2287,7 @@ def _run_from_dataset( raise RuntimeError( "dataset is need and should be initialized" ) - ## change default executor + # change default executor heter_place = framework._get_paddle_place(heter_place) p = core.Place() p.set_place(heter_place) diff --git a/python/paddle/base/framework.py b/python/paddle/base/framework.py index 98e374ffced3a9..f3d766bdfaf936 100644 --- a/python/paddle/base/framework.py +++ b/python/paddle/base/framework.py @@ -2993,7 +2993,7 @@ def __init__( if ( type == 'less_than' and op_attrs['force_cpu'] is not None - ) or op_attrs['force_cpu'] != False: + ) or op_attrs['force_cpu'] is not False: warnings.warn( "The Attr(force_cpu) of Op(%s) will be deprecated in the future, " "please use 'device_guard' instead. 'device_guard' has higher priority when they are " @@ -4264,7 +4264,7 @@ def _rename_var(self, name, new_name): return var def _remove_var(self, name, sync=True): - if sync == True: + if sync is True: self._sync_with_cpp() self.desc._remove_var(name.encode()) del self.vars[name] @@ -4453,7 +4453,7 @@ def _remove_op(self, index, sync=True): Returns: None """ - if sync == True: + if sync is True: self._sync_with_cpp() self.desc._remove_op(index, index + 1) del self.ops[index] diff --git a/python/paddle/base/trainer_desc.py b/python/paddle/base/trainer_desc.py index 255ddf05a580a7..3d6c947db484e9 100644 --- a/python/paddle/base/trainer_desc.py +++ b/python/paddle/base/trainer_desc.py @@ -112,7 +112,7 @@ def _set_infer(self, infer): def _set_fleet_desc(self, fleet_desc): self._fleet_desc = fleet_desc - ## serialize fleet_desc + # serialize fleet_desc from google.protobuf import text_format fleet_desc_str = text_format.MessageToString(fleet_desc) diff --git a/python/paddle/base/trainer_factory.py b/python/paddle/base/trainer_factory.py index c5743ca22a29e2..c8b61fdf7c1121 100644 --- a/python/paddle/base/trainer_factory.py +++ b/python/paddle/base/trainer_factory.py @@ -186,7 +186,7 @@ def handler_launch_func(self, scope, handler): elapsed_secs = 0 while True: self.running_lock.acquire() - if self.running == False: + if self.running is False: break if elapsed_secs < period_secs: # TODO(guru4elephant): needs customized condition