From bd98227741b09a9e4d86d2c84892465a348a3884 Mon Sep 17 00:00:00 2001 From: Bo Zhang <105368690+zhangbopd@users.noreply.github.com> Date: Fri, 22 Sep 2023 14:29:37 +0800 Subject: [PATCH] Add test_dropout_nd (#1673) * add test_dropout_nd * PR comment --- api/tests/dropout_nd.py | 65 +++++++++++++++++++++++++++ api/tests_v2/configs/dropout_nd.json | 66 ++++++++++++++++++++++++++++ 2 files changed, 131 insertions(+) create mode 100644 api/tests/dropout_nd.py create mode 100644 api/tests_v2/configs/dropout_nd.json diff --git a/api/tests/dropout_nd.py b/api/tests/dropout_nd.py new file mode 100644 index 0000000000..96bc30a36a --- /dev/null +++ b/api/tests/dropout_nd.py @@ -0,0 +1,65 @@ +# Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from common_import import * +import paddle +from paddle import _legacy_C_ops + + +# no dropout_nd in pytorch +def dropout_nd(x, + p=0.5, + axis=None, + training=True, + mode="upscale_in_train", + name=None): + drop_axes = [axis] if isinstance(axis, int) else list(axis) + seed = None + mode = ('downgrade_in_infer' + if mode == 'downscale_in_infer' else mode) # semantic transfer + out = _legacy_C_ops.dropout_nd( + x, + 'dropout_prob', + p, + 'is_test', + not training, + 'fix_seed', + seed is not None, + 'seed', + seed if seed is not None else 0, + 'dropout_implementation', + mode, + 'axis', + drop_axes, ) + return out + + +@benchmark_registry.register("dropout_nd") +class PaddleDropoutNdConfig(APIConfig): + def __init__(self): + super(PaddleDropoutNdConfig, self).__init__('dropout_nd') + self.run_torch = False + + +@benchmark_registry.register("dropout_nd") +class PaddleDropoutNd(PaddleOpBenchmarkBase): + def build_graph(self, config): + x = self.variable(name='x', shape=config.x_shape, dtype=config.x_dtype) + result = dropout_nd( + x=x, p=config.p, axis=config.axis, mode=config.mode) + + self.feed_list = [x] + self.fetch_list = [result] + if config.backward: + self.append_gradients(result[0], self.feed_list) diff --git a/api/tests_v2/configs/dropout_nd.json b/api/tests_v2/configs/dropout_nd.json new file mode 100644 index 0000000000..3d0f15509e --- /dev/null +++ b/api/tests_v2/configs/dropout_nd.json @@ -0,0 +1,66 @@ +[{ + "op": "dropout_nd", + "param_info": { + "mode": { + "type": "string", + "value": "upscale_in_train" + }, + "p": { + "type": "float", + "value": "0.5" + }, + "x": { + "dtype": "float32", + "shape": "[-1L, 16L, -1L]", + "type": "Variable" + }, + "axis": { + "type": "list", + "value": "[1]" + } + }, + "repeat": 2000 +}, { + "op": "dropout_nd", + "param_info": { + "mode": { + "type": "string", + "value": "downscale_in_infer" + }, + "p": { + "type": "float", + "value": "0.5" + }, + "x": { + "dtype": "float32", + "shape": "[-1L, 16L, -1L, -1L]", + "type": "Variable" + }, + "axis": { + "type": "list", + "value": "[0,1]" + } + }, + "repeat": 2000 +}, { + "op": "dropout_nd", + "param_info": { + "mode": { + "type": "string", + "value": "upscale_in_train" + }, + "p": { + "type": "float", + "value": "0.1" + }, + "x": { + "dtype": "float32", + "shape": "[32L, 128L, 768L]", + "type": "Variable" + }, + "axis": { + "type": "list", + "value": "[0]" + } + } +}]