Skip to content

Commit

Permalink
修改COPY-FROM No. 3 autograd (#5984)
Browse files Browse the repository at this point in the history
  • Loading branch information
gouzil authored Jul 6, 2023
1 parent eb2ead6 commit 3354d86
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 298 deletions.
154 changes: 6 additions & 148 deletions docs/api/paddle/autograd/PyLayerContext_cn.rst
Original file line number Diff line number Diff line change
Expand Up @@ -11,25 +11,7 @@ PyLayerContext
代码示例
::::::::::::

.. code-block:: python
import paddle
from paddle.autograd import PyLayer
class cus_tanh(PyLayer):
@staticmethod
def forward(ctx, x):
# ctx is a object of PyLayerContext.
y = paddle.tanh(x)
ctx.save_for_backward(y)
return y
@staticmethod
def backward(ctx, dy):
# ctx is a object of PyLayerContext.
y, = ctx.saved_tensor()
grad = dy * (1 - paddle.square(y))
return grad
COPY-FROM: paddle.autograd.PyLayerContext


方法
Expand All @@ -53,26 +35,7 @@ None

**代码示例**

.. code-block:: python
import paddle
from paddle.autograd import PyLayer
class cus_tanh(PyLayer):
@staticmethod
def forward(ctx, x):
# ctx is a context object that store some objects for backward.
y = paddle.tanh(x)
# Pass tensors to backward.
ctx.save_for_backward(y)
return y
@staticmethod
def backward(ctx, dy):
# Get the tensors passed by forward.
y, = ctx.saved_tensor()
grad = dy * (1 - paddle.square(y))
return grad
COPY-FROM: paddle.autograd.PyLayerContext.save_for_backward


saved_tensor()
Expand All @@ -87,26 +50,7 @@ saved_tensor()

**代码示例**

.. code-block:: python
import paddle
from paddle.autograd import PyLayer
class cus_tanh(PyLayer):
@staticmethod
def forward(ctx, x):
# ctx is a context object that store some objects for backward.
y = paddle.tanh(x)
# Pass tensors to backward.
ctx.save_for_backward(y)
return y
@staticmethod
def backward(ctx, dy):
# Get the tensors passed by forward.
y, = ctx.saved_tensor()
grad = dy * (1 - paddle.square(y))
return grad
COPY-FROM: paddle.autograd.PyLayerContext.saved_tensor


mark_not_inplace(self, *tensors)
Expand All @@ -129,33 +73,7 @@ None

**代码示例**

.. code-block:: python
import paddle
class Exp(paddle.autograd.PyLayer):
@staticmethod
def forward(ctx, x):
ctx.mark_not_inplace(x)
return x
@staticmethod
def backward(ctx, grad_output):
out = grad_output.exp()
return out
x = paddle.randn((1, 1))
x.stop_gradient = False
attn_layers = []
for idx in range(0, 2):
attn_layers.append(Exp())
for step in range(0, 2):
a = x
for j in range(0,2):
a = attn_layers[j].apply(x)
a.backward()
COPY-FROM: paddle.autograd.PyLayerContext.mark_not_inplace

mark_non_differentiable(self, *tensors)
'''''''''
Expand All @@ -179,32 +97,7 @@ None

**代码示例**

.. code-block:: python
import os
os.environ['FLAGS_enable_eager_mode'] = '1'
import paddle
from paddle.autograd import PyLayer
import numpy as np
class Tanh(PyLayer):
@staticmethod
def forward(ctx, x):
a = x + x
b = x + x + x
ctx.mark_non_differentiable(a)
return a, b
@staticmethod
def backward(ctx, grad_a, grad_b):
assert np.equal(grad_a.numpy(), paddle.zeros([1]).numpy())
assert np.equal(grad_b.numpy(), paddle.ones([1], dtype="float64").numpy())
return grad_b
x = paddle.ones([1], dtype="float64")
x.stop_gradient = False
a, b = Tanh.apply(x)
b.sum().backward()
COPY-FROM: paddle.autograd.PyLayerContext.mark_non_differentiable

set_materialize_grads(self, value)
'''''''''
Expand All @@ -227,39 +120,4 @@ None

**代码示例**

.. code-block:: python
import os
os.environ['FLAGS_enable_eager_mode'] = '1'
import paddle
from paddle.autograd import PyLayer
import numpy as np
class Tanh(PyLayer):
@staticmethod
def forward(ctx, x):
return x+x+x, x+x
@staticmethod
def backward(ctx, grad, grad2):
assert np.equal(grad2.numpy(), paddle.zeros([1]).numpy())
return grad
class Tanh2(PyLayer):
@staticmethod
def forward(ctx, x):
ctx.set_materialize_grads(False)
return x+x+x, x+x
@staticmethod
def backward(ctx, grad, grad2):
assert grad2==None
return grad
x = paddle.ones([1], dtype="float64")
x.stop_gradient = False
Tanh.apply(x)[0].backward()
x2 = paddle.ones([1], dtype="float64")
x2.stop_gradient = False
Tanh2.apply(x2)[0].backward()
COPY-FROM: paddle.autograd.PyLayerContext.set_materialize_grads
100 changes: 4 additions & 96 deletions docs/api/paddle/autograd/PyLayer_cn.rst
Original file line number Diff line number Diff line change
Expand Up @@ -19,38 +19,7 @@ Paddle 通过创建 ``PyLayer`` 子类的方式实现 Python 端自定义算子
代码示例
::::::::::::

.. code-block:: python
import paddle
from paddle.autograd import PyLayer
# Inherit from PyLayer
class cus_tanh(PyLayer):
@staticmethod
def forward(ctx, x, func1, func2=paddle.square):
# ctx is a context object that store some objects for backward.
ctx.func = func2
y = func1(x)
# Pass tensors to backward.
ctx.save_for_backward(y)
return y
@staticmethod
# forward has only one output, so there is only one gradient in the input of backward.
def backward(ctx, dy):
# Get the tensors passed by forward.
y, = ctx.saved_tensor()
grad = dy * (1 - ctx.func(y))
# forward has only one input, so only one gradient tensor is returned.
return grad
data = paddle.randn([2, 3], dtype="float64")
data.stop_gradient = False
z = cus_tanh.apply(data, func1=paddle.tanh)
z.mean().backward()
print(data.grad)
COPY-FROM: paddle.autograd.PyLayer


方法
Expand All @@ -71,25 +40,7 @@ Tensor 或至少包含一个 Tensor 的 list/tuple

**代码示例**

.. code-block:: python
import paddle
from paddle.autograd import PyLayer
class cus_tanh(PyLayer):
@staticmethod
def forward(ctx, x):
y = paddle.tanh(x)
# Pass tensors to backward.
ctx.save_for_backward(y)
return y
@staticmethod
def backward(ctx, dy):
# Get the tensors passed by forward.
y, = ctx.saved_tensor()
grad = dy * (1 - paddle.square(y))
return grad
COPY-FROM: paddle.autograd.PyLayer.forward


backward(ctx, *args, **kwargs)
Expand All @@ -108,25 +59,7 @@ backward(ctx, *args, **kwargs)

**代码示例**

.. code-block:: python
import paddle
from paddle.autograd import PyLayer
class cus_tanh(PyLayer):
@staticmethod
def forward(ctx, x):
y = paddle.tanh(x)
# Pass tensors to backward.
ctx.save_for_backward(y)
return y
@staticmethod
def backward(ctx, dy):
# Get the tensors passed by forward.
y, = ctx.saved_tensor()
grad = dy * (1 - paddle.square(y))
return grad
COPY-FROM: paddle.autograd.PyLayer.backward


apply(cls, *args, **kwargs)
Expand All @@ -145,29 +78,4 @@ Tensor 或至少包含一个 Tensor 的 list/tuple

**代码示例**

.. code-block:: python
import paddle
from paddle.autograd import PyLayer
class cus_tanh(PyLayer):
@staticmethod
def forward(ctx, x, func1, func2=paddle.square):
ctx.func = func2
y = func1(x)
# Pass tensors to backward.
ctx.save_for_backward(y)
return y
@staticmethod
def backward(ctx, dy):
# Get the tensors passed by forward.
y, = ctx.saved_tensor()
grad = dy * (1 - ctx.func(y))
return grad
data = paddle.randn([2, 3], dtype="float64")
data.stop_gradient = False
# run custom Layer.
z = cus_tanh.apply(data, func1=paddle.tanh)
COPY-FROM: paddle.autograd.PyLayer
55 changes: 1 addition & 54 deletions docs/api/paddle/autograd/saved_tensors_hooks_cn.rst
Original file line number Diff line number Diff line change
Expand Up @@ -21,57 +21,4 @@ saved_tensors_hooks 用于动态图,注册一对 pack / unpack hook,用于
代码示例
::::::::::::

.. code-block:: python
# Example1
import paddle
def pack_hook(x):
print("Packing", x)
return x.numpy()
def unpack_hook(x):
print("UnPacking", x)
return paddle.to_tensor(x)
a = paddle.ones([3,3])
b = paddle.ones([3,3]) * 2
a.stop_gradient = False
b.stop_gradient = False
with paddle.autograd.saved_tensors_hooks(pack_hook, unpack_hook):
y = paddle.multiply(a, b)
y.sum().backward()
# Example2
import paddle
from paddle.autograd import PyLayer
class cus_tanh(PyLayer):
@staticmethod
def forward(ctx, a, b):
y = paddle.multiply(a, b)
ctx.save_for_backward(a, b)
return y
@staticmethod
def backward(ctx, dy):
a,b = ctx.saved_tensor()
grad_a = dy * a
grad_b = dy * b
return grad_a, grad_b
def pack_hook(x):
print("Packing", x)
return x.numpy()
def unpack_hook(x):
print("UnPacking", x)
return paddle.to_tensor(x)
a = paddle.ones([3,3])
b = paddle.ones([3,3]) * 2
a.stop_gradient = False
b.stop_gradient = False
with paddle.autograd.saved_tensors_hooks(pack_hook, unpack_hook):
y = cus_tanh.apply(a, b)
y.sum().backward()
COPY-FROM: paddle.autograd.saved_tensors_hooks

0 comments on commit 3354d86

Please sign in to comment.