Skip to content

Commit

Permalink
[xdoctest] reformat example code with google style in No.344-No.352 (#…
Browse files Browse the repository at this point in the history
…57134)

* fix sample codes

* fix bug

* fix bug
  • Loading branch information
longranger2 authored Sep 11, 2023
1 parent 7cf2c45 commit 02a579a
Show file tree
Hide file tree
Showing 6 changed files with 410 additions and 389 deletions.
16 changes: 9 additions & 7 deletions python/paddle/base/lod_tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,10 +62,10 @@ def create_lod_tensor(data, recursive_seq_lens, place):
.. code-block:: python
import paddle.base as base
import numpy as np
>>> import paddle.base as base
>>> import numpy as np
t = base.create_lod_tensor(np.ndarray([5, 30]), [[2, 3]], base.CPUPlace())
>>> t = base.create_lod_tensor(np.ndarray([5, 30]), [[2, 3]], base.CPUPlace())
"""
if isinstance(data, core.LoDTensor):
return create_lod_tensor(np.array(data), recursive_seq_lens, place)
Expand Down Expand Up @@ -154,13 +154,15 @@ def create_random_int_lodtensor(
is inside [low, high].
Examples:
.. code-block:: python
import paddle.base as base
>>> import paddle.base as base
t = base.create_random_int_lodtensor(recursive_seq_lens=[[2, 3]],
base_shape=[30], place=base.CPUPlace(), low=0, high=10)
print(t.shape()) # [5, 30]
>>> t = base.create_random_int_lodtensor(recursive_seq_lens=[[2, 3]],
... base_shape=[30], place=base.CPUPlace(), low=0, high=10)
>>> print(t.shape())
[5, 30]
"""
assert isinstance(base_shape, list), "base_shape should be a list"
# append the total number of basic elements to the front of its shape
Expand Down
9 changes: 7 additions & 2 deletions python/paddle/base/log_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,15 @@ def get_logger(name, level, fmt=None):
logging.Logger: logging logger with given settings
Examples:
.. code-block:: python
logger = log_helper.get_logger(__name__, logging.INFO,
fmt='%(asctime)s-%(levelname)s: %(message)s')
>>> import paddle
>>> import logging
>>> from paddle.base import log_helper
>>> logger = log_helper.get_logger(__name__, logging.INFO,
... fmt='%(asctime)s-%(levelname)s: %(message)s')
"""

logger = logging.getLogger(name)
Expand Down
47 changes: 24 additions & 23 deletions python/paddle/base/param_attr.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,14 +61,15 @@ class ParamAttr:
.. code-block:: python
import paddle
weight_attr = paddle.ParamAttr(name="weight",
learning_rate=0.5,
regularizer=paddle.regularizer.L2Decay(1.0),
trainable=True)
print(weight_attr.name) # "weight"
paddle.nn.Linear(3, 4, weight_attr=weight_attr)
>>> import paddle
>>> weight_attr = paddle.ParamAttr(name="weight",
... learning_rate=0.5,
... regularizer=paddle.regularizer.L2Decay(1.0),
... trainable=True)
>>> print(weight_attr.name)
weight
>>> paddle.nn.Linear(3, 4, weight_attr=weight_attr)
"""

def __init__(
Expand Down Expand Up @@ -259,24 +260,24 @@ class WeightNormParamAttr(ParamAttr):
.. code-block:: python
import paddle
>>> import paddle
paddle.enable_static()
>>> paddle.enable_static()
data = paddle.static.data(name="data", shape=[3, 32, 32], dtype="float32")
fc = paddle.static.nn.fc(x=data,
size=1000,
weight_attr=paddle.static.WeightNormParamAttr(
dim=None,
name='weight_norm_param',
initializer=paddle.nn.initializer.Constant(1.0),
learning_rate=1.0,
regularizer=paddle.regularizer.L2Decay(0.1),
trainable=True,
do_model_average=False,
need_clip=True))
>>> data = paddle.static.data(name="data", shape=[3, 32, 32], dtype="float32")
>>> fc = paddle.static.nn.fc(x=data,
... size=1000,
... weight_attr=paddle.static.WeightNormParamAttr(
... dim=None,
... name='weight_norm_param',
... initializer=paddle.nn.initializer.Constant(1.0),
... learning_rate=1.0,
... regularizer=paddle.regularizer.L2Decay(0.1),
... trainable=True,
... do_model_average=False,
... need_clip=True))
...
"""
# List to record the parameters reparameterized by weight normalization.
# If these parameters are treated as Variable rather than Parameter,
Expand Down
Loading

0 comments on commit 02a579a

Please sign in to comment.