diff --git a/mindspore/nn/graph_kernels/graph_kernels.py b/mindspore/nn/graph_kernels/graph_kernels.py index b022c18d46d..93e63dcd793 100644 --- a/mindspore/nn/graph_kernels/graph_kernels.py +++ b/mindspore/nn/graph_kernels/graph_kernels.py @@ -365,6 +365,7 @@ class ReLU(GraphKernel): >>> input_x = Tensor(np.array([[-1.0, 4.0, -8.0], [2.0, -5.0, 9.0]]), mindspore.float32) >>> relu = ReLU() >>> result = relu(input_x) + >>> print(result) [[0, 4.0, 0.0], [2.0, 0.0, 9.0]] """ def __init__(self): @@ -682,7 +683,8 @@ class LogSoftmax(GraphKernel): Examples: >>> input_x = Tensor(np.array([1, 2, 3, 4, 5]), mindspore.float32) >>> log_softmax = LogSoftmax() - >>> log_softmax(input_x) + >>> result = log_softmax(input_x) + >>> print(result) [-4.4519143, -3.4519143, -2.4519143, -1.4519144, -0.4519144] """ @@ -739,7 +741,8 @@ class Tanh(GraphKernel): Examples: >>> input_x = Tensor(np.array([1, 2, 3, 4, 5]), mindspore.float32) >>> tanh = Tanh() - >>> tanh(input_x) + >>> result = tanh(input_x) + >>> print(result) [0.7615941, 0.9640276, 0.9950548, 0.9993293, 0.99990916] """ def __init__(self): diff --git a/mindspore/nn/layer/quant.py b/mindspore/nn/layer/quant.py index e6a752f9881..c9e2c4f0109 100644 --- a/mindspore/nn/layer/quant.py +++ b/mindspore/nn/layer/quant.py @@ -378,7 +378,8 @@ class Conv2dBnFoldQuantOneConv(Cell): >>> quant_config=qconfig) >>> input = Tensor(np.random.randint(-2, 2, (2, 1, 3, 3)), mindspore.float32) >>> result = conv2d_bnfold(input) - >>> result.shape + >>> output = result.shape + >>> print(output) (2, 6, 2, 2) """