forked from mindspore-Ecosystem/mindspore
add tensor dim(), numpy(), param copy
This commit is contained in:
parent
10d6e6535e
commit
3d3254a990
|
@ -0,0 +1,6 @@
|
|||
mindspore.Tensor.numpy
|
||||
======================
|
||||
|
||||
.. py:method:: mindspore.Tensor.numpy()
|
||||
|
||||
参考 `Tensor.asnumpy() <https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.asnumpy.html>`_。
|
|
@ -37,6 +37,13 @@
|
|||
返回:
|
||||
Parameter,返回克隆的新参数。
|
||||
|
||||
.. py:method:: copy
|
||||
|
||||
拷贝参数。
|
||||
|
||||
返回:
|
||||
Parameter,返回拷贝的新参数。
|
||||
|
||||
.. py:method:: comm_fusion
|
||||
:property:
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ mindspore.nn.Rprop
|
|||
|
||||
弹性反向传播(Rprop)算法的实现。
|
||||
|
||||
请参阅论文 `A Direct Adaptive Method for Faster Backpropagation Learning <http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.21.1417>`_ 。
|
||||
请参阅论文 `A Direct Adaptive Method for Faster Backpropagation Learning <https://ieeexplore.ieee.org/document/298623>`_ 。
|
||||
|
||||
更新公式如下:
|
||||
|
||||
|
|
|
@ -337,6 +337,7 @@ BuiltInTypeMap &GetMethodMap() {
|
|||
{"div", std::string("div")}, // div()
|
||||
{"equal", std::string("equal")}, // equal()
|
||||
{"expm1", std::string("expm1")}, // expm1()
|
||||
{"dim", prim::kPrimRank}, // P.Rank()
|
||||
}},
|
||||
{kObjectTypeRowTensorType,
|
||||
{
|
||||
|
|
|
@ -34,7 +34,7 @@ class JitConfig:
|
|||
**kwargs (dict): A dictionary of keyword arguments that the class needs.
|
||||
|
||||
Examples:
|
||||
>>> from mindspore.common.jit_config import JitConfig
|
||||
>>> from mindspore import JitConfig
|
||||
>>>
|
||||
>>> jitconfig = JitConfig(jit_level="O1")
|
||||
>>> net = LeNet5()
|
||||
|
|
|
@ -331,6 +331,15 @@ class Parameter(Tensor_):
|
|||
self.init_in_server = init_in_server
|
||||
self.param_info.init_in_server = init_in_server
|
||||
|
||||
def copy(self):
|
||||
"""
|
||||
Copy the parameter.
|
||||
|
||||
Returns:
|
||||
Parameter, a new parameter.
|
||||
"""
|
||||
return self.clone(init='same')
|
||||
|
||||
def set_param_fl(self, push_to_server=False, pull_from_server=False, requires_aggr=True):
|
||||
"""
|
||||
Set the way of parameter and server interaction.
|
||||
|
|
|
@ -616,6 +616,13 @@ class Tensor(Tensor_):
|
|||
self._init_check()
|
||||
return Tensor_.asnumpy(self)
|
||||
|
||||
def numpy(self):
|
||||
"""
|
||||
Refer to `Tensor.asnumpy() \
|
||||
<https://www.mindspore.cn/docs/en/master/api_python/mindspore/Tensor/mindspore.Tensor.asnumpy.html>`_.
|
||||
"""
|
||||
return self.asnumpy()
|
||||
|
||||
def is_persistent_data(self):
|
||||
"""
|
||||
Check if size of tensor is huge, and need save data to persistent storage.
|
||||
|
|
|
@ -30,7 +30,7 @@ class Rprop(Optimizer):
|
|||
Implements Resilient backpropagation.
|
||||
|
||||
Further information about this implementation can be found at `A Direct Adaptive Method for Faster Backpropagation
|
||||
Learning: The RPROP Algorithm <http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.21.1417>`_.
|
||||
Learning: The RPROP Algorithm <https://ieeexplore.ieee.org/document/298623>`_.
|
||||
|
||||
The updating formulas are as follows:
|
||||
|
||||
|
|
|
@ -96,3 +96,33 @@ def test_margin_ranking_loss_mean(mode):
|
|||
output = loss(input1, input2, target)
|
||||
expect_output = np.array(1.2293333)
|
||||
assert np.allclose(output.asnumpy(), expect_output)
|
||||
|
||||
|
||||
|
||||
@pytest.mark.level0
|
||||
@pytest.mark.platform_x86_cpu
|
||||
@pytest.mark.platform_arm_cpu
|
||||
@pytest.mark.platform_x86_gpu_training
|
||||
@pytest.mark.platform_arm_ascend_training
|
||||
@pytest.mark.platform_x86_ascend_training
|
||||
@pytest.mark.env_onecard
|
||||
@pytest.mark.parametrize('mode', [ms.GRAPH_MODE])
|
||||
def test_tensor_dim(mode):
|
||||
"""
|
||||
Feature: test tensor dim
|
||||
Description: Verify the result of dim.
|
||||
Expectation: expect correct forward result.
|
||||
"""
|
||||
class Net(nn.Cell):
|
||||
def __init__(self):
|
||||
super(Net, self).__init__()
|
||||
self.tensor = Tensor([[1, 2, 3], [4, 5, 6]])
|
||||
|
||||
def construct(self, x):
|
||||
return x.dim(), self.tensor.dim()
|
||||
|
||||
net = Net()
|
||||
input11 = Tensor([[1, 2, 3], [4, 5, 6]])
|
||||
input22 = Tensor([[[1, 2, 3], [4, 5, 6]]])
|
||||
net(input11)
|
||||
net(input22)
|
||||
|
|
|
@ -277,3 +277,15 @@ def test_parameter_init_from_tensor():
|
|||
assert np.allclose(param.asnumpy(), np.array([1]))
|
||||
tensor.asnumpy()[0] = 2
|
||||
assert np.allclose(param.asnumpy(), np.array([2]))
|
||||
|
||||
|
||||
def test_parameter_copy():
|
||||
"""
|
||||
Feature: Parameter copy.
|
||||
Description: Parameter copy.
|
||||
Expectation: The two Parameter's data are the same.
|
||||
"""
|
||||
tensor = Tensor(np.array([[1, 2, 3], [2, 3, 4]]))
|
||||
param1 = Parameter(tensor, name="testParameter")
|
||||
param2 = param1.copy()
|
||||
np.all(param1.data.asnumpy() == param2.data.asnumpy())
|
||||
|
|
Loading…
Reference in New Issue