!44558 Add tensor.det() and tensor.ndimension()
Merge pull request !44558 from shaojunsong/feature/tensor1025
This commit is contained in:
commit
bf99cc4fa1
|
@ -0,0 +1,6 @@
|
|||
mindspore.Tensor.det
|
||||
====================
|
||||
|
||||
.. py:method:: mindspore.Tensor.det()
|
||||
|
||||
参考 `Tensor.matrix_determinant() <https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.matrix_determinant.html>`_。
|
|
@ -0,0 +1,6 @@
|
|||
mindspore.Tensor.ndimension
|
||||
===========================
|
||||
|
||||
.. py:method:: mindspore.Tensor.ndimension()
|
||||
|
||||
参考 `Tensor.ndim() <https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.ndim.html>`_。
|
|
@ -156,6 +156,7 @@ Reduction方法
|
|||
mindspore.Tensor.ger
|
||||
mindspore.Tensor.log_matrix_determinant
|
||||
mindspore.Tensor.matrix_determinant
|
||||
mindspore.Tensor.det
|
||||
|
||||
Tensor操作方法
|
||||
----------------
|
||||
|
@ -215,6 +216,7 @@ Array操作
|
|||
mindspore.Tensor.minimum
|
||||
mindspore.Tensor.nbytes
|
||||
mindspore.Tensor.ndim
|
||||
mindspore.Tensor.ndimension
|
||||
mindspore.Tensor.nonzero
|
||||
mindspore.Tensor.narrow
|
||||
mindspore.Tensor.ptp
|
||||
|
|
|
@ -161,6 +161,7 @@ Linear Algebraic Methods
|
|||
mindspore.Tensor.ger
|
||||
mindspore.Tensor.log_matrix_determinant
|
||||
mindspore.Tensor.matrix_determinant
|
||||
mindspore.Tensor.det
|
||||
|
||||
Tensor Operation Methods
|
||||
------------------------
|
||||
|
@ -220,6 +221,7 @@ Array Methods
|
|||
mindspore.Tensor.minimum
|
||||
mindspore.Tensor.nbytes
|
||||
mindspore.Tensor.ndim
|
||||
mindspore.Tensor.ndimension
|
||||
mindspore.Tensor.nonzero
|
||||
mindspore.Tensor.narrow
|
||||
mindspore.Tensor.ptp
|
||||
|
|
|
@ -232,7 +232,9 @@ BuiltInTypeMap &GetMethodMap() {
|
|||
{"log1p", std::string("log1p")}, // P.Log1p()
|
||||
{"logit", std::string("logit")}, // Logit()
|
||||
{"log_matrix_determinant", std::string("log_matrix_determinant")}, // log_matrix_determinant()
|
||||
{"matrix_determinant", std::string("matrix_determinant")}, // log_matrix_determinant()
|
||||
{"matrix_determinant", std::string("matrix_determinant")}, // matrix_determinant()
|
||||
{"det", std::string("matrix_determinant")}, // det()
|
||||
{"ndimension", std::string("ndim_")}, // ndimension()
|
||||
{"max", std::string("max")}, // P.reduce_max()
|
||||
{"min", std::string("min")}, // P.reduce_min()
|
||||
{"pow", std::string("pow")}, // P.Pow()
|
||||
|
|
|
@ -92,6 +92,11 @@ def mean(x, axis=(), keep_dims=False):
|
|||
return reduce_mean(x, axis)
|
||||
|
||||
|
||||
def ndimension(x):
|
||||
"""Return the number of tensor dimensions."""
|
||||
return len(x.shape)
|
||||
|
||||
|
||||
def prod(x, axis=(), keep_dims=False):
|
||||
"""
|
||||
Reduces a dimension of a tensor by product all elements in the dimension.
|
||||
|
@ -1068,6 +1073,11 @@ def resize(x, *new_shape):
|
|||
return res.reshape(new_shape)
|
||||
|
||||
|
||||
def det(x):
|
||||
"""Computes the determinant of one or more square matrices."""
|
||||
return F.matrix_determinant(x)
|
||||
|
||||
|
||||
def diagonal(x, offset=0, axis1=0, axis2=1):
|
||||
"""
|
||||
Returns specified diagonals.
|
||||
|
|
|
@ -627,6 +627,13 @@ class Tensor(Tensor_):
|
|||
"""
|
||||
return Tensor(data, dtype=dtype)
|
||||
|
||||
def ndimension(self):
|
||||
r"""
|
||||
Refer to `Tensor.ndim()
|
||||
<https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.ndim.html>` _.
|
||||
"""
|
||||
return len(self._shape)
|
||||
|
||||
def set_const_arg(self, const_arg=True):
|
||||
"""
|
||||
Specify whether the tensor is a constant when it is used for the argument of a network.
|
||||
|
@ -2011,7 +2018,7 @@ class Tensor(Tensor_):
|
|||
ValueError: If the dimension of self tensor is less than 2.
|
||||
|
||||
Supported Platforms:
|
||||
``Ascend`` ``GPU`` ``CPU``
|
||||
``GPU`` ``CPU``
|
||||
|
||||
Examples:
|
||||
>>> input_x = Tensor(np.array([[[-4.5, -1.5], [7.0, 6.0]], [[2.5, 0.5], [3.0, 9.0]]]), mindspore.float32)
|
||||
|
@ -4244,6 +4251,15 @@ class Tensor(Tensor_):
|
|||
res = flattened[:new_size]
|
||||
return res.reshape(new_shape)
|
||||
|
||||
def det(self):
|
||||
r"""
|
||||
Refer to `Tensor.matrix_determinant()
|
||||
<https://www.mindspore.cn/docs/zh-CN/master/api_python/mindspore/Tensor/mindspore.Tensor.matrix_determinant.html
|
||||
>` _.
|
||||
"""
|
||||
self._init_check()
|
||||
return tensor_operator_registry.get('matrix_determinant')(self)
|
||||
|
||||
def diagonal(self, offset=0, axis1=0, axis2=1):
|
||||
"""
|
||||
Return specified diagonals.
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
import numpy as np
|
||||
import pytest
|
||||
import mindspore.common.dtype as mstype
|
||||
import mindspore.nn as nn
|
||||
from mindspore import Tensor
|
||||
from mindspore import context
|
||||
|
||||
|
||||
class Net(nn.Cell):
|
||||
def construct(self, x):
|
||||
return x.det()
|
||||
|
||||
|
||||
@pytest.mark.level0
|
||||
@pytest.mark.platform_x86_cpu
|
||||
@pytest.mark.platform_arm_cpu
|
||||
@pytest.mark.platform_x86_gpu_training
|
||||
@pytest.mark.env_onecard
|
||||
@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE])
|
||||
def test_det(mode):
|
||||
"""
|
||||
Feature: tensor.det()
|
||||
Description: Verify the result of tensor.det()
|
||||
Expectation: success
|
||||
"""
|
||||
context.set_context(mode=mode)
|
||||
net = Net()
|
||||
x = Tensor([[1.5, 2.0], [3, 4.6]], dtype=mstype.float32)
|
||||
output = net(x)
|
||||
expected = np.array(0.9)
|
||||
assert np.allclose(output.asnumpy(), expected)
|
|
@ -0,0 +1,32 @@
|
|||
import pytest
|
||||
import mindspore.common.dtype as mstype
|
||||
import mindspore.nn as nn
|
||||
from mindspore import Tensor
|
||||
from mindspore import context
|
||||
|
||||
|
||||
class Net(nn.Cell):
|
||||
def construct(self, x):
|
||||
return x.ndimension()
|
||||
|
||||
|
||||
@pytest.mark.level0
|
||||
@pytest.mark.platform_x86_cpu
|
||||
@pytest.mark.platform_arm_cpu
|
||||
@pytest.mark.platform_x86_gpu_training
|
||||
@pytest.mark.platform_arm_ascend_training
|
||||
@pytest.mark.platform_x86_ascend_training
|
||||
@pytest.mark.env_onecard
|
||||
@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE])
|
||||
def test_ndimension(mode):
|
||||
"""
|
||||
Feature: tensor.ndimension()
|
||||
Description: Verify the result of tensor.ndimension
|
||||
Expectation: success
|
||||
"""
|
||||
context.set_context(mode=mode)
|
||||
net = Net()
|
||||
x = Tensor([[1.5, 2.0], [3, 4.6], [0.3, 4.6]], dtype=mstype.float32)
|
||||
output = net(x)
|
||||
expected = 2
|
||||
assert output == expected
|
Loading…
Reference in New Issue