add tensor.logdet & ops.logdet

This commit is contained in:
shaojunsong 2022-11-25 16:20:06 +08:00
parent 4118dbcd4d
commit 8f28ca73e1
12 changed files with 160 additions and 2 deletions

View File

@ -211,6 +211,7 @@ mindspore.ops
mindspore.ops.log1p
mindspore.ops.logaddexp
mindspore.ops.logaddexp2
mindspore.ops.logdet
mindspore.ops.logical_and
mindspore.ops.logical_not
mindspore.ops.logical_or

View File

@ -0,0 +1,6 @@
mindspore.Tensor.logdet
========================
.. py:method:: mindspore.Tensor.logdet()
详情请参考 :func:`mindspore.ops.logdet`

View File

@ -0,0 +1,15 @@
mindspore.ops.logdet
=====================
.. py:function:: mindspore.ops.logdet(x)
计算方块矩阵或批量方块矩阵的对数行列式。
参数:
- **x** (Tensor) - 任意维度的Tensor。
返回:
Tensor`x` 的对数行列式。如果行列式小于0则返回nan。如果行列式等于0则返回-inf。
异常:
- **TypeError** - 如果 `x` 的dtype不是float32、float64、Complex64或Complex128。

View File

@ -212,6 +212,7 @@ Element-by-Element Operations
mindspore.ops.log1p
mindspore.ops.logaddexp
mindspore.ops.logaddexp2
mindspore.ops.logdet
mindspore.ops.logical_and
mindspore.ops.logical_not
mindspore.ops.logical_or

View File

@ -243,6 +243,7 @@ BuiltInTypeMap &GetMethodMap() {
{"log1p", std::string("log1p")}, // P.Log1p()
{"logit", std::string("logit")}, // Logit()
{"negative", std::string("negative")}, // neg()
{"logdet", std::string("logdet")}, // logdet()
{"log_matrix_determinant", std::string("log_matrix_determinant")}, // log_matrix_determinant()
{"matrix_determinant", std::string("matrix_determinant")}, // matrix_determinant()
{"det", std::string("matrix_determinant")}, // det()

View File

@ -3115,6 +3115,11 @@ def logit(x, eps=None):
return F.logit(x, eps)
def logdet(x):
"""Returns the log determinant of one or batches of square matrices."""
return F.logdet(x)
def log_matrix_determinant(x):
"""Computes the sign and the log of the absolute value of the determinant of one or more square matrices."""
return F.log_matrix_determinant(x)

View File

@ -1443,6 +1443,13 @@ class Tensor(Tensor_):
"""
return tensor_operator_registry.get('logsumexp')(self, dim, keepdim)
def logdet(self):
r"""
For details, please refer to :func:`mindspore.ops.logdet`.
"""
self._init_check()
return tensor_operator_registry.get('logdet')(self)
def log_matrix_determinant(self):
r"""
For details, please refer to :func:`mindspore.ops.log_matrix_determinant`.

View File

@ -194,6 +194,7 @@ from .math_func import (
heaviside,
gcd,
log,
logdet,
log_matrix_determinant,
matrix_determinant,
linspace,

View File

@ -712,8 +712,8 @@ def subtract(x, other, *, alpha=1):
output[i] = x[i] - alpha * y[i]
Args:
x (Union[Tensor, number.Number]): The tensor or number to be subtracted.
other (Union[Tensor, number.Number]): The tensor or number to subtract.
x (Union[Tensor, number.Number]): Tensor or Number involved in subtraction.
other (Union[Tensor, number.Number]): Tensor or Number involved in subtraction.
Keyword Args:
alpha (Number): The multiplier for `other`. Default: 1.
@ -1107,6 +1107,33 @@ def log(x):
return log_(x)
def logdet(x):
r"""
Calculates log determinant of a square matrix or batches of square matrices.
Args:
x (Tensor): Input Tensor of any dimension.
Returns:
Tensor, the log determinant of `x`. If the matrix determinant is smaller than 0, nan will be returned. If the
matrix determinant is 0, -inf will be returned.
Raises:
TypeError: If dtype of `x` is not float32, float64, Complex64 or Complex128.
Supported Platforms:
``GPU`` ``CPU``
Examples:
>>> a = Tensor([[[8, 9], [1, 2]], [[5, 6], [3, 4]]], mindspore.float32)
>>> output = ops.logdet(a)
>>> print(output)
[1.9459091 0.6931454]
"""
det = matrix_determinant(x)
return log_(det)
def floor(x):
r"""
Rounds a tensor down to the closest integer element-wise.
@ -7446,6 +7473,7 @@ __all__ = [
'isclose',
'isreal',
'log',
'logdet',
'log_matrix_determinant',
'matrix_determinant',
'linspace',

View File

@ -184,6 +184,7 @@ tensor_operator_registry.register('mirror_pad', P.MirrorPad)
tensor_operator_registry.register('minimum', P.Minimum)
tensor_operator_registry.register('matrix_determinant', matrix_determinant)
tensor_operator_registry.register('log1p', log1p)
tensor_operator_registry.register('logdet', logdet)
tensor_operator_registry.register('log_matrix_determinant', log_matrix_determinant)
tensor_operator_registry.register('ceil', P.Ceil)
tensor_operator_registry.register('fill', P.Fill)

View File

@ -0,0 +1,47 @@
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import pytest
import mindspore as ms
import mindspore.nn as nn
import mindspore.ops as ops
class Net(nn.Cell):
def construct(self, x):
output = ops.logdet(x)
return output
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_arm_cpu
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
@pytest.mark.parametrize('mode', [ms.GRAPH_MODE, ms.PYNATIVE_MODE])
def test_logdet(mode):
"""
Feature: ops.logdet
Description: Verify the result of ops.logdet
Expectation: success
"""
ms.set_context(mode=mode)
x = ms.Tensor(np.array([[[1, 2], [-4, 5]], [[7, 8], [-10, 11]]]), ms.float32)
net = Net()
output = net(x)
expected = np.array([2.564947, 5.0562468])
assert np.allclose(output.asnumpy(), expected)

View File

@ -0,0 +1,45 @@
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import pytest
import mindspore as ms
import mindspore.nn as nn
class Net(nn.Cell):
def construct(self, x):
return x.logdet()
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_arm_cpu
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
@pytest.mark.parametrize('mode', [ms.GRAPH_MODE, ms.PYNATIVE_MODE])
def test_logdet(mode):
"""
Feature: tensor.logdet
Description: Verify the result of tensor.logdet
Expectation: success
"""
ms.set_context(mode=mode)
x = ms.Tensor(np.array([[[1, 2], [-4, 5]], [[7, 8], [-10, 11]]]), ms.float32)
net = Net()
output = net(x)
expected = np.array([2.564947, 5.0562468])
assert np.allclose(output.asnumpy(), expected)