!49540 fix input type bug of logical_not

Merge pull request !49540 from wtcheng/master
This commit is contained in:
i-robot 2023-03-01 01:24:48 +00:00 committed by Gitee
commit aa2b53887d
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
4 changed files with 3 additions and 32 deletions

View File

@ -3,6 +3,6 @@
.. py:class:: mindspore.ops.LogicalNot
逐元素计算个Tensor的逻辑非运算。
逐元素计算个Tensor的逻辑非运算。
详情请查看 :func:`mindspore.ops.logical_not`

View File

@ -3,7 +3,7 @@ mindspore.ops.logical_not
.. py:function:: mindspore.ops.logical_not(x)
逐元素计算个Tensor的逻辑非运算。
逐元素计算个Tensor的逻辑非运算。
.. math::
out_{i} = \neg x_{i}

View File

@ -54,34 +54,8 @@ bool LogicalNotGpuKernelMod::LaunchKernel(const std::vector<AddressPtr> &inputs,
const std::vector<std::pair<KernelAttr, LogicalNotGpuKernelMod::KernelRunFunc>> &LogicalNotGpuKernelMod::GetFuncList()
const {
static const std::vector<std::pair<KernelAttr, LogicalNotGpuKernelMod::KernelRunFunc>> func_list = {
{KernelAttr().AddInputAttr(kNumberTypeFloat64).AddOutputAttr(kNumberTypeBool),
&LogicalNotGpuKernelMod::LaunchKernel<double>},
{KernelAttr().AddInputAttr(kNumberTypeFloat32).AddOutputAttr(kNumberTypeBool),
&LogicalNotGpuKernelMod::LaunchKernel<float>},
{KernelAttr().AddInputAttr(kNumberTypeFloat16).AddOutputAttr(kNumberTypeBool),
&LogicalNotGpuKernelMod::LaunchKernel<half>},
{KernelAttr().AddInputAttr(kNumberTypeBool).AddOutputAttr(kNumberTypeBool),
&LogicalNotGpuKernelMod::LaunchKernel<bool>},
{KernelAttr().AddInputAttr(kNumberTypeInt8).AddOutputAttr(kNumberTypeBool),
&LogicalNotGpuKernelMod::LaunchKernel<int8_t>},
{KernelAttr().AddInputAttr(kNumberTypeInt16).AddOutputAttr(kNumberTypeBool),
&LogicalNotGpuKernelMod::LaunchKernel<int16_t>},
{KernelAttr().AddInputAttr(kNumberTypeInt32).AddOutputAttr(kNumberTypeBool),
&LogicalNotGpuKernelMod::LaunchKernel<int32_t>},
{KernelAttr().AddInputAttr(kNumberTypeInt64).AddOutputAttr(kNumberTypeBool),
&LogicalNotGpuKernelMod::LaunchKernel<int64_t>},
{KernelAttr().AddInputAttr(kNumberTypeUInt8).AddOutputAttr(kNumberTypeBool),
&LogicalNotGpuKernelMod::LaunchKernel<uint8_t>},
{KernelAttr().AddInputAttr(kNumberTypeUInt16).AddOutputAttr(kNumberTypeBool),
&LogicalNotGpuKernelMod::LaunchKernel<uint16_t>},
{KernelAttr().AddInputAttr(kNumberTypeUInt32).AddOutputAttr(kNumberTypeBool),
&LogicalNotGpuKernelMod::LaunchKernel<uint32_t>},
{KernelAttr().AddInputAttr(kNumberTypeUInt64).AddOutputAttr(kNumberTypeBool),
&LogicalNotGpuKernelMod::LaunchKernel<uint64_t>},
{KernelAttr().AddInputAttr(kNumberTypeComplex64).AddOutputAttr(kNumberTypeBool),
&LogicalNotGpuKernelMod::LaunchKernel<utils::Complex<float>>},
{KernelAttr().AddInputAttr(kNumberTypeComplex128).AddOutputAttr(kNumberTypeBool),
&LogicalNotGpuKernelMod::LaunchKernel<utils::Complex<double>>},
};
return func_list;
}

View File

@ -87,10 +87,7 @@ def test_logicalor():
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
@pytest.mark.parametrize("dtype", [np.bool_, np.int8, np.int16, np.int32, np.int64,
np.uint8, np.uint16, np.uint32, np.uint64,
np.float16, np.float32, np.float64,
np.complex64, np.complex128])
@pytest.mark.parametrize("dtype", [np.bool_])
@pytest.mark.parametrize("mode", [context.GRAPH_MODE, context.PYNATIVE_MODE])
def test_logicalnot(dtype, mode):
"""