add tesecases of Tensor

This commit is contained in:
huangbingjian 2021-12-10 17:54:15 +08:00
parent 9e37e77dc2
commit 038cbe43de
3 changed files with 232 additions and 30 deletions

View File

@ -160,3 +160,19 @@ def test_np_fallback_func_tensor_index():
output = np_fallback_func_tensor_index(x)
output_expect = Tensor(6, mstype.float32)
assert output == output_expect
@pytest.mark.skip(reason='Not support in graph jit fallback feature yet')
def test_np_calculate():
"""
Feature: Fallback feature.
Description: Support numpy calculation.
Expectation: No exception.
"""
@ms_function
def np_calculate():
x = np.array([3, 1, 2, 4, 5])
y = x % 2
z = Tensor(y)
return z
assert np.all(np_calculate().asnumpy() == np.array([1, 1, 0, 0, 1]))

View File

@ -54,21 +54,6 @@ def test_use_monad():
print(use_monad(x, y))
@ms_function
def use_tensor_with_mstype():
me_x = Tensor(1, mstype.int32)
return me_x
def test_tensor_with_mstype():
"""
Feature: JIT Fallback
Description: Test tensor with mstype in graph mode.
Expectation: No exception.
"""
print(use_tensor_with_mstype())
@ms_function
def use_tuple_of_tensor():
me_x = (Tensor(1), Tensor(1))
@ -244,11 +229,7 @@ def test_np_tensor_add():
print("tensor_list:", tensor_list)
assert tensor_list[-1] == 11
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_binop_new_tensor():
"""
Feature: Fallback feature
@ -268,11 +249,6 @@ def test_binop_new_tensor():
print(net())
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_fallback_tensor_compare():
"""
Feature: Fallback feature
@ -293,11 +269,6 @@ def test_fallback_tensor_compare():
print(compare_net())
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_fallback_tensor_not():
"""
Feature: Fallback feature

View File

@ -0,0 +1,215 @@
import pytest
import numpy as np
from mindspore import Tensor, ms_function, context
import mindspore.common.dtype as mstype
from mindspore.common.initializer import One
context.set_context(mode=context.GRAPH_MODE)
def test_tensor():
"""
Feature: JIT Fallback
Description: Test Tensor() in graph mode.
Expectation: No exception.
"""
@ms_function
def foo():
me_x = Tensor(1)
return me_x
print(foo())
def test_tensor_bool():
"""
Feature: JIT Fallback
Description: Test Tensor(bool) in graph mode.
Expectation: No exception.
"""
@ms_function
def foo():
me_x = Tensor([True, True, False])
return me_x
print(foo())
def test_tensor_array():
"""
Feature: JIT Fallback
Description: Test Tensor(array) in graph mode.
Expectation: No exception.
"""
@ms_function
def foo():
me_x = Tensor([1])
return me_x
print(foo())
def test_tensor_with_mstype():
"""
Feature: JIT Fallback
Description: Test Tensor() with mstype in graph mode.
Expectation: No exception.
"""
@ms_function
def foo():
me_x = Tensor(1, mstype.int32)
return me_x
print(foo())
def test_tensor_array_with_mstype():
"""
Feature: JIT Fallback
Description: Test Tensor(array) with mstype in graph mode.
Expectation: No exception.
"""
@ms_function
def foo():
me_x = Tensor([1], mstype.int32)
return me_x
print(foo())
@pytest.mark.skip(reason='Not support in graph jit fallback feature yet')
def test_tensor_array_astype():
"""
Feature: JIT Fallback
Description: Test Tensor(array) with astype() in graph mode.
Expectation: No exception.
"""
@ms_function
def foo():
me_x = Tensor([1.1, -2.1]).astype("float32")
return me_x
print(foo())
def test_tensor_with_numpy():
"""
Feature: JIT Fallback
Description: Test Tensor() with numpy in graph mode.
Expectation: No exception.
"""
@ms_function
def foo():
me_x = Tensor(np.zeros([1, 2, 3]), mstype.float32)
return me_x
print(foo())
def test_tensor_with_init():
"""
Feature: JIT Fallback
Description: Test Tensor() with init in graph mode.
Expectation: No exception.
"""
@ms_function
def foo():
me_x = Tensor(shape=(1, 3), dtype=mstype.float32, init=One())
return me_x
print(foo())
def test_tensor_reshape():
"""
Feature: JIT Fallback
Description: Test Tensor() with reshape() in graph mode.
Expectation: No exception.
"""
@ms_function
def foo():
me_x = Tensor(np.arange(10, 16).reshape(2, 3).astype("float32"))
return me_x
print(foo())
@pytest.mark.skip(reason='Not support in graph jit fallback feature yet')
def test_tensor_abs():
"""
Feature: JIT Fallback
Description: Test Tensor.abs() in graph mode.
Expectation: No exception.
"""
@ms_function
def foo():
a = Tensor([1.1, -2.1]).astype("float32")
out = a.abs()
return out
print(foo())
@pytest.mark.skip(reason='Not support in graph jit fallback feature yet')
def test_tensor_all():
"""
Feature: JIT Fallback
Description: Test Tensor.all() in graph mode.
Expectation: No exception.
"""
@ms_function
def foo():
a = Tensor([True, True, False])
out = a.all()
return out
print(foo())
@pytest.mark.skip(reason='Not support in graph jit fallback feature yet')
def test_tensor_any():
"""
Feature: JIT Fallback
Description: Test Tensor.any() in graph mode.
Expectation: No exception.
"""
@ms_function
def foo():
a = Tensor([True, True, False])
out = a.any()
return out
print(foo())
@pytest.mark.skip(reason='Not support in graph jit fallback feature yet')
def test_tensor_argmax():
"""
Feature: JIT Fallback
Description: Test Tensor.argmax() in graph mode.
Expectation: No exception.
"""
@ms_function
def foo():
a = Tensor(np.arange(10, 16).reshape(2, 3).astype("float32"))
out = a.argmax()
return out
print(foo())
@pytest.mark.skip(reason='Not support in graph jit fallback feature yet')
def test_tensor_argmin():
"""
Feature: JIT Fallback
Description: Test Tensor.argmin() in graph mode.
Expectation: No exception.
"""
@ms_function
def foo():
a = Tensor(np.arange(10, 16).reshape(2, 3).astype("float32"))
out = a.argmin()
return out
print(foo())
@pytest.mark.skip(reason='Not support in graph jit fallback feature yet')
def test_tensor_astype():
"""
Feature: JIT Fallback
Description: Test Tensor.astype() in graph mode.
Expectation: No exception.
"""
@ms_function
def foo():
a = Tensor(np.ones((1, 2, 2, 1), dtype=np.float32))
out = a.astype("int32")
return out
print(foo())