forked from mindspore-Ecosystem/mindspore
!29104 Fixe some CPU operator whitelists
Merge pull request !29104 from chenweitao_295/doc_issue_2
This commit is contained in:
commit
8304f50738
|
@ -82,7 +82,7 @@ void SearchSortedCPUKernel<S, T>::CheckParam(const std::vector<AddressPtr> &inpu
|
|||
|
||||
if (outputs[0]->size / sizeof(T) != inputs[1]->size / sizeof(S)) {
|
||||
MS_LOG(EXCEPTION) << "For '" << kernel_name_
|
||||
<< "', the dimensions of input and output should be matched, but got the dimension of input "
|
||||
<< "', the dimension of `v` and output should be equal, but got the dimension of `v` "
|
||||
<< inputs[1]->size << " and the dimension of output " << outputs[0]->size;
|
||||
}
|
||||
|
||||
|
|
|
@ -156,8 +156,8 @@ bool SliceCPUKernel::Launch(const std::vector<kernel::AddressPtr> &inputs, const
|
|||
if (input_shape[i] < LongToSize(begin[i] + size[i])) {
|
||||
MS_LOG(EXCEPTION) << "For '" << kernel_name_
|
||||
<< "', slice shape should be not greater than origin shape. But in dimension i=" << i
|
||||
<< ", origin shape 'input_shape[i]' is " << input_shape[i]
|
||||
<< " and slice shape 'LongToSize(begin[i] + size[i])' is " << LongToSize(begin[i] + size[i]);
|
||||
<< ", origin shape 'input_shape[i]' is " << input_shape[i] << " and slice shape is "
|
||||
<< LongToSize(begin[i] + size[i]);
|
||||
}
|
||||
}
|
||||
InitSliceParam(input_shape, begin, size);
|
||||
|
|
|
@ -111,7 +111,7 @@ void SparseApplyAdamCPUKernel::InitKernel(const CNodePtr &kernel_node) {
|
|||
std::vector<size_t> indices_shape = AnfAlgo::GetPrevNodeOutputInferShape(kernel_node, 10);
|
||||
if (var_shape.empty()) {
|
||||
MS_LOG(EXCEPTION) << "For '" << kernel_name_
|
||||
<< "', the dimension of 'var' should be at least 1-D, but got empty tensor.";
|
||||
<< "', the dimension of 'var' should be at least 1-D, but got scalar or None.";
|
||||
}
|
||||
if (!IsSameShape(var_shape, m_shape)) {
|
||||
MS_LOG(EXCEPTION) << "For '" << kernel_name_
|
||||
|
|
|
@ -95,7 +95,7 @@ void SparseApplyFtrlCPUKernel::InitKernel(const CNodePtr &kernel_node) {
|
|||
std::vector<size_t> grad_shape = AnfAlgo::GetPrevNodeOutputInferShape(kernel_node, 3);
|
||||
std::vector<size_t> indices_shape = AnfAlgo::GetPrevNodeOutputInferShape(kernel_node, 4);
|
||||
if (var_shape.empty()) {
|
||||
MS_LOG(EXCEPTION) << "For '" << kernel_name_ << "', the 'var' should be at least 1-D, but got empty tensor.";
|
||||
MS_LOG(EXCEPTION) << "For '" << kernel_name_ << "', the 'var' should be at least 1-D, but got scalar or None.";
|
||||
}
|
||||
if (!IsSameShape(var_shape, accum_shape)) {
|
||||
MS_LOG(EXCEPTION) << "For '" << kernel_name_
|
||||
|
|
|
@ -91,7 +91,7 @@ void SparseApplyLazyAdamCPUKernel::InitKernel(const CNodePtr &kernel_node) {
|
|||
std::vector<size_t> indices_shape = AnfAlgo::GetPrevNodeOutputInferShape(kernel_node, 10);
|
||||
if (var_shape.empty()) {
|
||||
MS_LOG(EXCEPTION) << "For '" << kernel_name_
|
||||
<< "', the dimension of 'var' should be at least 1-D, but got empty tensor.";
|
||||
<< "', the dimension of 'var' should be at least 1-D, but got scalar or None.";
|
||||
}
|
||||
if (!IsSameShape(var_shape, m_shape)) {
|
||||
MS_LOG(EXCEPTION) << "For '" << kernel_name_
|
||||
|
|
|
@ -93,7 +93,7 @@ void SparseApplyProximalAdagradCPUKernel::InitKernel(const CNodePtr &kernel_node
|
|||
std::vector<size_t> indices_shape = AnfAlgo::GetPrevNodeOutputInferShape(kernel_node, 6);
|
||||
if (var_shape.empty()) {
|
||||
MS_LOG(EXCEPTION) << "For '" << kernel_name_
|
||||
<< "', the dimension of 'var' should be at least 1-D, but got empty tensor.";
|
||||
<< "', the dimension of 'var' should be at least 1-D, but got scalar or None.";
|
||||
}
|
||||
if (!IsSameShape(var_shape, accum_shape)) {
|
||||
MS_LOG(EXCEPTION) << "For '" << kernel_name_
|
||||
|
@ -129,15 +129,18 @@ void SparseApplyProximalAdagradCPUKernel::InitKernel(const CNodePtr &kernel_node
|
|||
}
|
||||
if (!lr_shape.empty()) {
|
||||
MS_LOG(EXCEPTION) << "For '" << kernel_name_
|
||||
<< "', 'lr' should be a scalar, but got the dimension of 'lr': " << Vector2Str(lr_shape);
|
||||
<< "', 'lr' should be a scalar,and dimension of 'lr' should be 0,but got the dimension of 'lr': "
|
||||
<< Vector2Str(lr_shape);
|
||||
}
|
||||
if (!l1_shape.empty()) {
|
||||
MS_LOG(EXCEPTION) << "For '" << kernel_name_
|
||||
<< "', 'l1' should be a scalar, but got the dimension of 'l1': " << Vector2Str(l1_shape);
|
||||
<< "', 'l1' should be a scalar,and dimension of 'l1' should be 0,but got the dimension of 'l1': "
|
||||
<< Vector2Str(l1_shape);
|
||||
}
|
||||
if (!l2_shape.empty()) {
|
||||
MS_LOG(EXCEPTION) << "For '" << kernel_name_
|
||||
<< "', 'l2' should be a scalar, but got the dimension of 'l2': " << Vector2Str(l2_shape);
|
||||
<< "', 'l2' should be a scalar,and dimension of 'l2' should be 0,but got the dimension of 'l2': "
|
||||
<< Vector2Str(l2_shape);
|
||||
}
|
||||
indices_data_type_ = AnfAlgo::GetInputDeviceDataType(kernel_node, 6);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue