modify reduceminD and reducemaxD IR

This commit is contained in:
yanghaoran 2020-04-02 17:15:41 +08:00 committed by mxm
parent c24252b2cc
commit b53c974513
5 changed files with 75 additions and 101 deletions

2
.gitmodules vendored
View File

@ -12,4 +12,4 @@
url = https://github.com/protocolbuffers/protobuf.git
[submodule "graphengine"]
path = graphengine
url = https://gitee.com/mindspore/graphengine.git
url = https://gitee.com/ms-incubator/graphengine.git

@ -1 +1 @@
Subproject commit 5f763679fa33de1608d07f7651c6f16012b953ea
Subproject commit 092c7a1f6548cac7d40e677af3498c3c49ea2bfd

View File

@ -189,7 +189,7 @@ std::unordered_map<std::string, OpAdapterDescPtr> &DfGraphConvertor::get_adpt_ma
{string(kNameApplyMomentum), ADPT_DESC(ApplyMomentum)},
{string(kNameMaxPool), ADPT_DESC(MaxPool)},
{string(kNameAvgPool), ADPT_DESC(AvgPool)},
{string(kNameTopK), ADPT_DESC(TopKV2)},
{string(kNameTopK), ADPT_DESC(TopK)},
{string(kNamePack), ADPT_DESC(Pack)},
{string(kNameSplitD), ADPT_DESC(SplitD)},
{string(kNameAllReduce), ADPT_DESC(HcomAllReduce)},
@ -310,7 +310,7 @@ std::unordered_map<std::string, OpAdapterDescPtr> &DfGraphConvertor::get_adpt_ma
{prim::kPrimMinimum->name(), ADPT_DESC(Minimum)},
{prim::kPrimSelect->name(), ADPT_DESC(Select)},
{string(kNameLessEqual), ADPT_DESC(LessEqual)},
{prim::kPrimLogSoftmax->name(), ADPT_DESC(LogSoftmax)},
{prim::kPrimLogSoftmax->name(), ADPT_DESC(LogSoftmaxV2)},
{string(kNameTruncatedNormal), ADPT_DESC(TruncatedNormal)},
{string(kNameStridedSliceGrad), ADPT_DESC(StridedSliceGrad)},
{prim::kPrimGelu->name(), ADPT_DESC(Gelu)},
@ -343,7 +343,7 @@ std::unordered_map<std::string, OpAdapterDescPtr> &DfGraphConvertor::get_adpt_ma
{prim::kPrimMatMul->name(), ADPT_DESC(MatMul)},
{string(kNameConst), ADPT_DESC(Constant, Const)},
{string(kNameSoftmax), ADPT_DESC(Softmax)},
{string(kNameSoftmax), ADPT_DESC(SoftmaxV2)},
{string(kNameSoftmaxGrad), ADPT_DESC(SoftmaxGrad)},
{string(kNameParam), ADPT_DESC(Data)},
{string(kNameROIAlign), ADPT_DESC(ROIAlign)},
@ -1017,8 +1017,8 @@ DfGraphConvertor &DfGraphConvertor::BuildGraph() {
}
}
// set up dependices
MS_LOG(DEBUG) << "set up dependices";
// set up dependencies
MS_LOG(DEBUG) << "set up dependencies";
std::vector<AnfNodePtr> nodes = ::mindspore::TopoSort(anf_graph_->get_return());
for (auto &it : nodes) {
SetNodeInput(it);
@ -1115,8 +1115,8 @@ void DfGraphConvertor::UpdateDataOpDesc(const AnfNodePtr &it, const OperatorPtr
if (desc == nullptr) {
MS_LOG(ERROR) << "Update data op descriptor failed! TensorDesc is null.";
} else {
(void)std::static_pointer_cast<Data>(op)->update_input_desc_data(*desc);
(void)std::static_pointer_cast<Data>(op)->update_output_desc_out(*desc);
(void)std::static_pointer_cast<Data>(op)->update_input_desc_x(*desc);
(void)std::static_pointer_cast<Data>(op)->update_output_desc_y(*desc);
}
}

View File

@ -138,11 +138,10 @@ OUTPUT_MAP(ApplyMomentum) = {{0, OUTPUT_DESC(var)}};
INPUT_MAP(Summary) = {{2, INPUT_DESC(x)}};
ATTR_MAP(Summary) = EMPTY_ATTR_MAP;
// data
// Data
INPUT_MAP(Data) = EMPTY_INPUT_MAP;
ATTR_MAP(Data) = EMPTY_ATTR_MAP;
// resnet ops in ge
// BatchNorm
INPUT_MAP(BatchNorm) = {{1, INPUT_DESC(x)},
{2, INPUT_DESC(scale)},
@ -194,9 +193,9 @@ OUTPUT_MAP(PRelu) = {{0, OUTPUT_DESC(y)}};
// PReluGrad
INPUT_MAP(PReluGrad) = {
{1, INPUT_DESC(input_gradients)}, {2, INPUT_DESC(input_features)}, {3, INPUT_DESC(input_weights)}};
{1, INPUT_DESC(grads)}, {2, INPUT_DESC(features)}, {3, INPUT_DESC(weights)}};
ATTR_MAP(PReluGrad) = EMPTY_ATTR_MAP;
OUTPUT_MAP(PReluGrad) = {{0, OUTPUT_DESC(output_backprops_dx)}, {1, OUTPUT_DESC(output_backprops_da)}};
OUTPUT_MAP(PReluGrad) = {{0, OUTPUT_DESC(dx)}, {1, OUTPUT_DESC(da)}};
// Sigmoid
INPUT_MAP(Sigmoid) = {{1, INPUT_DESC(x)}};
@ -241,12 +240,12 @@ ATTR_MAP(CumsumD) = {{"exclusive", ATTR_DESC(exclusive, AnyTraits<bool>())},
{"reverse", ATTR_DESC(reverse, AnyTraits<bool>())}};
OUTPUT_MAP(CumsumD) = {{0, OUTPUT_DESC(y)}};
// softmax
INPUT_MAP(Softmax) = {{1, INPUT_DESC(x)}};
ATTR_MAP(Softmax) = {
{"axis", ATTR_DESC(axis, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())},
// SoftmaxV2
INPUT_MAP(SoftmaxV2) = {{1, INPUT_DESC(x)}};
ATTR_MAP(SoftmaxV2) = {
{"axis", ATTR_DESC(axes, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())},
};
OUTPUT_MAP(Softmax) = {{0, OUTPUT_DESC(y)}};
OUTPUT_MAP(SoftmaxV2) = {{0, OUTPUT_DESC(y)}};
// SoftmaxGrad
INPUT_MAP(SoftmaxGrad) = {{1, INPUT_DESC(softmax)}, {2, INPUT_DESC(grad_softmax)}};
@ -269,21 +268,21 @@ ATTR_MAP(GatherV2) = EMPTY_ATTR_MAP;
OUTPUT_MAP(GatherV2) = {{0, OUTPUT_DESC(y)}};
// ReduceSum
INPUT_MAP(ReduceSum) = {{1, INPUT_DESC(x)}, {2, INPUT_DESC(axis)}};
INPUT_MAP(ReduceSum) = {{1, INPUT_DESC(x)}, {2, INPUT_DESC(axes)}};
ATTR_MAP(ReduceSum) = {{"keep_dims", ATTR_DESC(keep_dims, AnyTraits<bool>())}};
OUTPUT_MAP(ReduceSum) = {{0, OUTPUT_DESC(y)}};
// ReduceSumD
INPUT_MAP(ReduceSumD) = {{1, INPUT_DESC(x)}};
INPUT_ATTR_MAP(ReduceSumD) = {
{2, ATTR_DESC(axis, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())}};
{2, ATTR_DESC(axes, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())}};
ATTR_MAP(ReduceSumD) = {{"keep_dims", ATTR_DESC(keep_dims, AnyTraits<bool>())}};
OUTPUT_MAP(ReduceSumD) = {{0, OUTPUT_DESC(y)}};
// ReduceProdD
INPUT_MAP(ReduceProdD) = {{1, INPUT_DESC(x)}};
INPUT_ATTR_MAP(ReduceProdD) = {
{2, ATTR_DESC(axis, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())}};
{2, ATTR_DESC(axes, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())}};
ATTR_MAP(ReduceProdD) = {{"keep_dims", ATTR_DESC(keep_dims, AnyTraits<bool>())}};
OUTPUT_MAP(ReduceProdD) = {{0, OUTPUT_DESC(y)}};
@ -294,7 +293,7 @@ ATTR_MAP(CumprodD) = {{"exclusive", ATTR_DESC(exclusive, AnyTraits<bool>())},
{"reverse", ATTR_DESC(reverse, AnyTraits<bool>())}};
OUTPUT_MAP(CumprodD) = {{0, OUTPUT_DESC(y)}};
// SoftmaxCrossEntropyWithLogits/
// SoftmaxCrossEntropyWithLogits
INPUT_MAP(SoftmaxCrossEntropyWithLogits) = {{1, INPUT_DESC(features)}, {2, INPUT_DESC(labels)}};
ATTR_MAP(SoftmaxCrossEntropyWithLogits) = EMPTY_ATTR_MAP;
OUTPUT_MAP(SoftmaxCrossEntropyWithLogits) = {{0, OUTPUT_DESC(loss)}, {1, OUTPUT_DESC(backprop)}};
@ -306,7 +305,7 @@ INPUT_ATTR_MAP(MeanGrad) = {{2, ATTR_DESC(mean_grad_output_shape_value, kOpForma
ATTR_MAP(MeanGrad) = {{"mode", ATTR_DESC(mode, AnyTraits<int64_t>())}};
INPUT_MAP(SliceD) = {{1, INPUT_DESC(x)}};
INPUT_ATTR_MAP(SliceD) = {{2, ATTR_DESC(begin, AnyTraits<int>(), AnyTraits<std::vector<int64_t>>())},
INPUT_ATTR_MAP(SliceD) = {{2, ATTR_DESC(offsets, AnyTraits<int>(), AnyTraits<std::vector<int64_t>>())},
{3, ATTR_DESC(size, AnyTraits<int>(), AnyTraits<std::vector<int64_t>>())}};
ATTR_MAP(SliceD) = EMPTY_ATTR_MAP;
OUTPUT_MAP(SliceD) = {{0, OUTPUT_DESC(y)}};
@ -401,42 +400,10 @@ ATTR_MAP(BoundingBoxDecode) = {
};
OUTPUT_MAP(BoundingBoxDecode) = {{0, OUTPUT_DESC(bboxes)}};
#ifdef VALID_CODE
// Less
INPUT_MAP(Less) = {{1, INPUT_DESC(x)}, {2, INPUT_DESC(y)}};
ATTR_MAP(Less) = EMPTY_ATTR_MAP;
OUTPUT_MAP(Less) = {{0, OUTPUT_DESC(z)}};
// Cast
INPUT_MAP(Cast) = {{1, INPUT_DESC(x)}};
INPUT_ATTR_MAP(Cast) = {{2, ATTR_DESC(dst_type, AnyTraits<GEType>())}};
ATTR_MAP(Cast) = {{"Truncate", ATTR_DESC(truncate, AnyTraits<bool>())}};
OUTPUT_MAP(Cast) = {{0, OUTPUT_DESC(y)}};
// Minimum
INPUT_MAP(Minimum) = {{1, INPUT_DESC(x)}, {2, INPUT_DESC(y)}};
ATTR_MAP(Minimum) = {{"alpha", ATTR_DESC(alpha, AnyTraits<float>())}, {"beta", ATTR_DESC(beta, AnyTraits<float>())}};
OUTPUT_MAP(Minimum) = {{0, OUTPUT_DESC(z)}};
// Sub
INPUT_MAP(Sub) = {{1, INPUT_DESC(x1)}, {2, INPUT_DESC(x2)}};
ATTR_MAP(Sub) = {{"alpha", ATTR_DESC(alpha, AnyTraits<float>())}, {"beta", ATTR_DESC(beta, AnyTraits<float>())}};
#endif
// TopKV2
INPUT_MAP(TopKV2) = {
{1, INPUT_DESC(input)},
{2, INPUT_DESC(k)},
};
ATTR_MAP(TopKV2) = {{"T", ATTR_DESC(T, AnyTraits<GEType>())}, {"sorted", ATTR_DESC(sorted, AnyTraits<bool>())}};
OUTPUT_MAP(TopKV2) = {
{0, OUTPUT_DESC(values)},
{1, OUTPUT_DESC(indices)},
};
// TopK
INPUT_MAP(TopK) = {{1, INPUT_DESC(x)}, {2, INPUT_DESC(k)}};
ATTR_MAP(TopK) = {{"sorted", ATTR_DESC(sorted, AnyTraits<bool>())}};
OUTPUT_MAP(TopK) = {{0, OUTPUT_DESC(values)}, {1, OUTPUT_DESC(indices)}};
// Multiply
INPUT_MAP(Multiply) = {{1, INPUT_DESC(x)}, {2, INPUT_DESC(y)}};
@ -476,7 +443,7 @@ ATTR_MAP(Iou) = {{"mode", ATTR_DESC(mode, AnyTraits<std::string>())}};
OUTPUT_MAP(Iou) = {{0, OUTPUT_DESC(overlap)}};
// ResizeNearestNeighborD
INPUT_MAP(ResizeNearestNeighborD) = {{1, INPUT_DESC(images)}};
INPUT_MAP(ResizeNearestNeighborD) = {{1, INPUT_DESC(x)}};
ATTR_MAP(ResizeNearestNeighborD) = {
{"size", ATTR_DESC(size, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())},
{"align_corners", ATTR_DESC(align_corners, AnyTraits<bool>())}};
@ -506,17 +473,17 @@ ATTR_MAP(Relu6) = EMPTY_ATTR_MAP;
OUTPUT_MAP(Relu6) = {{0, OUTPUT_DESC(activations)}};
// Relu6Grad
INPUT_MAP(Relu6Grad) = {{1, INPUT_DESC(dy)}, {2, INPUT_DESC(y)}};
INPUT_MAP(Relu6Grad) = {{1, INPUT_DESC(features)}, {2, INPUT_DESC(gradients)}};
ATTR_MAP(Relu6Grad) = EMPTY_ATTR_MAP;
OUTPUT_MAP(Relu6Grad) = {{0, OUTPUT_DESC(z)}};
OUTPUT_MAP(Relu6Grad) = {{0, OUTPUT_DESC(backprops)}};
// ResizeBilinearGrad
INPUT_MAP(ResizeBilinearGrad) = {{1, INPUT_DESC(grads)}, {2, INPUT_DESC(original_image)}};
ATTR_MAP(ResizeBilinearGrad) = {{"align_corners", ATTR_DESC(align_corners, AnyTraits<bool>())}};
OUTPUT_MAP(ResizeBilinearGrad) = {{0, OUTPUT_DESC(y)}};
// ResizeBilinear
INPUT_MAP(ResizeBilinearD) = {{1, INPUT_DESC(images)}};
// ResizeBilinearD
INPUT_MAP(ResizeBilinearD) = {{1, INPUT_DESC(x)}};
ATTR_MAP(ResizeBilinearD) = {
{"size", ATTR_DESC(size, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())},
{"align_corners", ATTR_DESC(align_corners, AnyTraits<bool>())}};
@ -539,9 +506,9 @@ OUTPUT_MAP(NMSWithMask) = {
{0, OUTPUT_DESC(selected_boxes)}, {1, OUTPUT_DESC(selected_idx)}, {2, OUTPUT_DESC(selected_mask)}};
// Unpack
INPUT_MAP(Unpack) = {{1, INPUT_DESC(value)}};
INPUT_MAP(Unpack) = {{1, INPUT_DESC(x)}};
ATTR_MAP(Unpack) = {{"axis", ATTR_DESC(axis, AnyTraits<int>())}, {"num", ATTR_DESC(num, AnyTraits<int>())}};
DYN_OUTPUT_MAP(Unpack) = {{0, DYN_OUTPUT_DESC(output)}};
DYN_OUTPUT_MAP(Unpack) = {{0, DYN_OUTPUT_DESC(y)}};
// ScatterNdUpdate
INPUT_MAP(ScatterNdUpdate) = {{1, INPUT_DESC(var)}, {2, INPUT_DESC(indices)}, {3, INPUT_DESC(updates)}};
@ -574,8 +541,8 @@ INPUT_MAP(SigmoidCrossEntropyWithLogitsGrad) = {
ATTR_MAP(SigmoidCrossEntropyWithLogitsGrad) = EMPTY_ATTR_MAP;
OUTPUT_MAP(SigmoidCrossEntropyWithLogitsGrad) = {{0, OUTPUT_DESC(gradient)}};
// ScatterNd
INPUT_MAP(ScatterNdD) = {{1, INPUT_DESC(indices)}, {2, INPUT_DESC(updates)}};
// ScatterNdD
INPUT_MAP(ScatterNdD) = {{1, INPUT_DESC(indices)}, {2, INPUT_DESC(x)}};
INPUT_ATTR_MAP(ScatterNdD) = {
{3, ATTR_DESC(shape, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())}};
ATTR_MAP(ScatterNdD) = EMPTY_ATTR_MAP;
@ -587,7 +554,7 @@ ATTR_MAP(PadD) = {{"paddings", ATTR_DESC(paddings, AnyTraits<std::vector<std::ve
OUTPUT_MAP(PadD) = {{0, OUTPUT_DESC(y)}};
// GatherNd
INPUT_MAP(GatherNd) = {{1, INPUT_DESC(x1)}, {2, INPUT_DESC(x2)}};
INPUT_MAP(GatherNd) = {{1, INPUT_DESC(x)}, {2, INPUT_DESC(indices)}};
ATTR_MAP(GatherNd) = EMPTY_ATTR_MAP;
OUTPUT_MAP(GatherNd) = {{0, OUTPUT_DESC(y)}};
@ -612,13 +579,13 @@ ATTR_MAP(ROIAlignGrad) = {
// ArgMaxD
INPUT_MAP(ArgMaxD) = {{1, INPUT_DESC(x)}};
ATTR_MAP(ArgMaxD) = {{"axis", ATTR_DESC(dimension, AnyTraits<int>())},
{"output_type", ATTR_DESC(output_type, AnyTraits<GEType>())}};
{"output_type", ATTR_DESC(dtype, AnyTraits<GEType>())}};
OUTPUT_MAP(ArgMaxD) = {{0, OUTPUT_DESC(y)}};
// ArgMinD
INPUT_MAP(ArgMinD) = {{1, INPUT_DESC(x)}};
ATTR_MAP(ArgMinD) = {{"axis", ATTR_DESC(dimension, AnyTraits<int>())},
{"output_type", ATTR_DESC(output_type, AnyTraits<GEType>())}};
{"output_type", ATTR_DESC(dtype, AnyTraits<GEType>())}};
OUTPUT_MAP(ArgMinD) = {{0, OUTPUT_DESC(y)}};
// ArgMaxWithValue
@ -634,14 +601,14 @@ ATTR_MAP(ArgMinWithValue) = {{"axis", ATTR_DESC(dimension, AnyTraits<int>())},
OUTPUT_MAP(ArgMinWithValue) = {{0, OUTPUT_DESC(indice)}, {1, OUTPUT_DESC(values)}};
// ReduceAll
INPUT_MAP(ReduceAll) = {{1, INPUT_DESC(x)}, {2, INPUT_DESC(axis)}};
INPUT_MAP(ReduceAll) = {{1, INPUT_DESC(x)}, {2, INPUT_DESC(axes)}};
ATTR_MAP(ReduceAll) = {{"keep_dims", ATTR_DESC(keep_dims, AnyTraits<bool>())}};
OUTPUT_MAP(ReduceAll) = {{0, OUTPUT_DESC(y)}};
// ReduceMeanD
INPUT_MAP(ReduceMeanD) = {{1, INPUT_DESC(x)}};
INPUT_ATTR_MAP(ReduceMeanD) = {
{2, ATTR_DESC(axis, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())}};
{2, ATTR_DESC(axes, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())}};
ATTR_MAP(ReduceMeanD) = {{"keep_dims", ATTR_DESC(keep_dims, AnyTraits<bool>())}};
OUTPUT_MAP(ReduceMeanD) = {{0, OUTPUT_DESC(y)}};
@ -708,11 +675,12 @@ INPUT_MAP(BiasAddGrad) = {{1, INPUT_DESC(x)}};
ATTR_MAP(BiasAddGrad) = {{"data_format", ATTR_DESC(data_format, AnyTraits<std::string>())}};
OUTPUT_MAP(BiasAddGrad) = {{0, OUTPUT_DESC(y)}};
// maxpoolgrad
// MaxPoolGrad
INPUT_MAP(MaxPoolGrad) = {{1, INPUT_DESC(x1)}, {2, INPUT_DESC(x2)}, {3, INPUT_DESC(grad)}};
ATTR_MAP(MaxPoolGrad) = {{"ksize", ATTR_DESC(ksize, AnyTraits<int>(), AnyTraits<std::vector<int64_t>>())},
{"strides", ATTR_DESC(strides, AnyTraits<int>(), AnyTraits<std::vector<int64_t>>())},
{"padding", ATTR_DESC(padding, AnyTraits<std::string>())}};
{"padding", ATTR_DESC(padding, AnyTraits<std::string>())},
{"data_format", ATTR_DESC(data_format, AnyTraits<std::string>())}};
OUTPUT_MAP(MaxPoolGrad) = {{0, OUTPUT_DESC(y)}};
// avgpoolgrad
@ -739,28 +707,34 @@ ATTR_MAP(Conv2D) = {
{"stride", ATTR_DESC(strides, "pad", AnyTraits<std::vector<int64_t>>())},
{"pad_list", ATTR_DESC(pads, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())},
{"dilation", ATTR_DESC(dilations, "pad", AnyTraits<std::vector<int64_t>>())},
{"data_format", ATTR_DESC(data_format, AnyTraits<std::string>())},
{"group", ATTR_DESC(groups, AnyTraits<int>())}
};
OUTPUT_MAP(Conv2D) = {{0, OUTPUT_DESC(y)}};
// Conv2DBackpropInputD
INPUT_MAP(Conv2DBackpropInputD) = {{1, INPUT_DESC(out_backprop)}, {2, INPUT_DESC(filters)}};
INPUT_MAP(Conv2DBackpropInputD) = {{1, INPUT_DESC(out_backprop)}, {2, INPUT_DESC(filter)}};
INPUT_ATTR_MAP(Conv2DBackpropInputD) = {
{3, ATTR_DESC(input_sizes, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())}};
{3, ATTR_DESC(input_size, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())}};
ATTR_MAP(Conv2DBackpropInputD) = {
{"pad_list", ATTR_DESC(pads, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())},
{"stride", ATTR_DESC(strides, "strides", AnyTraits<std::vector<int64_t>>())},
{"stride", ATTR_DESC(strides, "pad", AnyTraits<std::vector<int64_t>>())},
{"dilation", ATTR_DESC(dilations, "pad", AnyTraits<std::vector<int64_t>>())},
{"data_format", ATTR_DESC(data_format, AnyTraits<std::string>())},
{"group", ATTR_DESC(groups, AnyTraits<int>())}
};
OUTPUT_MAP(Conv2DBackpropInputD) = {{0, OUTPUT_DESC(y)}};
// Conv2DBackpropFilterD
INPUT_MAP(Conv2DBackpropFilterD) = {{1, INPUT_DESC(out_backprop)}, {2, INPUT_DESC(x)}};
INPUT_ATTR_MAP(Conv2DBackpropFilterD) = {
{3, ATTR_DESC(filter_sizes, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())}};
{3, ATTR_DESC(filter_size, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())}};
ATTR_MAP(Conv2DBackpropFilterD) = {
{"pad_list", ATTR_DESC(pads, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())},
{"stride", ATTR_DESC(strides, "strides", AnyTraits<std::vector<int64_t>>())},
{"stride", ATTR_DESC(strides, "pad", AnyTraits<std::vector<int64_t>>())},
{"dilation", ATTR_DESC(dilations, "pad", AnyTraits<std::vector<int64_t>>())},
{"data_format", ATTR_DESC(data_format, AnyTraits<std::string>())},
{"group", ATTR_DESC(groups, AnyTraits<int>())}
};
OUTPUT_MAP(Conv2DBackpropFilterD) = {{0, OUTPUT_DESC(y)}};
@ -798,8 +772,8 @@ OUTPUT_MAP(DepthwiseConv2DBackpropFilterD) = {{0, OUTPUT_DESC(filter_grad)}};
// MatMul
INPUT_MAP(MatMul) = {{1, INPUT_DESC(x1)}, {2, INPUT_DESC(x2)}};
ATTR_MAP(MatMul) = {{"transpose_a", ATTR_DESC(transpose_a, AnyTraits<bool>())},
{"transpose_b", ATTR_DESC(transpose_b, AnyTraits<bool>())}};
ATTR_MAP(MatMul) = {{"transpose_a", ATTR_DESC(transpose_x1, AnyTraits<bool>())},
{"transpose_b", ATTR_DESC(transpose_x2, AnyTraits<bool>())}};
OUTPUT_MAP(MatMul) = {{0, OUTPUT_DESC(y)}};
// Merge
@ -846,10 +820,10 @@ ATTR_MAP(Sub) = EMPTY_ATTR_MAP;
OUTPUT_MAP(Sub) = {{0, OUTPUT_DESC(y)}};
// SplitD
INPUT_MAP(SplitD) = {{1, INPUT_DESC(value)}};
INPUT_MAP(SplitD) = {{1, INPUT_DESC(x)}};
ATTR_MAP(SplitD) = {{"axis", ATTR_DESC(split_dim, AnyTraits<int>())},
{"output_num", ATTR_DESC(num_split, AnyTraits<int>())}};
DYN_OUTPUT_MAP(SplitD) = {{0, DYN_OUTPUT_DESC(output)}};
DYN_OUTPUT_MAP(SplitD) = {{0, DYN_OUTPUT_DESC(y)}};
// Neg
INPUT_MAP(Neg) = {{1, INPUT_DESC(x)}};
@ -876,12 +850,12 @@ OUTPUT_MAP(Pack) = {{0, OUTPUT_DESC(y)}};
// ConcatD
INPUT_MAP(ConcatD) = EMPTY_INPUT_MAP;
DYN_INPUT_MAP(ConcatD) = {{1, DYN_INPUT_DESC(input_values)}};
DYN_INPUT_MAP(ConcatD) = {{1, DYN_INPUT_DESC(x)}};
ATTR_MAP(ConcatD) = {
{"axis", ATTR_DESC(concat_dim, AnyTraits<int>())},
{"inputNums", ATTR_DESC(N, AnyTraits<int>())},
};
OUTPUT_MAP(ConcatD) = {{0, OUTPUT_DESC(output_data)}};
OUTPUT_MAP(ConcatD) = {{0, OUTPUT_DESC(y)}};
// Less
INPUT_MAP(Less) = {{1, INPUT_DESC(x1)}, {2, INPUT_DESC(x2)}};
@ -916,14 +890,14 @@ OUTPUT_MAP(TanhGrad) = {{0, OUTPUT_DESC(z)}};
// ReduceMinD
INPUT_MAP(ReduceMinD) = {{1, INPUT_DESC(x)}};
INPUT_ATTR_MAP(ReduceMinD) = {
{2, ATTR_DESC(axis, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())}};
{2, ATTR_DESC(axes, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())}};
ATTR_MAP(ReduceMinD) = {{"keep_dims", ATTR_DESC(keep_dims, AnyTraits<bool>())}};
OUTPUT_MAP(ReduceMinD) = {{0, OUTPUT_DESC(y)}};
// ReduceMaxD
INPUT_MAP(ReduceMaxD) = {{1, INPUT_DESC(x)}};
INPUT_ATTR_MAP(ReduceMaxD) = {
{2, ATTR_DESC(axis, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())}};
{2, ATTR_DESC(axes, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())}};
ATTR_MAP(ReduceMaxD) = {{"keep_dims", ATTR_DESC(keep_dims, AnyTraits<bool>())}};
OUTPUT_MAP(ReduceMaxD) = {{0, OUTPUT_DESC(y)}};
@ -1008,11 +982,11 @@ INPUT_MAP(LessEqual) = {{1, INPUT_DESC(x1)}, {2, INPUT_DESC(x2)}};
ATTR_MAP(LessEqual) = EMPTY_ATTR_MAP;
OUTPUT_MAP(LessEqual) = {{0, OUTPUT_DESC(y)}};
// LogSoftmax
INPUT_MAP(LogSoftmax) = {{1, INPUT_DESC(logits)}};
ATTR_MAP(LogSoftmax) = {
{"axis", ATTR_DESC(axis, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())}};
OUTPUT_MAP(LogSoftmax) = {{0, OUTPUT_DESC(logsoftmax)}};
// LogSoftmaxV2
INPUT_MAP(LogSoftmaxV2) = {{1, INPUT_DESC(logits)}};
ATTR_MAP(LogSoftmaxV2) = {
{"axis", ATTR_DESC(axes, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())}};
OUTPUT_MAP(LogSoftmaxV2) = {{0, OUTPUT_DESC(logsoftmax)}};
// RandomChoiceWithMask
INPUT_MAP(RandomChoiceWithMask) = {{1, INPUT_DESC(x)}};
@ -1094,8 +1068,8 @@ OUTPUT_MAP(LayerNormGrad) = {{0, OUTPUT_DESC(pd_x)}, {1, OUTPUT_DESC(pd_gamma)},
// BatchMatMul
INPUT_MAP(BatchMatMul) = {{1, INPUT_DESC(x1)}, {2, INPUT_DESC(x2)}};
ATTR_MAP(BatchMatMul) = {{"transpose_x1", ATTR_DESC(adj_x, AnyTraits<bool>())},
{"transpose_x2", ATTR_DESC(adj_y, AnyTraits<bool>())}};
ATTR_MAP(BatchMatMul) = {{"transpose_x1", ATTR_DESC(adj_x1, AnyTraits<bool>())},
{"transpose_x2", ATTR_DESC(adj_x2, AnyTraits<bool>())}};
OUTPUT_MAP(BatchMatMul) = {{0, OUTPUT_DESC(y)}};
// DropoutDoMask

View File

@ -209,8 +209,8 @@ DECLARE_OP_USE_OUTPUT(Merge)
DECLARE_OP_ADAPTER(Switch)
DECLARE_OP_USE_OUTPUT(Switch)
DECLARE_OP_ADAPTER(TopKV2)
DECLARE_OP_USE_OUTPUT(TopKV2)
DECLARE_OP_ADAPTER(TopK)
DECLARE_OP_USE_OUTPUT(TopK)
DECLARE_OP_ADAPTER(RealDiv)
DECLARE_OP_USE_OUTPUT(RealDiv)
@ -260,8 +260,8 @@ DECLARE_OP_ADAPTER(Select)
DECLARE_OP_USE_OUTPUT(Select)
DECLARE_OP_ADAPTER(LessEqual)
DECLARE_OP_USE_OUTPUT(LessEqual)
DECLARE_OP_ADAPTER(LogSoftmax)
DECLARE_OP_USE_OUTPUT(LogSoftmax)
DECLARE_OP_ADAPTER(LogSoftmaxV2)
DECLARE_OP_USE_OUTPUT(LogSoftmaxV2)
DECLARE_OP_ADAPTER(TruncatedNormal)
DECLARE_OP_USE_OUTPUT(TruncatedNormal)
DECLARE_OP_ADAPTER(StridedSliceGrad)
@ -391,8 +391,8 @@ DECLARE_OP_ADAPTER(Sigmoid)
DECLARE_OP_USE_OUTPUT(Sigmoid)
DECLARE_OP_ADAPTER(SigmoidGrad)
DECLARE_OP_USE_OUTPUT(SigmoidGrad)
DECLARE_OP_ADAPTER(Softmax)
DECLARE_OP_USE_OUTPUT(Softmax)
DECLARE_OP_ADAPTER(SoftmaxV2)
DECLARE_OP_USE_OUTPUT(SoftmaxV2)
DECLARE_OP_ADAPTER(SoftmaxGrad)
DECLARE_OP_USE_OUTPUT(SoftmaxGrad)
DECLARE_OP_ADAPTER(Greater)