From 7570ee6db5b9b5e785366fa90d6465e5f2674c29 Mon Sep 17 00:00:00 2001 From: hangangqiang Date: Tue, 12 Oct 2021 10:21:34 +0800 Subject: [PATCH] add attention op doc --- .../cpu/nnacl/fp32_sparse/matmul_sparse_x1_fp32.c | 3 ++- mindspore/core/ops/attention.h | 5 ++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/mindspore/ccsrc/backend/kernel_compiler/cpu/nnacl/fp32_sparse/matmul_sparse_x1_fp32.c b/mindspore/ccsrc/backend/kernel_compiler/cpu/nnacl/fp32_sparse/matmul_sparse_x1_fp32.c index 57aa74a851a..67fedc4f34f 100644 --- a/mindspore/ccsrc/backend/kernel_compiler/cpu/nnacl/fp32_sparse/matmul_sparse_x1_fp32.c +++ b/mindspore/ccsrc/backend/kernel_compiler/cpu/nnacl/fp32_sparse/matmul_sparse_x1_fp32.c @@ -23,7 +23,7 @@ void MatMulSparse8x8(const float *a, const float *b, const uint32_t *nnz, const const float *bias, ActType act_type, int out_stride) { #ifndef ENABLE_ARM64 return; -#endif +#else // mul-acc for (int oc = 0; oc < 8; oc++) { uint32_t cur_nnz = nnz[oc]; @@ -50,4 +50,5 @@ void MatMulSparse8x8(const float *a, const float *b, const uint32_t *nnz, const *(c + 6 * out_stride + oc) = vacc2[2]; *(c + 7 * out_stride + oc) = vacc2[3]; } +#endif } diff --git a/mindspore/core/ops/attention.h b/mindspore/core/ops/attention.h index 5e204f3b008..c7276233951 100644 --- a/mindspore/core/ops/attention.h +++ b/mindspore/core/ops/attention.h @@ -26,16 +26,19 @@ namespace mindspore { namespace ops { constexpr auto kNameAttention = "Attention"; -// Attention MultiHeadAttention +/// \brief MultiHead-Attention op in MindIR. class MS_CORE_API Attention : public PrimitiveC { public: + /// \brief Constructor. Attention() : PrimitiveC(kNameAttention) { InitIOName( {"q", "k", "v", "weight_q", "weight_k", "weight_v", "weight_o", "bias_q", "bias_k", "bias_v", "bias_o", "mask"}, {"output"}); } + /// \brief Destructor. ~Attention() override = default; MS_DECLARE_PARENT(Attention, PrimitiveC); + /// \brief Initialize Attention op. void Init() {} }; } // namespace ops