!24715 add attention op doc

Merge pull request !24715 from hangq/fuzz
This commit is contained in:
i-robot 2021-10-12 06:12:03 +00:00 committed by Gitee
commit b289f70cb0
2 changed files with 6 additions and 2 deletions

View File

@ -23,7 +23,7 @@ void MatMulSparse8x8(const float *a, const float *b, const uint32_t *nnz, const
const float *bias, ActType act_type, int out_stride) {
#ifndef ENABLE_ARM64
return;
#endif
#else
// mul-acc
for (int oc = 0; oc < 8; oc++) {
uint32_t cur_nnz = nnz[oc];
@ -50,4 +50,5 @@ void MatMulSparse8x8(const float *a, const float *b, const uint32_t *nnz, const
*(c + 6 * out_stride + oc) = vacc2[2];
*(c + 7 * out_stride + oc) = vacc2[3];
}
#endif
}

View File

@ -26,16 +26,19 @@
namespace mindspore {
namespace ops {
constexpr auto kNameAttention = "Attention";
// Attention MultiHeadAttention
/// \brief MultiHead-Attention op in MindIR.
class MS_CORE_API Attention : public PrimitiveC {
public:
/// \brief Constructor.
Attention() : PrimitiveC(kNameAttention) {
InitIOName(
{"q", "k", "v", "weight_q", "weight_k", "weight_v", "weight_o", "bias_q", "bias_k", "bias_v", "bias_o", "mask"},
{"output"});
}
/// \brief Destructor.
~Attention() override = default;
MS_DECLARE_PARENT(Attention, PrimitiveC);
/// \brief Initialize Attention op.
void Init() {}
};
} // namespace ops