!49811 fix ascend int8 clip & bn precision issue.

Merge pull request !49811 from yeyunpeng2020/master_ci
This commit is contained in:
i-robot 2023-03-06 06:26:28 +00:00 committed by Gitee
commit 02c441a9e4
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
4 changed files with 17 additions and 4 deletions

View File

@ -48,6 +48,7 @@
#include "tools/optimizer/common/pass_manager_extends.h"
#include "tools/optimizer/graph/clip_convert_activation_pass.h"
#include "tools/optimizer/fusion/transpose_fusion.h"
#include "tools/optimizer/fusion/batchnorm_to_scale_fusion.h"
#include "tools/converter/quantizer/full_quant_quantizer.h"
#include "tools/converter/quantizer/insert_quant_node_manager.h"
#include "tools/converter/parser/unify_format.h"
@ -768,6 +769,8 @@ STATUS AclPassImpl::PreQuantization(const FuncGraphPtr &func_graph) {
auto fusion_pm = std::make_shared<opt::LitePassManager>("anf fusion pass manager", false);
CHECK_NULL_RETURN(fusion_pm);
std::vector<opt::PassPtr> fusions{
std::make_shared<opt::ClipConvertActivationPass>(true),
std::make_shared<opt::BatchNormToScaleFusion>(),
std::make_shared<opt::ConvBiasaddFusion>(),
std::make_shared<opt::ConvBatchNormFusion>(param_->fmk_type),
std::make_shared<opt::ConvScaleFusion>(param_->fmk_type),

View File

@ -514,7 +514,7 @@ void FullQuantQuantizer::InitAscendConfig() {
init_param_.activation_symmetric_ = false;
init_param_.weight_channel_symmetric_ = true;
init_param_.weight_layer_symmetric_ = false;
support_int8_ops_ = {prim::kPrimConv2DFusion, prim::kPrimFullConnection};
support_int8_ops_ = {prim::kPrimConv2DFusion};
per_channel_ops_ = {prim::kPrimConv2DFusion};
}

View File

@ -80,15 +80,20 @@ bool ClipConvertActivationPass::Run(const FuncGraphPtr &graph) {
max = *reinterpret_cast<float *>(max_tensor_info->data_c());
}
}
bool is_relu6 = min == 0 && max == kValueThreshold6;
bool is_relu = lite::FloatCompare(min) && lite::FloatCompare(max, FLT_MAX);
if (only_relu_ && !(is_relu6 || is_relu)) {
return false;
}
auto manager = graph->manager();
MS_ASSERT(manager != nullptr);
auto primitive_c = std::make_shared<mindspore::ops::Activation>();
MS_CHECK_TRUE_MSG(primitive_c != nullptr, false, "primitive_c is nullptr");
primitive_c->Init(0, min, max, mindspore::HARD_TANH);
if (min == 0 && max == kValueThreshold6) {
if (is_relu6) {
primitive_c->set_activation_type(mindspore::RELU6);
}
if (lite::FloatCompare(min) && lite::FloatCompare(max, FLT_MAX)) {
if (is_relu) {
primitive_c->set_activation_type(mindspore::RELU);
}
auto primitive = primitive_c->GetPrim();

View File

@ -22,9 +22,14 @@
namespace mindspore::opt {
class ClipConvertActivationPass : public Pass {
public:
ClipConvertActivationPass() : Pass("clip_convert_activation_pass") {}
explicit ClipConvertActivationPass(bool only_relu = false) : Pass("clip_convert_activation_pass") {
only_relu_ = only_relu;
}
~ClipConvertActivationPass() override = default;
bool Run(const FuncGraphPtr &graph) override;
private:
bool only_relu_ = false;
};
} // namespace mindspore::opt
#endif // MINDSPORE_LITE_TOOLS_OPTIMIZER_GRAPH_CLIP_CONVERT_ACTIVATION_PASS_H_