Merge pull request !31504 from liujunzhu/master
This commit is contained in:
i-robot 2022-03-19 07:37:49 +00:00 committed by Gitee
commit e06649fc51
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
12 changed files with 20 additions and 14 deletions

View File

@ -201,6 +201,7 @@ Status MultiProcess::ReceiveMsg(const CreateBufferCall &create_buffer_call) {
msg_len = receive_msg_->msg_total_len;
msg_buffer = create_buffer_call(msg_len);
}
MS_EXCEPTION_IF_NULL(msg_buffer);
auto ret = memcpy_s(msg_buffer + cur_offset, msg_len - cur_offset, shmat_data_addr_, receive_msg_->msg_len);
if (ret != EOK) {
MS_LOG(INFO) << "memcpy_s failed, ret = " << ret;

View File

@ -39,7 +39,7 @@ class AscendMemAdapter {
uint8_t *MallocStaticDevMem(size_t size, const std::string &tag = "");
uint8_t *MallocDynamicDevMem(size_t size, const std::string &tag = "");
bool FreeStaticDevMem(void *devPtr) const { return true; }
bool FreeStaticDevMem(void *) const { return true; }
void ResetDynamicMemory();
[[nodiscard]] uint64_t FreeDevMemSize() const { return static_mem_offset_ - max_dynamic_mem_offset_; }

View File

@ -41,7 +41,7 @@ void AscendMemoryPool::Init() {
if (task_sink) {
SetMemAllocUintSize(ASCEND_COMMON_POOL_ALLOC_UNIT_SIZE_FOR_GRAPH, ASCEND_COMMON_POOL_ALLOC_UNIT_SIZE_FOR_GRAPH);
} else {
SetMemAllocUintSize(total_size * kCommonMemoryRatio, total_size * kPersistMemoryRatio);
SetMemAllocUintSize(FloatToSize(total_size * kCommonMemoryRatio), FloatToSize(total_size * kPersistMemoryRatio));
}
}
}

View File

@ -32,7 +32,7 @@ int MpiPycc::GetDeviceID() { return GetDeviceId(); }
int MpiPycc::GetRankId(const std::string &group) { return GetRankIdByGroup(group); }
int MpiPycc::GetRankSize(const std::string &group) { return GetGroupSize(group); }
void MpiPycc::CreateGroup(const std::string &group, const std::vector<unsigned int> &ranks) {
CreateCommForGroup(group, ranks);
(void)CreateCommForGroup(group, ranks);
}
// cppcheck-suppress syntaxError

View File

@ -99,7 +99,7 @@ bool AicpuExtInfoHandler::ParseExtShapeType(AicpuExtInfo *aicpu_ext_info) {
auto type = reinterpret_cast<const int32_t *>(aicpu_ext_info->infoMsg);
if (*type != unknown_type_) {
if (*type != static_cast<int32_t>(unknown_type_)) {
MS_LOG(ERROR) << "Node:" << node_name_ << " parse ext shape type failed as need:" << unknown_type_
<< " but got:" << *type;
return false;
@ -230,8 +230,9 @@ bool AicpuExtInfoHandler::UpdateShapeAndType(const std::vector<int64_t> &shape,
return true;
}
void AicpuExtInfoHandler::GetShapeAndType(NotNull<const AicpuShapeAndType *> shape_and_type,
NotNull<std::vector<int64_t> *> shape, NotNull<TypeId *> data_type) {
void AicpuExtInfoHandler::GetShapeAndType(const NotNull<const AicpuShapeAndType *> &shape_and_type,
const NotNull<std::vector<int64_t> *> &shape,
const NotNull<TypeId *> &data_type) {
for (int64_t tmpDim : shape_and_type->dims) {
if (tmpDim == kDimEndFlag) {
break;

View File

@ -69,8 +69,8 @@ class AicpuExtInfoHandler {
[[nodiscard]] static bool UpdateShapeAndType(const std::vector<int64_t> &shape,
NotNull<AicpuShapeAndType *> shape_and_type);
static void GetShapeAndType(NotNull<const AicpuShapeAndType *> shape_and_type, NotNull<std::vector<int64_t> *> shape,
NotNull<TypeId *> data_type);
static void GetShapeAndType(const NotNull<const AicpuShapeAndType *> &shape_and_type,
const NotNull<std::vector<int64_t> *> &shape, const NotNull<TypeId *> &data_type);
const std::string node_name_;
const uint32_t input_num_;

View File

@ -23,7 +23,7 @@ namespace mindspore::kernel {
using ANodeFusionDataTypeMap = std::map<const AnfNodePtr, tbe::FusionDataType>;
class FusionBuildTbeJsonCreator : public TbeJsonCreator {
public:
FusionBuildTbeJsonCreator() : optional_index_(0) {}
FusionBuildTbeJsonCreator() : TbeJsonCreator(), optional_index_(0) {}
~FusionBuildTbeJsonCreator() override = default;
bool GenJson(const FusionScopeInfo &fusion_scope_info, nlohmann::json *fusion_json) override;

View File

@ -16,6 +16,8 @@
#ifndef MINDSPORE_CCSRC_BACKEND_OPTIMIZER_ASCEND_BUFFER_FUSION_PASS_BATCHMATMUL_DROPOUTDOMASKV3_FUSION_PASS_H_
#define MINDSPORE_CCSRC_BACKEND_OPTIMIZER_ASCEND_BUFFER_FUSION_PASS_BATCHMATMUL_DROPOUTDOMASKV3_FUSION_PASS_H_
#include <utility>
#include "utils/hash_set.h"
#include "plugin/device/ascend/optimizer/buffer_fusion/fusion_base_pass.h"
#include "ir/anf.h"
@ -30,7 +32,7 @@ namespace opt {
class BatchMatmulDropoutDoMaskV3FusionPass : public FusionBasePass {
public:
explicit BatchMatmulDropoutDoMaskV3FusionPass(FusionIdAllocatorPtr idAllocator)
: FusionBasePass("BatchMatmulDropoutDoMaskV3FusionPass", idAllocator) {}
: FusionBasePass("BatchMatmulDropoutDoMaskV3FusionPass", std::move(idAllocator)) {}
~BatchMatmulDropoutDoMaskV3FusionPass() override = default;
void MatchSingleFusionPattern(const session::KernelGraph &kernel_graph, FusedNodeRecord *candidate_fusion) override;

View File

@ -16,6 +16,8 @@
#ifndef MINDSPORE_CCSRC_BACKEND_OPTIMIZER_ASCEND_BUFFER_FUSION_PASS_DEPTHWISECONV_ELTWISE_FUSION_PASS_H_
#define MINDSPORE_CCSRC_BACKEND_OPTIMIZER_ASCEND_BUFFER_FUSION_PASS_DEPTHWISECONV_ELTWISE_FUSION_PASS_H_
#include <utility>
#include "utils/hash_set.h"
#include "plugin/device/ascend/optimizer/buffer_fusion/fusion_base_pass.h"
#include "ir/anf.h"
@ -30,7 +32,7 @@ namespace opt {
class DepthwiseConvEltwiseFusionPass : public FusionBasePass {
public:
explicit DepthwiseConvEltwiseFusionPass(FusionIdAllocatorPtr idAllocator)
: FusionBasePass("DepthwiseConvEltwiseFusionPass", idAllocator) {}
: FusionBasePass("DepthwiseConvEltwiseFusionPass", std::move(idAllocator)) {}
~DepthwiseConvEltwiseFusionPass() override = default;
void MatchSingleFusionPattern(const session::KernelGraph &kernel_graph, FusedNodeRecord *candidate_fusion) override;

View File

@ -25,7 +25,7 @@ namespace mindspore {
namespace opt {
class BnSplit : public PatternProcessPass {
public:
explicit BnSplit(string name = "bn_split", bool multigraph = true) : PatternProcessPass(name, multigraph) {}
explicit BnSplit(const string &name = "bn_split", bool multigraph = true) : PatternProcessPass(name, multigraph) {}
~BnSplit() override = default;
const BaseRef DefinePattern() const override;
const AnfNodePtr Process(const FuncGraphPtr &, const AnfNodePtr &, const EquivPtr &) const override;

View File

@ -27,7 +27,7 @@
namespace mindspore {
namespace opt {
namespace {
bool NeedFusion(const AnfNodePtr &sum_anf, const AnfNodePtr &input0, const AnfNodePtr &input1) {
bool NeedFusion(const AnfNodePtr &sum_anf, const AnfNodePtr &input0, const AnfNodePtr &) {
if (sum_anf == nullptr || !sum_anf->isa<CNode>()) {
MS_LOG(WARNING) << "Matched ReduceSum is not a CNode!";
return false;

View File

@ -49,7 +49,7 @@ class InputToOutputRegistry {
void Register(const InputToOutputRegister &reg);
void Register(
const std::string &op_name, const std::vector<size_t> &input_indices,
const PreCheckFunc &pre_check_func = [](const CNodePtr &node) { return true; });
const PreCheckFunc &pre_check_func = [](const CNodePtr &) { return true; });
bool GetRegisterByOpName(const std::string &op_name, InputToOutputRegister *reg) const;
private: