From 143ccea93f3bcc9b8d626d15fb26ace0bedd9b83 Mon Sep 17 00:00:00 2001 From: zhoufeng Date: Mon, 6 Mar 2023 17:43:38 +0800 Subject: [PATCH] delete export macro in plugin Signed-off-by: zhoufeng --- .../ccsrc/plugin/device/ascend/hal/device/ascend_data_queue.h | 2 +- .../device/ascend/hal/device/distribute/ascend_collective.h | 2 +- .../plugin/device/ascend/hal/profiler/memory_profiling.h | 4 ++-- .../device/ascend/hal/profiler/parallel_strategy_profiling.h | 4 ++-- .../optimizer/enhancer/add_placeholder_for_dynamic_rnn.h | 2 +- .../plugin/device/ascend/optimizer/ge/clip_by_norm_fission.h | 2 +- .../ascend/optimizer/ir_fission/ascend_clip_by_norm_fission.h | 2 +- 7 files changed, 9 insertions(+), 9 deletions(-) diff --git a/mindspore/ccsrc/plugin/device/ascend/hal/device/ascend_data_queue.h b/mindspore/ccsrc/plugin/device/ascend/hal/device/ascend_data_queue.h index 5ccdcb38274..de22faec460 100644 --- a/mindspore/ccsrc/plugin/device/ascend/hal/device/ascend_data_queue.h +++ b/mindspore/ccsrc/plugin/device/ascend/hal/device/ascend_data_queue.h @@ -33,7 +33,7 @@ namespace mindspore { namespace device { -class BACKEND_EXPORT AscendDataQueueDynamic : public DataQueue { +class AscendDataQueueDynamic : public DataQueue { public: explicit AscendDataQueueDynamic(const std::string &channel_name, const size_t capacity); ~AscendDataQueueDynamic() override = default; diff --git a/mindspore/ccsrc/plugin/device/ascend/hal/device/distribute/ascend_collective.h b/mindspore/ccsrc/plugin/device/ascend/hal/device/distribute/ascend_collective.h index 67387f792ca..ff1f2b4c1df 100644 --- a/mindspore/ccsrc/plugin/device/ascend/hal/device/distribute/ascend_collective.h +++ b/mindspore/ccsrc/plugin/device/ascend/hal/device/distribute/ascend_collective.h @@ -39,7 +39,7 @@ ORIGIN_METHOD(GetDeviceId, int); ORIGIN_METHOD(CreateCommForGroup, bool, const std::string &, const std::vector &); ORIGIN_METHOD(DestroyHcclComm, void); -class BACKEND_EXPORT HcclCollectiveGroup { +class HcclCollectiveGroup { public: HcclCollectiveGroup(HcclCollectiveGroup const &) = delete; HcclCollectiveGroup &operator=(const HcclCollectiveGroup &) = delete; diff --git a/mindspore/ccsrc/plugin/device/ascend/hal/profiler/memory_profiling.h b/mindspore/ccsrc/plugin/device/ascend/hal/profiler/memory_profiling.h index a2986a62d73..b32b9dea279 100644 --- a/mindspore/ccsrc/plugin/device/ascend/hal/profiler/memory_profiling.h +++ b/mindspore/ccsrc/plugin/device/ascend/hal/profiler/memory_profiling.h @@ -108,8 +108,8 @@ class MemoryProfiling { return instance; } - BACKEND_EXPORT std::shared_ptr AddGraphMemoryNode(uint32_t graph_id); - BACKEND_EXPORT std::shared_ptr GetGraphMemoryNode(uint32_t graph_id) const; + std::shared_ptr AddGraphMemoryNode(uint32_t graph_id); + std::shared_ptr GetGraphMemoryNode(uint32_t graph_id) const; void SetDeviceMemSize(uint64_t size) { device_mem_size_ = size; } bool MemoryToPB(); void SaveMemoryProfiling(); diff --git a/mindspore/ccsrc/plugin/device/ascend/hal/profiler/parallel_strategy_profiling.h b/mindspore/ccsrc/plugin/device/ascend/hal/profiler/parallel_strategy_profiling.h index efd203bca13..80220b0a284 100644 --- a/mindspore/ccsrc/plugin/device/ascend/hal/profiler/parallel_strategy_profiling.h +++ b/mindspore/ccsrc/plugin/device/ascend/hal/profiler/parallel_strategy_profiling.h @@ -31,10 +31,10 @@ namespace profiler { namespace ascend { class ParallelStrategy { public: - BACKEND_EXPORT static std::shared_ptr &GetInstance(); + static std::shared_ptr &GetInstance(); ParallelStrategy() = default; ~ParallelStrategy() {} - BACKEND_EXPORT void DumpProfileParallelStrategy(const FuncGraphPtr &func_graph); + void DumpProfileParallelStrategy(const FuncGraphPtr &func_graph); void SaveParallelStrategyToFile(); std::string GetParallelStrategyForReport(); diff --git a/mindspore/ccsrc/plugin/device/ascend/optimizer/enhancer/add_placeholder_for_dynamic_rnn.h b/mindspore/ccsrc/plugin/device/ascend/optimizer/enhancer/add_placeholder_for_dynamic_rnn.h index f320d6d84c9..81c7b5f1374 100644 --- a/mindspore/ccsrc/plugin/device/ascend/optimizer/enhancer/add_placeholder_for_dynamic_rnn.h +++ b/mindspore/ccsrc/plugin/device/ascend/optimizer/enhancer/add_placeholder_for_dynamic_rnn.h @@ -22,7 +22,7 @@ namespace mindspore { namespace opt { -class BACKEND_EXPORT InsertPlaceholderForDynamicRNN : public PatternProcessPass { +class InsertPlaceholderForDynamicRNN : public PatternProcessPass { public: explicit InsertPlaceholderForDynamicRNN(bool multigraph = true) : PatternProcessPass("add_placeholder_for_dynamic_rnn", multigraph) {} diff --git a/mindspore/ccsrc/plugin/device/ascend/optimizer/ge/clip_by_norm_fission.h b/mindspore/ccsrc/plugin/device/ascend/optimizer/ge/clip_by_norm_fission.h index b6efc63a010..a94c4a4554c 100644 --- a/mindspore/ccsrc/plugin/device/ascend/optimizer/ge/clip_by_norm_fission.h +++ b/mindspore/ccsrc/plugin/device/ascend/optimizer/ge/clip_by_norm_fission.h @@ -26,7 +26,7 @@ // This pass will split `ClipByNorm` op to smaller ops, such as `square`, `sqrt`, `reducesum` to achieve same function namespace mindspore { namespace opt { -class BACKEND_EXPORT ClipByNormFissionGe : public PatternProcessPass { +class ClipByNormFissionGe : public PatternProcessPass { public: explicit ClipByNormFissionGe(bool multigraph = true) : PatternProcessPass("clip_by_norm_fission_ge", multigraph) {} ~ClipByNormFissionGe() override = default; diff --git a/mindspore/ccsrc/plugin/device/ascend/optimizer/ir_fission/ascend_clip_by_norm_fission.h b/mindspore/ccsrc/plugin/device/ascend/optimizer/ir_fission/ascend_clip_by_norm_fission.h index ca1d1580e77..28970287823 100644 --- a/mindspore/ccsrc/plugin/device/ascend/optimizer/ir_fission/ascend_clip_by_norm_fission.h +++ b/mindspore/ccsrc/plugin/device/ascend/optimizer/ir_fission/ascend_clip_by_norm_fission.h @@ -25,7 +25,7 @@ // This pass will split `ClipByNorm` op to smaller ops, such as `square`, `sqrt`, `reducesum` to achieve same function namespace mindspore::opt { -class BACKEND_EXPORT AscendClipByNormFission : public PatternProcessPass { +class AscendClipByNormFission : public PatternProcessPass { public: explicit AscendClipByNormFission(bool multigraph = true) : PatternProcessPass("ascend_clip_by_norm_fission", multigraph) {}