!49845 delete export macro in plugin

Merge pull request !49845 from zhoufeng/delete-export-macro-in-plugin
This commit is contained in:
i-robot 2023-03-08 06:13:25 +00:00 committed by Gitee
commit 5279728e6c
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
7 changed files with 9 additions and 9 deletions

View File

@ -33,7 +33,7 @@
namespace mindspore {
namespace device {
class BACKEND_EXPORT AscendDataQueueDynamic : public DataQueue {
class AscendDataQueueDynamic : public DataQueue {
public:
explicit AscendDataQueueDynamic(const std::string &channel_name, const size_t capacity);
~AscendDataQueueDynamic() override = default;

View File

@ -39,7 +39,7 @@ ORIGIN_METHOD(GetDeviceId, int);
ORIGIN_METHOD(CreateCommForGroup, bool, const std::string &, const std::vector<unsigned int> &);
ORIGIN_METHOD(DestroyHcclComm, void);
class BACKEND_EXPORT HcclCollectiveGroup {
class HcclCollectiveGroup {
public:
HcclCollectiveGroup(HcclCollectiveGroup const &) = delete;
HcclCollectiveGroup &operator=(const HcclCollectiveGroup &) = delete;

View File

@ -108,8 +108,8 @@ class MemoryProfiling {
return instance;
}
BACKEND_EXPORT std::shared_ptr<GraphMemory> AddGraphMemoryNode(uint32_t graph_id);
BACKEND_EXPORT std::shared_ptr<GraphMemory> GetGraphMemoryNode(uint32_t graph_id) const;
std::shared_ptr<GraphMemory> AddGraphMemoryNode(uint32_t graph_id);
std::shared_ptr<GraphMemory> GetGraphMemoryNode(uint32_t graph_id) const;
void SetDeviceMemSize(uint64_t size) { device_mem_size_ = size; }
bool MemoryToPB();
void SaveMemoryProfiling();

View File

@ -31,10 +31,10 @@ namespace profiler {
namespace ascend {
class ParallelStrategy {
public:
BACKEND_EXPORT static std::shared_ptr<ParallelStrategy> &GetInstance();
static std::shared_ptr<ParallelStrategy> &GetInstance();
ParallelStrategy() = default;
~ParallelStrategy() {}
BACKEND_EXPORT void DumpProfileParallelStrategy(const FuncGraphPtr &func_graph);
void DumpProfileParallelStrategy(const FuncGraphPtr &func_graph);
void SaveParallelStrategyToFile();
std::string GetParallelStrategyForReport();

View File

@ -22,7 +22,7 @@
namespace mindspore {
namespace opt {
class BACKEND_EXPORT InsertPlaceholderForDynamicRNN : public PatternProcessPass {
class InsertPlaceholderForDynamicRNN : public PatternProcessPass {
public:
explicit InsertPlaceholderForDynamicRNN(bool multigraph = true)
: PatternProcessPass("add_placeholder_for_dynamic_rnn", multigraph) {}

View File

@ -26,7 +26,7 @@
// This pass will split `ClipByNorm` op to smaller ops, such as `square`, `sqrt`, `reducesum` to achieve same function
namespace mindspore {
namespace opt {
class BACKEND_EXPORT ClipByNormFissionGe : public PatternProcessPass {
class ClipByNormFissionGe : public PatternProcessPass {
public:
explicit ClipByNormFissionGe(bool multigraph = true) : PatternProcessPass("clip_by_norm_fission_ge", multigraph) {}
~ClipByNormFissionGe() override = default;

View File

@ -25,7 +25,7 @@
// This pass will split `ClipByNorm` op to smaller ops, such as `square`, `sqrt`, `reducesum` to achieve same function
namespace mindspore::opt {
class BACKEND_EXPORT AscendClipByNormFission : public PatternProcessPass {
class AscendClipByNormFission : public PatternProcessPass {
public:
explicit AscendClipByNormFission(bool multigraph = true)
: PatternProcessPass("ascend_clip_by_norm_fission", multigraph) {}