!18761 [MSLITE] Fix bug of nnie compilation.
Merge pull request !18761 from wangshaocong/nnie_to_master
This commit is contained in:
commit
bea0eb7bd2
|
@ -89,7 +89,7 @@ class Collected {
|
|||
template <typename T>
|
||||
inline Future<std::list<T>> Collect(const std::list<Future<T>> &futures) {
|
||||
if (futures.empty()) {
|
||||
return Future(std::list<T>());
|
||||
return Future<std::list<T>>(std::list<T>());
|
||||
}
|
||||
|
||||
Promise<std::list<T>> *promise = new (std::nothrow) Promise<std::list<T>>();
|
||||
|
|
|
@ -46,7 +46,7 @@ Option<uuid> uuid::FromBytes(const std::string &s) {
|
|||
uuid u;
|
||||
memcpy(&u.uuidData, s.data(), s.size());
|
||||
|
||||
return Option(u);
|
||||
return Option<uuid>(u);
|
||||
}
|
||||
|
||||
Option<unsigned char> uuid::GetValue(char c) {
|
||||
|
@ -106,7 +106,7 @@ Option<uuid> uuid::FromString(const std::string &s) {
|
|||
MS_LOG(ERROR) << "No } end or leng invalid";
|
||||
return Option<uuid>(MindrtNone());
|
||||
}
|
||||
return Option(u);
|
||||
return Option<uuid>(u);
|
||||
}
|
||||
|
||||
// To check whether uuid looks like 0000000-000-000-000-000000000000000
|
||||
|
|
|
@ -30,7 +30,7 @@ void ActorWorker::CreateThread(ActorThreadPool *pool, ThreadPolicy policy) {
|
|||
|
||||
void ActorWorker::RunWithSpin() {
|
||||
#ifndef __APPLE__
|
||||
static std::atomic_int index = 0;
|
||||
static std::atomic_int index = {0};
|
||||
pthread_setname_np(pthread_self(), ("ActorThread_" + std::to_string(index++)).c_str());
|
||||
#endif
|
||||
while (alive_) {
|
||||
|
@ -48,7 +48,7 @@ void ActorWorker::RunWithSpin() {
|
|||
|
||||
void ActorWorker::RunWithWait() {
|
||||
#ifndef __APPLE__
|
||||
static std::atomic_int index = 0;
|
||||
static std::atomic_int index = {0};
|
||||
pthread_setname_np(pthread_self(), ("ActorThread_" + std::to_string(index++)).c_str());
|
||||
#endif
|
||||
while (alive_) {
|
||||
|
|
|
@ -33,7 +33,7 @@ void Worker::CreateThread() { thread_ = std::thread(&Worker::Run, this); }
|
|||
|
||||
void Worker::Run() {
|
||||
#ifndef __APPLE__
|
||||
static std::atomic_int index = 0;
|
||||
static std::atomic_int index = {0};
|
||||
pthread_setname_np(pthread_self(), ("KernelThread_" + std::to_string(index++)).c_str());
|
||||
#endif
|
||||
while (alive_) {
|
||||
|
@ -128,7 +128,7 @@ int ThreadPool::ParallelLaunch(const Func &func, Content content, int task_num)
|
|||
// distribute task to the KernelThread and the idle ActorThread,
|
||||
// if the task num is greater than the KernelThread num
|
||||
THREAD_INFO("launch: %d", task_num);
|
||||
Task task = Task(func, content);
|
||||
Task task = {func, content};
|
||||
|
||||
DistributeTask(&task, task_num);
|
||||
// synchronization
|
||||
|
|
|
@ -164,7 +164,9 @@ include(${TOP_DIR}/cmake/utils.cmake)
|
|||
include(${TOP_DIR}/cmake/dependency_utils.cmake)
|
||||
include(${TOP_DIR}/cmake/dependency_securec.cmake)
|
||||
include(${TOP_DIR}/cmake/external_libs/flatbuffers.cmake)
|
||||
include(${TOP_DIR}/cmake/external_libs/openssl.cmake)
|
||||
if(NOT TARGET_HIMIX200)
|
||||
include(${TOP_DIR}/cmake/external_libs/openssl.cmake)
|
||||
endif()
|
||||
if(MSLITE_GPU_BACKEND STREQUAL opencl)
|
||||
include(${TOP_DIR}/cmake/external_libs/opencl.cmake)
|
||||
endif()
|
||||
|
|
|
@ -145,16 +145,6 @@ if(ENABLE_MINDRT)
|
|||
${CMAKE_CURRENT_SOURCE_DIR}/lite_mindrt.cc
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/mindrt_executor.cc
|
||||
)
|
||||
elseif(TARGET_HIMIX200)
|
||||
include_directories(${CORE_DIR}/mindrt)
|
||||
include_directories(${CORE_DIR}/mindrt/include)
|
||||
include_directories(${CORE_DIR}/mindrt/src)
|
||||
set(LITE_SRC
|
||||
${LITE_SRC}
|
||||
${CORE_DIR}/mindrt/src/thread/core_affinity.cc
|
||||
${CORE_DIR}/mindrt/src/thread/actor_threadpool.cc
|
||||
${CORE_DIR}/mindrt/src/thread/threadpool.cc
|
||||
)
|
||||
endif()
|
||||
|
||||
add_subdirectory(ops)
|
||||
|
|
|
@ -181,20 +181,24 @@ int AffineFp32CPUKernel::Init() {
|
|||
return RET_PARAM_INVALID;
|
||||
}
|
||||
if (affine_parameter_->activation_type_ != schema::ActivationType::ActivationType_NO_ACTIVATION) {
|
||||
if (auto ret = CheckActivationValid(); ret != RET_OK) {
|
||||
auto ret = CheckActivationValid();
|
||||
if (ret != RET_OK) {
|
||||
MS_LOG(ERROR) << "CheckActivationValid failed";
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
if (auto ret = ReSize(); ret != RET_OK) {
|
||||
auto ret = ReSize();
|
||||
if (ret != RET_OK) {
|
||||
MS_LOG(ERROR) << "ReSize failed";
|
||||
return ret;
|
||||
}
|
||||
if (auto ret = FullRunInit(); ret != RET_OK) {
|
||||
ret = FullRunInit();
|
||||
if (ret != RET_OK) {
|
||||
MS_LOG(ERROR) << "FullRunInit failed";
|
||||
return ret;
|
||||
}
|
||||
if (auto ret = IncrementInit(); ret != RET_OK) {
|
||||
ret = IncrementInit();
|
||||
if (ret != RET_OK) {
|
||||
MS_LOG(ERROR) << "IncrementInit failed";
|
||||
return ret;
|
||||
}
|
||||
|
@ -214,7 +218,8 @@ int AffineFp32CPUKernel::ReSize() {
|
|||
}
|
||||
full_mult_kernel_->set_name(this->name_);
|
||||
}
|
||||
if (auto ret = full_mult_kernel_->ReSize(); ret != RET_OK) {
|
||||
auto ret = full_mult_kernel_->ReSize();
|
||||
if (ret != RET_OK) {
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
@ -227,7 +232,8 @@ int AffineFp32CPUKernel::ReSize() {
|
|||
}
|
||||
increment_mult_kernel_->set_name(this->name_);
|
||||
}
|
||||
if (auto ret = increment_mult_kernel_->ReSize(); ret != RET_OK) {
|
||||
ret = increment_mult_kernel_->ReSize();
|
||||
if (ret != RET_OK) {
|
||||
return ret;
|
||||
}
|
||||
return RET_OK;
|
||||
|
@ -372,7 +378,8 @@ int AffineFp32CPUKernel::IncrementSplice() {
|
|||
}
|
||||
|
||||
int AffineFp32CPUKernel::IncrementMatmulRun() {
|
||||
if (auto ret = IncrementSplice(); ret != RET_OK) {
|
||||
auto ret = IncrementSplice();
|
||||
if (ret != RET_OK) {
|
||||
MS_LOG(ERROR) << "IncrementSplice failed.";
|
||||
return ret;
|
||||
}
|
||||
|
@ -381,7 +388,8 @@ int AffineFp32CPUKernel::IncrementMatmulRun() {
|
|||
MS_LOG(ERROR) << "increment_mult_kernel_ is null, can't call increment_mult_kernel_->Run().";
|
||||
return RET_NULL_PTR;
|
||||
}
|
||||
if (auto ret = increment_mult_kernel_->Run(); ret != RET_OK) {
|
||||
ret = increment_mult_kernel_->Run();
|
||||
if (ret != RET_OK) {
|
||||
MS_LOG(ERROR) << "increment_mult_kernel_->Run() failed";
|
||||
return ret;
|
||||
}
|
||||
|
@ -392,7 +400,8 @@ int AffineFp32CPUKernel::IncrementMatmulRun() {
|
|||
}
|
||||
|
||||
if (affine_parameter_->activation_type_ != schema::ActivationType::ActivationType_NO_ACTIVATION) {
|
||||
if (auto ret = DoActivation(increment_output_) != RET_OK) {
|
||||
ret = DoActivation(increment_output_);
|
||||
if (ret != RET_OK) {
|
||||
MS_LOG(ERROR) << "DoActivation() failed";
|
||||
return ret;
|
||||
}
|
||||
|
@ -431,7 +440,8 @@ int AffineFp32CPUKernel::FullSpliceRun() {
|
|||
|
||||
int AffineFp32CPUKernel::FullMatmulRun() {
|
||||
// Run Splice
|
||||
if (auto ret = FullSpliceRun(); ret != RET_OK) {
|
||||
auto ret = FullSpliceRun();
|
||||
if (ret != RET_OK) {
|
||||
MS_LOG(ERROR) << "run FullSplice failed";
|
||||
return ret;
|
||||
}
|
||||
|
@ -440,13 +450,15 @@ int AffineFp32CPUKernel::FullMatmulRun() {
|
|||
MS_LOG(ERROR) << "full_mult_kernel_ is null, can't call full_mult_kernel_->Run().";
|
||||
return RET_NULL_PTR;
|
||||
}
|
||||
if (auto ret = full_mult_kernel_->Run(); ret != RET_OK) {
|
||||
ret = full_mult_kernel_->Run();
|
||||
if (ret != RET_OK) {
|
||||
MS_LOG(ERROR) << "full_mult_kernel_->Run() failed";
|
||||
return ret;
|
||||
}
|
||||
|
||||
if (affine_parameter_->activation_type_ != schema::ActivationType::ActivationType_NO_ACTIVATION) {
|
||||
if (auto ret = DoActivation(out_tensors_.at(kOutputIndex)) != RET_OK) {
|
||||
ret = DoActivation(out_tensors_.at(kOutputIndex));
|
||||
if (ret != RET_OK) {
|
||||
MS_LOG(ERROR) << "DoActivation() failed";
|
||||
return ret;
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
#ifndef MINDSPORE_LITE_SRC_TENSOR_H_
|
||||
#define MINDSPORE_LITE_SRC_TENSOR_H_
|
||||
|
||||
#include <math.h>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
|
@ -216,7 +217,7 @@ class Tensor : public mindspore::tensor::MSTensor {
|
|||
|
||||
void set_scale(float scale) { this->scale_ = scale; }
|
||||
|
||||
bool IsScale() const { return (std::abs(this->scale_ - 1.0f) > 1.0e-05); }
|
||||
bool IsScale() const { return (std::fabs(this->scale_ - 1.0f) > 1.0e-05); }
|
||||
|
||||
private:
|
||||
template <typename T>
|
||||
|
|
Loading…
Reference in New Issue