!6318 fix cmake compile options

Merge pull request !6318 from hexia/master
This commit is contained in:
mindspore-ci-bot 2020-09-16 15:44:53 +08:00 committed by Gitee
commit 9c3a494ea7
16 changed files with 2 additions and 25 deletions

View File

@ -1,3 +1,5 @@
set(libevent_CFLAGS "-fstack-protector-all -D_FORTIFY_SOURCE=2 -O2")
set(libevent_LDFLAGS "-Wl,-z,now")
mindspore_add_pkg(libevent
VER 2.1.12
LIBS event event_pthreads

View File

@ -29,7 +29,6 @@
namespace mindspore {
namespace inference {
class AclSession : public InferSession {
public:
AclSession();

View File

@ -23,7 +23,6 @@
namespace mindspore {
namespace inference {
DvppProcess::DvppProcess() {}
DvppProcess::~DvppProcess() {}
@ -1134,6 +1133,5 @@ Status DvppProcess::InitWithJsonConfig(const std::string &json_config) {
}
return SUCCESS;
}
} // namespace inference
} // namespace mindspore

View File

@ -22,7 +22,6 @@
namespace mindspore {
namespace inference {
Status ModelProcess::PreInitModelResource() {
model_desc_ = aclmdlCreateDesc();
aclError acl_ret = aclmdlGetDesc(model_desc_, model_id_);
@ -428,6 +427,5 @@ size_t ModelProcess::GetBatchSize() const {
}
return static_cast<size_t>(input_infos_[0].dims[0]);
}
} // namespace inference
} // namespace mindspore

View File

@ -25,7 +25,6 @@
namespace mindspore {
namespace inference {
struct AclTensorInfo {
void *device_data;
size_t buffer_size;
@ -78,7 +77,6 @@ class ModelProcess {
void DestroyInputsBuffer();
void DestroyOutputsBuffer();
};
} // namespace inference
} // namespace mindspore

View File

@ -32,7 +32,6 @@ using nlohmann::json;
namespace mindspore {
namespace serving {
const int BUF_MAX = 0x7FFFFFFF;
static constexpr char HTTP_DATA[] = "data";
static constexpr char HTTP_TENSOR[] = "tensor";
@ -550,6 +549,5 @@ void http_handler_msg(struct evhttp_request *const req, void *const arg) {
evbuffer_free(retbuff);
MSI_TIME_STAMP_END(TotalRestfulPredict)
}
} // namespace serving
} // namespace mindspore

View File

@ -40,7 +40,6 @@ using ms_serving::PredictRequest;
namespace mindspore {
namespace serving {
namespace {
static const uint32_t uint32max = 0x7FFFFFFF;
std::promise<void> exit_requested;
@ -66,7 +65,6 @@ grpc::Status CreatGRPCStatus(const Status &status) {
return grpc::Status::CANCELLED;
}
}
} // namespace
// Service Implement
@ -114,7 +112,6 @@ static std::pair<struct evhttp *, struct event_base *> NewHttpServer() {
auto listener =
evconnlistener_new_bind(eb, nullptr, nullptr, LEV_OPT_REUSEABLE | LEV_OPT_CLOSE_ON_EXEC | LEV_OPT_CLOSE_ON_FREE, -1,
reinterpret_cast<struct sockaddr *>(&sin), sizeof(sin));
if (listener == nullptr) {
MSI_LOG_ERROR << "Serving Error: RESTful server start failed, create http listener faild, port " << http_port;
std::cout << "Serving Error: RESTful server start failed, create http listener faild, port " << http_port

View File

@ -19,7 +19,6 @@
#include "util/status.h"
namespace mindspore {
namespace serving {
class Server {
public:
Server() = default;

View File

@ -27,7 +27,6 @@ using std::vector;
namespace mindspore {
namespace serving {
using inference::DataType;
using inference::InferTensorBase;
@ -191,6 +190,5 @@ const inference::InferImagesBase *ServingImagesRequest::operator[](size_t index)
}
return &(cache_[index]);
}
} // namespace serving
} // namespace mindspore

View File

@ -25,7 +25,6 @@
namespace mindspore {
namespace serving {
class MS_API ServingTensor : public inference::InferTensorBase {
public:
// the other's lifetime must longer than this object
@ -101,7 +100,6 @@ class ServingImagesRequest : public inference::ImagesRequestBase {
const ms_serving::PredictRequest &request_;
std::vector<ServingImages> cache_;
};
} // namespace serving
} // namespace mindspore
#endif // MINDSPORE_SERVING_TENSOR_H_

View File

@ -35,7 +35,6 @@ using ms_serving::PredictRequest;
namespace mindspore {
namespace serving {
Status Session::CreatDeviceSession(const std::string &device, uint32_t device_id) {
session_ = inference::InferSession::CreateSession(device, device_id);
if (session_ == nullptr) {
@ -151,6 +150,5 @@ Status Session::GetModelInputsInfo(std::vector<inference::InferTensor> &tensor_l
}
return ret;
}
} // namespace serving
} // namespace mindspore

View File

@ -28,7 +28,6 @@
namespace mindspore {
namespace serving {
using inference::FAILED;
using inference::INVALID_INPUTS;
using inference::Status;
@ -57,7 +56,6 @@ class Session {
Status PredictInner(const PredictRequest &request, PredictReply &reply);
};
} // namespace serving
} // namespace mindspore
#endif // MINDSPORE_SERVER_H

View File

@ -29,7 +29,6 @@
namespace mindspore {
namespace serving {
bool DirOrFileExist(const std::string &file_path) {
int ret = access(file_path.c_str(), 0);
return (ret == -1) ? false : true;

View File

@ -23,7 +23,6 @@
namespace mindspore {
namespace serving {
bool StartWith(const std::string &str, const std::string &expected) {
return expected.empty() ||
(str.size() >= expected.size() && memcmp(str.data(), expected.data(), expected.size()) == 0);

View File

@ -19,7 +19,6 @@
namespace mindspore {
namespace serving {
MindSporeModel::MindSporeModel(const std::string &model_name, const std::string &model_path,
const std::string &model_version, const time_t &last_update_time)
: model_name_(model_name),

View File

@ -25,7 +25,6 @@
namespace mindspore {
namespace serving {
volatile bool stop_poll = false;
std::string GetVersionFromPath(const std::string &path) {