serving: codex

This commit is contained in:
xuyongfei 2020-09-08 16:30:38 +08:00
parent 2a9c458870
commit 116535b206
8 changed files with 51 additions and 31 deletions

View File

@ -107,6 +107,7 @@ class InferTensor : public InferTensorBase {
public: public:
InferTensor() = default; InferTensor() = default;
~InferTensor() = default;
InferTensor(DataType type, std::vector<int64_t> shape, const void *data, size_t data_len) { InferTensor(DataType type, std::vector<int64_t> shape, const void *data, size_t data_len) {
set_data_type(type); set_data_type(type);
set_shape(shape); set_shape(shape);
@ -131,6 +132,8 @@ class InferTensor : public InferTensorBase {
class InferImagesBase { class InferImagesBase {
public: public:
InferImagesBase() = default;
virtual ~InferImagesBase() = default;
virtual size_t batch_size() const = 0; virtual size_t batch_size() const = 0;
virtual bool get(size_t index, const void *&pic_buffer, uint32_t &pic_size) const = 0; virtual bool get(size_t index, const void *&pic_buffer, uint32_t &pic_size) const = 0;
virtual size_t input_index() const = 0; // the index of images as input in model virtual size_t input_index() const = 0; // the index of images as input in model
@ -138,18 +141,24 @@ class InferImagesBase {
class RequestBase { class RequestBase {
public: public:
RequestBase() = default;
virtual ~RequestBase() = default;
virtual size_t size() const = 0; virtual size_t size() const = 0;
virtual const InferTensorBase *operator[](size_t index) const = 0; virtual const InferTensorBase *operator[](size_t index) const = 0;
}; };
class ImagesRequestBase { class ImagesRequestBase {
public: public:
ImagesRequestBase() = default;
virtual ~ImagesRequestBase() = default;
virtual size_t size() const = 0; virtual size_t size() const = 0;
virtual const InferImagesBase *operator[](size_t index) const = 0; virtual const InferImagesBase *operator[](size_t index) const = 0;
}; };
class ReplyBase { class ReplyBase {
public: public:
ReplyBase() = default;
virtual ~ReplyBase() = default;
virtual size_t size() const = 0; virtual size_t size() const = 0;
virtual InferTensorBase *operator[](size_t index) = 0; virtual InferTensorBase *operator[](size_t index) = 0;
virtual const InferTensorBase *operator[](size_t index) const = 0; virtual const InferTensorBase *operator[](size_t index) const = 0;
@ -160,6 +169,7 @@ class ReplyBase {
class VectorInferTensorWrapReply : public ReplyBase { class VectorInferTensorWrapReply : public ReplyBase {
public: public:
explicit VectorInferTensorWrapReply(std::vector<InferTensor> &tensor_list) : tensor_list_(tensor_list) {} explicit VectorInferTensorWrapReply(std::vector<InferTensor> &tensor_list) : tensor_list_(tensor_list) {}
~VectorInferTensorWrapReply() = default;
size_t size() const { return tensor_list_.size(); } size_t size() const { return tensor_list_.size(); }
InferTensorBase *operator[](size_t index) { InferTensorBase *operator[](size_t index) {
@ -187,6 +197,7 @@ class VectorInferTensorWrapReply : public ReplyBase {
class VectorInferTensorWrapRequest : public RequestBase { class VectorInferTensorWrapRequest : public RequestBase {
public: public:
explicit VectorInferTensorWrapRequest(const std::vector<InferTensor> &tensor_list) : tensor_list_(tensor_list) {} explicit VectorInferTensorWrapRequest(const std::vector<InferTensor> &tensor_list) : tensor_list_(tensor_list) {}
~VectorInferTensorWrapRequest() = default;
size_t size() const { return tensor_list_.size(); } size_t size() const { return tensor_list_.size(); }
const InferTensorBase *operator[](size_t index) const { const InferTensorBase *operator[](size_t index) const {

View File

@ -33,6 +33,8 @@ class Status {
Status() : status_code_(FAILED) {} Status() : status_code_(FAILED) {}
Status(enum StatusCode status_code, const std::string &status_msg = "") Status(enum StatusCode status_code, const std::string &status_msg = "")
: status_code_(status_code), status_msg_(status_msg) {} : status_code_(status_code), status_msg_(status_msg) {}
~Status() = default;
bool IsSuccess() const { return status_code_ == SUCCESS; } bool IsSuccess() const { return status_code_ == SUCCESS; }
enum StatusCode StatusCode() const { return status_code_; } enum StatusCode StatusCode() const { return status_code_; }
std::string StatusMessage() const { return status_msg_; } std::string StatusMessage() const { return status_msg_; }

View File

@ -233,7 +233,7 @@ Status DvppProcess::InputInputBuffer(const void *pic_buffer, size_t pic_buffer_s
} }
static void JpegErrorExitCustom(j_common_ptr cinfo) { static void JpegErrorExitCustom(j_common_ptr cinfo) {
char jpeg_last_error_msg[JMSG_LENGTH_MAX]; char jpeg_last_error_msg[JMSG_LENGTH_MAX] = {0};
if (cinfo != nullptr && cinfo->err != nullptr && cinfo->err->format_message != nullptr) { if (cinfo != nullptr && cinfo->err != nullptr && cinfo->err->format_message != nullptr) {
(*(cinfo->err->format_message))(cinfo, jpeg_last_error_msg); (*(cinfo->err->format_message))(cinfo, jpeg_last_error_msg);
} }

View File

@ -42,7 +42,7 @@ enum HTTP_DATA_TYPE { HTTP_DATA_NONE, HTTP_DATA_INT, HTTP_DATA_FLOAT };
static const std::map<inference::DataType, HTTP_DATA_TYPE> infer_type2_http_type{ static const std::map<inference::DataType, HTTP_DATA_TYPE> infer_type2_http_type{
{inference::DataType::kMSI_Int32, HTTP_DATA_INT}, {inference::DataType::kMSI_Float32, HTTP_DATA_FLOAT}}; {inference::DataType::kMSI_Int32, HTTP_DATA_INT}, {inference::DataType::kMSI_Float32, HTTP_DATA_FLOAT}};
Status GetPostMessage(struct evhttp_request *req, std::string *buf) { Status GetPostMessage(struct evhttp_request *const req, std::string *const buf) {
Status status(SUCCESS); Status status(SUCCESS);
size_t post_size = evbuffer_get_length(req->input_buffer); size_t post_size = evbuffer_get_length(req->input_buffer);
if (post_size == 0) { if (post_size == 0) {
@ -57,7 +57,8 @@ Status GetPostMessage(struct evhttp_request *req, std::string *buf) {
return status; return status;
} }
} }
Status CheckRequestValid(struct evhttp_request *http_request) {
Status CheckRequestValid(struct evhttp_request *const http_request) {
Status status(SUCCESS); Status status(SUCCESS);
switch (evhttp_request_get_command(http_request)) { switch (evhttp_request_get_command(http_request)) {
case EVHTTP_REQ_POST: case EVHTTP_REQ_POST:
@ -68,7 +69,7 @@ Status CheckRequestValid(struct evhttp_request *http_request) {
} }
} }
void ErrorMessage(struct evhttp_request *req, Status status) { void ErrorMessage(struct evhttp_request *const req, Status status) {
json error_json = {{"error_message", status.StatusMessage()}}; json error_json = {{"error_message", status.StatusMessage()}};
std::string out_error_str = error_json.dump(); std::string out_error_str = error_json.dump();
struct evbuffer *retbuff = evbuffer_new(); struct evbuffer *retbuff = evbuffer_new();
@ -77,7 +78,7 @@ void ErrorMessage(struct evhttp_request *req, Status status) {
evbuffer_free(retbuff); evbuffer_free(retbuff);
} }
Status CheckMessageValid(const json &message_info, HTTP_TYPE *type) { Status CheckMessageValid(const json &message_info, HTTP_TYPE *const type) {
Status status(SUCCESS); Status status(SUCCESS);
int count = 0; int count = 0;
if (message_info.find(HTTP_DATA) != message_info.end()) { if (message_info.find(HTTP_DATA) != message_info.end()) {
@ -95,7 +96,7 @@ Status CheckMessageValid(const json &message_info, HTTP_TYPE *type) {
return status; return status;
} }
Status GetDataFromJson(const json &json_data_array, ServingTensor *request_tensor, size_t data_index, Status GetDataFromJson(const json &json_data_array, ServingTensor *const request_tensor, size_t data_index,
HTTP_DATA_TYPE type) { HTTP_DATA_TYPE type) {
Status status(SUCCESS); Status status(SUCCESS);
auto type_name = [](const json &json_data) -> std::string { auto type_name = [](const json &json_data) -> std::string {
@ -133,7 +134,7 @@ Status GetDataFromJson(const json &json_data_array, ServingTensor *request_tenso
return SUCCESS; return SUCCESS;
} }
Status RecusiveGetTensor(const json &json_data, size_t depth, ServingTensor *request_tensor, size_t data_index, Status RecusiveGetTensor(const json &json_data, size_t depth, ServingTensor *const request_tensor, size_t data_index,
HTTP_DATA_TYPE type) { HTTP_DATA_TYPE type) {
Status status(SUCCESS); Status status(SUCCESS);
std::vector<int64_t> required_shape = request_tensor->shape(); std::vector<int64_t> required_shape = request_tensor->shape();
@ -185,7 +186,7 @@ std::vector<int64_t> GetJsonArrayShape(const json &json_array) {
return json_shape; return json_shape;
} }
Status TransDataToPredictRequest(const json &message_info, PredictRequest *request) { Status TransDataToPredictRequest(const json &message_info, PredictRequest *const request) {
Status status = SUCCESS; Status status = SUCCESS;
auto tensors = message_info.find(HTTP_DATA); auto tensors = message_info.find(HTTP_DATA);
if (tensors == message_info.end()) { if (tensors == message_info.end()) {
@ -241,7 +242,7 @@ Status TransDataToPredictRequest(const json &message_info, PredictRequest *reque
return SUCCESS; return SUCCESS;
} }
Status TransTensorToPredictRequest(const json &message_info, PredictRequest *request) { Status TransTensorToPredictRequest(const json &message_info, PredictRequest *const request) {
Status status(SUCCESS); Status status(SUCCESS);
auto tensors = message_info.find(HTTP_TENSOR); auto tensors = message_info.find(HTTP_TENSOR);
if (tensors == message_info.end()) { if (tensors == message_info.end()) {
@ -291,7 +292,8 @@ Status TransTensorToPredictRequest(const json &message_info, PredictRequest *req
return status; return status;
} }
Status TransHTTPMsgToPredictRequest(struct evhttp_request *http_request, PredictRequest *request, HTTP_TYPE *type) { Status TransHTTPMsgToPredictRequest(struct evhttp_request *const http_request, PredictRequest *const request,
HTTP_TYPE *const type) {
Status status = CheckRequestValid(http_request); Status status = CheckRequestValid(http_request);
if (status != SUCCESS) { if (status != SUCCESS) {
return status; return status;
@ -352,7 +354,7 @@ Status TransHTTPMsgToPredictRequest(struct evhttp_request *http_request, Predict
return status; return status;
} }
Status GetJsonFromTensor(const ms_serving::Tensor &tensor, int len, int *pos, json *out_json) { Status GetJsonFromTensor(const ms_serving::Tensor &tensor, int len, int *const pos, json *const out_json) {
Status status(SUCCESS); Status status(SUCCESS);
switch (tensor.tensor_type()) { switch (tensor.tensor_type()) {
case ms_serving::MS_INT32: { case ms_serving::MS_INT32: {
@ -366,7 +368,7 @@ Status GetJsonFromTensor(const ms_serving::Tensor &tensor, int len, int *pos, js
case ms_serving::MS_FLOAT32: { case ms_serving::MS_FLOAT32: {
auto data = reinterpret_cast<const float *>(tensor.data().data()) + *pos; auto data = reinterpret_cast<const float *>(tensor.data().data()) + *pos;
std::vector<float> result_tensor(len); std::vector<float> result_tensor(len);
memcpy_s(result_tensor.data(), result_tensor.size() * sizeof(float), data, len * sizeof(float)); (void)memcpy_s(result_tensor.data(), result_tensor.size() * sizeof(float), data, len * sizeof(float));
*out_json = std::move(result_tensor); *out_json = std::move(result_tensor);
*pos += len; *pos += len;
break; break;
@ -378,7 +380,7 @@ Status GetJsonFromTensor(const ms_serving::Tensor &tensor, int len, int *pos, js
return status; return status;
} }
Status TransPredictReplyToData(const PredictReply &reply, json *out_json) { Status TransPredictReplyToData(const PredictReply &reply, json *const out_json) {
Status status(SUCCESS); Status status(SUCCESS);
for (int i = 0; i < reply.result_size(); i++) { for (int i = 0; i < reply.result_size(); i++) {
(*out_json)["data"].push_back(json()); (*out_json)["data"].push_back(json());
@ -396,7 +398,7 @@ Status TransPredictReplyToData(const PredictReply &reply, json *out_json) {
return status; return status;
} }
Status RecusiveGetJson(const ms_serving::Tensor &tensor, int depth, int *pos, json *out_json) { Status RecusiveGetJson(const ms_serving::Tensor &tensor, int depth, int *const pos, json *const out_json) {
Status status(SUCCESS); Status status(SUCCESS);
if (depth >= 10) { if (depth >= 10) {
ERROR_INFER_STATUS(status, FAILED, "result tensor shape dims is larger than 10"); ERROR_INFER_STATUS(status, FAILED, "result tensor shape dims is larger than 10");
@ -420,7 +422,7 @@ Status RecusiveGetJson(const ms_serving::Tensor &tensor, int depth, int *pos, js
return status; return status;
} }
Status TransPredictReplyToTensor(const PredictReply &reply, json *out_json) { Status TransPredictReplyToTensor(const PredictReply &reply, json *const out_json) {
Status status(SUCCESS); Status status(SUCCESS);
for (int i = 0; i < reply.result_size(); i++) { for (int i = 0; i < reply.result_size(); i++) {
(*out_json)["tensor"].push_back(json()); (*out_json)["tensor"].push_back(json());
@ -434,7 +436,7 @@ Status TransPredictReplyToTensor(const PredictReply &reply, json *out_json) {
return status; return status;
} }
Status TransPredictReplyToHTTPMsg(const PredictReply &reply, const HTTP_TYPE &type, struct evbuffer *buf) { Status TransPredictReplyToHTTPMsg(const PredictReply &reply, const HTTP_TYPE &type, struct evbuffer *const buf) {
Status status(SUCCESS); Status status(SUCCESS);
json out_json; json out_json;
switch (type) { switch (type) {
@ -454,7 +456,7 @@ Status TransPredictReplyToHTTPMsg(const PredictReply &reply, const HTTP_TYPE &ty
return status; return status;
} }
Status HttpHandleMsgDetail(struct evhttp_request *req, void *arg, struct evbuffer *retbuff) { Status HttpHandleMsgDetail(struct evhttp_request *const req, void *const arg, struct evbuffer *const retbuff) {
PredictRequest request; PredictRequest request;
PredictReply reply; PredictReply reply;
HTTP_TYPE type; HTTP_TYPE type;
@ -482,7 +484,7 @@ Status HttpHandleMsgDetail(struct evhttp_request *req, void *arg, struct evbuffe
return SUCCESS; return SUCCESS;
} }
void http_handler_msg(struct evhttp_request *req, void *arg) { void http_handler_msg(struct evhttp_request *const req, void *const arg) {
MSI_TIME_STAMP_START(TotalRestfulPredict) MSI_TIME_STAMP_START(TotalRestfulPredict)
struct evbuffer *retbuff = evbuffer_new(); struct evbuffer *retbuff = evbuffer_new();
if (retbuff == nullptr) { if (retbuff == nullptr) {

View File

@ -44,6 +44,7 @@ namespace serving {
namespace { namespace {
static const uint32_t uint32max = 0x7FFFFFFF; static const uint32_t uint32max = 0x7FFFFFFF;
std::promise<void> exit_requested; std::promise<void> exit_requested;
static const char kServerHttpIp[] = "0.0.0.0";
void ClearEnv() { Session::Instance().Clear(); } void ClearEnv() { Session::Instance().Clear(); }
void HandleSignal(int sig) { exit_requested.set_value(); } void HandleSignal(int sig) { exit_requested.set_value(); }
@ -195,7 +196,7 @@ Status Server::BuildAndStart() {
return res; return res;
} }
auto option_args = Options::Instance().GetArgs(); auto option_args = Options::Instance().GetArgs();
std::string server_address = "0.0.0.0:" + std::to_string(option_args->grpc_port); std::string server_address = std::string(kServerHttpIp) + ":" + std::to_string(option_args->grpc_port);
auto http_server_new_ret = NewHttpServer(); auto http_server_new_ret = NewHttpServer();
struct evhttp *http_server = http_server_new_ret.first; struct evhttp *http_server = http_server_new_ret.first;
@ -211,7 +212,7 @@ Status Server::BuildAndStart() {
event_base_free(eb); event_base_free(eb);
}; };
int32_t http_port = option_args->rest_api_port; int32_t http_port = option_args->rest_api_port;
std::string http_addr = "0.0.0.0"; std::string http_addr = kServerHttpIp;
evhttp_set_timeout(http_server, 60); evhttp_set_timeout(http_server, 60);
evhttp_set_gencb(http_server, http_handler_msg, nullptr); evhttp_set_gencb(http_server, http_handler_msg, nullptr);

View File

@ -50,6 +50,7 @@ class MS_API ServingTensor : public inference::InferTensorBase {
class ServingImages : public inference::InferImagesBase { class ServingImages : public inference::InferImagesBase {
public: public:
explicit ServingImages(const ms_serving::Images &images); explicit ServingImages(const ms_serving::Images &images);
~ServingImages() = default;
size_t batch_size() const override; size_t batch_size() const override;
bool get(size_t index, const void *&pic_buffer, uint32_t &pic_size) const override; bool get(size_t index, const void *&pic_buffer, uint32_t &pic_size) const override;
@ -62,6 +63,7 @@ class ServingImages : public inference::InferImagesBase {
class ServingRequest : public inference::RequestBase { class ServingRequest : public inference::RequestBase {
public: public:
explicit ServingRequest(const ms_serving::PredictRequest &request); explicit ServingRequest(const ms_serving::PredictRequest &request);
~ServingRequest() = default;
size_t size() const override; size_t size() const override;
const inference::InferTensorBase *operator[](size_t index) const override; const inference::InferTensorBase *operator[](size_t index) const override;
@ -74,6 +76,7 @@ class ServingRequest : public inference::RequestBase {
class ServingReply : public inference::ReplyBase { class ServingReply : public inference::ReplyBase {
public: public:
explicit ServingReply(ms_serving::PredictReply &reply) : reply_(reply) {} explicit ServingReply(ms_serving::PredictReply &reply) : reply_(reply) {}
~ServingReply() = default;
size_t size() const override; size_t size() const override;
inference::InferTensorBase *operator[](size_t index) override; inference::InferTensorBase *operator[](size_t index) override;
@ -89,6 +92,7 @@ class ServingReply : public inference::ReplyBase {
class ServingImagesRequest : public inference::ImagesRequestBase { class ServingImagesRequest : public inference::ImagesRequestBase {
public: public:
explicit ServingImagesRequest(const ms_serving::PredictRequest &request); explicit ServingImagesRequest(const ms_serving::PredictRequest &request);
~ServingImagesRequest() = default;
size_t size() const override; size_t size() const override;
const inference::InferImagesBase *operator[](size_t index) const override; const inference::InferImagesBase *operator[](size_t index) const override;

View File

@ -29,13 +29,13 @@ bool StartWith(const std::string &str, const std::string &expected) {
(str.size() >= expected.size() && memcmp(str.data(), expected.data(), expected.size()) == 0); (str.size() >= expected.size() && memcmp(str.data(), expected.data(), expected.size()) == 0);
} }
bool RemovePrefix(std::string *str, const std::string &prefix) { bool RemovePrefix(std::string *const str, const std::string &prefix) {
if (!StartWith(*str, prefix)) return false; if (!StartWith(*str, prefix)) return false;
str->replace(str->begin(), str->begin() + prefix.size(), ""); str->replace(str->begin(), str->begin() + prefix.size(), "");
return true; return true;
} }
bool Option::ParseInt32(std::string *arg) { bool Option::ParseInt32(std::string *const arg) {
if (RemovePrefix(arg, "--") && RemovePrefix(arg, name_) && RemovePrefix(arg, "=")) { if (RemovePrefix(arg, "--") && RemovePrefix(arg, name_) && RemovePrefix(arg, "=")) {
int32_t parsed_value; int32_t parsed_value;
try { try {
@ -50,7 +50,7 @@ bool Option::ParseInt32(std::string *arg) {
return false; return false;
} }
bool Option::ParseBool(std::string *arg) { bool Option::ParseBool(std::string *const arg) {
if (RemovePrefix(arg, "--") && RemovePrefix(arg, name_) && RemovePrefix(arg, "=")) { if (RemovePrefix(arg, "--") && RemovePrefix(arg, name_) && RemovePrefix(arg, "=")) {
if (*arg == "true") { if (*arg == "true") {
*bool_default_ = true; *bool_default_ = true;
@ -66,7 +66,7 @@ bool Option::ParseBool(std::string *arg) {
return false; return false;
} }
bool Option::ParseString(std::string *arg) { bool Option::ParseString(std::string *const arg) {
if (RemovePrefix(arg, "--") && RemovePrefix(arg, name_) && RemovePrefix(arg, "=")) { if (RemovePrefix(arg, "--") && RemovePrefix(arg, name_) && RemovePrefix(arg, "=")) {
*string_default_ = *arg; *string_default_ = *arg;
return true; return true;
@ -74,7 +74,7 @@ bool Option::ParseString(std::string *arg) {
return false; return false;
} }
bool Option::ParseFloat(std::string *arg) { bool Option::ParseFloat(std::string *const arg) {
if (RemovePrefix(arg, "--") && RemovePrefix(arg, name_) && RemovePrefix(arg, "=")) { if (RemovePrefix(arg, "--") && RemovePrefix(arg, name_) && RemovePrefix(arg, "=")) {
float parsed_value; float parsed_value;
try { try {
@ -89,7 +89,7 @@ bool Option::ParseFloat(std::string *arg) {
return false; return false;
} }
Option::Option(const std::string &name, int32_t *default_point, const std::string &usage) Option::Option(const std::string &name, int32_t *const default_point, const std::string &usage)
: name_(name), : name_(name),
type_(MS_TYPE_INT32), type_(MS_TYPE_INT32),
int32_default_(default_point), int32_default_(default_point),
@ -98,7 +98,7 @@ Option::Option(const std::string &name, int32_t *default_point, const std::strin
float_default_(nullptr), float_default_(nullptr),
usage_(usage) {} usage_(usage) {}
Option::Option(const std::string &name, bool *default_point, const std::string &usage) Option::Option(const std::string &name, bool *const default_point, const std::string &usage)
: name_(name), : name_(name),
type_(MS_TYPE_BOOL), type_(MS_TYPE_BOOL),
int32_default_(nullptr), int32_default_(nullptr),
@ -107,7 +107,7 @@ Option::Option(const std::string &name, bool *default_point, const std::string &
float_default_(nullptr), float_default_(nullptr),
usage_(usage) {} usage_(usage) {}
Option::Option(const std::string &name, std::string *default_point, const std::string &usage) Option::Option(const std::string &name, std::string *const default_point, const std::string &usage)
: name_(name), : name_(name),
type_(MS_TYPE_STRING), type_(MS_TYPE_STRING),
int32_default_(nullptr), int32_default_(nullptr),
@ -116,7 +116,7 @@ Option::Option(const std::string &name, std::string *default_point, const std::s
float_default_(nullptr), float_default_(nullptr),
usage_(usage) {} usage_(usage) {}
Option::Option(const std::string &name, float *default_point, const std::string &usage) Option::Option(const std::string &name, float *const default_point, const std::string &usage)
: name_(name), : name_(name),
type_(MS_TYPE_FLOAT), type_(MS_TYPE_FLOAT),
int32_default_(nullptr), int32_default_(nullptr),
@ -125,7 +125,7 @@ Option::Option(const std::string &name, float *default_point, const std::string
float_default_(default_point), float_default_(default_point),
usage_(usage) {} usage_(usage) {}
bool Option::Parse(std::string *arg) { bool Option::Parse(std::string *const arg) {
bool result = false; bool result = false;
switch (type_) { switch (type_) {
case MS_TYPE_BOOL: case MS_TYPE_BOOL:

View File

@ -93,7 +93,7 @@ int main(int argc, char **argv) {
size_t start_pos = arg_val.find(arg_target_str); size_t start_pos = arg_val.find(arg_target_str);
if (start_pos != std::string::npos) { if (start_pos != std::string::npos) {
start_pos += arg_target_str.size(); start_pos += arg_target_str.size();
if (arg_val[start_pos] == '=') { if (start_pos < arg_val.size() && arg_val[start_pos] == '=') {
target_str = arg_val.substr(start_pos + 1); target_str = arg_val.substr(start_pos + 1);
} else { } else {
std::cout << "The only correct argument syntax is --target=" << std::endl; std::cout << "The only correct argument syntax is --target=" << std::endl;