forked from mindspore-Ecosystem/mindspore
update serving cpp_client compiler; update serving acl build option
This commit is contained in:
parent
10015ad9b2
commit
3554cad325
|
@ -51,8 +51,6 @@ include_directories(${CMAKE_CURRENT_SOURCE_DIR})
|
|||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/third_party/flatbuffers/include)
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/third_party/flatbuffers/include/flatbuffers)
|
||||
|
||||
if (NOT ENABLE_ACL)
|
||||
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/dependency_utils.cmake)
|
||||
find_package(Python3 3.7 COMPONENTS Interpreter Development)
|
||||
if(Python3_FOUND)
|
||||
|
@ -103,13 +101,9 @@ if (ENABLE_TESTCASES)
|
|||
add_subdirectory(tests)
|
||||
endif()
|
||||
|
||||
endif() # NOT ENABLE_ACL
|
||||
|
||||
if (ENABLE_SERVING)
|
||||
add_subdirectory(serving)
|
||||
add_subdirectory(serving/example/cpp_client)
|
||||
endif()
|
||||
|
||||
if (NOT ENABLE_ACL)
|
||||
include(cmake/package.cmake)
|
||||
endif() # NOT ENABLE_ACL
|
||||
|
|
2
build.sh
2
build.sh
|
@ -270,6 +270,8 @@ checkopts()
|
|||
echo "enable serving"
|
||||
;;
|
||||
w)
|
||||
ENABLE_SERVING="on"
|
||||
echo "enable serving"
|
||||
ENABLE_ACL="on"
|
||||
echo "enable acl"
|
||||
;;
|
||||
|
|
|
@ -3,7 +3,6 @@ cmake_minimum_required(VERSION 3.5.1)
|
|||
project(MSClient C CXX)
|
||||
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
|
||||
add_compile_definitions(_GLIBCXX_USE_CXX11_ABI=0)
|
||||
|
||||
find_package(Threads REQUIRED)
|
||||
|
||||
|
@ -18,7 +17,8 @@ if(GRPC_PATH)
|
|||
set(protobuf_MODULE_COMPATIBLE TRUE)
|
||||
find_package(Protobuf CONFIG REQUIRED)
|
||||
message(STATUS "Using protobuf ${protobuf_VERSION}, CMAKE_PREFIX_PATH : ${CMAKE_PREFIX_PATH}")
|
||||
elseif(NOT GRPC_PATH)
|
||||
elseif(NOT GRPC_PATH AND grpc_ROOT)
|
||||
add_compile_definitions(_GLIBCXX_USE_CXX11_ABI=0)
|
||||
if (EXISTS ${grpc_ROOT}/lib64)
|
||||
set(gRPC_DIR "${grpc_ROOT}/lib64/cmake/grpc")
|
||||
elseif(EXISTS ${grpc_ROOT}/lib)
|
||||
|
@ -28,12 +28,9 @@ elseif(NOT GRPC_PATH)
|
|||
add_executable(protobuf::libprotoc ALIAS protobuf::protoc)
|
||||
message(STATUS "serving using grpc_DIR : " ${gRPC_DIR})
|
||||
elseif(NOT gRPC_DIR AND NOT GRPC_PATH)
|
||||
message("please check gRPC. If the client is compiled separately,you can use the command: cmake -D GRPC_PATH=xxx")
|
||||
message("XXX is the gRPC installation path")
|
||||
message(FATAL_ERROR "please check gRPC. If the client is compiled separately,you can use the command: cmake -D GRPC_PATH=xxx\n" "XXX is the gRPC installation path")
|
||||
endif()
|
||||
|
||||
set(_PROTOBUF_LIBPROTOBUF protobuf::libprotobuf)
|
||||
set(_REFLECTION gRPC::grpc++_reflection)
|
||||
if(CMAKE_CROSSCOMPILING)
|
||||
find_program(_PROTOBUF_PROTOC protoc)
|
||||
else()
|
||||
|
@ -45,7 +42,6 @@ endif()
|
|||
find_package(gRPC CONFIG REQUIRED)
|
||||
message(STATUS "Using gRPC ${gRPC_VERSION}")
|
||||
|
||||
set(_GRPC_GRPCPP gRPC::grpc++)
|
||||
if(CMAKE_CROSSCOMPILING)
|
||||
find_program(_GRPC_CPP_PLUGIN_EXECUTABLE grpc_cpp_plugin)
|
||||
else()
|
||||
|
@ -75,13 +71,10 @@ add_custom_command(
|
|||
include_directories("${CMAKE_CURRENT_BINARY_DIR}")
|
||||
|
||||
# Targets greeter_[async_](client|server)
|
||||
foreach(_target
|
||||
ms_client)
|
||||
add_executable(${_target} "${_target}.cc"
|
||||
${hw_proto_srcs}
|
||||
${hw_grpc_srcs})
|
||||
target_link_libraries(${_target}
|
||||
${_REFLECTION}
|
||||
${_GRPC_GRPCPP}
|
||||
${_PROTOBUF_LIBPROTOBUF})
|
||||
endforeach()
|
||||
add_executable(ms_client "ms_client.cc"
|
||||
${hw_proto_srcs}
|
||||
${hw_grpc_srcs})
|
||||
target_link_libraries(ms_client
|
||||
gRPC::grpc++_reflection
|
||||
gRPC::grpc++
|
||||
protobuf::libprotobuf)
|
||||
|
|
|
@ -36,7 +36,7 @@ y = np.ones(4).astype(np.float32)
|
|||
def export_net():
|
||||
add = Net()
|
||||
output = add(Tensor(x), Tensor(y))
|
||||
export(add, Tensor(x), Tensor(y), file_name='tensor_add.pb', file_format='MINDIR')
|
||||
export(add, Tensor(x), Tensor(y), file_name='tensor_add.mindir', file_format='MINDIR')
|
||||
print(x)
|
||||
print(y)
|
||||
print(output.asnumpy())
|
||||
|
|
|
@ -48,7 +48,6 @@ def run():
|
|||
|
||||
try:
|
||||
result = stub.Predict(request)
|
||||
print(result)
|
||||
result_np = np.frombuffer(result.result[0].data, dtype=np.float32).reshape(result.result[0].tensor_shape.dims)
|
||||
print("ms client received: ")
|
||||
print(result_np)
|
||||
|
|
Loading…
Reference in New Issue