fix compilation warnings

Signed-off-by: zhoufeng <zhoufeng54@huawei.com>
This commit is contained in:
zhoufeng 2020-08-21 09:07:17 +08:00
parent 0fe88f07f6
commit 22ec976ef7
11 changed files with 14 additions and 13 deletions

View File

@ -23,7 +23,7 @@ endif()
set(CMAKE_CXX_FLAGS_DEBUG "$ENV{CXXFLAGS} -O0 -g2 -ggdb -fno-inline-functions -fno-omit-frame-pointer -Wl,--allow-shlib-undefined -D_LIBCPP_INLINE_VISIBILITY='' -D_LIBCPP_DISABLE_EXTERN_TEMPLATE=1 -DHALF_ENABLE_CPP11_USER_LITERALS=0 -D_FORTIFY_SOURCE=2 -Wno-cpp")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -I/usr/local/include -std=c++17 -Werror -Wall -Wno-deprecated-declarations -fPIC")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -I/usr/local/include -std=c++17 -Werror -Wall -fPIC")
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
set(PYBIND11_CPP_STANDARD -std=c++17)

View File

@ -1818,7 +1818,7 @@ class IrParser {
py::object tensor_obj = LoadObject(lexer_.GetTokenText());
py::array tensor_data = py::cast<py::array>(tensor_obj);
if (tensor_data == nullptr) {
if (!tensor_data) {
return TOK_ERROR;
}
*val_ptr = TensorPy::MakeTensor(tensor_data, TypeIdToType(type));

View File

@ -49,7 +49,8 @@ void MultitypeFuncGraph::Register(const TypePtrList &types, specialize_fn s_fn)
}
void MultitypeFuncGraph::Register(const TypePtrList &types, const py::function &py_fn) {
MS_LOG(DEBUG) << "Register type (" << ::mindspore::ToString(types) << ", " << std::string(py_fn.str()) << ").";
MS_LOG(DEBUG) << "Register type (" << ::mindspore::ToString(types) << ", " << py::str(py_fn.cast<py::object>())
<< ").";
auto fn = fn_cache_.find(types);
if (fn != fn_cache_.end()) {
MS_LOG(EXCEPTION) << "Cannot register as (" << ::mindspore::ToString(types) << ", already registered.";
@ -116,7 +117,7 @@ FuncGraphPtr MultitypeFuncGraph::GenerateFromTypes(const TypePtrList &types) {
auto py_fn = SignMatch(types);
std::ostringstream buffer;
buffer << types;
if (py_fn != py::none()) {
if (!py_fn.is_none()) {
FuncGraphPtr func_graph = parse::ParsePythonCode(py_fn);
if (func_graph == nullptr) {
MS_LOG(EXCEPTION) << "Fail to parse overload function " << buffer.str();

View File

@ -50,7 +50,7 @@ FuncGraphPtr KPrim::GetBprop(const PrimitivePtr &prim) {
grad_op_child_scope_prefix + prim->name());
ScopeGuard scope_guard(scope);
py::function fn = prim->is_base() ? GetBpropFunction(prim->name()) : prim->cast<PrimitivePyPtr>()->GetBpropFunction();
if (fn == nullptr || py::isinstance<py::none>(fn)) {
if (!fn || py::isinstance<py::none>(fn)) {
MS_LOG(DEBUG) << "Fail to find bprop function for " << prim->name() << ".";
return nullptr;
}

View File

@ -285,7 +285,7 @@ Status BatchOp::MapColumns(std::pair<std::unique_ptr<TensorQTable>, CBatchInfo>
Status BatchOp::GetBatchSize(int32_t *batch_size, CBatchInfo info) {
#ifdef ENABLE_PYTHON
if (batch_size_func_ != nullptr) {
if (batch_size_func_) {
RETURN_IF_NOT_OK(InvokeBatchSizeFunc(batch_size, info));
} else {
(*batch_size) = start_batch_size_;

View File

@ -36,7 +36,7 @@ namespace parse {
FuncGraphPtr ParsePythonCode(const py::object &obj, const std::string &python_mod_get_parse_method) {
(void)python_adapter::set_python_scoped();
if (obj == nullptr || py::isinstance<py::none>(obj)) {
if (!obj || py::isinstance<py::none>(obj)) {
MS_LOG(ERROR) << "Parse the python code failed, obj is nullptr or none";
return nullptr;
}
@ -265,7 +265,7 @@ FunctionBlockPtr Parser::ParseFunction(const py::object &node, const FunctionBlo
}
bool set_flag = UpdateFuncGraphFlags(ast_->function(), current_fg);
if (ast_->obj() != ast_->function()) {
if (!ast_->obj().is(ast_->function())) {
set_flag = set_flag && UpdateFuncGraphFlags(ast_->obj(), current_fg);
}

View File

@ -190,7 +190,7 @@ bool AddDFGraph(const std::map<std::string, ExecutorInfoPtr> &info, const py::di
ConvertObjectToTensors(init_params, &init_tensors);
(void)convertor.ConvertAllNode().InitParam(init_tensors).BuildGraph();
if (broadcast_params != py::none()) {
if (!broadcast_params.is_none()) {
if (!py::isinstance<py::dict>(broadcast_params)) {
MS_LOG(ERROR) << "Invalid broadcast params, it must be py::dict type";
return false;

View File

@ -103,7 +103,7 @@ py::tuple check_bprop_out(const py::object &grads_obj, const py::tuple &py_args)
py::object grad_dtype = grads[i].attr("dtype");
py::tuple arg_shape = py_args[i].attr("shape");
py::object arg_dtype = py_args[i].attr("dtype");
if (!grad_shape.equal(arg_shape) || grad_dtype != arg_dtype) {
if (!grad_shape.equal(arg_shape) || !grad_dtype.is(arg_dtype)) {
MS_EXCEPTION(ValueError) << "For user define net bprop, the gradient of the " << i
<< "th arg should have the same shape and dtype as the " << i << "th arg, but the "
<< i << "th arg shape: " << py::cast<py::str>(arg_shape)

View File

@ -63,7 +63,7 @@ class PrimitivePy : public Primitive {
const py::object &GetPyObj() const { return python_obj_; }
py::dict RunInfer(const py::tuple &args);
bool ObjHasAttr(const char *attr_name) { return py::hasattr(python_obj_, attr_name); }
bool HasPyObj() { return python_obj_ != nullptr; }
bool HasPyObj() { return python_obj_.operator bool(); }
PrimitivePtr Clone() override;
bool is_tuple_input_ = false;

View File

@ -24,5 +24,5 @@ bool PyObjectRef::operator==(const BaseRef &other) const {
return *this == utils::cast<PyObjectRef>(other);
}
bool PyObjectRef::operator==(const PyObjectRef &other) const { return object_ == other.object_; }
bool PyObjectRef::operator==(const PyObjectRef &other) const { return object_.is(other.object_); }
} // namespace mindspore

View File

@ -61,7 +61,7 @@ void PrintMeTensor(MeTensor* tensor) {
py::array tensor_data = TensorPy::AsNumpy(*tensor);
std::cout << std::string(py::str(tensor_data)) << std::endl;
std::cout << "tensor dtype is: " << std::string(tensor_data.dtype().str()) << std::endl;
std::cout << "tensor dtype is: " << py::str(tensor_data.dtype()) << std::endl;
}
FuncGraphPtr MakeFuncGraph(const PrimitivePtr prim, unsigned int nparam) {