code warning fix

This commit is contained in:
l00591931 2022-01-17 16:38:29 +08:00
parent dcf3095302
commit 5c739625f6
13 changed files with 43 additions and 42 deletions

View File

@ -50,7 +50,7 @@ class TileEliminater : public AnfVisitor {
auto fn = [this]() -> size_t {
auto x_shape_base = x_->Shape();
auto x_size = 0;
uint64_t x_size = 0;
ShapePtr x_shape;
if (x_shape_base && (x_shape = x_shape_base->cast<ShapePtr>())) {
x_size = x_shape->shape().size();

View File

@ -153,7 +153,7 @@ class Optimizer : public std::enable_shared_from_this<Optimizer> {
return func_graph;
}
// Optimizer step counter;
int64_t counter = 1;
int counter = 1;
bool changes = true;
// If no changes since last renormalization, then no need to do the renormalization again.
// Set the initial value to true, so the renormalization can be executed once if it's the
@ -180,7 +180,7 @@ class Optimizer : public std::enable_shared_from_this<Optimizer> {
if (is_untyped_generated_) {
std::transform(func_graph->parameters().begin(), func_graph->parameters().end(),
std::back_inserter(maybe_new_args_spec),
[](AnfNodePtr param) -> AbstractBasePtr { return param->abstract(); });
[](const AnfNodePtr &param) -> AbstractBasePtr { return param->abstract(); });
func_graph = pipeline::Renormalize(resource_ptr, func_graph, maybe_new_args_spec);
clear_is_untyped_generated();
} else {
@ -189,7 +189,7 @@ class Optimizer : public std::enable_shared_from_this<Optimizer> {
} else {
std::transform(func_graph->parameters().begin(), func_graph->parameters().end(),
std::back_inserter(maybe_new_args_spec),
[](AnfNodePtr param) -> AbstractBasePtr { return param->abstract(); });
[](const AnfNodePtr &param) -> AbstractBasePtr { return param->abstract(); });
func_graph = pipeline::Renormalize(resource_ptr, func_graph, maybe_new_args_spec);
}
}
@ -241,12 +241,12 @@ class Optimizer : public std::enable_shared_from_this<Optimizer> {
void enable_watch_renormalize() { is_watch_renormalize_ = true; }
void disable_watch_renormalize() { is_watch_renormalize_ = false; }
bool is_watch_renormalize() { return is_watch_renormalize_; }
bool is_watch_renormalize() const { return is_watch_renormalize_; }
void set_enable(bool enable) { is_enable_ = enable; }
bool traverse_nodes_first() { return traverse_nodes_first_; }
bool traverse_nodes_first() const { return traverse_nodes_first_; }
bool is_first_order_j() { return is_first_order_j_; }
bool is_first_order_j() const { return is_first_order_j_; }
void set_is_first_order_j(bool is_first_order_j) { is_first_order_j_ = is_first_order_j; }
struct {

View File

@ -36,20 +36,20 @@ class PassGroup {
// Delete graph pass before the pass manager is freed.
bool DeletePass(const std::string &pass_name);
// Run passes added in pass manager on the input graph
// @param [inout] graph The graph to be optimized
// @param [in out] graph The graph to be optimized
// @return true, graph changed
// @return false, graph not changed
bool Run(const FuncGraphPtr &func_graph) const;
// Run the given graph passes on the input graph
// @param [inout] func_graph The graph to be optimized
// @param [in out] func_graph The graph to be optimized
// @param [in] passes The given graph passes
// @param [inout] res MatchResult used to collect all matched patterns and nodes
// @param [in out] res MatchResult used to collect all matched patterns and nodes
// @return true, graph changed
// @return false, graph not changed
bool Run(const FuncGraphPtr &func_graph, const std::vector<PythonPassPtr> &passes, const MatchResultPtr &res) const;
std::string name() const { return name_; }
void SetRunOnlyOnce(bool run_only_once) { run_only_once_ = run_only_once; }
size_t size() { return passes_.size(); }
size_t size() const { return passes_.size(); }
private:
const std::string name_;

View File

@ -88,7 +88,7 @@ class Prim final : public Pattern {
public:
Prim() { unique_name_ = std::to_string(g_id_++); }
~Prim() = default;
Prim(vector<py::object> prim_objs, string name) : name_(name) {
Prim(const vector<py::object> &prim_objs, const string &name) : name_(name) {
unique_name_ = std::to_string(g_id_++) + "Prim_" + name;
for (auto &prim_obj : prim_objs) {
if (py::isinstance<PrimitivePyAdapter>(prim_obj)) {
@ -123,7 +123,7 @@ class Call final : public Pattern {
public:
Call() { unique_name_ = std::to_string(g_id_++); }
~Call() = default;
Call(PatternPtr prim_pattern, vector<PatternPtr> inputs) {
Call(const PatternPtr &prim_pattern, const vector<PatternPtr> &inputs) {
// NOTE: should_replace is ignored in this case, since each sub-pattern has its own setting
prim_pattern_ = prim_pattern;
unique_name_ = std::to_string(g_id_++) + "Call_" + prim_pattern->unique_name();
@ -200,7 +200,7 @@ class NewTensor final : public Pattern {
public:
NewTensor() { unique_name_ = std::to_string(g_id_++); }
~NewTensor() = default;
explicit NewTensor(tensor::TensorPtr input_tensor) : input_tensor_(input_tensor) {
explicit NewTensor(const tensor::TensorPtr &input_tensor) : input_tensor_(input_tensor) {
unique_name_ = std::to_string(g_id_++) + "NewTensor";
}
MS_DECLARE_PARENT(NewTensor, Pattern);
@ -216,7 +216,8 @@ class NewTensor final : public Pattern {
class NewParameter final : public Pattern {
public:
NewParameter() { unique_name_ = std::to_string(g_id_++); }
explicit NewParameter(string para_name, tensor::TensorPtr default_tensor, bool requires_grad, bool layerwise_parallel)
explicit NewParameter(const string &para_name, tensor::TensorPtr default_tensor, bool requires_grad,
bool layerwise_parallel)
: para_name_(para_name), requires_grad_(requires_grad), layerwise_parallel_(layerwise_parallel) {
unique_name_ = std::to_string(g_id_++) + "NewParameter_" + para_name;
default_tensor_ = std::make_shared<tensor::Tensor>(*default_tensor.get());
@ -227,22 +228,22 @@ class NewParameter final : public Pattern {
MatchResultPtr match(const AnfNodePtr &node) override {
MS_LOG(EXCEPTION) << "Find NewParameter in pattern, NewParameter should only appear in the target.\n";
}
string para_name() { return para_name_; }
tensor::TensorPtr default_tensor() { return default_tensor_; }
bool requires_grad() { return requires_grad_; }
bool layerwise_parallel() { return layerwise_parallel_; }
bool built() { return built_; }
const string &para_name() const { return para_name_; }
tensor::TensorPtr default_tensor() const { return default_tensor_; }
bool requires_grad() const { return requires_grad_; }
bool layerwise_parallel() const { return layerwise_parallel_; }
bool built() const { return built_; }
void set_built(bool built) { built_ = built; }
void reset() override { built_ = false; }
bool should_last() { return last_across_passes_; }
bool should_last() const { return last_across_passes_; }
void set_last(bool last) { last_across_passes_ = last; }
private:
string para_name_;
bool requires_grad_;
bool layerwise_parallel_;
bool requires_grad_{false};
bool layerwise_parallel_{false};
bool last_across_passes_{false};
bool built_;
bool built_{false};
tensor::TensorPtr default_tensor_;
};
@ -253,7 +254,7 @@ class Imm final : public Pattern {
~Imm() = default;
MS_DECLARE_PARENT(Imm, Pattern);
MatchResultPtr match(const AnfNodePtr &node) override;
int value() { return value_; }
int value() const { return value_; }
private:
int64_t value_;
@ -263,8 +264,8 @@ class MatchResult {
public:
MatchResult() {}
~MatchResult() = default;
void add_entry(PatternPtr pattern, AnfNodePtr node) { match_result_[pattern] = node; }
const PatternNodeMap &result() { return match_result_; }
void add_entry(const PatternPtr &pattern, const AnfNodePtr &node) { match_result_[pattern] = node; }
const PatternNodeMap &result() const { return match_result_; }
AnfNodePtr get_node(const PatternPtr &pattern);
void merge(const MatchResultPtr &other_result);
void clear() { match_result_.clear(); }

View File

@ -59,11 +59,11 @@ class PyPassManager {
PassGroupPtr GetPassGroup(Phase phase);
MatchResultPtr GetMatchResult() { return res_; }
void SetRenorm(bool should_renorm) { should_renorm_ = should_renorm; }
bool ShouldRenorm() { return should_renorm_; }
bool ShouldRenorm() const { return should_renorm_; }
void SetReOpt(bool should_reopt) { should_reopt_ = should_reopt; }
bool ShouldReOpt() { return should_reopt_; }
void SetResource(pipeline::ResourcePtr resource) { resource_ = resource; }
pipeline::ResourcePtr GetResource() { return resource_; }
bool ShouldReOpt() const { return should_reopt_; }
void SetResource(const pipeline::ResourcePtr &resource) { resource_ = resource; }
pipeline::ResourcePtr GetResource() const { return resource_; }
void ClearRes();
void ClearPipelineRes() {
resource_ = nullptr;

View File

@ -675,7 +675,7 @@ bool GeOptimizeAction(const ResourcePtr &res) { return OptimizeAction(res, kGePa
bool VmOptimizeAction(const ResourcePtr &res) {
#if ((defined ENABLE_CPU) && (!defined _WIN32))
if (ps::PSContext::instance()->is_ps_mode()) {
kVmPasses.push_back({"server_communication_op_fusion", ps::Util::FuseServerCommOps});
kVmPasses.push_back(PassItem("server_communication_op_fusion", ps::Util::FuseServerCommOps));
}
#endif
auto ret = OptimizeAction(res, kVmPasses);

View File

@ -136,8 +136,9 @@ AnfNodePtr FunctionBlock::ReadVariable(const std::string &var_name) {
static const auto use_fallback = (parser_.support_fallback() != "0");
if (use_fallback) {
MS_LOG(DEBUG) << "Update global params of block: " << ToString()
<< ", with previous block: " << block->ToString() << ",\nCurrent: " << py::str(global_py_params())
<< "\nInsert: " << py::str(block->global_py_params());
<< ", with previous block: " << block->ToString()
<< ",\nCurrent: " << py::str(const_cast<py::dict &>(global_py_params()))
<< "\nInsert: " << py::str(const_cast<py::dict &>(block->global_py_params()));
UpdateGlobalPyParam(block->global_py_params());
}
return res;

View File

@ -87,7 +87,7 @@ class FunctionBlock : public std::enable_shared_from_this<FunctionBlock> {
bool is_dead_block() const { return is_dead_block_; }
void SetAsDeadBlock();
py::dict &global_py_params() { return global_py_params_; }
const py::dict &global_py_params() { return global_py_params_; }
void set_global_py_params(const py::dict &symbols) { global_py_params_ = symbols; }
void AddGlobalPyParam(const std::string &name, const py::object &obj) { global_py_params_[py::str(name)] = obj; }
void UpdateGlobalPyParam(const py::dict &symbols) {

View File

@ -79,7 +79,7 @@ class LoopContext {
LoopContext(std::stack<Loop> *loops, const FunctionBlockPtr &header, const AnfNodePtr &iterator) : loops_(loops) {
loops_->emplace(header, iterator, nullptr);
}
~LoopContext() { loops_->pop(); }
~LoopContext() noexcept { loops_->pop(); }
const FunctionBlockPtr &EndBlock() const { return loops_->top().end; }
private:

View File

@ -246,8 +246,7 @@ bool DynamicParser::IsDynamicCell(const py::object &cell) {
// get the name of input args as the initialize of dynamic_variables
ParseInputArgs(ast, fn_node);
// parse body context
bool ret = false;
ret = ParseBodyContext(ast, fn_node);
bool ret = ParseBodyContext(ast, fn_node);
cell_input_args_.clear();
return ret;
}

View File

@ -163,9 +163,9 @@ class SymbolResolver {
// resolve symbol in namespace and save it in result_;
bool Resolve();
SymbolPtr symbol() { return symbol_; }
SymbolPtr symbol() const { return symbol_; }
const py::object &result() { return result_; }
const py::object &result() const { return result_; }
private:
// namespace where the symbol locates

View File

@ -1447,7 +1447,7 @@ py::object GraphExecutorPy::Run(const py::tuple &args, const py::object &phase_o
} // namespace pipeline
FuncGraphPtr GraphExecutorPy::BuildGraph(const py::dict &init_params, const std::string &phase,
const py::object &broadcast_params) {
const py::object &broadcast_params) const {
#ifdef ENABLE_D
return BuildDFGraph(info_, init_params, phase, broadcast_params);
#else

View File

@ -97,7 +97,7 @@ class GraphExecutorPy : public std::enable_shared_from_this<GraphExecutorPy> {
bool HasCompiled(const std::string &phase) const;
FuncGraphPtr BuildGraph(const py::dict &init_params, const std::string &phase,
const py::object &broadcast_params = {});
const py::object &broadcast_params = {}) const;
void UpdataParamNodeDefaultInput(const std::string &phase,
const std::unordered_map<std::string, tensor::TensorPtr> &params);
void RunInitGraph(const py::dict &init_params, const std::string &phase) const;