fix codex pclint and code check2.0
This commit is contained in:
parent
66fe07a009
commit
3bca7466dd
|
@ -762,7 +762,7 @@ bool KPynativeCellImpl::BackPropagateOneCNodeWithBPropFuncGraph(const CNodePtr &
|
|||
(void)node_list.insert(node_list.end(), k_node_list.begin(), k_node_list.end());
|
||||
// out;
|
||||
node_list.push_back(adjoint->k_node());
|
||||
// dout;
|
||||
// dout
|
||||
node_list.push_back(adjoint->RealDout());
|
||||
}
|
||||
// Back propagate process
|
||||
|
|
|
@ -226,7 +226,7 @@ ValuePtr ConvertDataClass(const py::object &obj) {
|
|||
MS_LOG(DEBUG) << "Converting dataclass";
|
||||
// Maybe the obj is dataclass define
|
||||
auto desc = py::cast<std::string>(python_adapter::CallPyObjMethod(obj, PYTHON_GET_OBJ_DESC, obj));
|
||||
// desc has format "<class xxxx>", strip the '<' and '>' by offset 1;
|
||||
// desc has format "<class xxxx>", strip the '<' and '>' by offset 1
|
||||
auto converted = std::make_shared<ClassObject>(obj, std::string(desc.begin() + 1, desc.end() - 1));
|
||||
return converted;
|
||||
}
|
||||
|
|
|
@ -349,7 +349,10 @@ OptPassGroupMap GetOptPassesA(const opt::irpass::OptimizeIRPassLib &irpass) {
|
|||
|
||||
OptPassGroupMap GetA1A2(const opt::irpass::OptimizeIRPassLib &irpass) {
|
||||
auto opt_a = GetOptPassesA(irpass);
|
||||
OptPassGroupMap a1_a2({opt_a[0], opt_a[1], opt_a[2]});
|
||||
constexpr auto opt_a1_index = 0;
|
||||
constexpr auto parameter_eliminate = 1;
|
||||
constexpr auto opt_a2_index = 2;
|
||||
OptPassGroupMap a1_a2({opt_a[opt_a1_index], opt_a[parameter_eliminate], opt_a[opt_a2_index]});
|
||||
return a1_a2;
|
||||
}
|
||||
|
||||
|
|
|
@ -26,7 +26,6 @@
|
|||
|
||||
namespace mindspore::pipeline {
|
||||
namespace {
|
||||
|
||||
class OrderEnforcer {
|
||||
public:
|
||||
explicit OrderEnforcer(const FuncGraphPtr &func_graph) : func_graph_(func_graph), manager_(func_graph->manager()) {
|
||||
|
|
|
@ -573,6 +573,5 @@ AbstractBasePtr InferImplTransData(const AnalysisEnginePtr &, const PrimitivePtr
|
|||
MS_EXCEPTION_IF_NULL(output);
|
||||
return output;
|
||||
}
|
||||
|
||||
} // namespace abstract
|
||||
} // namespace mindspore
|
||||
|
|
|
@ -525,10 +525,10 @@ void FuncGraph::SetDefaultValues(const std::vector<std::string> &name_list, cons
|
|||
void FuncGraph::ClearDefaultValues() { parameter_default_value_.clear(); }
|
||||
|
||||
size_t FuncGraph::GetDefaultValueCount() {
|
||||
int null_count =
|
||||
int64_t null_count =
|
||||
std::count_if(parameter_default_value_.begin(), parameter_default_value_.end(),
|
||||
[](const std::pair<std::string, AnfNodePtr> &pair) { return IsValueNode<Null>(pair.second); });
|
||||
return parameter_default_value_.size() - IntToSize(null_count);
|
||||
return parameter_default_value_.size() - LongToSize(null_count);
|
||||
}
|
||||
|
||||
AnfNodePtr FuncGraph::GetVariableArgParameter() {
|
||||
|
|
|
@ -19,7 +19,8 @@
|
|||
namespace mindspore {
|
||||
namespace ops {
|
||||
namespace {
|
||||
int64_t get_good_ld(const int64_t dim, const int64_t type_size) {
|
||||
constexpr int64_t type_size = 4;
|
||||
int64_t get_good_ld(const int64_t dim) {
|
||||
int64_t ld = ((dim + (64 / type_size) - 1) / (64 / type_size)) * (64 / type_size);
|
||||
if (ld * 256 == 0) {
|
||||
return ld + 64 / type_size;
|
||||
|
@ -54,9 +55,8 @@ AbstractBasePtr LstmInfer(const PrimitivePtr &primitive, const std::vector<Abstr
|
|||
|
||||
std::vector<int64_t> y_shape = {x_input_shape[0], x_input_shape[1], hidden_size * num_directions};
|
||||
|
||||
int64_t type_size = 4;
|
||||
int64_t gates_ws_ld = get_good_ld(hidden_size * 4, type_size);
|
||||
int64_t states_ws_ld = get_good_ld(std::max(hidden_size, input_size), type_size);
|
||||
int64_t gates_ws_ld = get_good_ld(hidden_size * 4);
|
||||
int64_t states_ws_ld = get_good_ld(std::max(hidden_size, input_size));
|
||||
int64_t ws_gates_size = num_layers * num_directions * x_input_shape[0] * x_input_shape[1] * gates_ws_ld * type_size;
|
||||
int64_t ws_states_size =
|
||||
(num_layers + 1) * num_directions * (x_input_shape[0] + 1) * x_input_shape[1] * states_ws_ld * type_size;
|
||||
|
|
Loading…
Reference in New Issue