forked from mindspore-Ecosystem/mindspore
fix some log bug
This commit is contained in:
parent
49f012ad74
commit
28607c906e
|
@ -20,16 +20,24 @@
|
|||
#include "backend/kernel_compiler/oplib/oplib.h"
|
||||
#include "backend/kernel_compiler/common_utils.h"
|
||||
#include "backend/session/anf_runtime_algorithm.h"
|
||||
#include "base/core_ops.h"
|
||||
|
||||
namespace mindspore {
|
||||
namespace kernel {
|
||||
constexpr auto kDynamicShape = "DynamicShape";
|
||||
static const std::set<std::string> host_kernel = {prim::kPrimDynamicShape->name(),
|
||||
prim::kPrimDynamicBroadcastGradientArgs->name()};
|
||||
|
||||
void HostMetadataInfo(const CNodePtr &kernel_node, std::vector<std::shared_ptr<KernelBuildInfo>> *kernel_info_list) {
|
||||
MS_LOG(INFO) << "HostMetadataInfo.";
|
||||
MS_EXCEPTION_IF_NULL(kernel_node);
|
||||
MS_EXCEPTION_IF_NULL(kernel_info_list);
|
||||
|
||||
std::string op_name = AnfAlgo::GetCNodeName(kernel_node);
|
||||
if (host_kernel.find(op_name) == host_kernel.end()) {
|
||||
MS_LOG(DEBUG) << "Host dose not have op [" << op_name << "]";
|
||||
return;
|
||||
}
|
||||
|
||||
std::vector<std::string> inputs_format{};
|
||||
std::vector<TypeId> inputs_type{};
|
||||
size_t input_num = AnfAlgo::GetInputTensorNum(kernel_node);
|
||||
|
|
|
@ -61,6 +61,7 @@ constexpr auto kTile = "Tile";
|
|||
constexpr auto kBiasAddGrad = "BiasAddGrad";
|
||||
|
||||
// Arrays
|
||||
constexpr auto kDynamicShape = "DynamicShape";
|
||||
constexpr auto kStack = "Stack";
|
||||
constexpr auto kUnstack = "Unstack";
|
||||
constexpr auto kTupleGetItem = "TupleGetItem";
|
||||
|
@ -167,7 +168,7 @@ inline const PrimitivePtr kPrimSparseGatherV2 = std::make_shared<Primitive>("Spa
|
|||
inline const PrimitivePtr kPrimSparseToDense = std::make_shared<Primitive>("SparseToDense");
|
||||
inline const PrimitivePtr kPrimShape = std::make_shared<Primitive>("Shape");
|
||||
inline const PrimitivePtr kPrimStridedSlice = std::make_shared<Primitive>(kStridedSlice);
|
||||
inline const PrimitivePtr kPrimDynamicShape = std::make_shared<Primitive>("DynamicShape");
|
||||
inline const PrimitivePtr kPrimDynamicShape = std::make_shared<Primitive>(kDynamicShape);
|
||||
inline const PrimitivePtr kPrimEmbeddingLookup = std::make_shared<Primitive>("EmbeddingLookup");
|
||||
inline const PrimitivePtr kPrimEmbeddingLookupCommGrad = std::make_shared<Primitive>("EmbeddingLookupCommGrad");
|
||||
inline const PrimitivePtr kPrimSize = std::make_shared<Primitive>("Size");
|
||||
|
|
|
@ -266,7 +266,7 @@ class Conv2d(_Conv):
|
|||
def extend_repr(self):
|
||||
s = 'input_channels={}, output_channels={}, kernel_size={},' \
|
||||
'stride={}, pad_mode={}, padding={}, dilation={}, ' \
|
||||
'group={}, has_bias={}' \
|
||||
'group={}, has_bias={}, ' \
|
||||
'weight_init={}, bias_init={}, format={}'.format(
|
||||
self.in_channels,
|
||||
self.out_channels,
|
||||
|
|
|
@ -35,7 +35,7 @@ class LearningRateScheduler(Callback):
|
|||
learning_rate_function (Function): The function about how to change the learning rate during training.
|
||||
|
||||
Examples:
|
||||
>>> from _lr_scheduler_callback import LearningRateScheduler
|
||||
>>> from mindspore.train.callback import LearningRateScheduler
|
||||
>>> import mindspore.nn as nn
|
||||
>>> from mindspore.train import Model
|
||||
...
|
||||
|
|
Loading…
Reference in New Issue