clean pylint

This commit is contained in:
jinyaohui 2020-05-18 16:42:35 +08:00
parent f23bfe0d71
commit 5a914994ba
607 changed files with 2425 additions and 2036 deletions

View File

@ -14,6 +14,7 @@
# ============================================================================
'''Remove after MindData merge to MindSpore '''
import numpy as np
from mindspore import Tensor

View File

@ -19,13 +19,14 @@
import math
import numpy as np
import mindspore.common.dtype as mstype
import mindspore.ops.functional as F
from mindspore import nn
from mindspore.common.initializer import TruncatedNormal
from mindspore.ops import operations as P
from mindspore.common.tensor import Tensor
from mindspore.model_zoo.Bert_NEZHA.bert_model import SaturateCast, RelaPosEmbeddingsGenerator
from mindspore.ops import operations as P
class BertAttentionQueryKeyMul(nn.Cell):

View File

@ -17,22 +17,19 @@
import numpy as np
import mindspore.common.dtype as mstype
from mindspore import context, nn
from mindspore.model_zoo.Bert_NEZHA import GetNextSentenceOutput, BertNetworkWithLoss
from mindspore.model_zoo.Bert_NEZHA.bert_model import BertConfig, \
EmbeddingLookup, EmbeddingPostprocessor, BertOutput, \
BertAttention, BertSelfAttention, SaturateCast, TruncatedNormal, \
BertEncoderCell, BertTransformer, CreateAttentionMaskFromInputMask, BertModel
from mindspore import context, nn
import mindspore.common.dtype as mstype
from .bert_attention_submodules import BertAttentionQueryKeyMul, BertAttentionRelativePositionKeys, \
BertAttentionMaskBackward, BertAttentionSoftmax, BertAttentionRelativePositionValues, BertDense
from ..mindspore_test import mindspore_test
from ..pipeline.gradient.compare_gradient import \
pipeline_for_compare_inputs_grad_with_numerical_diff_for_group_by_group_config, \
pipeline_for_compare_params_grad_with_numerical_diff_for_group_by_group_config
from ..mindspore_test import mindspore_test
from .bert_attention_submodules import BertAttentionQueryKeyMul, BertAttentionRelativePositionKeys, \
BertAttentionMaskBackward, BertAttentionSoftmax, BertAttentionRelativePositionValues, BertDense
verification_set = {
'inputs': [

View File

@ -16,6 +16,7 @@
"""Test bert compare with npy."""
import numpy as np
import mindspore.common.dtype as mstype
from mindspore import context
from mindspore.model_zoo.Bert_NEZHA.bert_model import BertAttention, SaturateCast, \
@ -25,12 +26,11 @@ from mindspore.model_zoo.Bert_NEZHA.bert_model import BertAttention, SaturateCas
BertSelfAttention, CreateAttentionMaskFromInputMask, \
RelaPosMatrixGenerator, BertOutput, \
RelaPosEmbeddingsGenerator
from .bert_attention_submodules import BertAttentionQueryKeyMul, BertAttentionRelativePositionKeys, BertAttentionMask, \
BertAttentionSoftmax, BertAttentionRelativePositionValues, BertDense
from ..mindspore_test import mindspore_test
from ..pipeline.forward.compare_forward import \
pipeline_for_compare_forward_with_npy_for_group_by_group_config_using_group_policy
from .bert_attention_submodules import BertAttentionQueryKeyMul, BertAttentionRelativePositionKeys, BertAttentionMask, \
BertAttentionSoftmax, BertAttentionRelativePositionValues, BertDense
verification_set = {
'inputs': [

View File

@ -17,11 +17,10 @@
from mindspore import context
from mindspore.ops import operations as P
from ..mindspore_test import mindspore_test
from ..pipeline.gradient.compare_gradient import \
pipeline_for_compare_inputs_grad_with_numerical_diff_for_group_by_group_config, \
pipeline_for_compare_inputs_jacobian_with_numerical_diff_for_group_by_group_config
from ..mindspore_test import mindspore_test
# from ...vm_impl import *

View File

@ -15,20 +15,20 @@
"""Test bert submodules."""
import os
import numpy as np
import os
from mindspore import Tensor
from mindspore import nn, context
from mindspore.model_zoo.Bert_NEZHA import EmbeddingLookup, GetMaskedLMOutput, \
BertConfig, BertPreTraining, BertNetworkWithLoss
from mindspore.model_zoo.Bert_NEZHA.bert_model import BertModel
from mindspore import nn, context
from mindspore import Tensor
from ..utils.block_util import get_output_cell
from ..mindspore_test import mindspore_test
from ..pipeline.forward.compile_forward import pipeline_for_compile_forward_anf_graph_for_case_by_case_config, \
pipeline_for_compile_forward_ge_graph_for_case_by_case_config
from ..pipeline.gradient.compile_gradient import pipeline_for_compile_grad_anf_graph_for_case_by_case_config, \
pipeline_for_compile_grad_ge_graph_for_case_by_case_config
from ..utils.block_util import get_output_cell
from ...dataset_mock import MindData
# pylint: disable=missing-docstring, W0612, arguments-differ

View File

@ -18,9 +18,9 @@
from mindspore import context
from mindspore.nn.optim import Lamb
from ..mindspore_test import mindspore_test
from ..pipeline.gradient.check_training import pipeline_for_check_model_loss_for_case_by_case_config
from ..utils.model_util import Linreg
from ..utils.model_util import SquaredLoss
from ..pipeline.gradient.check_training import pipeline_for_check_model_loss_for_case_by_case_config
network = Linreg(2)
num_epochs = 1000

View File

@ -19,7 +19,6 @@ import numpy as np
from mindspore import context
from mindspore.ops import operations as P
from ..mindspore_test import mindspore_test
from ..pipeline.gradient.compare_gradient import pipeline_for_compare_inputs_grad_with_npy_for_case_by_case_config
from ...vm_impl import *

View File

@ -15,8 +15,8 @@
"""Component that Check if the function raises the expected Exception."""
import sys
import pytest
import sys
from ...components.icomponent import IExectorComponent
from ...utils import keyword

View File

@ -16,8 +16,8 @@
"""Component that verify if the model can converge to expected loss."""
from ...components.icomponent import IExectorComponent
from ...utils.model_util import Model
from ...utils import keyword
from ...utils.model_util import Model
class LossVerifierEC(IExectorComponent):

View File

@ -18,8 +18,8 @@
import numpy as np
from ...components.icomponent import IFacadeComponent
from ...utils.facade_util import get_block_config, fill_block_config
from ...utils import keyword
from ...utils.facade_util import get_block_config, fill_block_config
class MeFacadeFC(IFacadeComponent):

View File

@ -16,8 +16,8 @@
"""Component that generate dataset for linear regression."""
from ...components.icomponent import IDataComponent
from ...utils.dataset_util import generate_dataset_for_linear_regression
from ...utils import keyword
from ...utils.dataset_util import generate_dataset_for_linear_regression
class GenerateDataSetForLRDC(IDataComponent):

View File

@ -16,12 +16,12 @@
"""Component that generate inputs for specified shape type."""
import numpy as np
from mindspore.common.tensor import Tensor
from mindspore.common.tensor import Tensor
from ...components.icomponent import IDataComponent
from ...utils.other_util import shape2tensor
from ...utils.config_util import get_input_config
from ...utils import keyword
from ...utils.config_util import get_input_config
from ...utils.other_util import shape2tensor
class GenerateFromShapeDC(IDataComponent):

View File

@ -16,8 +16,8 @@
"""Component that load inputs from npy file."""
from ...components.icomponent import IDataComponent
from ...utils.npy_util import load_data_from_npy_or_shape
from ...utils import keyword
from ...utils.npy_util import load_data_from_npy_or_shape
class LoadFromNpyDC(IDataComponent):

View File

@ -16,8 +16,8 @@
"""Component that comparing forward result with baseline functions."""
from ...components.icomponent import IVerifierComponent
from ...utils.compare_util import compare
from ...utils import keyword
from ...utils.compare_util import compare
class CompareWithVC(IVerifierComponent):

View File

@ -16,8 +16,8 @@
"""Component that comparing gradients with baseline functions."""
from ...components.icomponent import IVerifierComponent
from ...utils.compare_util import compare
from ...utils import keyword
from ...utils.compare_util import compare
class CompareGradientWithVC(IVerifierComponent):

View File

@ -16,11 +16,12 @@
"""Component that comparing results with expectation serialized as npy file."""
import numpy as np
from ...components.icomponent import IVerifierComponent
from ...utils.other_util import to_numpy_list, to_numpy
from ...utils.npy_util import load_data_from_npy_or_shape
from ...utils.verifier_util import tolerance_assert
from ...utils import keyword
from ...utils.npy_util import load_data_from_npy_or_shape
from ...utils.other_util import to_numpy_list, to_numpy
from ...utils.verifier_util import tolerance_assert
class LoadFromNpyVC(IVerifierComponent):

View File

@ -16,8 +16,8 @@
"""Component that verify shape and type."""
from ...components.icomponent import IVerifierComponent
from ...utils.other_util import to_numpy_list
from ...utils import keyword
from ...utils.other_util import to_numpy_list
class ShapeTypeVC(IVerifierComponent):

View File

@ -17,6 +17,7 @@
import logging
import pytest
from .components.icomponent import IDataComponent, IBuilderComponent, IExectorComponent, \
IVerifierComponent, IFIPolicyComponent, IERPolicyComponent, IComponent, \
IFacadeComponent

View File

@ -15,18 +15,18 @@
"""Pipelines for forward comparison."""
from ...components.expect_result_policy.cartesian_product_on_id_for_expect_result import IdCartesianProductERPC
from ...components.function_inputs_policy.cartesian_product_on_id_for_function_inputs import IdCartesianProductFIPC
from ...components.executor.exec_forward import IdentityEC
from ...components.function.run_block import RunBlockBC
from ...components.inputs.generate_inputs_from_shape import GenerateFromShapeDC
from ...components.verifier.compare_forward import CompareWithVC
from ...components.expect_result_policy.cartesian_product_on_id_for_expect_result import IdCartesianProductERPC
from ...components.facade.me_facade import MeFacadeFC
from ...components.inputs.load_inputs_from_npy import LoadFromNpyDC
from ...components.verifier.verify_expect_from_npy import LoadFromNpyVC
from ...components.function.init_params_with_rand_and_run_block import RunBlockWithRandParamBC
from ...components.function.run_block import RunBlockBC
from ...components.function_inputs_policy.cartesian_product_on_group_for_function_inputs import \
GroupCartesianProductFIPC
from ...components.function_inputs_policy.cartesian_product_on_id_for_function_inputs import IdCartesianProductFIPC
from ...components.inputs.generate_inputs_from_shape import GenerateFromShapeDC
from ...components.inputs.load_inputs_from_npy import LoadFromNpyDC
from ...components.verifier.compare_forward import CompareWithVC
from ...components.verifier.verify_expect_from_npy import LoadFromNpyVC
# pylint: disable=W0105
"""

View File

@ -15,13 +15,13 @@
"""Pipelines for forward computing."""
from ...components.facade.me_facade import MeFacadeFC
from ...components.inputs.generate_inputs_from_shape import GenerateFromShapeDC
from ...components.executor.exec_forward import IdentityEC
from ...components.executor.check_exceptions import CheckExceptionsEC
from ...components.function_inputs_policy.cartesian_product_on_id_for_function_inputs import IdCartesianProductFIPC
from ...components.executor.exec_forward import IdentityEC
from ...components.facade.me_facade import MeFacadeFC
from ...components.function.compile_block import CompileBlockBC
from ...components.function.run_block import RunBlockBC
from ...components.function_inputs_policy.cartesian_product_on_id_for_function_inputs import IdCartesianProductFIPC
from ...components.inputs.generate_inputs_from_shape import GenerateFromShapeDC
# pylint: disable=W0105
"""

View File

@ -16,10 +16,10 @@
"""Pipelines for exception checking."""
from ...components.executor.check_exceptions import CheckExceptionsEC
from ...components.inputs.get_inputs_from_config import IdentityDC
from ...components.function.get_function_from_config import IdentityBC
from ...components.facade.me_facade import MeFacadeFC
from ...components.function.get_function_from_config import IdentityBC
from ...components.function_inputs_policy.cartesian_product_on_id_for_function_inputs import IdCartesianProductFIPC
from ...components.inputs.get_inputs_from_config import IdentityDC
# pylint: disable=W0105
"""

View File

@ -15,13 +15,13 @@
"""Pipelines for shape and type checking."""
from ...components.inputs.get_inputs_from_config import IdentityDC
from ...components.function.get_function_from_config import IdentityBC
from ...components.executor.exec_forward import IdentityEC
from ...components.expect_result_policy.cartesian_product_on_group_for_expect_result \
import GroupCartesianProductERPC
from ...components.function.get_function_from_config import IdentityBC
from ...components.function_inputs_policy.cartesian_product_on_group_for_function_inputs \
import GroupCartesianProductFIPC
from ...components.inputs.get_inputs_from_config import IdentityDC
from ...components.verifier.verify_shapetype import ShapeTypeVC
# pylint: disable=W0105

View File

@ -15,11 +15,11 @@
"""Pipelines for loss checking."""
from ...components.function_inputs_policy.cartesian_product_on_id_for_function_inputs import IdCartesianProductFIPC
from ...components.executor.exec_and_verify_model_loss import LossVerifierEC
from ...components.facade.me_facade import MeFacadeFC
from ...components.function.get_function_from_config import IdentityBC
from ...components.function_inputs_policy.cartesian_product_on_id_for_function_inputs import IdCartesianProductFIPC
from ...components.inputs.generate_dataset_for_linear_regression import GenerateDataSetForLRDC
from ...components.executor.exec_and_verify_model_loss import LossVerifierEC
# pylint: disable=W0105
"""

View File

@ -15,20 +15,20 @@
"""Pipelines for gradients comparison."""
from ...components.expect_result_policy.cartesian_product_on_id_for_expect_result import IdCartesianProductERPC
from ...components.function_inputs_policy.cartesian_product_on_id_for_function_inputs import IdCartesianProductFIPC
from ...components.executor.exec_gradient import IdentityBackwardEC
from ...components.function.run_gradient_wrt_inputs import RunBackwardBlockWrtInputsBC
from ...components.function.run_gradient_wrt_params import RunBackwardBlockWrtParamsBC
from ...components.inputs.generate_inputs_from_shape import GenerateFromShapeDC
from ...components.verifier.compare_gradient import CompareGradientWithVC
from ...components.facade.me_facade import MeFacadeFC
from ...components.inputs.load_inputs_from_npy import LoadFromNpyDC
from ...components.verifier.verify_expect_from_npy import LoadFromNpyVC
from ...components.function.get_function_from_config import IdentityBC
from ...components.executor.check_gradient_wrt_inputs import CheckGradientWrtInputsEC
from ...components.executor.check_gradient_wrt_params import CheckGradientWrtParamsEC
from ...components.executor.check_jacobian_wrt_inputs import CheckJacobianWrtInputsEC
from ...components.executor.exec_gradient import IdentityBackwardEC
from ...components.expect_result_policy.cartesian_product_on_id_for_expect_result import IdCartesianProductERPC
from ...components.facade.me_facade import MeFacadeFC
from ...components.function.get_function_from_config import IdentityBC
from ...components.function.run_gradient_wrt_inputs import RunBackwardBlockWrtInputsBC
from ...components.function.run_gradient_wrt_params import RunBackwardBlockWrtParamsBC
from ...components.function_inputs_policy.cartesian_product_on_id_for_function_inputs import IdCartesianProductFIPC
from ...components.inputs.generate_inputs_from_shape import GenerateFromShapeDC
from ...components.inputs.load_inputs_from_npy import LoadFromNpyDC
from ...components.verifier.compare_gradient import CompareGradientWithVC
from ...components.verifier.verify_expect_from_npy import LoadFromNpyVC
# pylint: disable=W0105
"""

View File

@ -15,12 +15,12 @@
"""Pipelines for gradients."""
from ...components.facade.me_facade import MeFacadeFC
from ...components.inputs.generate_inputs_from_shape import GenerateFromShapeDC
from ...components.executor.exec_gradient import IdentityBackwardEC
from ...components.function_inputs_policy.cartesian_product_on_id_for_function_inputs import IdCartesianProductFIPC
from ...components.facade.me_facade import MeFacadeFC
from ...components.function.compile_gradient_wrt_inputs import CompileBackwardBlockWrtInputsBC
from ...components.function.run_gradient_wrt_inputs import RunBackwardBlockWrtInputsBC
from ...components.function_inputs_policy.cartesian_product_on_id_for_function_inputs import IdCartesianProductFIPC
from ...components.inputs.generate_inputs_from_shape import GenerateFromShapeDC
# pylint: disable=W0105
"""

View File

@ -19,13 +19,13 @@
import numpy as np
from mindspore import ParameterTuple
from mindspore import nn, context
from mindspore.common.api import _executor, ms_function
from mindspore.common.tensor import Tensor
from mindspore import nn, context
from mindspore.ops.composite import GradOperation
from mindspore.ops import functional as F
from mindspore.ops import operations as P
from mindspore import ParameterTuple
from mindspore.ops.composite import GradOperation
from . import keyword

View File

@ -15,11 +15,11 @@
"""Utils for computing gradients."""
from mindspore.ops.composite.base import GradOperation
from mindspore import context
from mindspore.nn import Cell
from mindspore.common import ParameterTuple
from mindspore.common.api import ms_function
from mindspore.nn import Cell
from mindspore.ops.composite.base import GradOperation
class Bprop(Cell):

View File

@ -16,14 +16,15 @@
"""Implementation of Numerical gradients checking."""
# pylint: disable=missing-docstring
from typing import Callable, List, Any
import mindspore._c_expression as _c_expression
import numpy as np
from mindspore import Tensor
from mindspore.ops.composite import GradOperation
from typing import Callable, List, Any
from mindspore import ParameterTuple
from mindspore import Tensor
from mindspore import context
from mindspore.common.api import ms_function
import mindspore._c_expression as _c_expression
from mindspore.ops.composite import GradOperation
from .block_util import get_output_cell, gen_net, gen_grad_net, \
get_uniform_with_shape, set_block_phase, get_output_reduce_cell, set_block_param_with_rand

View File

@ -16,8 +16,9 @@
"""Utils for baseline comparison test."""
import numpy as np
from .other_util import to_numpy_list
from . import keyword
from .other_util import to_numpy_list
def compare(expect, func_result, baseline):

View File

@ -15,8 +15,9 @@
"""Dataset utils."""
import random
import numpy as np
import random
from mindspore import Tensor

View File

@ -18,6 +18,7 @@
# pylint: disable=missing-docstring, unused-argument
import logging
import mindspore.nn as nn
from mindspore.ops import operations as P
from mindspore.ops._grad.grad_base import bprop_getters

View File

@ -18,11 +18,12 @@
# pylint: disable=arguments-differ
import numpy as np
from mindspore import Parameter, ParameterTuple, Tensor
import mindspore.nn as nn
from mindspore.ops import operations as P
from mindspore.ops import functional as F
from mindspore import Parameter, ParameterTuple, Tensor
from mindspore.ops import composite as C
from mindspore.ops import functional as F
from mindspore.ops import operations as P
class SquaredLoss(nn.Cell):

View File

@ -18,9 +18,8 @@
import numpy as np
from mindspore.common.tensor import Tensor
from .other_util import shape2tensor
from .config_util import get_expect_config
from .other_util import shape2tensor
def load_npy(p):

View File

@ -15,9 +15,9 @@
"""Other utils."""
import mindspore._c_expression as _c_expression
import numpy as np
import mindspore._c_expression as _c_expression
from mindspore.common.tensor import Tensor

View File

@ -14,6 +14,7 @@
# ============================================================================
""" test ops """
import numpy as np
import mindspore.nn as nn
import mindspore.ops.composite as C
import mindspore.ops.functional as F

View File

@ -13,9 +13,8 @@
# limitations under the License.
# ============================================================================
"""test dataset performance about mindspore.MindDataset, mindspore.TFRecordDataset, tf.data.TFRecordDataset"""
import time
import tensorflow as tf
import time
import mindspore.dataset as ds
from mindspore.mindrecord import FileReader

View File

@ -13,15 +13,16 @@
# limitations under the License.
# ============================================================================
import numpy as np
import os
import pytest
import numpy as np
import mindspore as ms
import mindspore.communication.management as distributedTool
import mindspore.context as context
from mindspore.common.tensor import Tensor
from mindspore.nn import Cell
from mindspore.ops import operations as P
from mindspore.common.tensor import Tensor
import mindspore.context as context
import mindspore.communication.management as distributedTool
device_num = 2
device_id = int(os.getenv('DEVICE_ID'))

View File

@ -13,22 +13,23 @@
# limitations under the License.
# ============================================================================
import numpy as np
import os
import pytest
import numpy as np
import mindspore as ms
from numpy import allclose
from mindspore.nn import Cell
from mindspore import context
from mindspore.ops import operations as P
from mindspore.ops import functional as F
from mindspore.common.tensor import Tensor
import mindspore as ms
import mindspore.communication.management as distributedTool
from mindspore.common.parameter import ParameterTuple, Parameter
from mindspore.ops import composite as C
from mindspore import context
from mindspore.common import dtype as mstype
from mindspore.train import Model, ParallelMode
from mindspore.common.parameter import ParameterTuple, Parameter
from mindspore.common.tensor import Tensor
from mindspore.nn import Cell
from mindspore.nn.optim.momentum import Momentum
from mindspore.ops import composite as C
from mindspore.ops import functional as F
from mindspore.ops import operations as P
from mindspore.train import Model, ParallelMode
from mindspore.train.callback import Callback
np.set_printoptions(threshold=np.inf)

View File

@ -13,22 +13,23 @@
# limitations under the License.
# ============================================================================
import os
import numpy as np
import os
import pytest
import mindspore.common.dtype as mstype
import mindspore.context as context
import mindspore.nn as nn
import mindspore.common.dtype as mstype
from mindspore import Tensor
from mindspore.ops import operations as P
from mindspore.nn.optim.momentum import Momentum
from mindspore.common.initializer import One
from mindspore.train.model import Model, ParallelMode
from mindspore.communication.management import init
import mindspore.ops.functional as F
from mindspore import Tensor
from mindspore.common.initializer import One
from mindspore.communication.management import init
from mindspore.nn.loss.loss import _Loss
from mindspore.train.callback import Callback
from mindspore.nn.optim.momentum import Momentum
from mindspore.ops import operations as P
from mindspore.parallel import set_algo_parameters
from mindspore.train.callback import Callback
from mindspore.train.model import Model, ParallelMode
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
context.set_context(device_id=int(os.getenv('DEVICE_ID')))

View File

@ -13,10 +13,11 @@
# limitations under the License.
# ============================================================================
""" test_cont_break """
import pytest
import numpy as np
from mindspore.nn import Cell
import pytest
from mindspore import Tensor, Model, context
from mindspore.nn import Cell
def run_test(netclass, count, dev):

View File

@ -14,10 +14,11 @@
# ============================================================================
""" test_multigraph_sink """
import pytest
import mindspore.context as context
from mindspore.common.tensor import Tensor
from mindspore.common import dtype as mstype
from mindspore.common import ms_function
from mindspore.common.tensor import Tensor
def setup_module(module):

View File

@ -13,6 +13,7 @@
# limitations under the License.
# ============================================================================
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor, ms_function

View File

@ -13,9 +13,10 @@
# limitations under the License.
# ============================================================================
import numpy as np
import mindspore.common.dtype as mstype
import mindspore.context as context
import mindspore.nn as nn
import mindspore.common.dtype as mstype
from mindspore import Tensor, ms_function
from mindspore.ops import operations as P

View File

@ -13,13 +13,14 @@
# limitations under the License.
# ============================================================================
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor, Parameter, Model, ms_function
from mindspore.ops import operations as P
from mindspore.common.initializer import initializer
from mindspore.nn.loss import SoftmaxCrossEntropyWithLogits
from mindspore.nn.optim import Momentum
from mindspore.ops import operations as P
context.set_context(device_target="Ascend")

View File

@ -12,8 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import pytest
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor

View File

@ -12,8 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import pytest
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor

View File

@ -13,6 +13,7 @@
# limitations under the License.
# ============================================================================
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor

View File

@ -12,8 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import pytest
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor

View File

@ -14,13 +14,13 @@
# ============================================================================
"""Aggregator."""
import mindspore.nn as nn
from mindspore.ops import operations as P
from mindspore.ops import functional as F
from mindspore._extends import cell_attr_register
from mindspore import Tensor, Parameter
from mindspore.common.initializer import initializer
from mindspore._checkparam import check_int_positive, check_bool
from mindspore._extends import cell_attr_register
from mindspore.common.initializer import initializer
from mindspore.nn.layer.activation import get_activation
from mindspore.ops import functional as F
from mindspore.ops import operations as P
class GNNFeatureTransform(nn.Cell):

View File

@ -14,13 +14,13 @@
# ============================================================================
"""test gnn aggregator."""
import numpy as np
from aggregator import MeanAggregator, AttentionHead, AttentionAggregator
import mindspore.nn as nn
import mindspore.context as context
import mindspore.nn as nn
import mindspore.ops.composite as C
from mindspore import Tensor
from mindspore.common.api import _executor
import mindspore.ops.composite as C
from aggregator import MeanAggregator, AttentionHead, AttentionAggregator
context.set_context(mode=context.GRAPH_MODE)

View File

@ -12,8 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import os
import filecmp
import os
curr_path = os.path.abspath(os.curdir)
file_memreuse = curr_path + "/mem_reuse_check/memreuse.ir"

View File

@ -13,11 +13,12 @@
# limitations under the License.
# ============================================================================
import numpy as np
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.ops import operations as P
from mindspore.common.initializer import initializer
from mindspore.common import dtype as mstype
from mindspore.common.initializer import initializer
from mindspore.ops import operations as P
def weight_variable(shape):

View File

@ -13,24 +13,25 @@
# limitations under the License.
# ============================================================================
import argparse
import os
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
import os
import random
from resnet import resnet50
import mindspore.common.dtype as mstype
from mindspore import Tensor
from mindspore.ops import operations as P
from mindspore.ops import functional as F
from mindspore.nn.optim.momentum import Momentum
from mindspore.train.model import Model, ParallelMode
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor
from mindspore.train.serialization import load_checkpoint, load_param_into_net
import mindspore.context as context
import mindspore.dataset as de
import mindspore.dataset.transforms.c_transforms as C
import mindspore.dataset.transforms.vision.c_transforms as vision
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.communication.management import init
from resnet import resnet50
import random
from mindspore.nn.optim.momentum import Momentum
from mindspore.ops import functional as F
from mindspore.ops import operations as P
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor
from mindspore.train.model import Model, ParallelMode
from mindspore.train.serialization import load_checkpoint, load_param_into_net
random.seed(1)
np.random.seed(1)

View File

@ -13,24 +13,25 @@
# limitations under the License.
# ============================================================================
import argparse
import os
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
import os
import random
from resnet import resnet50
import mindspore.common.dtype as mstype
from mindspore import Tensor
from mindspore.ops import operations as P
from mindspore.ops import functional as F
from mindspore.nn.optim.momentum import Momentum
from mindspore.train.model import Model, ParallelMode
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor
from mindspore.train.serialization import load_checkpoint, load_param_into_net
import mindspore.context as context
import mindspore.dataset as de
import mindspore.dataset.transforms.c_transforms as C
import mindspore.dataset.transforms.vision.c_transforms as vision
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.communication.management import init
from resnet import resnet50
import random
from mindspore.nn.optim.momentum import Momentum
from mindspore.ops import functional as F
from mindspore.ops import operations as P
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor
from mindspore.train.model import Model, ParallelMode
from mindspore.train.serialization import load_checkpoint, load_param_into_net
random.seed(1)
np.random.seed(1)

View File

@ -13,13 +13,14 @@
# limitations under the License.
# ============================================================================
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.ops import operations as P
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.communication.management import init, NCCL_WORLD_COMM_GROUP, get_rank, get_group_size
from mindspore.ops import operations as P
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')

View File

@ -13,13 +13,14 @@
# limitations under the License.
# ============================================================================
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.ops import operations as P
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.communication.management import init, NCCL_WORLD_COMM_GROUP, get_rank, get_group_size
from mindspore.ops import operations as P
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')

View File

@ -14,14 +14,15 @@
# ============================================================================
import datetime
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.nn.optim import Momentum
from mindspore.nn import TrainOneStepCell, WithLossCell
from mindspore.ops import operations as P
from mindspore.communication.management import init, get_rank, get_group_size
from mindspore.common import dtype as mstype
from mindspore.communication.management import init, get_rank, get_group_size
from mindspore.nn import TrainOneStepCell, WithLossCell
from mindspore.nn.optim import Momentum
from mindspore.ops import operations as P
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
init('nccl')

View File

@ -13,13 +13,14 @@
# limitations under the License.
# ============================================================================
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.ops import operations as P
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.communication.management import init, NCCL_WORLD_COMM_GROUP, get_rank, get_group_size
from mindspore.ops import operations as P
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')

View File

@ -13,8 +13,8 @@
# limitations under the License.
# ============================================================================
import mindspore.nn as nn
from mindspore.ops import operations as P
from mindspore.nn import Dense
from mindspore.ops import operations as P
class AlexNet(nn.Cell):

View File

@ -15,21 +15,22 @@
"""train bert network without lossscale"""
import numpy as np
import os
import pytest
import numpy as np
from numpy import allclose
import mindspore.common.dtype as mstype
import mindspore.dataset.engine.datasets as de
import mindspore.dataset.transforms.c_transforms as C
from mindspore import context
from mindspore import log as logger
from mindspore.common.tensor import Tensor
from mindspore.train.model import Model
from mindspore.train.callback import Callback, LossMonitor
from mindspore.train.loss_scale_manager import DynamicLossScaleManager
from mindspore.model_zoo.Bert_NEZHA import BertConfig, BertNetworkWithLoss, BertTrainOneStepWithLossScaleCell
from mindspore.nn.optim import Momentum
from mindspore import log as logger
from mindspore.train.callback import Callback, LossMonitor
from mindspore.train.loss_scale_manager import DynamicLossScaleManager
from mindspore.train.model import Model
_current_dir = os.path.dirname(os.path.realpath(__file__))
DATA_DIR = ["/home/workspace/mindspore_dataset/bert/example/examples.tfrecord"]

View File

@ -13,10 +13,11 @@
# limitations under the License.
# ============================================================================
import numpy as np
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.ops import operations as P
from mindspore.nn import Dense
from mindspore.ops import operations as P
class LeNet(nn.Cell):

View File

@ -13,6 +13,7 @@
# limitations under the License.
# ============================================================================
import numpy as np
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.ops import operations as P

View File

@ -12,8 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import pytest
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor

View File

@ -17,15 +17,16 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pytest
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.initializer import initializer
from mindspore.nn import TrainOneStepCell, WithLossCell
from mindspore.nn.optim import Momentum
from mindspore.ops import operations as P
from mindspore.nn import TrainOneStepCell, WithLossCell
from mindspore.common.initializer import initializer
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")

View File

@ -13,26 +13,26 @@
# limitations under the License.
# ============================================================================
import numpy as np
import os
import pytest
import numpy as np
from mindspore import Tensor
import mindspore.context as context
from mindspore.ops import operations as P
import mindspore.dataset as ds
import mindspore.dataset.transforms.c_transforms as C
import mindspore.dataset.transforms.vision.c_transforms as CV
import mindspore.nn as nn
from mindspore.nn import Dense, TrainOneStepCell, WithLossCell
from mindspore.nn.optim import Momentum
from mindspore.nn.metrics import Accuracy
from mindspore.train import Model
from mindspore import Tensor
from mindspore.common import dtype as mstype
from mindspore.common.initializer import initializer
from mindspore.model_zoo.lenet import LeNet5
from mindspore.train.callback import LossMonitor
import mindspore.dataset as ds
import mindspore.dataset.transforms.vision.c_transforms as CV
import mindspore.dataset.transforms.c_transforms as C
from mindspore.dataset.transforms.vision import Inter
from mindspore.model_zoo.lenet import LeNet5
from mindspore.nn import Dense, TrainOneStepCell, WithLossCell
from mindspore.nn.metrics import Accuracy
from mindspore.nn.optim import Momentum
from mindspore.ops import operations as P
from mindspore.train import Model
from mindspore.train.callback import LossMonitor
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")

View File

@ -13,17 +13,18 @@
# limitations under the License.
# ============================================================================
import pytest
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.nn.optim import Momentum
from mindspore.ops import operations as P
from mindspore.nn import TrainOneStepCell, WithLossCell
from mindspore.nn import Dense
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.nn import Dense
from mindspore.nn import TrainOneStepCell, WithLossCell
from mindspore.nn.optim import Momentum
from mindspore.ops import operations as P
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")

View File

@ -17,22 +17,23 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pytest
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore import amp
from mindspore.nn import Dense
from mindspore.nn import TrainOneStepCell, WithLossCell
from mindspore.nn.cell import Cell
from mindspore.nn.layer.conv import Conv2d
from mindspore.nn.layer.basic import Flatten
from mindspore.nn.layer.conv import Conv2d
from mindspore.nn.layer.normalization import BatchNorm2d
from mindspore.nn.layer.pooling import MaxPool2d
from mindspore.ops.operations import TensorAdd
from mindspore.nn.optim import Momentum
from mindspore.ops import operations as P
from mindspore.nn import TrainOneStepCell, WithLossCell
from mindspore.nn import Dense
from mindspore import amp
from mindspore.ops.operations import TensorAdd
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")

View File

@ -18,18 +18,19 @@ Function:
Usage:
python test_network_main.py --net lenet --target Ascend
"""
import argparse
import numpy as np
import os
import time
import numpy as np
import argparse
from models.alexnet import AlexNet
from models.lenet import LeNet
from models.resnetv1_5 import resnet50
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.nn import TrainOneStepCell, WithLossCell
from mindspore.nn.optim import Momentum
from models.lenet import LeNet
from models.resnetv1_5 import resnet50
from models.alexnet import AlexNet
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

View File

@ -12,11 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
import numpy as np
import mindspore.context as context
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

View File

@ -12,14 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops import operations as P
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

View File

@ -12,12 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.ops import operations as P
context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend")

View File

@ -12,11 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
import numpy as np
import mindspore.context as context
context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend")

View File

@ -12,12 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.ops import operations as P
context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend")

View File

@ -12,12 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.ops import operations as P
context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend")

View File

@ -12,11 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
import numpy as np
import mindspore.context as context
context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend")

View File

@ -12,14 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops import operations as P
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

View File

@ -12,12 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.ops import operations as P
context.set_context(device_target="Ascend")

View File

@ -12,15 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
from mindspore.ops.operations import _grad_ops as G
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops import operations as P
from mindspore.ops.operations import _grad_ops as G
context.set_context(device_target="Ascend")

View File

@ -12,15 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
from mindspore.ops.operations import _grad_ops as G
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops import operations as P
from mindspore.ops.operations import _grad_ops as G
context.set_context(device_target="Ascend")

View File

@ -12,14 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops import operations as P
context.set_context(device_target="Ascend")

View File

@ -12,15 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
from mindspore.ops.operations import _grad_ops as G
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops import operations as P
from mindspore.ops.operations import _grad_ops as G
context.set_context(device_target="Ascend")

View File

@ -12,14 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops import operations as P
from mindspore.ops.composite import GradOperation
context.set_context(device_target="Ascend")

View File

@ -12,14 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops import operations as P
context.set_context(device_target="Ascend")

View File

@ -12,14 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops import operations as P
from mindspore.ops.composite import GradOperation
context.set_context(device_target="Ascend")

View File

@ -12,11 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
import numpy as np
import mindspore.context as context
context.set_context(mode=context.GRAPH_MODE,
device_target="Ascend")

View File

@ -12,11 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
import numpy as np
import mindspore.context as context
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

View File

@ -12,12 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.ops import operations as P
context.set_context(device_target="Ascend")

View File

@ -12,14 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops import operations as P
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

View File

@ -12,14 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops import operations as P
from mindspore.ops.composite import GradOperation
# context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

View File

@ -13,9 +13,10 @@
# limitations under the License.
# ============================================================================
import numpy as np
import mindspore.nn as nn
import mindspore.context as context
import mindspore.common.dtype as mstype
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function

View File

@ -12,14 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops import operations as P
context.set_context(device_target="Ascend")

View File

@ -12,12 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.ops import operations as P
context.set_context(device_target="Ascend")

View File

@ -12,12 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.ops import operations as P
from mindspore.ops.composite import GradOperation
context.set_context(device_target="Ascend")

View File

@ -12,14 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops import operations as P
context.set_context(device_target="Ascend")

View File

@ -12,14 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops import operations as P
from mindspore.ops.composite import GradOperation
context.set_context(device_target="Ascend")

View File

@ -12,14 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops import operations as P
context.set_context(device_target="Ascend")

View File

@ -12,14 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops import operations as P
from mindspore.ops.composite import GradOperation
context.set_context(device_target="Ascend")

View File

@ -12,12 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.ops import operations as P
context.set_context(device_target="Ascend")

View File

@ -12,14 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore import Tensor
from mindspore.ops import operations as P
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops import operations as P
context.set_context(device_target="Ascend")

Some files were not shown because too many files have changed in this diff Show More