forked from mindspore-Ecosystem/mindspore
use standed ”import“ in cases
This commit is contained in:
parent
cea9e119e1
commit
aff2399bf9
|
@ -21,7 +21,7 @@ from mindspore.nn import Cell
|
|||
from mindspore import context, Tensor
|
||||
from mindspore.common.parameter import Parameter
|
||||
from mindspore.common.initializer import initializer
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
from mindspore.ops.composite import GradOperation
|
||||
from mindspore.common import ParameterTuple
|
||||
from tests.security_utils import security_off_wrap
|
||||
|
|
|
@ -26,8 +26,7 @@ from mindspore.communication.management import init
|
|||
from mindspore.nn.loss.loss import LossBase
|
||||
from mindspore.nn.optim.momentum import Momentum
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Callback, Model
|
||||
from mindspore.context import ParallelMode
|
||||
import mindspore.dataset as ds
|
||||
|
||||
|
|
|
@ -23,8 +23,8 @@ from mindspore.nn import MultiFieldEmbeddingLookup as embedding
|
|||
from mindspore import Tensor
|
||||
from mindspore import context
|
||||
from mindspore.train import Model
|
||||
from mindspore.train.callback import CheckpointConfig
|
||||
from mindspore.train.callback import ModelCheckpoint
|
||||
from mindspore.train import CheckpointConfig
|
||||
from mindspore.train import ModelCheckpoint
|
||||
from mindspore.train.serialization import load_checkpoint
|
||||
from mindspore.train.serialization import load_param_into_net
|
||||
from mindspore.communication.management import init
|
||||
|
|
|
@ -27,8 +27,8 @@ from mindspore.nn import Flatten
|
|||
from mindspore.nn import Momentum
|
||||
import mindspore.ops.operations as P
|
||||
from mindspore.train.serialization import load_param_into_net
|
||||
from mindspore.train.callback import CheckpointConfig
|
||||
from mindspore.train.callback import ModelCheckpoint
|
||||
from mindspore.train import CheckpointConfig
|
||||
from mindspore.train import ModelCheckpoint
|
||||
from mindspore.train.serialization import load_checkpoint
|
||||
|
||||
from mindspore.nn import SoftmaxCrossEntropyWithLogits
|
||||
|
|
|
@ -26,8 +26,8 @@ from mindspore.nn import Dense
|
|||
from mindspore.nn import Softmax
|
||||
import mindspore.ops.operations as P
|
||||
from mindspore.train.serialization import load_param_into_net
|
||||
from mindspore.train.callback import CheckpointConfig
|
||||
from mindspore.train.callback import ModelCheckpoint
|
||||
from mindspore.train import CheckpointConfig
|
||||
from mindspore.train import ModelCheckpoint
|
||||
from mindspore.train.serialization import load_checkpoint
|
||||
from mindspore.nn import Momentum
|
||||
from mindspore.nn import SoftmaxCrossEntropyWithLogits
|
||||
|
|
|
@ -27,8 +27,8 @@ from mindspore.nn.loss.loss import LossBase
|
|||
from mindspore.nn.optim.momentum import Momentum
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.parallel import set_algo_parameters
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Callback
|
||||
from mindspore.train import Model
|
||||
from mindspore.context import ParallelMode
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
|
||||
|
|
|
@ -27,8 +27,7 @@ from mindspore.nn import Cell
|
|||
from mindspore.nn.optim.momentum import Momentum
|
||||
from mindspore.ops import functional as F
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.train import Model
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train import Model, Callback
|
||||
|
||||
np.set_printoptions(threshold=np.inf)
|
||||
device_num = 2
|
||||
|
|
|
@ -20,9 +20,8 @@ import numpy as np
|
|||
import mindspore.communication.management as distributedTool
|
||||
import mindspore.nn as nn
|
||||
from mindspore import context
|
||||
from mindspore.train.metrics import Accuracy
|
||||
from mindspore.train import Model
|
||||
from mindspore.train.callback import LossMonitor, TimeMonitor
|
||||
from mindspore.train import Accuracy
|
||||
from mindspore.train import Model, LossMonitor, TimeMonitor
|
||||
from tests.models.official.cv.lenet.src.dataset import create_dataset
|
||||
from tests.models.official.cv.lenet.src.lenet import LeNet5
|
||||
|
||||
|
|
|
@ -16,10 +16,9 @@ import pytest
|
|||
|
||||
from mindspore.ops import operations as P
|
||||
import mindspore.nn as nn
|
||||
from mindspore.train import Model
|
||||
from mindspore.train import Model, Callback
|
||||
from mindspore.common import set_seed
|
||||
import mindspore.dataset as ds
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore import log as logger
|
||||
|
||||
set_seed(1)
|
||||
|
|
|
@ -33,7 +33,7 @@ from mindspore.nn import Dense
|
|||
from mindspore.nn import SoftmaxCrossEntropyWithLogits
|
||||
from mindspore.nn import WithLossCell
|
||||
from mindspore import dataset as ds
|
||||
from mindspore import Model
|
||||
from mindspore.train import Model
|
||||
from dump_test_utils import generate_dump_json, generate_statistic_dump_json, check_dump_structure
|
||||
from tests.security_utils import security_off_wrap
|
||||
|
||||
|
|
|
@ -19,8 +19,7 @@ import pytest
|
|||
from mindspore import ops, nn, Tensor, Parameter, ParameterTuple, context, set_seed
|
||||
from mindspore.common.initializer import initializer, XavierUniform
|
||||
import mindspore.dataset as ds
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train import Model
|
||||
from mindspore.train import Callback, Model
|
||||
from mindspore.common import dtype as mstype
|
||||
import mindspore as ms
|
||||
|
||||
|
|
|
@ -35,8 +35,7 @@ from mindspore.nn.wrap.grad_reducer import DistributedGradReducer
|
|||
from mindspore.ops import composite as C, functional as F
|
||||
from mindspore.ops.functional import stop_gradient
|
||||
from mindspore.parallel._utils import _get_parallel_mode, _get_device_num, _get_gradients_mean
|
||||
from mindspore.train import Model
|
||||
from mindspore.train.callback._callback import Callback
|
||||
from mindspore.train import Model, Callback
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG, format="%(asctime)s %(levelname)s %(message)s")
|
||||
|
||||
|
|
|
@ -21,9 +21,8 @@ import pytest
|
|||
import mindspore as ms
|
||||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.nn.optim import Adam
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train.loss_scale_manager import DynamicLossScaleManager
|
||||
from mindspore.train.callback import Callback, TimeMonitor
|
||||
from mindspore.train import Callback, TimeMonitor, Model
|
||||
from mindspore.common import set_seed
|
||||
import mindspore.dataset as de
|
||||
from transformer.transformer_for_train import TransformerNetworkWithLoss, TransformerTrainOneStepWithLossScaleCell
|
||||
|
|
|
@ -39,8 +39,7 @@ from mindspore.nn.wrap.loss_scale import DynamicLossScaleUpdateCell
|
|||
from mindspore.nn.wrap.grad_reducer import DistributedGradReducer
|
||||
from mindspore.nn.optim import Adam
|
||||
|
||||
from mindspore.train import Model
|
||||
from mindspore.train.callback._callback import Callback
|
||||
from mindspore.train import Model, Callback
|
||||
from mindspore import context, ParameterTuple, set_seed
|
||||
from mindspore.context import ParallelMode
|
||||
from mindspore.communication.management import get_group_size
|
||||
|
|
|
@ -18,7 +18,7 @@ import numpy as np
|
|||
from mindspore.train import Model
|
||||
from mindspore import Tensor
|
||||
import mindspore.dataset as ds
|
||||
from mindspore.train.callback import LossMonitor
|
||||
from mindspore.train import LossMonitor
|
||||
import mindspore as ms
|
||||
import mindspore.nn as nn
|
||||
import mindspore.communication as comm
|
||||
|
|
|
@ -22,7 +22,7 @@ import mindspore.dataset as ds
|
|||
import mindspore.dataset.transforms as C
|
||||
import mindspore.dataset.vision as CV
|
||||
import mindspore.nn as nn
|
||||
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, Callback
|
||||
from mindspore.train import ModelCheckpoint, CheckpointConfig, Callback
|
||||
from mindspore import load_checkpoint
|
||||
from mindspore.common import dtype as mstype
|
||||
from mindspore.dataset.vision import Inter
|
||||
|
|
|
@ -23,9 +23,8 @@ import mindspore.dataset.vision as CV
|
|||
import mindspore.nn as nn
|
||||
from mindspore.common import dtype as mstype
|
||||
from mindspore.dataset.vision import Inter
|
||||
from mindspore.train.metrics import Accuracy
|
||||
from mindspore.train import Model
|
||||
from mindspore.train.callback import LossMonitor
|
||||
from mindspore.train import Accuracy
|
||||
from mindspore.train import Model, LossMonitor
|
||||
from mindspore.common.initializer import TruncatedNormal
|
||||
from mindspore.communication.management import init
|
||||
|
||||
|
|
|
@ -28,9 +28,8 @@ from mindspore.experimental import MapParameter
|
|||
from mindspore.nn import Cell, Flatten, Dense
|
||||
from mindspore.nn import SoftmaxCrossEntropyWithLogits
|
||||
from mindspore.nn import Adam
|
||||
from mindspore.train import Model
|
||||
from mindspore.train.callback import CheckpointConfig, ModelCheckpoint
|
||||
from mindspore.train.metrics import Accuracy
|
||||
from mindspore.train import Model, CheckpointConfig, ModelCheckpoint
|
||||
from mindspore.train import Accuracy
|
||||
from mindspore.common import set_seed
|
||||
|
||||
|
||||
|
|
|
@ -29,8 +29,7 @@ from mindspore.communication.management import init
|
|||
from mindspore.nn.optim.momentum import Momentum
|
||||
from mindspore.ops import functional as F
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import ModelCheckpoint, CheckpointConfig, LossMonitor, Model
|
||||
from mindspore.context import ParallelMode
|
||||
|
||||
random.seed(1)
|
||||
|
|
|
@ -29,8 +29,7 @@ from mindspore.communication.management import init
|
|||
from mindspore.nn.optim.momentum import Momentum
|
||||
from mindspore.ops import functional as F
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import ModelCheckpoint, CheckpointConfig, LossMonitor, Model
|
||||
from mindspore.context import ParallelMode
|
||||
|
||||
random.seed(1)
|
||||
|
|
|
@ -24,7 +24,7 @@ from mindspore import amp
|
|||
from mindspore import Tensor
|
||||
from mindspore import context
|
||||
from mindspore.train.loss_scale_manager import FixedLossScaleManager
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
from utils import FakeData
|
||||
from utils import allclose_nparray
|
||||
from utils import FakeDataInitMode
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
Defined callback for DeepFM.
|
||||
"""
|
||||
import time
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train import Callback
|
||||
|
||||
|
||||
def add_write(file_path, out_str):
|
||||
|
|
|
@ -22,10 +22,10 @@ from mindspore.ops import composite as C
|
|||
from mindspore.ops import operations as P
|
||||
from mindspore.nn import Dropout
|
||||
from mindspore.nn.optim import Adam
|
||||
from mindspore.train.metrics import Metric
|
||||
from mindspore.train import Metric
|
||||
from mindspore import nn, Tensor, ParameterTuple, Parameter
|
||||
from mindspore.common.initializer import Uniform, initializer
|
||||
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig
|
||||
from mindspore.train import ModelCheckpoint, CheckpointConfig
|
||||
from mindspore.parallel._utils import _get_device_num, _get_parallel_mode, _get_gradients_mean
|
||||
from mindspore.context import ParallelMode
|
||||
from mindspore.nn.wrap.grad_reducer import DistributedGradReducer
|
||||
|
|
|
@ -17,7 +17,7 @@ import os
|
|||
import pytest
|
||||
|
||||
from mindspore import context
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
from mindspore.common import set_seed
|
||||
|
||||
from src.deepfm import ModelBuilder, AUCMetric
|
||||
|
|
|
@ -17,9 +17,8 @@ import argparse
|
|||
import time
|
||||
from mindspore import context
|
||||
from mindspore.nn.optim.momentum import Momentum
|
||||
from mindspore import Model
|
||||
from mindspore.train.serialization import load_checkpoint, load_param_into_net
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train import Callback, Model
|
||||
from src.md_dataset import create_dataset
|
||||
from src.losses import OhemLoss
|
||||
from src.deeplabv3 import deeplabv3_resnet50
|
||||
|
|
|
@ -21,9 +21,8 @@ import numpy as np
|
|||
import mindspore.common.dtype as mstype
|
||||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.nn.optim import Adam
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model, Callback
|
||||
from mindspore.train.loss_scale_manager import DynamicLossScaleManager
|
||||
from mindspore.train.callback import Callback
|
||||
import mindspore.dataset as ds
|
||||
import mindspore.dataset.transforms as deC
|
||||
from mindspore import context
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
callbacks
|
||||
"""
|
||||
import time
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train import Callback
|
||||
from mindspore import context
|
||||
from mindspore.context import ParallelMode
|
||||
from mindspore.communication.management import get_rank
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
import os
|
||||
import sys
|
||||
from mindspore import Model, context
|
||||
from mindspore.train.callback import TimeMonitor
|
||||
from mindspore.train import TimeMonitor
|
||||
from mindspore.context import ParallelMode
|
||||
from mindspore.communication.management import get_rank, get_group_size, init
|
||||
from mindspore.nn.wrap.cell_wrapper import VirtualDatasetCellTriple
|
||||
|
|
|
@ -18,7 +18,7 @@ import os
|
|||
import sys
|
||||
import numpy as np
|
||||
from mindspore import Model, context
|
||||
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, TimeMonitor
|
||||
from mindspore.train import ModelCheckpoint, CheckpointConfig, TimeMonitor
|
||||
from mindspore.context import ParallelMode
|
||||
from mindspore.communication.management import get_rank, get_group_size, init
|
||||
|
||||
|
|
|
@ -28,9 +28,8 @@ import pytest
|
|||
import numpy as np
|
||||
import mindspore.nn as nn
|
||||
from mindspore import context, Tensor
|
||||
from mindspore.train import Model
|
||||
from mindspore.train import Model, Callback
|
||||
from mindspore.common.initializer import initializer
|
||||
from mindspore.train.callback import Callback
|
||||
|
||||
from src.yolov3 import yolov3_resnet18, YoloWithLossCell, TrainingWrapper
|
||||
from src.dataset import create_yolo_dataset
|
||||
|
|
|
@ -27,7 +27,7 @@ import numpy as np
|
|||
|
||||
from mindspore import context, Tensor
|
||||
from mindspore.common.initializer import initializer
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train import Callback
|
||||
from mindspore.context import ParallelMode
|
||||
from mindspore.nn.optim.momentum import Momentum
|
||||
import mindspore as ms
|
||||
|
|
|
@ -26,9 +26,8 @@ from mindspore import log as logger
|
|||
from mindspore.common import dtype as mstype
|
||||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.nn.learning_rate_schedule import LearningRateSchedule, PolynomialDecayLR, WarmUpLR
|
||||
from mindspore.train.metrics import Metric
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train import Callback, Metric
|
||||
|
||||
|
||||
class CrossEntropyCalculation(nn.Cell):
|
||||
|
|
|
@ -25,8 +25,7 @@ from mindspore import log as logger
|
|||
from mindspore.ops import operations as P
|
||||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.nn.optim import Lamb
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Callback, Model
|
||||
import mindspore.nn.learning_rate_schedule as lr_schedules
|
||||
from tests.models.official.nlp.bert.src.bert_for_pre_training import BertNetworkWithLoss
|
||||
from tests.models.official.nlp.bert.src.bert_for_pre_training import BertTrainOneStepCell
|
||||
|
|
|
@ -27,9 +27,9 @@ from mindspore import log as logger
|
|||
from mindspore.ops import operations as P
|
||||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.nn.optim import Lamb
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train import Callback
|
||||
from mindspore.train.loss_scale_manager import DynamicLossScaleManager
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
import mindspore.nn.learning_rate_schedule as lr_schedules
|
||||
from tests.models.official.nlp.bert.src.bert_for_pre_training import BertNetworkWithLoss
|
||||
from tests.models.official.nlp.bert.src.bert_for_pre_training import BertTrainOneStepWithLossScaleCell
|
||||
|
|
|
@ -25,12 +25,11 @@ import mindspore.common.dtype as mstype
|
|||
import mindspore.communication.management as D
|
||||
from mindspore import context
|
||||
from mindspore import log as logger
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train import Callback
|
||||
from mindspore.context import ParallelMode
|
||||
from mindspore.train.serialization import load_checkpoint, load_param_into_net
|
||||
from mindspore.nn.optim import thor
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train.train_thor import ConvertModelUtils
|
||||
from mindspore.train import Model, ConvertModelUtils
|
||||
import mindspore.dataset.transforms as C
|
||||
|
||||
from tests.st.networks.models.bert.bert_performance.src.bert_for_pre_training import BertNetworkWithLoss, \
|
||||
|
|
|
@ -27,9 +27,8 @@ from mindspore import log as logger
|
|||
from mindspore.ops import operations as P
|
||||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.nn.optim import Lamb
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train.loss_scale_manager import DynamicLossScaleManager
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model, Callback
|
||||
import mindspore.nn.learning_rate_schedule as lr_schedules
|
||||
from tests.models.official.nlp.bert.src.bert_for_pre_training import BertNetworkWithLoss
|
||||
from tests.models.official.nlp.bert.src.bert_for_pre_training import BertTrainOneStepWithLossScaleCell
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
# ============================================================================
|
||||
"""mIou."""
|
||||
import numpy as np
|
||||
from mindspore.train.metrics import Metric
|
||||
from mindspore.train import Metric
|
||||
|
||||
|
||||
def confuse_matrix(target, pred, n):
|
||||
|
|
|
@ -20,7 +20,7 @@ import numpy as np
|
|||
from mindspore import context, Tensor
|
||||
from mindspore.nn.optim.momentum import Momentum
|
||||
from mindspore import Model
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train import Callback
|
||||
from src.md_dataset import create_dataset
|
||||
from src.losses import OhemLoss
|
||||
from src.deeplabv3 import deeplabv3_resnet50
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
import time
|
||||
import numpy as np
|
||||
import mindspore as ms
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train import Callback
|
||||
|
||||
|
||||
class LossGet(Callback):
|
||||
|
|
|
@ -22,9 +22,9 @@ import numpy as np
|
|||
from mindspore import context
|
||||
from mindspore.common.tensor import Tensor
|
||||
import mindspore.communication.management as D
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
from mindspore.context import ParallelMode
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train import Callback
|
||||
from mindspore.train.loss_scale_manager import FixedLossScaleManager
|
||||
import mindspore.nn as nn
|
||||
import mindspore.dataset as ds
|
||||
|
|
|
@ -17,7 +17,7 @@ import os
|
|||
import numpy as np
|
||||
import mindspore as ms
|
||||
from mindspore import nn
|
||||
from mindspore.train.train_thor import ConvertModelUtils
|
||||
from mindspore.train import ConvertModelUtils
|
||||
from tests.st.networks.models.resnet50.src.callback import LossGet
|
||||
from tests.st.networks.models.resnet50.src_thor.config import config as thor_config
|
||||
from tests.st.networks.models.resnet50.src_thor.dataset import create_dataset2 as create_dataset_thor
|
||||
|
|
|
@ -27,12 +27,11 @@ from mindspore import Tensor, ParameterTuple
|
|||
from mindspore.common import dtype as mstype
|
||||
from mindspore.dataset.vision import Inter
|
||||
from mindspore.nn import Dense, TrainOneStepCell, WithLossCell, ForwardValueAndGrad
|
||||
from mindspore.train.metrics import Accuracy
|
||||
from mindspore.train import Accuracy
|
||||
from mindspore.nn.optim import Momentum
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.ops import functional as F
|
||||
from mindspore.train import Model
|
||||
from mindspore.train.callback import LossMonitor
|
||||
from mindspore.train import Model, LossMonitor
|
||||
from mindspore.common.initializer import TruncatedNormal
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
|
||||
|
|
|
@ -18,7 +18,8 @@ import mindspore.context as context
|
|||
import mindspore.nn as nn
|
||||
import mindspore.dataset as ds
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore import Model, Tensor
|
||||
from mindspore import Tensor
|
||||
from mindspore.train import Model
|
||||
import mindspore as ms
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE,
|
||||
|
|
|
@ -18,7 +18,7 @@ from mindspore import context
|
|||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.nn import Cell
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ import pytest
|
|||
from mindspore import context
|
||||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.nn import FastGelu
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ from mindspore import context
|
|||
from mindspore import log as logger
|
||||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.nn import GELU
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ from mindspore import log as logger
|
|||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.nn import Cell
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ from mindspore import log as logger
|
|||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.nn import Cell
|
||||
from mindspore.nn import LayerNorm
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ import mindspore.context as context
|
|||
import mindspore.nn as nn
|
||||
from mindspore import Tensor
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
|
||||
context.set_context(device_target="Ascend")
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ import mindspore.context as context
|
|||
import mindspore.nn as nn
|
||||
from mindspore import Tensor
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ import mindspore.context as context
|
|||
from mindspore import Tensor
|
||||
from mindspore.nn import Cell
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ from mindspore import Tensor
|
|||
from mindspore.nn import Cell
|
||||
import mindspore.ops as ops
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ import mindspore.context as context
|
|||
import mindspore.ops.operations as P
|
||||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.nn import Cell
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ import mindspore.context as context
|
|||
import mindspore.nn as nn
|
||||
from mindspore import Tensor
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
|
||||
context.set_context(device_target="Ascend")
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ import mindspore.context as context
|
|||
import mindspore.nn as nn
|
||||
from mindspore import Tensor
|
||||
from mindspore.ops.operations import _grad_ops as G
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
|
||||
context.set_context(device_target="Ascend")
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ import mindspore.context as context
|
|||
import mindspore.nn as nn
|
||||
from mindspore import Tensor
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
|
||||
context.set_context(device_target="Ascend")
|
||||
|
||||
|
|
|
@ -15,9 +15,8 @@
|
|||
|
||||
import mindspore.context as context
|
||||
from mindspore import set_seed
|
||||
from mindspore.train.metrics import Accuracy
|
||||
from mindspore.train import Model
|
||||
from mindspore.train.callback import LossMonitor, TimeMonitor
|
||||
from mindspore.train import Accuracy
|
||||
from mindspore.train import Model, LossMonitor, TimeMonitor
|
||||
from mindspore.communication.management import init
|
||||
|
||||
from src.lenet import Net, get_optimizer, get_loss, get_dataset
|
||||
|
|
|
@ -15,9 +15,9 @@
|
|||
|
||||
import mindspore.context as context
|
||||
from mindspore import set_seed
|
||||
from mindspore.train.metrics import Accuracy
|
||||
from mindspore.train import Accuracy
|
||||
from mindspore.train import Model
|
||||
from mindspore.train.callback import LossMonitor, TimeMonitor
|
||||
from mindspore.train import LossMonitor, TimeMonitor
|
||||
from mindspore.communication.management import init, get_rank
|
||||
|
||||
from src.net import SplitRefWithoutOptimNet, SplitOptimNet, get_optimizer, get_loss, get_dataset
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
|
||||
import mindspore.context as context
|
||||
from mindspore import set_seed
|
||||
from mindspore.train.metrics import Accuracy
|
||||
from mindspore.train import Accuracy
|
||||
from mindspore.train import Model
|
||||
from mindspore.communication.management import init, get_rank
|
||||
|
||||
|
|
|
@ -15,9 +15,8 @@
|
|||
|
||||
import mindspore.context as context
|
||||
from mindspore import set_seed
|
||||
from mindspore.train.metrics import Accuracy
|
||||
from mindspore.train import Model
|
||||
from mindspore.train.callback import TimeMonitor
|
||||
from mindspore.train import Accuracy
|
||||
from mindspore.train import Model, TimeMonitor
|
||||
from mindspore.communication.management import init
|
||||
|
||||
from src.dynamic_embedding import Net, get_optimizer, get_loss, get_dataset
|
||||
|
|
|
@ -23,8 +23,7 @@ import mindspore.nn as nn
|
|||
|
||||
from mindspore import context
|
||||
from mindspore import Tensor
|
||||
from mindspore.train import Model
|
||||
from mindspore.train.callback import LossMonitor
|
||||
from mindspore.train import Model, LossMonitor
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.common import dtype as mstype
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ from argparse import ArgumentParser
|
|||
|
||||
from mindspore import dataset as ds
|
||||
from mindspore import nn, Tensor, context
|
||||
from mindspore.train.metrics import Accuracy
|
||||
from mindspore.train import Accuracy
|
||||
from mindspore.nn.optim import Momentum
|
||||
from mindspore.dataset.transforms import transforms as C
|
||||
from mindspore.dataset.vision import transforms as CV
|
||||
|
|
|
@ -22,14 +22,13 @@ import pytest
|
|||
|
||||
from mindspore import dataset as ds
|
||||
from mindspore import nn, Tensor, context
|
||||
from mindspore.train.metrics import Accuracy
|
||||
from mindspore.nn.optim import Momentum
|
||||
from mindspore.dataset.transforms import transforms as C
|
||||
from mindspore.dataset.vision import transforms as CV
|
||||
from mindspore.dataset.vision import Inter
|
||||
from mindspore.common import dtype as mstype
|
||||
from mindspore.common.initializer import TruncatedNormal
|
||||
from mindspore.train import Model
|
||||
from mindspore.train import Model, Accuracy
|
||||
from mindspore import Profiler
|
||||
|
||||
|
||||
|
|
|
@ -20,9 +20,8 @@ import mindspore
|
|||
from mindspore.nn import Cell, Flatten, Dense
|
||||
from mindspore.nn import EmbeddingLookup, SoftmaxCrossEntropyWithLogits
|
||||
from mindspore.nn import Adam
|
||||
from mindspore.train import Model
|
||||
from mindspore.train.callback import CheckpointConfig, ModelCheckpoint
|
||||
from mindspore.train.metrics import Accuracy
|
||||
from mindspore.train import Model, CheckpointConfig, ModelCheckpoint
|
||||
from mindspore.train import Accuracy
|
||||
from mindspore.common import set_seed
|
||||
from mindspore.communication.management import get_rank
|
||||
import mindspore.ops.operations as op
|
||||
|
|
|
@ -23,9 +23,7 @@ import mindspore.dataset.vision as CV
|
|||
import mindspore.nn as nn
|
||||
from mindspore.common import dtype as mstype
|
||||
from mindspore.dataset.vision import Inter
|
||||
from mindspore.train.metrics import Accuracy
|
||||
from mindspore.train import Model
|
||||
from mindspore.train.callback import LossMonitor
|
||||
from mindspore.train import Model, LossMonitor, Accuracy
|
||||
from mindspore.common.initializer import TruncatedNormal
|
||||
from mindspore.communication.management import init
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ from mindspore import Tensor, context
|
|||
from mindspore.nn import LayerNorm, Cell
|
||||
from mindspore.common import ParameterTuple
|
||||
from mindspore.ops.composite import GradOperation
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
|
||||
class _Grad(Cell):
|
||||
def __init__(self, grad, network, wrt_params=False, real_inputs_count=None):
|
||||
|
|
|
@ -35,10 +35,10 @@ from mindspore.nn import Cell
|
|||
from mindspore.ops import operations as P
|
||||
from mindspore.ops import composite as CP
|
||||
from mindspore.nn.optim.momentum import Momentum
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train import Callback
|
||||
from mindspore.nn.loss import SoftmaxCrossEntropyWithLogits
|
||||
from mindspore.train.loss_scale_manager import FixedLossScaleManager
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
from mindspore.context import ParallelMode
|
||||
import mindspore.communication.management as D
|
||||
MINDSPORE_HCCL_CONFIG_PATH = "/home/workspace/mindspore_config/hccl/rank_table_8p.json"
|
||||
|
|
|
@ -25,12 +25,11 @@ import pytest
|
|||
from mindspore.common import set_seed
|
||||
from mindspore import nn, Tensor, context
|
||||
from mindspore.common.initializer import Normal
|
||||
from mindspore.train.metrics import Loss
|
||||
from mindspore.train import Loss
|
||||
from mindspore.nn.optim import Momentum
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.train import Model
|
||||
from mindspore.train.callback import SummaryCollector, SummaryLandscape
|
||||
from mindspore.train.summary.summary_record import SummaryRecord
|
||||
from mindspore import SummaryCollector, SummaryLandscape, SummaryRecord
|
||||
from tests.st.summary.dataset import create_mnist_dataset
|
||||
from tests.summary_utils import SummaryReader
|
||||
from tests.security_utils import security_off_wrap
|
||||
|
|
|
@ -22,7 +22,7 @@ import pytest
|
|||
|
||||
from mindspore import nn, Tensor, context
|
||||
from mindspore.common.initializer import Normal
|
||||
from mindspore.train.metrics import Loss
|
||||
from mindspore.train import Loss
|
||||
from mindspore.nn.optim import Momentum
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.train import Model
|
||||
|
|
|
@ -20,7 +20,7 @@ from resnet import resnet50
|
|||
|
||||
import mindspore.common.dtype as mstype
|
||||
import mindspore.ops.functional as F
|
||||
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor
|
||||
from mindspore.train import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor
|
||||
from mindspore.train.serialization import load_checkpoint, load_param_into_net
|
||||
import mindspore.dataset as ds
|
||||
import mindspore.dataset.transforms as C
|
||||
|
@ -31,7 +31,7 @@ from mindspore import context
|
|||
from mindspore.communication.management import init
|
||||
from mindspore.nn.optim.momentum import Momentum
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
from mindspore.context import ParallelMode
|
||||
|
||||
random.seed(1)
|
||||
|
|
|
@ -28,8 +28,8 @@ from mindspore import Tensor
|
|||
from mindspore import context
|
||||
from mindspore.nn.optim.momentum import Momentum
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Callback
|
||||
from mindspore.train import Model
|
||||
|
||||
random.seed(1)
|
||||
np.random.seed(1)
|
||||
|
|
|
@ -30,8 +30,7 @@ from mindspore import context
|
|||
from mindspore.communication.management import init
|
||||
from mindspore.nn.optim.momentum import Momentum
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Callback, Model
|
||||
from mindspore.context import ParallelMode
|
||||
|
||||
random.seed(1)
|
||||
|
|
|
@ -23,8 +23,8 @@ from mindspore import nn, Model
|
|||
from mindspore import dataset as ds
|
||||
from mindspore.nn.optim import Momentum
|
||||
from mindspore.common.initializer import Normal
|
||||
from mindspore.train.callback import RunContext, _InternalCallbackParam, \
|
||||
_CallbackManager, ReduceLROnPlateau, EarlyStopping
|
||||
from mindspore.train import RunContext, ReduceLROnPlateau, EarlyStopping
|
||||
from mindspore.train.callback import _InternalCallbackParam, _CallbackManager
|
||||
|
||||
|
||||
def get_data(num, w=4.0, b=5.0):
|
||||
|
|
|
@ -17,9 +17,9 @@
|
|||
|
||||
import pytest
|
||||
import numpy as np
|
||||
from mindspore import Model, nn, Tensor
|
||||
from mindspore import nn, Tensor
|
||||
from mindspore.common.initializer import Normal
|
||||
from mindspore.train.callback import Callback, TimeMonitor, LossMonitor
|
||||
from mindspore.train import Callback, TimeMonitor, LossMonitor, Model
|
||||
from mindspore import dataset as ds
|
||||
|
||||
|
||||
|
|
|
@ -23,11 +23,10 @@ from multiprocessing import Process
|
|||
import numpy as np
|
||||
import pytest
|
||||
|
||||
from mindspore import Model
|
||||
from mindspore import nn, context
|
||||
from mindspore import dataset as ds
|
||||
from mindspore.common.initializer import TruncatedNormal
|
||||
from mindspore.train.callback import Callback, OnRequestExit, LossMonitor
|
||||
from mindspore.train import Callback, OnRequestExit, LossMonitor, Model
|
||||
|
||||
|
||||
def conv(in_channels, out_channels, kernel_size, stride=1, padding=0):
|
||||
|
|
|
@ -24,7 +24,7 @@ import numpy as np
|
|||
import mindspore.nn as nn
|
||||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.nn.optim import Momentum
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
|
||||
log = logging.getLogger("test")
|
||||
log.setLevel(level=logging.ERROR)
|
||||
|
|
|
@ -23,7 +23,7 @@ from mindspore.common.api import jit, _cell_graph_executor
|
|||
from mindspore.common.parameter import Parameter
|
||||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.ops import functional as F
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
from tests.ut.python.model.resnet import resnet50
|
||||
|
||||
|
||||
|
|
|
@ -20,8 +20,7 @@ import pytest
|
|||
from mindspore import context
|
||||
from mindspore import log as logger
|
||||
from mindspore.dataset.callback import DSCallback, WaitedDSCallback
|
||||
from mindspore.train import Model
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.train import Model, Callback
|
||||
|
||||
import mindspore.dataset as ds
|
||||
import mindspore.nn as nn
|
||||
|
|
|
@ -18,7 +18,7 @@ import numpy as np
|
|||
import pytest
|
||||
|
||||
from mindspore import Tensor
|
||||
from mindspore.train.metrics import Accuracy
|
||||
from mindspore.train import Accuracy
|
||||
|
||||
|
||||
def test_classification_accuracy():
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
import math
|
||||
import numpy as np
|
||||
from mindspore import Tensor
|
||||
from mindspore.train.metrics import ROC, auc
|
||||
from mindspore.train import ROC, auc
|
||||
|
||||
|
||||
def test_auc():
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
"""test_bleu_score"""
|
||||
import math
|
||||
import pytest
|
||||
from mindspore.train.metrics import BleuScore
|
||||
from mindspore.train import BleuScore
|
||||
|
||||
|
||||
def test_bleu_score():
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
import numpy as np
|
||||
import pytest
|
||||
from mindspore import Tensor
|
||||
from mindspore.train.metrics import ConfusionMatrix
|
||||
from mindspore.train import ConfusionMatrix
|
||||
|
||||
|
||||
def test_confusion_matrix():
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
import numpy as np
|
||||
import pytest
|
||||
from mindspore import Tensor
|
||||
from mindspore.train.metrics import ConfusionMatrixMetric
|
||||
from mindspore.train import ConfusionMatrixMetric
|
||||
|
||||
|
||||
def test_confusion_matrix_metric():
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
import pytest
|
||||
import numpy as np
|
||||
from sklearn.metrics import pairwise
|
||||
from mindspore.train.metrics import CosineSimilarity
|
||||
from mindspore.train import CosineSimilarity
|
||||
|
||||
|
||||
def test_cosine_similarity():
|
||||
|
|
|
@ -17,7 +17,7 @@ import math
|
|||
import numpy as np
|
||||
import pytest
|
||||
from mindspore import Tensor
|
||||
from mindspore.train.metrics import get_metric_fn, Dice
|
||||
from mindspore.train import get_metric_fn, Dice
|
||||
|
||||
|
||||
def test_classification_dice():
|
||||
|
|
|
@ -18,7 +18,7 @@ import numpy as np
|
|||
import pytest
|
||||
|
||||
from mindspore import Tensor
|
||||
from mindspore.train.metrics import MAE, MSE
|
||||
from mindspore.train import MAE, MSE
|
||||
|
||||
|
||||
def test_MAE():
|
||||
|
|
|
@ -17,7 +17,7 @@ import numpy as np
|
|||
import pytest
|
||||
|
||||
from mindspore import Tensor
|
||||
from mindspore.train.metrics import get_metric_fn, Fbeta
|
||||
from mindspore.train import get_metric_fn, Fbeta
|
||||
|
||||
|
||||
def test_classification_fbeta():
|
||||
|
|
|
@ -18,7 +18,7 @@ import math
|
|||
import numpy as np
|
||||
import pytest
|
||||
from mindspore import Tensor
|
||||
from mindspore.train.metrics import get_metric_fn, HausdorffDistance
|
||||
from mindspore.train import get_metric_fn, HausdorffDistance
|
||||
|
||||
|
||||
def test_hausdorff_distance():
|
||||
|
|
|
@ -17,7 +17,7 @@ import numpy as np
|
|||
import pytest
|
||||
|
||||
from mindspore import Tensor
|
||||
from mindspore.train.metrics import Loss
|
||||
from mindspore.train import Loss
|
||||
|
||||
|
||||
def test_loss_inputs_error():
|
||||
|
|
|
@ -18,7 +18,7 @@ import math
|
|||
import numpy as np
|
||||
import pytest
|
||||
from mindspore import Tensor
|
||||
from mindspore.train.metrics import get_metric_fn, MeanSurfaceDistance
|
||||
from mindspore.train import get_metric_fn, MeanSurfaceDistance
|
||||
|
||||
|
||||
def test_mean_surface_distance():
|
||||
|
|
|
@ -17,7 +17,7 @@ import math
|
|||
import numpy as np
|
||||
|
||||
from mindspore import Tensor
|
||||
from mindspore.train.metrics import get_metric_fn, rearrange_inputs
|
||||
from mindspore.train import get_metric_fn, rearrange_inputs
|
||||
|
||||
|
||||
def test_classification_accuracy():
|
||||
|
|
|
@ -17,7 +17,7 @@ import pytest
|
|||
import numpy as np
|
||||
from mindspore import nn, context
|
||||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.train.metrics import OcclusionSensitivity
|
||||
from mindspore.train import OcclusionSensitivity
|
||||
context.set_context(mode=context.GRAPH_MODE)
|
||||
|
||||
class DenseNet(nn.Cell):
|
||||
|
|
|
@ -18,7 +18,7 @@ import math
|
|||
import numpy as np
|
||||
import pytest
|
||||
from mindspore import Tensor
|
||||
from mindspore.train.metrics import get_metric_fn, Perplexity
|
||||
from mindspore.train import get_metric_fn, Perplexity
|
||||
|
||||
|
||||
def test_perplexity():
|
||||
|
|
|
@ -18,7 +18,7 @@ import numpy as np
|
|||
import pytest
|
||||
|
||||
from mindspore import Tensor
|
||||
from mindspore.train.metrics import Precision
|
||||
from mindspore.train import Precision
|
||||
|
||||
|
||||
def test_classification_precision():
|
||||
|
|
|
@ -18,7 +18,7 @@ import numpy as np
|
|||
import pytest
|
||||
|
||||
from mindspore import Tensor
|
||||
from mindspore.train.metrics import Recall
|
||||
from mindspore.train import Recall
|
||||
|
||||
|
||||
def test_classification_recall():
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
import numpy as np
|
||||
import pytest
|
||||
from mindspore import Tensor
|
||||
from mindspore.train.metrics import ROC
|
||||
from mindspore.train import ROC
|
||||
|
||||
|
||||
def test_roc():
|
||||
|
|
|
@ -18,7 +18,7 @@ import math
|
|||
import numpy as np
|
||||
import pytest
|
||||
from mindspore import Tensor
|
||||
from mindspore.train.metrics import get_metric_fn, RootMeanSquareDistance
|
||||
from mindspore.train import get_metric_fn, RootMeanSquareDistance
|
||||
|
||||
|
||||
def test_root_mean_square_distance():
|
||||
|
|
|
@ -18,7 +18,7 @@ import numpy as np
|
|||
import pytest
|
||||
|
||||
from mindspore import Tensor
|
||||
from mindspore.train.metrics import TopKCategoricalAccuracy, Top1CategoricalAccuracy, Top5CategoricalAccuracy
|
||||
from mindspore.train import TopKCategoricalAccuracy, Top1CategoricalAccuracy, Top5CategoricalAccuracy
|
||||
|
||||
|
||||
def test_type_topk():
|
||||
|
|
|
@ -27,7 +27,7 @@ from mindspore import context
|
|||
from mindspore.nn.wrap.cell_wrapper import _VirtualDatasetCell
|
||||
from mindspore.parallel import set_algo_parameters
|
||||
from mindspore.nn.layer.activation import get_activation
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
from mindspore.common.api import _cell_graph_executor
|
||||
from tests.dataset_mock import MindData
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
|
||||
|
|
|
@ -29,7 +29,7 @@ from mindspore.ops import operations as P
|
|||
from mindspore.parallel import _cost_model_context as cost_model_context
|
||||
from mindspore.parallel import set_algo_parameters
|
||||
from mindspore.parallel._utils import _reset_op_id as resset_op_id
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train import Model
|
||||
from mindspore.context import ParallelMode
|
||||
from mindspore.communication._comm_helper import GlobalComm
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue