forked from mindspore-Ecosystem/mindspore
fix nasnet scripts
This commit is contained in:
parent
7f390467e9
commit
789296949b
|
@ -23,7 +23,7 @@ import mindspore.ops.functional as F
|
|||
import mindspore.ops.composite as C
|
||||
import mindspore.common.dtype as mstype
|
||||
from mindspore.nn.wrap.grad_reducer import DistributedGradReducer
|
||||
from mindspore.train.parallel_utils import ParallelMode
|
||||
from mindspore.context import ParallelMode
|
||||
from mindspore.parallel._utils import _get_device_num, _get_parallel_mode, _get_gradients_mean
|
||||
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ import os
|
|||
|
||||
from mindspore import Tensor
|
||||
from mindspore import context
|
||||
from mindspore import ParallelMode
|
||||
from mindspore.context import ParallelMode
|
||||
from mindspore.communication.management import init, get_rank, get_group_size
|
||||
from mindspore.nn.optim.rmsprop import RMSProp
|
||||
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor
|
||||
|
|
Loading…
Reference in New Issue