forked from mindspore-Ecosystem/mindspore
commit
428927bdff
|
@ -14,19 +14,20 @@
|
|||
# ============================================================================
|
||||
|
||||
|
||||
import math
|
||||
import os
|
||||
import random
|
||||
import math
|
||||
import cv2
|
||||
import pyclipper
|
||||
import numpy as np
|
||||
from PIL import Image
|
||||
|
||||
import Polygon as plg
|
||||
import cv2
|
||||
import numpy as np
|
||||
import pyclipper
|
||||
from PIL import Image
|
||||
from src.config import config
|
||||
|
||||
import mindspore.dataset.engine as de
|
||||
import mindspore.dataset.vision.py_transforms as py_transforms
|
||||
|
||||
from src.config import config
|
||||
|
||||
__all__ = ['train_dataset_creator', 'test_dataset_creator']
|
||||
|
||||
def get_img(img_path):
|
||||
|
|
|
@ -15,14 +15,16 @@
|
|||
|
||||
|
||||
import time
|
||||
|
||||
import numpy as np
|
||||
|
||||
import mindspore.nn as nn
|
||||
from mindspore.ops import functional as F
|
||||
from mindspore.ops import composite as C
|
||||
from mindspore import ParameterTuple
|
||||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.train.callback import Callback
|
||||
from mindspore.nn.wrap.grad_reducer import DistributedGradReducer
|
||||
import numpy as np
|
||||
from mindspore.ops import composite as C
|
||||
from mindspore.ops import functional as F
|
||||
from mindspore.train.callback import Callback
|
||||
|
||||
__all__ = ['LossCallBack', 'WithLossCell', 'TrainOneStepCell']
|
||||
|
||||
|
|
|
@ -13,12 +13,12 @@
|
|||
# limitations under the License.
|
||||
# ============================================================================
|
||||
""" test dynamic shape """
|
||||
from mindspore import Tensor, context, nn, Parameter
|
||||
from mindspore.ops import operations as P
|
||||
from mindspore import dtype as mstype
|
||||
|
||||
import numpy as np
|
||||
|
||||
from mindspore import Tensor, context, nn, Parameter
|
||||
from mindspore import dtype as mstype
|
||||
from mindspore.ops import operations as P
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, save_graphs=False)
|
||||
|
||||
|
||||
|
@ -32,6 +32,7 @@ def test_sparse_apply_proximal_ada_grad():
|
|||
self.lr = 0.01
|
||||
self.l1 = 0.0
|
||||
self.l2 = 0.0
|
||||
|
||||
def construct(self, grad, indices):
|
||||
out = self.sparse_apply_proximal_adagrad(self.var, self.accum, self.lr, self.l1, self.l2, grad, indices)
|
||||
return out[0]
|
||||
|
|
Loading…
Reference in New Issue