| @@ -14,19 +14,20 @@ | |||||
| # ============================================================================ | # ============================================================================ | ||||
| import math | |||||
| import os | import os | ||||
| import random | import random | ||||
| import math | |||||
| import Polygon as plg | |||||
| import cv2 | import cv2 | ||||
| import pyclipper | |||||
| import numpy as np | import numpy as np | ||||
| import pyclipper | |||||
| from PIL import Image | from PIL import Image | ||||
| import Polygon as plg | |||||
| from src.config import config | |||||
| import mindspore.dataset.engine as de | import mindspore.dataset.engine as de | ||||
| import mindspore.dataset.vision.py_transforms as py_transforms | import mindspore.dataset.vision.py_transforms as py_transforms | ||||
| from src.config import config | |||||
| __all__ = ['train_dataset_creator', 'test_dataset_creator'] | __all__ = ['train_dataset_creator', 'test_dataset_creator'] | ||||
| def get_img(img_path): | def get_img(img_path): | ||||
| @@ -15,14 +15,16 @@ | |||||
| import time | import time | ||||
| import numpy as np | |||||
| import mindspore.nn as nn | import mindspore.nn as nn | ||||
| from mindspore.ops import functional as F | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import ParameterTuple | from mindspore import ParameterTuple | ||||
| from mindspore.common.tensor import Tensor | from mindspore.common.tensor import Tensor | ||||
| from mindspore.train.callback import Callback | |||||
| from mindspore.nn.wrap.grad_reducer import DistributedGradReducer | from mindspore.nn.wrap.grad_reducer import DistributedGradReducer | ||||
| import numpy as np | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore.ops import functional as F | |||||
| from mindspore.train.callback import Callback | |||||
| __all__ = ['LossCallBack', 'WithLossCell', 'TrainOneStepCell'] | __all__ = ['LossCallBack', 'WithLossCell', 'TrainOneStepCell'] | ||||
| @@ -13,11 +13,11 @@ | |||||
| # limitations under the License. | # limitations under the License. | ||||
| # ============================================================================ | # ============================================================================ | ||||
| """ test dynamic shape """ | """ test dynamic shape """ | ||||
| import numpy as np | |||||
| from mindspore import Tensor, context, nn, Parameter | from mindspore import Tensor, context, nn, Parameter | ||||
| from mindspore.ops import operations as P | |||||
| from mindspore import dtype as mstype | from mindspore import dtype as mstype | ||||
| import numpy as np | |||||
| from mindspore.ops import operations as P | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False) | context.set_context(mode=context.GRAPH_MODE, save_graphs=False) | ||||
| @@ -32,6 +32,7 @@ def test_sparse_apply_proximal_ada_grad(): | |||||
| self.lr = 0.01 | self.lr = 0.01 | ||||
| self.l1 = 0.0 | self.l1 = 0.0 | ||||
| self.l2 = 0.0 | self.l2 = 0.0 | ||||
| def construct(self, grad, indices): | def construct(self, grad, indices): | ||||
| out = self.sparse_apply_proximal_adagrad(self.var, self.accum, self.lr, self.l1, self.l2, grad, indices) | out = self.sparse_apply_proximal_adagrad(self.var, self.accum, self.lr, self.l1, self.l2, grad, indices) | ||||
| return out[0] | return out[0] | ||||