From: @zhao_ting_v Reviewed-by: @wuxuejian,@liangchenghui Signed-off-by: @wuxuejianpull/14894/MERGE
| @@ -28,8 +28,8 @@ from __future__ import print_function | |||||
| import os | import os | ||||
| import numpy as np | import numpy as np | ||||
| import pycocotools.coco as coco | |||||
| import cv2 | import cv2 | ||||
| import pycocotools.coco as coco | |||||
| class CenterfaceDataset(): | class CenterfaceDataset(): | ||||
| """ | """ | ||||
| @@ -16,6 +16,8 @@ | |||||
| Data operations, will be used in train.py and eval.py | Data operations, will be used in train.py and eval.py | ||||
| """ | """ | ||||
| import os | import os | ||||
| import cv2 | |||||
| import numpy as np | |||||
| import mindspore.dataset as ds | import mindspore.dataset as ds | ||||
| import mindspore.dataset.vision.c_transforms as C | import mindspore.dataset.vision.c_transforms as C | ||||
| @@ -24,9 +26,6 @@ from src.dataset_utils import lucky, noise_blur, noise_speckle, noise_gamma, noi | |||||
| randcrop, resize, rdistort, rgeometry, rotate_about_center, whole_rdistort, warp_perspective, random_contrast, \ | randcrop, resize, rdistort, rgeometry, rotate_about_center, whole_rdistort, warp_perspective, random_contrast, \ | ||||
| unify_img_label | unify_img_label | ||||
| import cv2 | |||||
| import numpy as np | |||||
| cv2.setNumThreads(0) | cv2.setNumThreads(0) | ||||
| image_height = None | image_height = None | ||||
| @@ -16,11 +16,6 @@ | |||||
| import argparse | import argparse | ||||
| import os | import os | ||||
| import random | import random | ||||
| from src.cnn_direction_model import CNNDirectionModel | |||||
| from src.config import config1 as config | |||||
| from src.dataset import create_dataset_train | |||||
| import numpy as np | import numpy as np | ||||
| import mindspore as ms | import mindspore as ms | ||||
| @@ -35,6 +30,10 @@ from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMoni | |||||
| from mindspore.train.model import Model, ParallelMode | from mindspore.train.model import Model, ParallelMode | ||||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | from mindspore.train.serialization import load_checkpoint, load_param_into_net | ||||
| from src.cnn_direction_model import CNNDirectionModel | |||||
| from src.config import config1 as config | |||||
| from src.dataset import create_dataset_train | |||||
| parser = argparse.ArgumentParser(description='Image classification') | parser = argparse.ArgumentParser(description='Image classification') | ||||
| parser.add_argument('--run_distribute', type=bool, default=False, help='Run distribute') | parser.add_argument('--run_distribute', type=bool, default=False, help='Run distribute') | ||||
| parser.add_argument('--device_num', type=int, default=1, help='Device num.') | parser.add_argument('--device_num', type=int, default=1, help='Device num.') | ||||
| @@ -15,11 +15,11 @@ | |||||
| """Dataset preprocessing.""" | """Dataset preprocessing.""" | ||||
| import os | import os | ||||
| import numpy as np | import numpy as np | ||||
| from PIL import Image, ImageFile | |||||
| import mindspore.common.dtype as mstype | import mindspore.common.dtype as mstype | ||||
| import mindspore.dataset as ds | import mindspore.dataset as ds | ||||
| import mindspore.dataset.transforms.c_transforms as C | import mindspore.dataset.transforms.c_transforms as C | ||||
| import mindspore.dataset.vision.c_transforms as vc | import mindspore.dataset.vision.c_transforms as vc | ||||
| from PIL import Image, ImageFile | |||||
| from src.config import config1, label_dict | from src.config import config1, label_dict | ||||
| from src.ic03_dataset import IC03Dataset | from src.ic03_dataset import IC03Dataset | ||||
| from src.ic13_dataset import IC13Dataset | from src.ic13_dataset import IC13Dataset | ||||
| @@ -17,7 +17,7 @@ | |||||
| if [ ! -d out ]; then | if [ ! -d out ]; then | ||||
| mkdir out | mkdir out | ||||
| fi | fi | ||||
| cd out | |||||
| cd out || exit | |||||
| cmake .. \ | cmake .. \ | ||||
| -DMINDSPORE_PATH="`pip show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath`" | -DMINDSPORE_PATH="`pip show mindspore-ascend | grep Location | awk '{print $2"/mindspore"}' | xargs realpath`" | ||||
| make | make | ||||
| @@ -67,13 +67,13 @@ do | |||||
| cp ../*.py ./eval | cp ../*.py ./eval | ||||
| cp *.sh ./eval | cp *.sh ./eval | ||||
| cp -r ../src ./eval | cp -r ../src ./eval | ||||
| cd ./eval | |||||
| cd ./eval || exit | |||||
| env > env.log | env > env.log | ||||
| CHECKPOINT_FILE_PATH=$file | CHECKPOINT_FILE_PATH=$file | ||||
| echo "start eval for checkpoint file: ${CHECKPOINT_FILE_PATH}" | echo "start eval for checkpoint file: ${CHECKPOINT_FILE_PATH}" | ||||
| python eval.py --device_id=$DEVICE_ID --image_path=$IMAGE_PATH --dataset_path=$DATASET_PATH --checkpoint_path=$CHECKPOINT_FILE_PATH &> log | python eval.py --device_id=$DEVICE_ID --image_path=$IMAGE_PATH --dataset_path=$DATASET_PATH --checkpoint_path=$CHECKPOINT_FILE_PATH &> log | ||||
| echo "end eval for checkpoint file: ${CHECKPOINT_FILE_PATH}" | echo "end eval for checkpoint file: ${CHECKPOINT_FILE_PATH}" | ||||
| cd ./submit | |||||
| cd ./submit || exit | |||||
| file_base_name=$(basename $file) | file_base_name=$(basename $file) | ||||
| zip -r ../../submit_${file_base_name%.*}.zip *.txt | zip -r ../../submit_${file_base_name%.*}.zip *.txt | ||||
| cd ../../ | cd ../../ | ||||
| @@ -63,7 +63,7 @@ fi | |||||
| function compile_app() | function compile_app() | ||||
| { | { | ||||
| cd ../ascend310_infer | |||||
| cd ../ascend310_infer || exit | |||||
| if [ -f "Makefile" ]; then | if [ -f "Makefile" ]; then | ||||
| make clean | make clean | ||||
| fi | fi | ||||
| @@ -113,9 +113,9 @@ function cal_acc() | |||||
| if [ -f "ubmit.zip" ]; then | if [ -f "ubmit.zip" ]; then | ||||
| rm -f submit.zip | rm -f submit.zip | ||||
| fi | fi | ||||
| cd output | |||||
| cd output || exit | |||||
| zip -r ../submit.zip *.txt | zip -r ../submit.zip *.txt | ||||
| cd - | |||||
| cd - || exit | |||||
| } | } | ||||
| compile_app | compile_app | ||||
| @@ -17,7 +17,8 @@ if [ -d out ]; then | |||||
| rm -rf out | rm -rf out | ||||
| fi | fi | ||||
| mkdir out && cd out | |||||
| mkdir out | |||||
| cd out || exit | |||||
| if [ -f "Makefile" ]; then | if [ -f "Makefile" ]; then | ||||
| make clean | make clean | ||||
| @@ -60,13 +60,13 @@ fi | |||||
| function compile_app() | function compile_app() | ||||
| { | { | ||||
| cd ../ascend310_infer | |||||
| cd ../ascend310_infer || exit | |||||
| bash build.sh &> build.log | bash build.sh &> build.log | ||||
| } | } | ||||
| function infer() | function infer() | ||||
| { | { | ||||
| cd - | |||||
| cd - || exit | |||||
| if [ -d result_Files ]; then | if [ -d result_Files ]; then | ||||
| rm -rf ./result_Files | rm -rf ./result_Files | ||||
| fi | fi | ||||
| @@ -13,7 +13,7 @@ | |||||
| # See the License for the specific language governing permissions and | # See the License for the specific language governing permissions and | ||||
| # limitations under the License. | # limitations under the License. | ||||
| # ============================================================================ | # ============================================================================ | ||||
| path_cur=$(cd "`dirname $0`"; pwd) | |||||
| path_cur=$(cd "`dirname $0`" || exit; pwd) | |||||
| build_type="Release" | build_type="Release" | ||||
| function preparePath() { | function preparePath() { | ||||
| @@ -67,9 +67,9 @@ function air_to_om() | |||||
| function compile_app() | function compile_app() | ||||
| { | { | ||||
| cd ../ascend310_infer/src | |||||
| cd ../ascend310_infer/src || exit | |||||
| sh build.sh &> build.log | sh build.sh &> build.log | ||||
| cd - | |||||
| cd - || exit | |||||
| } | } | ||||
| function infer() | function infer() | ||||
| @@ -15,9 +15,9 @@ | |||||
| """coco eval for fasterrcnn""" | """coco eval for fasterrcnn""" | ||||
| import json | import json | ||||
| import numpy as np | import numpy as np | ||||
| import mmcv | |||||
| from pycocotools.coco import COCO | from pycocotools.coco import COCO | ||||
| from pycocotools.cocoeval import COCOeval | from pycocotools.cocoeval import COCOeval | ||||
| import mmcv | |||||
| _init_value = np.array(0.0) | _init_value = np.array(0.0) | ||||
| summary_init = { | summary_init = { | ||||
| @@ -18,7 +18,7 @@ rm -rf device | |||||
| mkdir device | mkdir device | ||||
| cp ./*.py ./device | cp ./*.py ./device | ||||
| cp -r ./src ./device | cp -r ./src ./device | ||||
| cd ./device | |||||
| cd ./device || exit | |||||
| DATA_DIR=$1 | DATA_DIR=$1 | ||||
| @@ -18,7 +18,7 @@ rm -rf evaluation | |||||
| mkdir evaluation | mkdir evaluation | ||||
| cp ./*.py ./evaluation | cp ./*.py ./evaluation | ||||
| cp -r ./src ./evaluation | cp -r ./src ./evaluation | ||||
| cd ./evaluation | |||||
| cd ./evaluation || exit | |||||
| export DEVICE_ID=0 | export DEVICE_ID=0 | ||||
| export RANK_SIZE=1 | export RANK_SIZE=1 | ||||
| @@ -13,13 +13,13 @@ | |||||
| # See the License for the specific language governing permissions and | # See the License for the specific language governing permissions and | ||||
| # limitations under the License. | # limitations under the License. | ||||
| # ============================================================================ | # ============================================================================ | ||||
| path_cur=$(cd "`dirname $0`"; pwd) | |||||
| path_cur=$(cd "`dirname $0`" || exit; pwd) | |||||
| build_type="Release" | build_type="Release" | ||||
| function preparePath() { | function preparePath() { | ||||
| rm -rf $1 | rm -rf $1 | ||||
| mkdir -p $1 | mkdir -p $1 | ||||
| cd $1 | |||||
| cd $1 || exit | |||||
| } | } | ||||
| function buildA300() { | function buildA300() { | ||||
| @@ -19,13 +19,13 @@ from __future__ import division | |||||
| import os | import os | ||||
| import numpy as np | import numpy as np | ||||
| from numpy import random | from numpy import random | ||||
| import cv2 | |||||
| import mmcv | import mmcv | ||||
| import mindspore.dataset as de | import mindspore.dataset as de | ||||
| import mindspore.dataset.vision.c_transforms as C | import mindspore.dataset.vision.c_transforms as C | ||||
| from mindspore.mindrecord import FileWriter | from mindspore.mindrecord import FileWriter | ||||
| from src.config import config | from src.config import config | ||||
| import cv2 | |||||
| def bbox_overlaps(bboxes1, bboxes2, mode='iou'): | def bbox_overlaps(bboxes1, bboxes2, mode='iou'): | ||||
| """Calculate the ious between each bbox of bboxes1 and bboxes2. | """Calculate the ious between each bbox of bboxes1 and bboxes2. | ||||
| @@ -15,10 +15,10 @@ | |||||
| """coco eval for maskrcnn""" | """coco eval for maskrcnn""" | ||||
| import json | import json | ||||
| import numpy as np | import numpy as np | ||||
| import mmcv | |||||
| from pycocotools.coco import COCO | from pycocotools.coco import COCO | ||||
| from pycocotools.cocoeval import COCOeval | from pycocotools.cocoeval import COCOeval | ||||
| from pycocotools import mask as maskUtils | from pycocotools import mask as maskUtils | ||||
| import mmcv | |||||
| from src.config import config | from src.config import config | ||||
| @@ -21,6 +21,7 @@ import mmcv | |||||
| import cv2 | import cv2 | ||||
| import numpy as np | import numpy as np | ||||
| from numpy import random | from numpy import random | ||||
| import mindspore.dataset as de | import mindspore.dataset as de | ||||
| import mindspore.dataset.vision.c_transforms as C | import mindspore.dataset.vision.c_transforms as C | ||||
| from mindspore.mindrecord import FileWriter | from mindspore.mindrecord import FileWriter | ||||
| @@ -18,9 +18,9 @@ import argparse | |||||
| import warnings | import warnings | ||||
| import sys | import sys | ||||
| import numpy as np | import numpy as np | ||||
| from tqdm import tqdm | |||||
| import cv2 | import cv2 | ||||
| from scipy.ndimage.filters import gaussian_filter | from scipy.ndimage.filters import gaussian_filter | ||||
| from tqdm import tqdm | |||||
| from pycocotools.coco import COCO as LoadAnn | from pycocotools.coco import COCO as LoadAnn | ||||
| from pycocotools.cocoeval import COCOeval as MapEval | from pycocotools.cocoeval import COCOeval as MapEval | ||||
| @@ -21,6 +21,7 @@ | |||||
| #include <iostream> | #include <iostream> | ||||
| #include <queue> | #include <queue> | ||||
| #include <vector> | #include <vector> | ||||
| #include <utility> | |||||
| #include <opencv2/opencv.hpp> | #include <opencv2/opencv.hpp> | ||||
| #include <opencv2/core/core.hpp> | #include <opencv2/core/core.hpp> | ||||
| #include <opencv2/highgui/highgui.hpp> | #include <opencv2/highgui/highgui.hpp> | ||||
| @@ -76,7 +77,7 @@ namespace pse_adaptor { | |||||
| int dx[] = {-1, 1, 0, 0}; | int dx[] = {-1, 1, 0, 0}; | ||||
| int dy[] = {0, 0, -1, 1}; | int dy[] = {0, 0, -1, 1}; | ||||
| for (int kernal_id = kernels.size() - 2; kernal_id >= 0; --kernal_id) { | |||||
| for (int kernel_id = kernels.size() - 2; kernel_id >= 0; --kernel_id) { | |||||
| while (!queue.empty()) { | while (!queue.empty()) { | ||||
| Point point = queue.front(); | Point point = queue.front(); | ||||
| queue.pop(); | queue.pop(); | ||||
| @@ -90,7 +91,7 @@ namespace pse_adaptor { | |||||
| if (tmp_x < 0 || tmp_x >= static_cast<int>(text_line->size())) continue; | if (tmp_x < 0 || tmp_x >= static_cast<int>(text_line->size())) continue; | ||||
| if (tmp_y < 0 || tmp_y >= static_cast<int>(text_line->at(1).size())) continue; | if (tmp_y < 0 || tmp_y >= static_cast<int>(text_line->at(1).size())) continue; | ||||
| if (kernels[kernal_id].at<char>(tmp_x, tmp_y) == 0) continue; | |||||
| if (kernels[kernel_id].at<char>(tmp_x, tmp_y) == 0) continue; | |||||
| if (text_line->at(tmp_x)[tmp_y] > 0) continue; | if (text_line->at(tmp_x)[tmp_y] > 0) continue; | ||||
| Point point_tmp(tmp_x, tmp_y); | Point point_tmp(tmp_x, tmp_y); | ||||
| @@ -17,17 +17,17 @@ | |||||
| import math | import math | ||||
| import os | import os | ||||
| import random | import random | ||||
| import Polygon as plg | |||||
| import cv2 | import cv2 | ||||
| from PIL import Image | |||||
| import numpy as np | import numpy as np | ||||
| import Polygon as plg | |||||
| import pyclipper | import pyclipper | ||||
| from PIL import Image | |||||
| from src.config import config | |||||
| import mindspore.dataset as ds | import mindspore.dataset as ds | ||||
| import mindspore.dataset.vision.py_transforms as py_transforms | import mindspore.dataset.vision.py_transforms as py_transforms | ||||
| from src.config import config | |||||
| __all__ = ['train_dataset_creator', 'test_dataset_creator'] | __all__ = ['train_dataset_creator', 'test_dataset_creator'] | ||||
| @@ -15,12 +15,12 @@ | |||||
| """ResNet.""" | """ResNet.""" | ||||
| import math | import math | ||||
| import numpy as np | import numpy as np | ||||
| from scipy.stats import truncnorm | |||||
| import mindspore.nn as nn | import mindspore.nn as nn | ||||
| import mindspore.common.dtype as mstype | import mindspore.common.dtype as mstype | ||||
| from mindspore.ops import operations as P | from mindspore.ops import operations as P | ||||
| from mindspore.ops import functional as F | from mindspore.ops import functional as F | ||||
| from mindspore.common.tensor import Tensor | from mindspore.common.tensor import Tensor | ||||
| from scipy.stats import truncnorm | |||||
| def _conv_variance_scaling_initializer(in_channel, out_channel, kernel_size): | def _conv_variance_scaling_initializer(in_channel, out_channel, kernel_size): | ||||
| @@ -14,11 +14,11 @@ | |||||
| # ============================================================================ | # ============================================================================ | ||||
| """ResNet.""" | """ResNet.""" | ||||
| import numpy as np | import numpy as np | ||||
| from scipy.stats import truncnorm | |||||
| import mindspore.nn as nn | import mindspore.nn as nn | ||||
| import mindspore.common.dtype as mstype | import mindspore.common.dtype as mstype | ||||
| from mindspore.ops import operations as P | from mindspore.ops import operations as P | ||||
| from mindspore.common.tensor import Tensor | from mindspore.common.tensor import Tensor | ||||
| from scipy.stats import truncnorm | |||||
| format_ = "NHWC" | format_ = "NHWC" | ||||
| # tranpose shape to NCHW, default init is NHWC. | # tranpose shape to NCHW, default init is NHWC. | ||||
| @@ -69,7 +69,7 @@ do | |||||
| cp ../*.py ./device$i | cp ../*.py ./device$i | ||||
| cp *.sh ./device$i | cp *.sh ./device$i | ||||
| cp -r ../src ./device$i | cp -r ../src ./device$i | ||||
| cd ./device$i | |||||
| cd ./device$i || exit | |||||
| export DEVICE_ID=$i | export DEVICE_ID=$i | ||||
| export RANK_ID=$i | export RANK_ID=$i | ||||
| echo "start training for device $i" | echo "start training for device $i" | ||||
| @@ -60,7 +60,7 @@ mkdir ./train | |||||
| cp ../*.py ./train | cp ../*.py ./train | ||||
| cp *.sh ./train | cp *.sh ./train | ||||
| cp -r ../src ./train | cp -r ../src ./train | ||||
| cd ./train | |||||
| cd ./train || exit | |||||
| echo "start training for device $DEVICE_ID" | echo "start training for device $DEVICE_ID" | ||||
| env > env.log | env > env.log | ||||
| if [ $# == 1 ] | if [ $# == 1 ] | ||||
| @@ -1,12 +1,12 @@ | |||||
| """ResNet""" | """ResNet""" | ||||
| import numpy as np | import numpy as np | ||||
| from scipy.stats import truncnorm | |||||
| import mindspore.nn as nn | import mindspore.nn as nn | ||||
| import mindspore.common.dtype as mstype | import mindspore.common.dtype as mstype | ||||
| from mindspore.ops import operations as P | from mindspore.ops import operations as P | ||||
| from mindspore.ops import functional as F | from mindspore.ops import functional as F | ||||
| from mindspore.common.tensor import Tensor | from mindspore.common.tensor import Tensor | ||||
| from scipy.stats import truncnorm | |||||
| def _conv_variance_scaling_initializer(in_channel, out_channel, kernel_size): | def _conv_variance_scaling_initializer(in_channel, out_channel, kernel_size): | ||||
| @@ -16,11 +16,11 @@ | |||||
| dataset processing. | dataset processing. | ||||
| """ | """ | ||||
| import os | import os | ||||
| from PIL import Image, ImageFile | |||||
| from mindspore.common import dtype as mstype | from mindspore.common import dtype as mstype | ||||
| import mindspore.dataset as de | import mindspore.dataset as de | ||||
| import mindspore.dataset.transforms.c_transforms as C | import mindspore.dataset.transforms.c_transforms as C | ||||
| import mindspore.dataset.vision.c_transforms as V_C | import mindspore.dataset.vision.c_transforms as V_C | ||||
| from PIL import Image, ImageFile | |||||
| from src.utils.sampler import DistributedSampler | from src.utils.sampler import DistributedSampler | ||||
| ImageFile.LOAD_TRUNCATED_IMAGES = True | ImageFile.LOAD_TRUNCATED_IMAGES = True | ||||
| @@ -16,11 +16,11 @@ | |||||
| dataset processing. | dataset processing. | ||||
| """ | """ | ||||
| import os | import os | ||||
| from PIL import Image, ImageFile | |||||
| from mindspore.common import dtype as mstype | from mindspore.common import dtype as mstype | ||||
| import mindspore.dataset as de | import mindspore.dataset as de | ||||
| import mindspore.dataset.transforms.c_transforms as C | import mindspore.dataset.transforms.c_transforms as C | ||||
| import mindspore.dataset.vision.c_transforms as V_C | import mindspore.dataset.vision.c_transforms as V_C | ||||
| from PIL import Image, ImageFile | |||||
| from src.utils.sampler import DistributedSampler | from src.utils.sampler import DistributedSampler | ||||
| ImageFile.LOAD_TRUNCATED_IMAGES = True | ImageFile.LOAD_TRUNCATED_IMAGES = True | ||||
| @@ -17,7 +17,8 @@ if [ -d out ]; then | |||||
| rm -rf out | rm -rf out | ||||
| fi | fi | ||||
| mkdir out && cd out | |||||
| mkdir out | |||||
| cd out || exit | |||||
| if [ -f "Makefile" ]; then | if [ -f "Makefile" ]; then | ||||
| make clean | make clean | ||||
| @@ -58,13 +58,13 @@ fi | |||||
| function compile_app() | function compile_app() | ||||
| { | { | ||||
| cd ../ascend310_infer | |||||
| cd ../ascend310_infer || exit | |||||
| bash build.sh &> build.log | bash build.sh &> build.log | ||||
| } | } | ||||
| function infer() | function infer() | ||||
| { | { | ||||
| cd - | |||||
| cd - || exit | |||||
| if [ -d result_Files ]; then | if [ -d result_Files ]; then | ||||
| rm -rf ./result_Files | rm -rf ./result_Files | ||||
| fi | fi | ||||
| @@ -19,5 +19,5 @@ rm -rf ./train_single | |||||
| mkdir ./train_single | mkdir ./train_single | ||||
| cp -r ./src ./train_single | cp -r ./src ./train_single | ||||
| cp ./train.py ./train_single | cp ./train.py ./train_single | ||||
| cd ./train_single | |||||
| cd ./train_single || exit | |||||
| python ./train.py --device_id=$DEVICE_ID > ./train.log 2>&1 & | python ./train.py --device_id=$DEVICE_ID > ./train.log 2>&1 & | ||||
| @@ -21,7 +21,6 @@ import numpy as np | |||||
| from src.data_loader import create_dataset, create_cell_nuclei_dataset | from src.data_loader import create_dataset, create_cell_nuclei_dataset | ||||
| from src.config import cfg_unet | from src.config import cfg_unet | ||||
| class dice_coeff(): | class dice_coeff(): | ||||
| def __init__(self): | def __init__(self): | ||||
| self.clear() | self.clear() | ||||
| @@ -79,7 +79,7 @@ function compile_app() | |||||
| function infer() | function infer() | ||||
| { | { | ||||
| cd - | |||||
| cd - || exit | |||||
| if [ -d result_Files ]; then | if [ -d result_Files ]; then | ||||
| rm -rf ./result_Files | rm -rf ./result_Files | ||||
| fi | fi | ||||
| @@ -75,7 +75,7 @@ mkdir ./eval | |||||
| cp ../*.py ./eval | cp ../*.py ./eval | ||||
| cp *.sh ./eval | cp *.sh ./eval | ||||
| cp -r ../src ./eval | cp -r ../src ./eval | ||||
| cd ./eval | |||||
| cd ./eval || exit | |||||
| echo "start eval for checkpoint file: ${CHECKPOINT_FILE_PATH}" | echo "start eval for checkpoint file: ${CHECKPOINT_FILE_PATH}" | ||||
| python eval.py --data_url=$IMAGE_PATH --seg_url=$SEG_PATH --ckpt_path=$CHECKPOINT_FILE_PATH > eval.log 2>&1 & | python eval.py --data_url=$IMAGE_PATH --seg_url=$SEG_PATH --ckpt_path=$CHECKPOINT_FILE_PATH > eval.log 2>&1 & | ||||
| echo "end eval for checkpoint file: ${CHECKPOINT_FILE_PATH}" | echo "end eval for checkpoint file: ${CHECKPOINT_FILE_PATH}" | ||||
| @@ -16,11 +16,11 @@ | |||||
| dataset processing. | dataset processing. | ||||
| """ | """ | ||||
| import os | import os | ||||
| from PIL import Image, ImageFile | |||||
| from mindspore.common import dtype as mstype | from mindspore.common import dtype as mstype | ||||
| import mindspore.dataset as de | import mindspore.dataset as de | ||||
| import mindspore.dataset.transforms.c_transforms as C | import mindspore.dataset.transforms.c_transforms as C | ||||
| import mindspore.dataset.vision.c_transforms as vision | import mindspore.dataset.vision.c_transforms as vision | ||||
| from PIL import Image, ImageFile | |||||
| from src.utils.sampler import DistributedSampler | from src.utils.sampler import DistributedSampler | ||||
| ImageFile.LOAD_TRUNCATED_IMAGES = True | ImageFile.LOAD_TRUNCATED_IMAGES = True | ||||
| @@ -16,11 +16,11 @@ | |||||
| import os | import os | ||||
| import math as m | import math as m | ||||
| import numpy as np | import numpy as np | ||||
| from PIL import Image | |||||
| import mindspore.common.dtype as mstype | import mindspore.common.dtype as mstype | ||||
| import mindspore.dataset as ds | import mindspore.dataset as ds | ||||
| import mindspore.dataset.transforms.c_transforms as c | import mindspore.dataset.transforms.c_transforms as c | ||||
| import mindspore.dataset.vision.c_transforms as vc | import mindspore.dataset.vision.c_transforms as vc | ||||
| from PIL import Image | |||||
| from src.config import config as cf | from src.config import config as cf | ||||
| @@ -69,7 +69,7 @@ function compile_app() | |||||
| function infer() | function infer() | ||||
| { | { | ||||
| cd - | |||||
| cd - || exit | |||||
| if [ -d result_Files ]; then | if [ -d result_Files ]; then | ||||
| rm -rf ./result_Files | rm -rf ./result_Files | ||||
| fi | fi | ||||
| @@ -53,7 +53,7 @@ for((i=0;i<=7;i++)); | |||||
| do | do | ||||
| rm -rf ${current_exec_path}/device$i | rm -rf ${current_exec_path}/device$i | ||||
| mkdir ${current_exec_path}/device$i | mkdir ${current_exec_path}/device$i | ||||
| cd ${current_exec_path}/device$i | |||||
| cd ${current_exec_path}/device$i || exit | |||||
| cp ../../*.py ./ | cp ../../*.py ./ | ||||
| cp -r ../../src ./ | cp -r ../../src ./ | ||||
| cp -r ../*.sh ./ | cp -r ../*.sh ./ | ||||
| @@ -61,6 +61,6 @@ do | |||||
| export DEVICE_ID=$i | export DEVICE_ID=$i | ||||
| echo "start training for rank $i, device $DEVICE_ID" | echo "start training for rank $i, device $DEVICE_ID" | ||||
| python ../../train.py --data_path $DATASET --data_name $DATANAME > log_fasttext.log 2>&1 & | python ../../train.py --data_path $DATASET --data_name $DATANAME > log_fasttext.log 2>&1 & | ||||
| cd ${current_exec_path} | |||||
| cd ${current_exec_path} || exit | |||||
| done | done | ||||
| cd ${current_exec_path} | |||||
| cd ${current_exec_path} || exit | |||||
| @@ -14,11 +14,12 @@ | |||||
| # ============================================================================ | # ============================================================================ | ||||
| """Data operations, will be used in train.py.""" | """Data operations, will be used in train.py.""" | ||||
| import numpy as np | |||||
| import mindspore.common.dtype as mstype | import mindspore.common.dtype as mstype | ||||
| import mindspore.dataset as de | import mindspore.dataset as de | ||||
| import mindspore.dataset.transforms.c_transforms as deC | import mindspore.dataset.transforms.c_transforms as deC | ||||
| from src.config import config | from src.config import config | ||||
| import numpy as np | |||||
| de.config.set_seed(1) | de.config.set_seed(1) | ||||
| def random_teacher_force(source_ids, target_ids, target_mask): | def random_teacher_force(source_ids, target_ids, target_mask): | ||||
| @@ -26,7 +26,7 @@ CKPT_FILE=$3 | |||||
| rm -rf eval | rm -rf eval | ||||
| mkdir -p eval | mkdir -p eval | ||||
| cd eval | |||||
| cd eval || exit | |||||
| mkdir -p ms_log | mkdir -p ms_log | ||||
| CUR_DIR=`pwd` | CUR_DIR=`pwd` | ||||
| export GLOG_log_dir=${CUR_DIR}/ms_log | export GLOG_log_dir=${CUR_DIR}/ms_log | ||||
| @@ -25,7 +25,7 @@ ACLIMDB_DIR=$2 | |||||
| GLOVE_DIR=$3 | GLOVE_DIR=$3 | ||||
| mkdir -p train | mkdir -p train | ||||
| cd train | |||||
| cd train || exit | |||||
| mkdir -p ms_log | mkdir -p ms_log | ||||
| CUR_DIR=`pwd` | CUR_DIR=`pwd` | ||||
| export GLOG_log_dir=${CUR_DIR}/ms_log | export GLOG_log_dir=${CUR_DIR}/ms_log | ||||
| @@ -17,7 +17,7 @@ | |||||
| if [ ! -d out ]; then | if [ ! -d out ]; then | ||||
| mkdir out | mkdir out | ||||
| fi | fi | ||||
| cd out | |||||
| cd out || exit | |||||
| export CXXFLAGS=-D_GLIBCXX_USE_CXX11_ABI=0 | export CXXFLAGS=-D_GLIBCXX_USE_CXX11_ABI=0 | ||||
| cmake .. -DCMAKE_CXX_COMPILER=g++ -DCMAKE_SKIP_RPATH=TRUE | cmake .. -DCMAKE_CXX_COMPILER=g++ -DCMAKE_SKIP_RPATH=TRUE | ||||
| make | make | ||||
| @@ -78,7 +78,7 @@ function air_to_om() | |||||
| function compile_app() | function compile_app() | ||||
| { | { | ||||
| cd ../ascend310_infer | |||||
| cd ../ascend310_infer || exit | |||||
| if [ -f "Makefile" ]; then | if [ -f "Makefile" ]; then | ||||
| make clean | make clean | ||||
| fi | fi | ||||
| @@ -88,7 +88,7 @@ function compile_app() | |||||
| echo "compile app code failed" | echo "compile app code failed" | ||||
| exit 1 | exit 1 | ||||
| fi | fi | ||||
| cd - | |||||
| cd - || exit | |||||
| } | } | ||||
| function infer() | function infer() | ||||
| @@ -17,9 +17,9 @@ | |||||
| Defined callback for DeepSpeech. | Defined callback for DeepSpeech. | ||||
| """ | """ | ||||
| import time | import time | ||||
| import numpy as np | |||||
| from mindspore.train.callback import Callback | from mindspore.train.callback import Callback | ||||
| from mindspore import Tensor | from mindspore import Tensor | ||||
| import numpy as np | |||||
| class TimeMonitor(Callback): | class TimeMonitor(Callback): | ||||
| @@ -17,10 +17,10 @@ import os | |||||
| from os.path import join | from os.path import join | ||||
| import argparse | import argparse | ||||
| import glob | import glob | ||||
| from hparams import hparams, hparams_debug_string | |||||
| import audio | |||||
| import numpy as np | import numpy as np | ||||
| from scipy.io import wavfile | from scipy.io import wavfile | ||||
| from hparams import hparams, hparams_debug_string | |||||
| import audio | |||||
| from tqdm import tqdm | from tqdm import tqdm | ||||
| from mindspore import context, Tensor | from mindspore import context, Tensor | ||||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | from mindspore.train.serialization import load_checkpoint, load_param_into_net | ||||
| @@ -17,12 +17,12 @@ import json | |||||
| from os.path import join | from os.path import join | ||||
| import argparse | import argparse | ||||
| from warnings import warn | from warnings import warn | ||||
| import numpy as np | |||||
| from hparams import hparams, hparams_debug_string | from hparams import hparams, hparams_debug_string | ||||
| from mindspore import context, Tensor | from mindspore import context, Tensor | ||||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net, export | from mindspore.train.serialization import load_checkpoint, load_param_into_net, export | ||||
| from wavenet_vocoder import WaveNet | from wavenet_vocoder import WaveNet | ||||
| from wavenet_vocoder.util import is_mulaw_quantize, is_scalar_input | from wavenet_vocoder.util import is_mulaw_quantize, is_scalar_input | ||||
| import numpy as np | |||||
| from src.loss import PredictNet | from src.loss import PredictNet | ||||
| parser = argparse.ArgumentParser(description='TTS training') | parser = argparse.ArgumentParser(description='TTS training') | ||||
| @@ -15,9 +15,9 @@ | |||||
| Defined callback for DeepFM. | Defined callback for DeepFM. | ||||
| """ | """ | ||||
| import time | import time | ||||
| import numpy as np | |||||
| from mindspore.train.callback import Callback | from mindspore.train.callback import Callback | ||||
| from mindspore import Tensor | from mindspore import Tensor | ||||
| import numpy as np | |||||
| class TimeMonitor(Callback): | class TimeMonitor(Callback): | ||||
| @@ -15,11 +15,11 @@ | |||||
| """Extended Conv1D.""" | """Extended Conv1D.""" | ||||
| import math | import math | ||||
| import numpy as np | |||||
| from mindspore import nn, Tensor | from mindspore import nn, Tensor | ||||
| from mindspore.ops import operations as P | from mindspore.ops import operations as P | ||||
| import mindspore.common.dtype as mstype | import mindspore.common.dtype as mstype | ||||
| from mindspore import context | from mindspore import context | ||||
| import numpy as np | |||||
| class Conv1d(nn.Conv1d): | class Conv1d(nn.Conv1d): | ||||
| """ | """ | ||||
| @@ -69,7 +69,7 @@ for((i=0;i<=$RANK_SIZE-1;i++)); | |||||
| do | do | ||||
| echo 'start rank '$i | echo 'start rank '$i | ||||
| mkdir ${current_exec_path}/device$i | mkdir ${current_exec_path}/device$i | ||||
| cd ${current_exec_path}/device$i | |||||
| cd ${current_exec_path}/device$i || exit | |||||
| export RANK_ID=$i | export RANK_ID=$i | ||||
| dev=`expr $i + 0` | dev=`expr $i + 0` | ||||
| export DEVICE_ID=$dev | export DEVICE_ID=$dev | ||||
| @@ -61,7 +61,7 @@ export RANK_ID=0 | |||||
| rm -rf ${current_exec_path}/device$USE_DEVICE_ID | rm -rf ${current_exec_path}/device$USE_DEVICE_ID | ||||
| echo 'start device '$USE_DEVICE_ID | echo 'start device '$USE_DEVICE_ID | ||||
| mkdir ${current_exec_path}/device$USE_DEVICE_ID | mkdir ${current_exec_path}/device$USE_DEVICE_ID | ||||
| cd ${current_exec_path}/device$USE_DEVICE_ID | |||||
| cd ${current_exec_path}/device$USE_DEVICE_ID || exit | |||||
| dev=`expr $USE_DEVICE_ID + 0` | dev=`expr $USE_DEVICE_ID + 0` | ||||
| export DEVICE_ID=$dev | export DEVICE_ID=$dev | ||||
| python ${dirname_path}/${SCRIPT_NAME} \ | python ${dirname_path}/${SCRIPT_NAME} \ | ||||
| @@ -61,7 +61,7 @@ export RANK_ID=0 | |||||
| rm -rf ${current_exec_path}/device$USE_DEVICE_ID | rm -rf ${current_exec_path}/device$USE_DEVICE_ID | ||||
| echo 'start device '$USE_DEVICE_ID | echo 'start device '$USE_DEVICE_ID | ||||
| mkdir ${current_exec_path}/device$USE_DEVICE_ID | mkdir ${current_exec_path}/device$USE_DEVICE_ID | ||||
| cd ${current_exec_path}/device$USE_DEVICE_ID | |||||
| cd ${current_exec_path}/device$USE_DEVICE_ID || exit | |||||
| dev=`expr $USE_DEVICE_ID + 0` | dev=`expr $USE_DEVICE_ID + 0` | ||||
| export DEVICE_ID=$dev | export DEVICE_ID=$dev | ||||
| python ${dirname_path}/${SCRIPT_NAME} \ | python ${dirname_path}/${SCRIPT_NAME} \ | ||||
| @@ -66,7 +66,7 @@ export RANK_ID=0 | |||||
| rm -rf ${current_exec_path}/device$USE_DEVICE_ID | rm -rf ${current_exec_path}/device$USE_DEVICE_ID | ||||
| echo 'start device '$USE_DEVICE_ID | echo 'start device '$USE_DEVICE_ID | ||||
| mkdir ${current_exec_path}/device$USE_DEVICE_ID | mkdir ${current_exec_path}/device$USE_DEVICE_ID | ||||
| cd ${current_exec_path}/device$USE_DEVICE_ID | |||||
| cd ${current_exec_path}/device$USE_DEVICE_ID || exit | |||||
| dev=`expr $USE_DEVICE_ID + 0` | dev=`expr $USE_DEVICE_ID + 0` | ||||
| export DEVICE_ID=$dev | export DEVICE_ID=$dev | ||||
| python ${dirname_path}/${SCRIPT_NAME} \ | python ${dirname_path}/${SCRIPT_NAME} \ | ||||
| @@ -17,8 +17,8 @@ import os | |||||
| import warnings | import warnings | ||||
| import argparse | import argparse | ||||
| import numpy as np | import numpy as np | ||||
| from tqdm import tqdm | |||||
| import cv2 | import cv2 | ||||
| from tqdm import tqdm | |||||
| import mindspore.nn as nn | import mindspore.nn as nn | ||||
| from mindspore import Tensor | from mindspore import Tensor | ||||
| @@ -75,7 +75,7 @@ for((i=0;i<=$RANK_SIZE-1;i++)); | |||||
| do | do | ||||
| echo 'start rank '$i | echo 'start rank '$i | ||||
| mkdir ${current_exec_path}/device$i | mkdir ${current_exec_path}/device$i | ||||
| cd ${current_exec_path}/device$i | |||||
| cd ${current_exec_path}/device$i || exit | |||||
| export RANK_ID=$i | export RANK_ID=$i | ||||
| dev=`expr $i + 0` | dev=`expr $i + 0` | ||||
| export DEVICE_ID=$dev | export DEVICE_ID=$dev | ||||
| @@ -61,7 +61,7 @@ export RANK_ID=0 | |||||
| rm -rf ${current_exec_path}/device$USE_DEVICE_ID | rm -rf ${current_exec_path}/device$USE_DEVICE_ID | ||||
| echo 'start device '$USE_DEVICE_ID | echo 'start device '$USE_DEVICE_ID | ||||
| mkdir ${current_exec_path}/device$USE_DEVICE_ID | mkdir ${current_exec_path}/device$USE_DEVICE_ID | ||||
| cd ${current_exec_path}/device$USE_DEVICE_ID | |||||
| cd ${current_exec_path}/device$USE_DEVICE_ID || exit | |||||
| dev=`expr $USE_DEVICE_ID + 0` | dev=`expr $USE_DEVICE_ID + 0` | ||||
| export DEVICE_ID=$dev | export DEVICE_ID=$dev | ||||
| python ${dirname_path}/${SCRIPT_NAME} \ | python ${dirname_path}/${SCRIPT_NAME} \ | ||||
| @@ -72,7 +72,7 @@ export RANK_ID=0 | |||||
| rm -rf ${current_exec_path}/device$USE_DEVICE_ID | rm -rf ${current_exec_path}/device$USE_DEVICE_ID | ||||
| echo 'start device '$USE_DEVICE_ID | echo 'start device '$USE_DEVICE_ID | ||||
| mkdir ${current_exec_path}/device$USE_DEVICE_ID | mkdir ${current_exec_path}/device$USE_DEVICE_ID | ||||
| cd ${current_exec_path}/device$USE_DEVICE_ID | |||||
| cd ${current_exec_path}/device$USE_DEVICE_ID || exit | |||||
| dev=`expr $USE_DEVICE_ID + 0` | dev=`expr $USE_DEVICE_ID + 0` | ||||
| export DEVICE_ID=$dev | export DEVICE_ID=$dev | ||||
| python ${dirname_path}/${SCRIPT_NAME} \ | python ${dirname_path}/${SCRIPT_NAME} \ | ||||
| @@ -102,7 +102,7 @@ def main(args): | |||||
| else: | else: | ||||
| param_dict_new[key] = values | param_dict_new[key] = values | ||||
| load_param_into_net(network, param_dict_new) | load_param_into_net(network, param_dict_new) | ||||
| cfg.logger.info('load model {} success'.format(cfg.pretrained)) | |||||
| cfg.logger.info('load model %s success' % str(cfg.pretrained)) | |||||
| # optimizer and lr scheduler | # optimizer and lr scheduler | ||||
| lr = warmup_step(cfg, gamma=0.9) | lr = warmup_step(cfg, gamma=0.9) | ||||
| @@ -328,6 +328,6 @@ if __name__ == '__main__': | |||||
| log_path = os.path.join(arg.ckpt_path, 'logs') | log_path = os.path.join(arg.ckpt_path, 'logs') | ||||
| arg.logger = get_logger(log_path, arg.local_rank) | arg.logger = get_logger(log_path, arg.local_rank) | ||||
| arg.logger.info('Config\n\n%s\n' % pformat(arg)) | |||||
| arg.logger.info('Config\n\n%s\n' % str(pformat(arg))) | |||||
| main(arg) | main(arg) | ||||
| @@ -42,7 +42,7 @@ export RANK_SIZE=8 | |||||
| export RANK_TABLE_FILE=$PATH1 | export RANK_TABLE_FILE=$PATH1 | ||||
| EXECUTE_PATH=$(pwd) | EXECUTE_PATH=$(pwd) | ||||
| echo *******************EXECUTE_PATH= $EXECUTE_PATH | |||||
| echo *******************EXECUTE_PATH=$EXECUTE_PATH | |||||
| if [ -d "${EXECUTE_PATH}/log_parallel_graph" ]; then | if [ -d "${EXECUTE_PATH}/log_parallel_graph" ]; then | ||||
| echo "[INFO] Delete old data_parallel log files" | echo "[INFO] Delete old data_parallel log files" | ||||
| rm -rf ${EXECUTE_PATH}/log_parallel_graph | rm -rf ${EXECUTE_PATH}/log_parallel_graph | ||||
| @@ -53,7 +53,7 @@ for((i=0;i<=7;i++)); | |||||
| do | do | ||||
| rm -rf ${EXECUTE_PATH}/data_parallel_log_$i | rm -rf ${EXECUTE_PATH}/data_parallel_log_$i | ||||
| mkdir -p ${EXECUTE_PATH}/data_parallel_log_$i | mkdir -p ${EXECUTE_PATH}/data_parallel_log_$i | ||||
| cd ${EXECUTE_PATH}/data_parallel_log_$i | |||||
| cd ${EXECUTE_PATH}/data_parallel_log_$i || exit | |||||
| export RANK_ID=$i | export RANK_ID=$i | ||||
| export DEVICE_ID=$i | export DEVICE_ID=$i | ||||
| echo "start training for rank $RANK_ID, device $DEVICE_ID" | echo "start training for rank $RANK_ID, device $DEVICE_ID" | ||||
| @@ -42,7 +42,7 @@ export RANK_SIZE=8 | |||||
| export RANK_TABLE_FILE=$PATH1 | export RANK_TABLE_FILE=$PATH1 | ||||
| EXECUTE_PATH=$(pwd) | EXECUTE_PATH=$(pwd) | ||||
| echo *******************EXECUTE_PATH= $EXECUTE_PATH | |||||
| echo *******************EXECUTE_PATH=$EXECUTE_PATH | |||||
| if [ -d "${EXECUTE_PATH}/log_parallel_graph" ]; then | if [ -d "${EXECUTE_PATH}/log_parallel_graph" ]; then | ||||
| echo "[INFO] Delete old data_parallel log files" | echo "[INFO] Delete old data_parallel log files" | ||||
| rm -rf ${EXECUTE_PATH}/log_parallel_graph | rm -rf ${EXECUTE_PATH}/log_parallel_graph | ||||
| @@ -53,7 +53,7 @@ for((i=0;i<=7;i++)); | |||||
| do | do | ||||
| rm -rf ${EXECUTE_PATH}/data_parallel_log_$i | rm -rf ${EXECUTE_PATH}/data_parallel_log_$i | ||||
| mkdir -p ${EXECUTE_PATH}/data_parallel_log_$i | mkdir -p ${EXECUTE_PATH}/data_parallel_log_$i | ||||
| cd ${EXECUTE_PATH}/data_parallel_log_$i | |||||
| cd ${EXECUTE_PATH}/data_parallel_log_$i || exit | |||||
| export RANK_ID=$i | export RANK_ID=$i | ||||
| export DEVICE_ID=$i | export DEVICE_ID=$i | ||||
| echo "start training for rank $RANK_ID, device $DEVICE_ID" | echo "start training for rank $RANK_ID, device $DEVICE_ID" | ||||
| @@ -39,7 +39,7 @@ if [ -d "${EXECUTE_PATH}/log_inference" ]; then | |||||
| fi | fi | ||||
| mkdir ${EXECUTE_PATH}/log_inference | mkdir ${EXECUTE_PATH}/log_inference | ||||
| cd ${EXECUTE_PATH}/log_inference | |||||
| cd ${EXECUTE_PATH}/log_inference || exit | |||||
| env > ${EXECUTE_PATH}/log_inference/face_recognition.log | env > ${EXECUTE_PATH}/log_inference/face_recognition.log | ||||
| python ${EXECUTE_PATH}/../eval.py &> ${EXECUTE_PATH}/log_inference/face_recognition.log & | python ${EXECUTE_PATH}/../eval.py &> ${EXECUTE_PATH}/log_inference/face_recognition.log & | ||||
| @@ -61,7 +61,7 @@ export RANK_ID=0 | |||||
| rm -rf ${current_exec_path}/device$USE_DEVICE_ID | rm -rf ${current_exec_path}/device$USE_DEVICE_ID | ||||
| echo 'start device '$USE_DEVICE_ID | echo 'start device '$USE_DEVICE_ID | ||||
| mkdir ${current_exec_path}/device$USE_DEVICE_ID | mkdir ${current_exec_path}/device$USE_DEVICE_ID | ||||
| cd ${current_exec_path}/device$USE_DEVICE_ID | |||||
| cd ${current_exec_path}/device$USE_DEVICE_ID || exit | |||||
| dev=`expr $USE_DEVICE_ID + 0` | dev=`expr $USE_DEVICE_ID + 0` | ||||
| export DEVICE_ID=$dev | export DEVICE_ID=$dev | ||||
| python ${dirname_path}/${SCRIPT_NAME} \ | python ${dirname_path}/${SCRIPT_NAME} \ | ||||
| @@ -75,7 +75,7 @@ for((i=0;i<=$RANK_SIZE-1;i++)); | |||||
| do | do | ||||
| echo 'start rank '$i | echo 'start rank '$i | ||||
| mkdir ${current_exec_path}/device$i | mkdir ${current_exec_path}/device$i | ||||
| cd ${current_exec_path}/device$i | |||||
| cd ${current_exec_path}/device$i || exit | |||||
| export RANK_ID=$i | export RANK_ID=$i | ||||
| dev=`expr $i + 0` | dev=`expr $i + 0` | ||||
| export DEVICE_ID=$dev | export DEVICE_ID=$dev | ||||
| @@ -61,7 +61,7 @@ export RANK_ID=0 | |||||
| rm -rf ${current_exec_path}/device$USE_DEVICE_ID | rm -rf ${current_exec_path}/device$USE_DEVICE_ID | ||||
| echo 'start device '$USE_DEVICE_ID | echo 'start device '$USE_DEVICE_ID | ||||
| mkdir ${current_exec_path}/device$USE_DEVICE_ID | mkdir ${current_exec_path}/device$USE_DEVICE_ID | ||||
| cd ${current_exec_path}/device$USE_DEVICE_ID | |||||
| cd ${current_exec_path}/device$USE_DEVICE_ID || exit | |||||
| dev=`expr $USE_DEVICE_ID + 0` | dev=`expr $USE_DEVICE_ID + 0` | ||||
| export DEVICE_ID=$dev | export DEVICE_ID=$dev | ||||
| python ${dirname_path}/${SCRIPT_NAME} \ | python ${dirname_path}/${SCRIPT_NAME} \ | ||||
| @@ -61,7 +61,7 @@ export RANK_ID=0 | |||||
| rm -rf ${current_exec_path}/device$USE_DEVICE_ID | rm -rf ${current_exec_path}/device$USE_DEVICE_ID | ||||
| echo 'start device '$USE_DEVICE_ID | echo 'start device '$USE_DEVICE_ID | ||||
| mkdir ${current_exec_path}/device$USE_DEVICE_ID | mkdir ${current_exec_path}/device$USE_DEVICE_ID | ||||
| cd ${current_exec_path}/device$USE_DEVICE_ID | |||||
| cd ${current_exec_path}/device$USE_DEVICE_ID || exit | |||||
| dev=`expr $USE_DEVICE_ID + 0` | dev=`expr $USE_DEVICE_ID + 0` | ||||
| export DEVICE_ID=$dev | export DEVICE_ID=$dev | ||||
| python ${dirname_path}/${SCRIPT_NAME} \ | python ${dirname_path}/${SCRIPT_NAME} \ | ||||
| @@ -72,7 +72,7 @@ export RANK_ID=0 | |||||
| rm -rf ${current_exec_path}/device$USE_DEVICE_ID | rm -rf ${current_exec_path}/device$USE_DEVICE_ID | ||||
| echo 'start device '$USE_DEVICE_ID | echo 'start device '$USE_DEVICE_ID | ||||
| mkdir ${current_exec_path}/device$USE_DEVICE_ID | mkdir ${current_exec_path}/device$USE_DEVICE_ID | ||||
| cd ${current_exec_path}/device$USE_DEVICE_ID | |||||
| cd ${current_exec_path}/device$USE_DEVICE_ID || exit | |||||
| dev=`expr $USE_DEVICE_ID + 0` | dev=`expr $USE_DEVICE_ID + 0` | ||||
| export DEVICE_ID=$dev | export DEVICE_ID=$dev | ||||
| python ${dirname_path}/${SCRIPT_NAME} \ | python ${dirname_path}/${SCRIPT_NAME} \ | ||||
| @@ -17,11 +17,9 @@ import os | |||||
| import glob | import glob | ||||
| import random | import random | ||||
| import pickle | import pickle | ||||
| from src.data import common | |||||
| import numpy as np | import numpy as np | ||||
| import imageio | import imageio | ||||
| from src.data import common | |||||
| def search(root, target="JPEG"): | def search(root, target="JPEG"): | ||||
| @@ -34,7 +34,7 @@ def calc_psnr(sr, hr, scale, rgb_range, y_only=False, dataset=None): | |||||
| gray_coeffs = np.array([65.738, 129.057, 25.064] | gray_coeffs = np.array([65.738, 129.057, 25.064] | ||||
| ).reshape((1, 3, 1, 1)) / 256 | ).reshape((1, 3, 1, 1)) / 256 | ||||
| diff = np.multiply(diff, gray_coeffs).sum(1) | diff = np.multiply(diff, gray_coeffs).sum(1) | ||||
| if hr.size == 1: | |||||
| if np.size(hr) == 1: | |||||
| return 0 | return 0 | ||||
| if scale != 1: | if scale != 1: | ||||
| shave = scale | shave = scale | ||||
| @@ -37,10 +37,10 @@ then | |||||
| else | else | ||||
| echo "NMS module was not found, install it now..." | echo "NMS module was not found, install it now..." | ||||
| git clone https://github.com/xingyizhou/CenterNet.git | git clone https://github.com/xingyizhou/CenterNet.git | ||||
| cd CenterNet/src/lib/external/ | |||||
| cd CenterNet/src/lib/external/ || exit | |||||
| make | make | ||||
| python setup.py install | python setup.py install | ||||
| cd - | |||||
| cd - || exit | |||||
| rm -rf CenterNet | rm -rf CenterNet | ||||
| fi | fi | ||||
| @@ -36,10 +36,10 @@ then | |||||
| else | else | ||||
| echo "NMS module was not found, install it now..." | echo "NMS module was not found, install it now..." | ||||
| git clone https://github.com/xingyizhou/CenterNet.git | git clone https://github.com/xingyizhou/CenterNet.git | ||||
| cd CenterNet/src/lib/external/ | |||||
| cd CenterNet/src/lib/external/ || exit | |||||
| make | make | ||||
| python setup.py install | python setup.py install | ||||
| cd - | |||||
| cd - || exit | |||||
| rm -rf CenterNet | rm -rf CenterNet | ||||
| fi | fi | ||||
| @@ -13,9 +13,9 @@ | |||||
| # limitations under the License. | # limitations under the License. | ||||
| # ============================================================================ | # ============================================================================ | ||||
| """hub config.""" | """hub config.""" | ||||
| from src.resnet_imgnet import resnet50 | |||||
| from mindspore import Tensor | |||||
| import numpy as np | import numpy as np | ||||
| from mindspore import Tensor | |||||
| from src.resnet_imgnet import resnet50 | |||||
| def get_index(filename): | def get_index(filename): | ||||
| @@ -15,7 +15,7 @@ | |||||
| # ============================================================================ | # ============================================================================ | ||||
| mkdir -p ms_log | mkdir -p ms_log | ||||
| PROJECT_DIR=$(cd "$(dirname "$0")"; pwd) | |||||
| PROJECT_DIR=$(cd "$(dirname "$0")" || exit; pwd) | |||||
| CUR_DIR=`pwd` | CUR_DIR=`pwd` | ||||
| export GLOG_log_dir=${CUR_DIR}/ms_log | export GLOG_log_dir=${CUR_DIR}/ms_log | ||||
| export GLOG_logtostderr=0 | export GLOG_logtostderr=0 | ||||
| @@ -15,8 +15,8 @@ | |||||
| """config script""" | """config script""" | ||||
| import mindspore.common.dtype as mstype | |||||
| from easydict import EasyDict as edict | from easydict import EasyDict as edict | ||||
| import mindspore.common.dtype as mstype | |||||
| from .tinybert_model import BertConfig | from .tinybert_model import BertConfig | ||||
| from .assessment_method import Accuracy, F1, Pearsonr, Matthews | from .assessment_method import Accuracy, F1, Pearsonr, Matthews | ||||
| @@ -15,8 +15,8 @@ | |||||
| """dataset api""" | """dataset api""" | ||||
| import os | import os | ||||
| from itertools import chain | from itertools import chain | ||||
| import gensim | |||||
| import numpy as np | import numpy as np | ||||
| import gensim | |||||
| from mindspore.mindrecord import FileWriter | from mindspore.mindrecord import FileWriter | ||||
| import mindspore.dataset as ds | import mindspore.dataset as ds | ||||
| @@ -18,8 +18,8 @@ from collections import defaultdict | |||||
| import random | import random | ||||
| from time import time | from time import time | ||||
| import json | import json | ||||
| from tqdm import tqdm | |||||
| import numpy as np | import numpy as np | ||||
| from tqdm import tqdm | |||||
| from transformers import AlbertTokenizer | from transformers import AlbertTokenizer | ||||
| @@ -26,9 +26,9 @@ import gzip | |||||
| import string | import string | ||||
| import pickle | import pickle | ||||
| import sqlite3 | import sqlite3 | ||||
| import numpy as np | |||||
| from tqdm import tqdm | from tqdm import tqdm | ||||
| import numpy as np | |||||
| from transformers import BasicTokenizer | from transformers import BasicTokenizer | ||||
| @@ -18,8 +18,8 @@ import json | |||||
| import random | import random | ||||
| from collections import defaultdict | from collections import defaultdict | ||||
| from time import time | from time import time | ||||
| from tqdm import tqdm | |||||
| import numpy as np | import numpy as np | ||||
| from tqdm import tqdm | |||||
| from mindspore import Tensor, ops | from mindspore import Tensor, ops | ||||
| from mindspore import dtype as mstype | from mindspore import dtype as mstype | ||||