You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

imgprocess.py 6.6 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189
  1. """
  2. /**
  3. * Copyright 2020 Zhejiang Lab. All Rights Reserved.
  4. *
  5. * Licensed under the Apache License, Version 2.0 (the "License");
  6. * you may not use this file except in compliance with the License.
  7. * You may obtain a copy of the License at
  8. *
  9. * http://www.apache.org/licenses/LICENSE-2.0
  10. *
  11. * Unless required by applicable law or agreed to in writing, software
  12. * distributed under the License is distributed on an "AS IS" BASIS,
  13. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. * See the License for the specific language governing permissions and
  15. * limitations under the License.
  16. * =============================================================
  17. */
  18. """
  19. # !/usr/bin/env python3
  20. # -*- coding: utf-8 -*-
  21. from datetime import datetime
  22. import sched
  23. import os
  24. import cv2
  25. import numpy as np
  26. import logging
  27. import time
  28. import json
  29. import argparse
  30. import sys
  31. import codecs
  32. import shutil
  33. import luascript.delaytaskscript as delay_script
  34. import common.config as config
  35. from common.augment_utils.ACE import ACE_color
  36. from common.augment_utils.dehaze import deHaze, addHaze
  37. from common.augment_utils.hist_equalize import adaptive_hist_equalize
  38. from common.log_config import setup_log
  39. schedule = sched.scheduler(time.time, time.sleep)
  40. delayId = ""
  41. finish_key = {}
  42. re_task_id = {}
  43. sys.stdout = codecs.getwriter("utf-8")(sys.stdout.detach())
  44. # task url suffix
  45. img_pro_url = 'api/data/datasets/'
  46. # arguments
  47. parser = argparse.ArgumentParser(description="config for image augmentation server")
  48. parser.add_argument("-m", "--mode", type=str, default="test", required=False)
  49. args = parser.parse_args()
  50. # url concat(ip + port + suffix)
  51. url_json = './common/config/url.json'
  52. with open(url_json) as f:
  53. url_dict = json.loads(f.read())
  54. img_pro_url = url_dict[args.mode] + img_pro_url
  55. # creat task quene
  56. base_path = "/nfs/"
  57. # create log path and file
  58. des_folder = os.path.join('./log', args.mode)
  59. if not os.path.exists(des_folder):
  60. os.makedirs(des_folder)
  61. logging = setup_log(args.mode, 'enhance-' + args.mode + '.log')
  62. enhanceTaskId = ""
  63. def start_enhance_task(enhanceTaskId, redisClient):
  64. """Enhance task method.
  65. Args:
  66. enhanceTaskId: enhance task id.
  67. redisClient: redis client.
  68. """
  69. global delayId
  70. detailKey = 'imgProcess:' + eval(str(enhanceTaskId[0], encoding="utf-8"))
  71. delayId = "\"" + eval(str(enhanceTaskId[0], encoding="utf-8")) + "\""
  72. print(detailKey)
  73. taskParameters = json.loads(redisClient.get(detailKey).decode())
  74. dataset_id = taskParameters['id']
  75. img_save_path = taskParameters['enhanceFilePath']
  76. ann_save_path = taskParameters["enhanceAnnotationPath"]
  77. file_list = taskParameters['fileDtos']
  78. nums_, img_path_list, ann_path_list = img_ann_list_gen(file_list)
  79. process_type = taskParameters['type']
  80. re_task_id = eval(str(enhanceTaskId[0], encoding="utf-8"))
  81. img_process_config = [dataset_id, img_save_path,
  82. ann_save_path, img_path_list,
  83. ann_path_list, process_type, re_task_id]
  84. image_enhance_process(img_process_config, redisClient)
  85. logging.info(str(nums_) + ' images for augment')
  86. def img_ann_list_gen(file_list):
  87. """Analyze the json request and convert to list"""
  88. nums_ = len(file_list)
  89. img_list = []
  90. ann_list = []
  91. for i in range(nums_):
  92. img_list.append(file_list[i]['filePath'])
  93. ann_list.append(file_list[i]['annotationPath'])
  94. return nums_, img_list, ann_list
  95. def image_enhance_process(img_task, redisClient):
  96. """The implementation of image augmentation thread"""
  97. global img_pro_url
  98. global finish_key
  99. global re_task_id
  100. logging.info('img_process server start'.center(66, '-'))
  101. logging.info(img_pro_url)
  102. try:
  103. dataset_id = img_task[0]
  104. img_save_path = img_task[1]
  105. ann_save_path = img_task[2]
  106. img_list = img_task[3]
  107. ann_list = img_task[4]
  108. method = img_task[5]
  109. re_task_id = img_task[6]
  110. suffix = '_enchanced_' + re_task_id
  111. logging.info("dataset_id " + str(dataset_id))
  112. finish_key = {"processKey": re_task_id}
  113. finish_data = {"id": re_task_id,
  114. "suffix": suffix}
  115. for j in range(len(ann_list)):
  116. img_path = img_list[j]
  117. ann_path = ann_list[j]
  118. img_process(suffix, img_path, ann_path,
  119. img_save_path, ann_save_path, method)
  120. redisClient.lpush(config.imgProcessFinishQueue, json.dumps(finish_key, separators=(',', ':')))
  121. redisClient.set("imgProcess:finished:" + re_task_id, json.dumps(finish_data))
  122. redisClient.zrem(config.imgProcessStartQueue, "\"" + re_task_id + "\"")
  123. logging.info('suffix:' + suffix)
  124. logging.info("End img_process of dataset:" + str(dataset_id))
  125. except Exception as e:
  126. redisClient.lpush(config.imgProcessFailedQueue, json.dumps(finish_key, separators=(',', ':')))
  127. redisClient.zrem(config.imgProcessStartQueue, "\"" + re_task_id + "\"")
  128. logging.info(img_pro_url)
  129. logging.error("Error imgProcess")
  130. logging.error(e)
  131. time.sleep(0.01)
  132. def img_process(suffix, img_path, ann_path, img_save_path, ann_save_path, method_ind):
  133. """Process images and save in specified path"""
  134. inds2method = {1: deHaze, 2: addHaze, 3: ACE_color, 4: adaptive_hist_equalize}
  135. method = inds2method[method_ind]
  136. img_raw = cv2.imdecode(np.fromfile(img_path.encode('utf-8'), dtype=np.uint8), 1)
  137. img_suffix = os.path.splitext(img_path)[-1]
  138. ann_name = ann_path.replace(ann_save_path, '')
  139. if method_ind <= 3:
  140. processed_img = method(img_raw / 255.0) * 255
  141. else:
  142. processed_img = method(img_raw)
  143. cv2.imwrite(img_save_path + ann_name + suffix + img_suffix,
  144. processed_img.astype(np.uint8))
  145. shutil.copyfile(ann_path.encode('utf-8'), (ann_path + suffix).encode('utf-8'))
  146. def delaySchduled(inc, redisClient):
  147. """Delay task method.
  148. Args:
  149. inc: scheduled task time.
  150. redisClient: redis client.
  151. """
  152. try:
  153. logging.info("delay:" + datetime.now().strftime("B%Y-%m-%d %H:%M:%S") + ":" + delayId)
  154. redisClient.eval(delay_script.delayTaskLua, 1, config.imgProcessStartQueue, delayId, int(time.time()))
  155. schedule.enter(inc, 0, delaySchduled, (inc, redisClient))
  156. except Exception as e:
  157. print("delay error" + e)
  158. def delayKeyThread(redisClient):
  159. """Delay task thread.
  160. Args:
  161. redisClient: redis client.
  162. """
  163. schedule.enter(0, 0, delaySchduled, (5, redisClient))
  164. schedule.run()

一站式算法开发平台、高性能分布式深度学习框架、先进算法模型库、视觉模型炼知平台、数据可视化分析平台等一系列平台及工具,在模型高效分布式训练、数据处理和可视分析、模型炼知和轻量化等技术上形成独特优势,目前已在产学研等各领域近千家单位及个人提供AI应用赋能

Contributors (1)