lingcai.wl yingda.chen 3 years ago
parent
commit
01c8735efa
2 changed files with 60 additions and 10 deletions
  1. +7
    -1
      modelscope/pipelines/cv/image_style_transfer_pipeline.py
  2. +53
    -9
      modelscope/utils/demo_utils.py

+ 7
- 1
modelscope/pipelines/cv/image_style_transfer_pipeline.py View File

@@ -61,7 +61,13 @@ class ImageStyleTransferPipeline(Pipeline):
def _sanitize_parameters(self, **pipeline_parameters):
return pipeline_parameters, {}, {}

def preprocess(self, content: Input, style: Input) -> Dict[str, Any]:
def preprocess(self,
content: Input,
style: Input = None) -> Dict[str, Any]:
if type(content) is dict: # for demo service
style = content['style']
content = content['content']

content = LoadImage.convert_to_ndarray(content)
if len(content.shape) == 2:
content = cv2.cvtColor(content, cv2.COLOR_GRAY2BGR)


+ 53
- 9
modelscope/utils/demo_utils.py View File

@@ -123,7 +123,7 @@ INPUT_EXAMPLES = {
'urlPaths': {
'outUrls': [{
'outputKey': OutputKeys.OUTPUT_PCM,
'fileType': 'wav'
'fileType': 'pcm'
}]
}
},
@@ -134,7 +134,7 @@ INPUT_EXAMPLES = {
'urlPaths': {
'outUrls': [{
'outputKey': OutputKeys.OUTPUT_PCM,
'fileType': 'wav'
'fileType': 'pcm'
}]
}
},
@@ -147,7 +147,13 @@ INPUT_EXAMPLES = {
'http://xingchen-data.oss-cn-zhangjiakou.aliyuncs.com/maas/visual-grounding/visual_grounding.png',
'a blue turtle-like pokemon with round head'
],
'urlPaths': {}
'urlPaths': {
'inUrls': [{
'name': 'image'
}, {
'name': 'text'
}]
}
},
TasksIODescriptions.visual_question_answering: {
'task':
@@ -156,7 +162,16 @@ INPUT_EXAMPLES = {
'http://225252-file.oss-cn-hangzhou-zmf.aliyuncs.com/maas_demo/visual_question_answering.png',
'what is grown on the plant?'
],
'urlPaths': {}
'urlPaths': {
'inUrls': [{
'name': 'image'
}, {
'name': 'text'
}],
'outUrls': [{
'outputKey': 'text'
}]
}
},
TasksIODescriptions.visual_entailment: {
'task':
@@ -165,7 +180,14 @@ INPUT_EXAMPLES = {
'http://xingchen-data.oss-cn-zhangjiakou.aliyuncs.com/maas/visual-entailment/visual_entailment.jpg',
'there are two birds.', 'test'
],
'urlPaths': {}
'urlPaths': {
'inUrls': [{
'name': 'image'
}, {
'name': 'text'
}],
'outUrls': [{}]
}
},
TasksIODescriptions.generative_multi_modal_embedding: {
'task':
@@ -174,7 +196,14 @@ INPUT_EXAMPLES = {
'http://clip-multimodal.oss-cn-beijing.aliyuncs.com/lingchen/demo/dogs.jpg',
'dogs playing in the grass'
],
'urlPaths': {}
'urlPaths': {
'inUrls': [{
'name': 'image'
}, {
'name': 'text'
}],
'outUrls': [{}]
}
},
}

@@ -192,7 +221,13 @@ class DemoCompatibilityCheck(object):
print('testing demo: ', self.task, self.model_id)
test_pipline = pipeline(self.task, self.model_id)
req = INPUT_EXAMPLES[TASKS_INPUT_TEMPLATES[self.task]]
output = test_pipline(preprocess(req))
inputs = preprocess(req)
params = req.get('parameters', {})
# maas inference
if params != {}:
output = test_pipline(inputs, **params)
else:
output = test_pipline(inputs)
json.dumps(output, cls=NumpyEncoder)
result = postprocess(req, output)
print(result)
@@ -215,11 +250,21 @@ class NumpyEncoder(json.JSONEncoder):


def preprocess(req):
in_urls = req.get('urlPaths').get('inUrls')
if len(req['inputs']) == 1:
inputs = req['inputs'][0]
else:
inputs = tuple(req['inputs'])
return inputs
if in_urls is None or len(in_urls) == 0:
return inputs

inputs_dict = {}
for i, in_url in enumerate(in_urls):
input_name = in_url.get('name')
if input_name is None or input_name == '':
return inputs
inputs_dict[input_name] = req['inputs'][i]
return inputs_dict


def postprocess(req, resp):
@@ -242,4 +287,3 @@ def postprocess(req, resp):
out_mem_file = io.BytesIO()
out_mem_file.write(new_resp.get(output_key))
return type(out_mem_file)
# TODO(lingcai.wl): support more file type

Loading…
Cancel
Save