From 5a2634610a3e1efca692327ab31988313574156d Mon Sep 17 00:00:00 2001 From: "suluyan.sly" Date: Fri, 2 Sep 2022 20:03:19 +0800 Subject: [PATCH] [to #42322933]skip sbert_en&bert_ch to save ci time MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ![](https://cn-hangzhou.oss-cdn.aliyun-inc.com/git/force/uploads/comment/251924/40165669611078357/image.png) fill mask pipeline 测试时间过长 这个task测了4个模型。从保证代码正确性的功能角度看,只测一个bert类(比如sbert中文),一个roberta类(veco)。减少测试的模型数量以减少测试时长。 Link: https://code.alibaba-inc.com/Ali-MaaS/MaaS-lib/codereview/10006556 * skip sbert_en&bert_ch to save ci time --- tests/pipelines/test_fill_mask.py | 38 ++----------------------------- 1 file changed, 2 insertions(+), 36 deletions(-) diff --git a/tests/pipelines/test_fill_mask.py b/tests/pipelines/test_fill_mask.py index 1b709e27..6b37f6df 100644 --- a/tests/pipelines/test_fill_mask.py +++ b/tests/pipelines/test_fill_mask.py @@ -43,7 +43,7 @@ class FillMaskTest(unittest.TestCase): @unittest.skipUnless(test_level() >= 2, 'skip test in current test level') def test_run_by_direct_model_download(self): # sbert - for language in ['zh', 'en']: + for language in ['zh']: model_dir = snapshot_download(self.model_id_sbert[language]) preprocessor = FillMaskPreprocessor( model_dir, first_sequence='sentence', second_sequence=None) @@ -74,24 +74,10 @@ class FillMaskTest(unittest.TestCase): f'{pipeline1(test_input)}\npipeline2: {pipeline2(test_input)}\n' ) - # zh bert - language = 'zh' - model_dir = snapshot_download(self.model_id_bert) - preprocessor = FillMaskPreprocessor( - model_dir, first_sequence='sentence', second_sequence=None) - model = BertForMaskedLM.from_pretrained(model_dir) - pipeline1 = FillMaskPipeline(model, preprocessor) - pipeline2 = pipeline( - Tasks.fill_mask, model=model, preprocessor=preprocessor) - ori_text = self.ori_texts[language] - test_input = self.test_inputs[language] - print(f'\nori_text: {ori_text}\ninput: {test_input}\npipeline1: ' - f'{pipeline1(test_input)}\npipeline2: {pipeline2(test_input)}\n') - @unittest.skipUnless(test_level() >= 0, 'skip test in current test level') def test_run_with_model_from_modelhub(self): # sbert - for language in ['zh', 'en']: + for language in ['zh']: print(self.model_id_sbert[language]) model = Model.from_pretrained(self.model_id_sbert[language]) preprocessor = FillMaskPreprocessor( @@ -121,20 +107,6 @@ class FillMaskTest(unittest.TestCase): f'\nori_text: {ori_text}\ninput: {test_input}\npipeline: ' f'{pipeline_ins(test_input)}\n') - # zh bert - model = Model.from_pretrained(self.model_id_bert) - preprocessor = FillMaskPreprocessor( - model.model_dir, first_sequence='sentence', second_sequence=None) - pipeline_ins = pipeline( - Tasks.fill_mask, model=model, preprocessor=preprocessor) - language = 'zh' - ori_text = self.ori_texts[language] - test_input = self.test_inputs[language] - with self.regress_tool.monitor_module_single_forward( - pipeline_ins.model, 'fill_mask_bert_zh'): - print(f'\nori_text: {ori_text}\ninput: {test_input}\npipeline: ' - f'{pipeline_ins(test_input)}\n') - @unittest.skipUnless(test_level() >= 0, 'skip test in current test level') def test_run_with_model_name(self): # veco @@ -153,12 +125,6 @@ class FillMaskTest(unittest.TestCase): f'\nori_text: {self.ori_texts[language]}\ninput: {self.test_inputs[language]}\npipeline: ' f'{pipeline_ins(self.test_inputs[language])}\n') - # bert - pipeline_ins = pipeline(task=Tasks.fill_mask, model=self.model_id_bert) - print( - f'\nori_text: {self.ori_texts[language]}\ninput: {self.test_inputs[language]}\npipeline: ' - f'{pipeline_ins(self.test_inputs[language])}\n') - @unittest.skipUnless(test_level() >= 2, 'skip test in current test level') def test_run_with_default_model(self): pipeline_ins = pipeline(task=Tasks.fill_mask)