From 39a309b6554070e68741a36593211ab47910a293 Mon Sep 17 00:00:00 2001 From: Yingda Chen Date: Sat, 3 Sep 2022 12:18:29 +0800 Subject: [PATCH] [to #42322933] reduce train epoch from 3 to w --- tests/trainers/test_finetune_mplug.py | 2 +- tests/trainers/test_finetune_token_classificatin.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/trainers/test_finetune_mplug.py b/tests/trainers/test_finetune_mplug.py index 351600c6..b46dbf45 100644 --- a/tests/trainers/test_finetune_mplug.py +++ b/tests/trainers/test_finetune_mplug.py @@ -35,7 +35,7 @@ class TestFinetuneMPlug(unittest.TestCase): }).rename_column('image:FILE', 'image').rename_column('answer:Value', 'answer')) - self.max_epochs = 3 + self.max_epochs = 2 def tearDown(self): shutil.rmtree(self.tmp_dir) diff --git a/tests/trainers/test_finetune_token_classificatin.py b/tests/trainers/test_finetune_token_classificatin.py index c34410be..9bdab9b7 100644 --- a/tests/trainers/test_finetune_token_classificatin.py +++ b/tests/trainers/test_finetune_token_classificatin.py @@ -92,7 +92,7 @@ class TestFinetuneTokenClassification(unittest.TestCase): } } cfg['preprocessor'] = {'type': 'token-cls-tokenizer'} - cfg.train.max_epochs = 3 + cfg.train.max_epochs = 2 cfg.train.lr_scheduler = { 'type': 'LinearLR', 'start_factor': 1.0,