Browse Source

add model name to baseModel. use model name as tag

master
jiangyu.xzy 3 years ago
parent
commit
cc76d900bc
4 changed files with 25 additions and 6 deletions
  1. +16
    -0
      modelscope/hub/t_jy.py
  2. +2
    -0
      modelscope/models/base/base_model.py
  3. +3
    -2
      modelscope/pipelines/base.py
  4. +4
    -4
      modelscope/trainers/trainer.py

+ 16
- 0
modelscope/hub/t_jy.py View File

@@ -0,0 +1,16 @@
def dec(param1):
print(param1)

def in_dec(func):
def in_func(name):
return func(name)
return in_func
return in_dec


@dec("dec1")
def aa(param):
print(param)
return

aa("heell")

+ 2
- 0
modelscope/models/base/base_model.py View File

@@ -131,6 +131,8 @@ class Model(ABC):

if not hasattr(model, 'cfg'):
model.cfg = cfg

model.name = model_name_or_path
return model

def save_pretrained(self,


+ 3
- 2
modelscope/pipelines/base.py View File

@@ -152,8 +152,9 @@ class Pipeline(ABC):
**kwargs) -> Union[Dict[str, Any], Generator]:
# model provider should leave it as it is
# modelscope library developer will handle this function
model_name = self.cfg.model.type
create_library_statistics("pipeline", model_name, None)
for single_model in self.models:
if hasattr(single_model, 'name'):
create_library_statistics("pipeline", single_model.name, None)
# place model to cpu or gpu
if (self.model or (self.has_multiple_models and self.models[0])):
if not self._model_prepare:


+ 4
- 4
modelscope/trainers/trainer.py View File

@@ -437,8 +437,8 @@ class EpochBasedTrainer(BaseTrainer):

def train(self, checkpoint_path=None, *args, **kwargs):
self._mode = ModeKeys.TRAIN
model_name = self.cfg.model.type
create_library_statistics("train", model_name, None)
if hasattr(self.model, 'name'):
create_library_statistics("train", self.model.name, None)

if self.train_dataset is None:
self.train_dataloader = self.get_train_dataloader()
@@ -459,8 +459,8 @@ class EpochBasedTrainer(BaseTrainer):
self.train_loop(self.train_dataloader)

def evaluate(self, checkpoint_path=None):
model_name = self.cfg.model.type
create_library_statistics("evaluate", model_name, None)
if hasattr(self.model, 'name'):
create_library_statistics("evaluate", self.model.name, None)
if checkpoint_path is not None and os.path.isfile(checkpoint_path):
from modelscope.trainers.hooks import CheckpointHook
CheckpointHook.load_checkpoint(checkpoint_path, self)


Loading…
Cancel
Save