diff --git a/modelscope/hub/t_jy.py b/modelscope/hub/t_jy.py new file mode 100644 index 00000000..baf84f46 --- /dev/null +++ b/modelscope/hub/t_jy.py @@ -0,0 +1,16 @@ +def dec(param1): + print(param1) + + def in_dec(func): + def in_func(name): + return func(name) + return in_func + return in_dec + + +@dec("dec1") +def aa(param): + print(param) + return + +aa("heell") \ No newline at end of file diff --git a/modelscope/models/base/base_model.py b/modelscope/models/base/base_model.py index 1ca7e030..721478c3 100644 --- a/modelscope/models/base/base_model.py +++ b/modelscope/models/base/base_model.py @@ -131,6 +131,8 @@ class Model(ABC): if not hasattr(model, 'cfg'): model.cfg = cfg + + model.name = model_name_or_path return model def save_pretrained(self, diff --git a/modelscope/pipelines/base.py b/modelscope/pipelines/base.py index 9280cc09..b9a4a25c 100644 --- a/modelscope/pipelines/base.py +++ b/modelscope/pipelines/base.py @@ -152,8 +152,9 @@ class Pipeline(ABC): **kwargs) -> Union[Dict[str, Any], Generator]: # model provider should leave it as it is # modelscope library developer will handle this function - model_name = self.cfg.model.type - create_library_statistics("pipeline", model_name, None) + for single_model in self.models: + if hasattr(single_model, 'name'): + create_library_statistics("pipeline", single_model.name, None) # place model to cpu or gpu if (self.model or (self.has_multiple_models and self.models[0])): if not self._model_prepare: diff --git a/modelscope/trainers/trainer.py b/modelscope/trainers/trainer.py index 522405ff..2e79667f 100644 --- a/modelscope/trainers/trainer.py +++ b/modelscope/trainers/trainer.py @@ -437,8 +437,8 @@ class EpochBasedTrainer(BaseTrainer): def train(self, checkpoint_path=None, *args, **kwargs): self._mode = ModeKeys.TRAIN - model_name = self.cfg.model.type - create_library_statistics("train", model_name, None) + if hasattr(self.model, 'name'): + create_library_statistics("train", self.model.name, None) if self.train_dataset is None: self.train_dataloader = self.get_train_dataloader() @@ -459,8 +459,8 @@ class EpochBasedTrainer(BaseTrainer): self.train_loop(self.train_dataloader) def evaluate(self, checkpoint_path=None): - model_name = self.cfg.model.type - create_library_statistics("evaluate", model_name, None) + if hasattr(self.model, 'name'): + create_library_statistics("evaluate", self.model.name, None) if checkpoint_path is not None and os.path.isfile(checkpoint_path): from modelscope.trainers.hooks import CheckpointHook CheckpointHook.load_checkpoint(checkpoint_path, self)