| @@ -646,29 +646,6 @@ class HubApi: | |||
| def check_local_cookies(self, use_cookies) -> CookieJar: | |||
| return self._check_cookie(use_cookies=use_cookies) | |||
| def create_library_statistics(self, | |||
| method: str, | |||
| name: str, | |||
| cn_name: Optional[str]): | |||
| """ | |||
| create library statistics. called by train()/evaluate()/pipeline() | |||
| Args: | |||
| method (str): called methed name,i.e train/evaluate/pipeline | |||
| name (str): model name, for example: damo/cv_unet_person-image-cartoon_compound-models | |||
| cn_name (str): model name in chinese, for example: 达摩卡通化模型 | |||
| Raises: | |||
| ValueError: If user_cookies is True, but no local cookie. | |||
| Returns: | |||
| None | |||
| """ | |||
| path = f'{self.endpoint}/api/v1/statistics/library' | |||
| headers = {'user-agent': ModelScopeConfig.get_user_agent()} | |||
| params = {"Method": method, "Name": name, "CnName": cn_name} | |||
| r = requests.post(path, params=params, headers=headers) | |||
| r.raise_for_status() | |||
| return | |||
| class ModelScopeConfig: | |||
| @@ -4,6 +4,7 @@ import hashlib | |||
| import os | |||
| from datetime import datetime | |||
| from typing import Optional | |||
| import requests | |||
| from modelscope.hub.constants import (DEFAULT_MODELSCOPE_DOMAIN, | |||
| DEFAULT_MODELSCOPE_GROUP, | |||
| @@ -12,6 +13,7 @@ from modelscope.hub.constants import (DEFAULT_MODELSCOPE_DOMAIN, | |||
| from modelscope.hub.errors import FileIntegrityError | |||
| from modelscope.utils.file_utils import get_default_cache_dir | |||
| from modelscope.utils.logger import get_logger | |||
| from modelscope.hub.api import ModelScopeConfig | |||
| logger = get_logger() | |||
| @@ -85,3 +87,14 @@ def file_integrity_validation(file_path, expected_sha256): | |||
| msg = 'File %s integrity check failed, the download may be incomplete, please try again.' % file_path | |||
| logger.error(msg) | |||
| raise FileIntegrityError(msg) | |||
| def create_library_statistics(method: str, | |||
| name: str, | |||
| cn_name: Optional[str]): | |||
| path = f'{get_endpoint()}/api/v1/statistics/library' | |||
| headers = {'user-agent': ModelScopeConfig.get_user_agent()} | |||
| params = {"Method": method, "Name": name, "CnName": cn_name} | |||
| r = requests.post(path, params=params, headers=headers) | |||
| r.raise_for_status() | |||
| return | |||
| @@ -23,7 +23,7 @@ from modelscope.utils.hub import read_config, snapshot_download | |||
| from modelscope.utils.import_utils import is_tf_available, is_torch_available | |||
| from modelscope.utils.logger import get_logger | |||
| from modelscope.utils.torch_utils import _find_free_port, _is_free_port | |||
| from modelscope.hub.api import HubApi | |||
| from modelscope.hub.utils.utils import create_library_statistics | |||
| from .util import is_model, is_official_hub_path | |||
| if is_torch_available(): | |||
| @@ -152,9 +152,8 @@ class Pipeline(ABC): | |||
| **kwargs) -> Union[Dict[str, Any], Generator]: | |||
| # model provider should leave it as it is | |||
| # modelscope library developer will handle this function | |||
| _api = HubApi() | |||
| model_name = self.cfg.task | |||
| _api.create_library_statistics("pipeline", model_name, None) | |||
| create_library_statistics("pipeline", model_name, None) | |||
| # place model to cpu or gpu | |||
| if (self.model or (self.has_multiple_models and self.models[0])): | |||
| if not self._model_prepare: | |||
| @@ -39,7 +39,7 @@ from modelscope.utils.logger import get_logger | |||
| from modelscope.utils.registry import build_from_cfg | |||
| from modelscope.utils.torch_utils import (get_dist_info, get_local_rank, | |||
| init_dist, set_random_seed) | |||
| from modelscope.hub.api import HubApi | |||
| from modelscope.hub.utils.utils import create_library_statistics | |||
| from .base import BaseTrainer | |||
| from .builder import TRAINERS | |||
| from .default_config import merge_cfg | |||
| @@ -437,9 +437,8 @@ class EpochBasedTrainer(BaseTrainer): | |||
| def train(self, checkpoint_path=None, *args, **kwargs): | |||
| self._mode = ModeKeys.TRAIN | |||
| _api = HubApi() | |||
| model_name = self.cfg.task | |||
| _api.create_library_statistics("train", model_name, None) | |||
| create_library_statistics("train", model_name, None) | |||
| if self.train_dataset is None: | |||
| self.train_dataloader = self.get_train_dataloader() | |||
| @@ -460,9 +459,8 @@ class EpochBasedTrainer(BaseTrainer): | |||
| self.train_loop(self.train_dataloader) | |||
| def evaluate(self, checkpoint_path=None): | |||
| _api = HubApi() | |||
| model_name = self.cfg.task | |||
| _api.create_library_statistics("evaluate", model_name, None) | |||
| create_library_statistics("evaluate", model_name, None) | |||
| if checkpoint_path is not None and os.path.isfile(checkpoint_path): | |||
| from modelscope.trainers.hooks import CheckpointHook | |||
| CheckpointHook.load_checkpoint(checkpoint_path, self) | |||