api tagging for pipeline/train/evaluate

This commit is contained in:
jiangyu.xzy
2022-11-01 11:22:46 +08:00
parent 40b6770956
commit f451ff8905
3 changed files with 35 additions and 1 deletions

View File

@@ -646,6 +646,30 @@ class HubApi:
def check_local_cookies(self, use_cookies) -> CookieJar:
return self._check_cookie(use_cookies=use_cookies)
def create_library_statistics(self,
method: str,
name: str,
cn_name: Optional[str]):
"""
create library statistics. called by train()/evaluate()/pipeline()
Args:
method (str): called methed name,i.e train/evaluate/pipeline
name (str): model name, for example: damo/cv_unet_person-image-cartoon_compound-models
cn_name (str): model name in chinese, for example: 达摩卡通化模型
Raises:
ValueError: If user_cookies is True, but no local cookie.
Returns:
None
"""
path = f'{self.endpoint}/api/v1/statistics/library'
headers = {'user-agent': ModelScopeConfig.get_user_agent()}
params = {"Method": method, "Name": name, "CnName": cn_name}
r = requests.post(path, params=params, headers=headers)
r.raise_for_status()
return
class ModelScopeConfig:
path_credential = expanduser(DEFAULT_CREDENTIALS_PATH)

View File

@@ -23,6 +23,7 @@ from modelscope.utils.hub import read_config, snapshot_download
from modelscope.utils.import_utils import is_tf_available, is_torch_available
from modelscope.utils.logger import get_logger
from modelscope.utils.torch_utils import _find_free_port, _is_free_port
from modelscope.hub.api import HubApi
from .util import is_model, is_official_hub_path
if is_torch_available():
@@ -151,7 +152,9 @@ class Pipeline(ABC):
**kwargs) -> Union[Dict[str, Any], Generator]:
# model provider should leave it as it is
# modelscope library developer will handle this function
_api = HubApi()
model_name = self.cfg.task
_api.create_library_statistics("pipeline", model_name, None)
# place model to cpu or gpu
if (self.model or (self.has_multiple_models and self.models[0])):
if not self._model_prepare:

View File

@@ -39,6 +39,7 @@ from modelscope.utils.logger import get_logger
from modelscope.utils.registry import build_from_cfg
from modelscope.utils.torch_utils import (get_dist_info, get_local_rank,
init_dist, set_random_seed)
from modelscope.hub.api import HubApi
from .base import BaseTrainer
from .builder import TRAINERS
from .default_config import merge_cfg
@@ -436,6 +437,9 @@ class EpochBasedTrainer(BaseTrainer):
def train(self, checkpoint_path=None, *args, **kwargs):
self._mode = ModeKeys.TRAIN
_api = HubApi()
model_name = self.cfg.task
_api.create_library_statistics("train", model_name, None)
if self.train_dataset is None:
self.train_dataloader = self.get_train_dataloader()
@@ -456,6 +460,9 @@ class EpochBasedTrainer(BaseTrainer):
self.train_loop(self.train_dataloader)
def evaluate(self, checkpoint_path=None):
_api = HubApi()
model_name = self.cfg.task
_api.create_library_statistics("evaluate", model_name, None)
if checkpoint_path is not None and os.path.isfile(checkpoint_path):
from modelscope.trainers.hooks import CheckpointHook
CheckpointHook.load_checkpoint(checkpoint_path, self)