diff --git a/modelscope/pipelines/builder.py b/modelscope/pipelines/builder.py index 7e5cc6b5..5fb66178 100644 --- a/modelscope/pipelines/builder.py +++ b/modelscope/pipelines/builder.py @@ -263,13 +263,14 @@ def external_engine_for_llm_checker(model: Union[str, List[str], Model, kwargs: Dict[str, Any]) -> Optional[str]: from .nlp.llm_pipeline import ModelTypeHelper, LLMAdapterRegistry from ..hub.check_model import get_model_id_from_cache - from swift.llm import get_model_info_meta if isinstance(model, list): model = model[0] if not isinstance(model, str): model = model.model_dir - if kwargs.get('llm_framework') == 'swift': + llm_framework = kwargs.get('llm_framework', '') + if llm_framework == 'swift': + from swift.llm import get_model_info_meta # check if swift supports if os.path.exists(model): model_id = get_model_id_from_cache(model) @@ -280,9 +281,8 @@ def external_engine_for_llm_checker(model: Union[str, List[str], Model, info = get_model_info_meta(model_id) model_type = info[0].model_type except Exception as e: - logger.warning( - f'Cannot using llm_framework with {model_id}, ' - f'ignoring llm_framework={self.llm_framework} : {e}') + logger.warning(f'Cannot using llm_framework with {model_id}, ' + f'ignoring llm_framework={llm_framework} : {e}') model_type = None if model_type: return 'llm' diff --git a/modelscope/utils/hf_util/patcher.py b/modelscope/utils/hf_util/patcher.py index b31b6e1d..6a41a5ce 100644 --- a/modelscope/utils/hf_util/patcher.py +++ b/modelscope/utils/hf_util/patcher.py @@ -345,7 +345,7 @@ def _patch_pretrained_class(all_imported_modules, wrap=False): else: all_available_modules.append( get_wrapped_class(var, **ignore_file_pattern_kwargs)) - except Exception: + except: # noqa all_available_modules.append(var) else: if has_from_pretrained and not hasattr(var, @@ -370,10 +370,9 @@ def _patch_pretrained_class(all_imported_modules, wrap=False): if has_get_config_dict and not hasattr(var, '_get_config_dict_origin'): var._get_config_dict_origin = var.get_config_dict - var.get_config_dict = partial( - patch_pretrained_model_name_or_path, - ori_func=var._get_config_dict_origin, - **ignore_file_pattern_kwargs) + var.get_config_dict = classmethod( + partial(patch_get_config_dict, + **ignore_file_pattern_kwargs)) all_available_modules.append(var) return all_available_modules @@ -619,11 +618,6 @@ def _patch_hub(): # Patch repocard.validate from huggingface_hub import repocard if not hasattr(repocard.RepoCard, '_validate_origin'): - - def load(*args, **kwargs): # noqa - from huggingface_hub.errors import EntryNotFoundError - raise EntryNotFoundError(message='API not supported.') - repocard.RepoCard._validate_origin = repocard.RepoCard.validate repocard.RepoCard.validate = lambda *args, **kwargs: None repocard.RepoCard._load_origin = repocard.RepoCard.load diff --git a/modelscope/utils/hf_util/pipeline_builder.py b/modelscope/utils/hf_util/pipeline_builder.py index 20ae0535..82fe7fbf 100644 --- a/modelscope/utils/hf_util/pipeline_builder.py +++ b/modelscope/utils/hf_util/pipeline_builder.py @@ -33,7 +33,6 @@ def hf_pipeline( **kwargs, ) -> 'transformers.Pipeline': from transformers import pipeline - if isinstance(model, str): if not os.path.exists(model): model = snapshot_download(model)