mirror of
https://github.com/modelscope/modelscope.git
synced 2025-12-16 08:17:45 +01:00
ok Merge branch 'master' of github.com:modelscope/modelscope into add_ci_test_header
This commit is contained in:
@@ -263,13 +263,14 @@ def external_engine_for_llm_checker(model: Union[str, List[str], Model,
|
||||
kwargs: Dict[str, Any]) -> Optional[str]:
|
||||
from .nlp.llm_pipeline import ModelTypeHelper, LLMAdapterRegistry
|
||||
from ..hub.check_model import get_model_id_from_cache
|
||||
from swift.llm import get_model_info_meta
|
||||
if isinstance(model, list):
|
||||
model = model[0]
|
||||
if not isinstance(model, str):
|
||||
model = model.model_dir
|
||||
|
||||
if kwargs.get('llm_framework') == 'swift':
|
||||
llm_framework = kwargs.get('llm_framework', '')
|
||||
if llm_framework == 'swift':
|
||||
from swift.llm import get_model_info_meta
|
||||
# check if swift supports
|
||||
if os.path.exists(model):
|
||||
model_id = get_model_id_from_cache(model)
|
||||
@@ -280,9 +281,8 @@ def external_engine_for_llm_checker(model: Union[str, List[str], Model,
|
||||
info = get_model_info_meta(model_id)
|
||||
model_type = info[0].model_type
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f'Cannot using llm_framework with {model_id}, '
|
||||
f'ignoring llm_framework={self.llm_framework} : {e}')
|
||||
logger.warning(f'Cannot using llm_framework with {model_id}, '
|
||||
f'ignoring llm_framework={llm_framework} : {e}')
|
||||
model_type = None
|
||||
if model_type:
|
||||
return 'llm'
|
||||
|
||||
@@ -345,7 +345,7 @@ def _patch_pretrained_class(all_imported_modules, wrap=False):
|
||||
else:
|
||||
all_available_modules.append(
|
||||
get_wrapped_class(var, **ignore_file_pattern_kwargs))
|
||||
except Exception:
|
||||
except: # noqa
|
||||
all_available_modules.append(var)
|
||||
else:
|
||||
if has_from_pretrained and not hasattr(var,
|
||||
@@ -370,10 +370,9 @@ def _patch_pretrained_class(all_imported_modules, wrap=False):
|
||||
if has_get_config_dict and not hasattr(var,
|
||||
'_get_config_dict_origin'):
|
||||
var._get_config_dict_origin = var.get_config_dict
|
||||
var.get_config_dict = partial(
|
||||
patch_pretrained_model_name_or_path,
|
||||
ori_func=var._get_config_dict_origin,
|
||||
**ignore_file_pattern_kwargs)
|
||||
var.get_config_dict = classmethod(
|
||||
partial(patch_get_config_dict,
|
||||
**ignore_file_pattern_kwargs))
|
||||
|
||||
all_available_modules.append(var)
|
||||
return all_available_modules
|
||||
@@ -619,11 +618,6 @@ def _patch_hub():
|
||||
# Patch repocard.validate
|
||||
from huggingface_hub import repocard
|
||||
if not hasattr(repocard.RepoCard, '_validate_origin'):
|
||||
|
||||
def load(*args, **kwargs): # noqa
|
||||
from huggingface_hub.errors import EntryNotFoundError
|
||||
raise EntryNotFoundError(message='API not supported.')
|
||||
|
||||
repocard.RepoCard._validate_origin = repocard.RepoCard.validate
|
||||
repocard.RepoCard.validate = lambda *args, **kwargs: None
|
||||
repocard.RepoCard._load_origin = repocard.RepoCard.load
|
||||
|
||||
@@ -33,7 +33,6 @@ def hf_pipeline(
|
||||
**kwargs,
|
||||
) -> 'transformers.Pipeline':
|
||||
from transformers import pipeline
|
||||
|
||||
if isinstance(model, str):
|
||||
if not os.path.exists(model):
|
||||
model = snapshot_download(model)
|
||||
|
||||
Reference in New Issue
Block a user