mirror of
https://github.com/modelscope/modelscope.git
synced 2025-12-25 20:49:37 +01:00
Merge branch merge_master-github_1023 into master
Title: Merge branch 'master-github' into merge_master-github_1023 Link: https://code.alibaba-inc.com/Ali-MaaS/MaaS-lib/codereview/14398134
This commit is contained in:
@@ -32,6 +32,7 @@ RUN pip install --no-cache-dir mpi4py paint_ldm \
|
||||
mmcls>=0.21.0 mmdet>=2.25.0 decord>=0.6.0 pai-easycv ms_swift \
|
||||
ipykernel fasttext fairseq deepspeed -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html
|
||||
|
||||
ARG USE_GPU
|
||||
# for cpu install cpu version faiss, faiss depends on blas lib, we install libopenblas TODO rename gpu or cpu version faiss
|
||||
RUN if [ "$USE_GPU" = "True" ] ; then \
|
||||
pip install --no-cache-dir funtextprocessing kwsbp==0.0.6 faiss==1.7.2 safetensors typeguard==2.13.3 scikit-learn librosa==0.9.2 funasr -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html; \
|
||||
@@ -45,10 +46,14 @@ COPY examples /modelscope/examples
|
||||
# for pai-easycv setup compatiblity issue
|
||||
ENV SETUPTOOLS_USE_DISTUTILS=stdlib
|
||||
|
||||
RUN CUDA_HOME=/usr/local/cuda TORCH_CUDA_ARCH_LIST="6.0 6.1 7.0 7.5 8.0 8.6" pip install --no-cache-dir 'git+https://github.com/facebookresearch/detectron2.git'
|
||||
RUN if [ "$USE_GPU" = "True" ] ; then \
|
||||
CUDA_HOME=/usr/local/cuda TORCH_CUDA_ARCH_LIST="6.0 6.1 7.0 7.5 8.0 8.6" pip install --no-cache-dir 'git+https://github.com/facebookresearch/detectron2.git'; \
|
||||
else \
|
||||
echo 'cpu unsupport detectron2'; \
|
||||
fi
|
||||
|
||||
# torchmetrics==0.11.4 for ofa
|
||||
RUN pip install --no-cache-dir tiktoken torchmetrics==0.11.4 transformers_stream_generator 'protobuf<=3.20.0' bitsandbytes basicsr
|
||||
RUN pip install --no-cache-dir jupyterlab torchmetrics==0.11.4 tiktoken transformers_stream_generator 'protobuf<=3.20.0' bitsandbytes basicsr
|
||||
COPY docker/scripts/install_flash_attension.sh /tmp/install_flash_attension.sh
|
||||
RUN if [ "$USE_GPU" = "True" ] ; then \
|
||||
bash /tmp/install_flash_attension.sh; \
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
git clone -b v1.0.8 https://github.com/Dao-AILab/flash-attention && \
|
||||
cd flash-attention && pip install . && \
|
||||
pip install csrc/layer_norm && \
|
||||
pip install csrc/rotary && \
|
||||
git clone -b v2.3.2 https://github.com/Dao-AILab/flash-attention && \
|
||||
cd flash-attention && python setup.py install && \
|
||||
cd .. && \
|
||||
rm -rf flash-attention
|
||||
|
||||
@@ -134,12 +134,13 @@ class Model(ABC):
|
||||
ignore_file_pattern=ignore_file_pattern)
|
||||
logger.info(f'initialize model from {local_model_dir}')
|
||||
|
||||
configuration_path = osp.join(local_model_dir, ModelFile.CONFIGURATION)
|
||||
cfg = None
|
||||
if cfg_dict is not None:
|
||||
cfg = cfg_dict
|
||||
else:
|
||||
cfg = Config.from_file(
|
||||
osp.join(local_model_dir, ModelFile.CONFIGURATION))
|
||||
task_name = cfg.task
|
||||
elif os.path.exists(configuration_path):
|
||||
cfg = Config.from_file(configuration_path)
|
||||
task_name = getattr(cfg, 'task', None)
|
||||
if 'task' in kwargs:
|
||||
task_name = kwargs.pop('task')
|
||||
model_cfg = getattr(cfg, 'model', ConfigDict())
|
||||
@@ -162,6 +163,9 @@ class Model(ABC):
|
||||
model = model.to(device)
|
||||
return model
|
||||
# use ms
|
||||
if cfg is None:
|
||||
raise FileNotFoundError(
|
||||
f'`{ModelFile.CONFIGURATION}` file not found.')
|
||||
model_cfg.model_dir = local_model_dir
|
||||
|
||||
# install and import remote repos before build
|
||||
|
||||
@@ -326,6 +326,20 @@ TASK_INPUTS = {
|
||||
|
||||
# ============ nlp tasks ===================
|
||||
Tasks.chat: {
|
||||
# An input example for `messages` format (Dict[str, List[Dict[str, str]]]):
|
||||
# {'messages': [{
|
||||
# 'role': 'system',
|
||||
# 'content': 'You are a helpful assistant.'
|
||||
# }, {
|
||||
# 'role': 'user',
|
||||
# 'content': 'Hello! Where is the capital of Zhejiang?'
|
||||
# }, {
|
||||
# 'role': 'assistant',
|
||||
# 'content': 'Hangzhou is the capital of Zhejiang.'
|
||||
# }, {
|
||||
# 'role': 'user',
|
||||
# 'content': 'Tell me something about HangZhou?'
|
||||
# }]}
|
||||
'messages': InputType.LIST
|
||||
},
|
||||
Tasks.text_classification: [
|
||||
|
||||
@@ -216,7 +216,7 @@ def llm_first_checker(model: Union[str, List[str], Model, List[Model]],
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def parse_model_type(file: Optional[str], pattern: str) -> Optional[str]:
|
||||
def parse_and_get(file: Optional[str], pattern: str) -> Optional[str]:
|
||||
if file is None or not osp.exists(file):
|
||||
return None
|
||||
return Config.from_file(file).safe_get(pattern)
|
||||
@@ -224,15 +224,22 @@ def llm_first_checker(model: Union[str, List[str], Model, List[Model]],
|
||||
def get_model_type(model: str, revision: Optional[str]) -> Optional[str]:
|
||||
cfg_file = get_file_name(model, ModelFile.CONFIGURATION, revision)
|
||||
hf_cfg_file = get_file_name(model, ModelFile.CONFIG, revision)
|
||||
cfg_model_type = parse_model_type(cfg_file, 'model.type')
|
||||
hf_cfg_model_type = parse_model_type(hf_cfg_file, 'model_type')
|
||||
cfg_model_type = parse_and_get(cfg_file, 'model.type')
|
||||
hf_cfg_model_type = parse_and_get(hf_cfg_file, 'model_type')
|
||||
return cfg_model_type or hf_cfg_model_type
|
||||
|
||||
def get_adapter_type(model: str, revision: Optional[str]) -> Optional[str]:
|
||||
cfg_file = get_file_name(model, ModelFile.CONFIGURATION, revision)
|
||||
model = parse_and_get(cfg_file, 'adapter_cfg.model_id_or_path')
|
||||
revision = parse_and_get(cfg_file, 'adapter_cfg.model_revision')
|
||||
return None if model is None else get_model_type(model, revision)
|
||||
|
||||
if isinstance(model, list):
|
||||
model = model[0]
|
||||
if not isinstance(model, str):
|
||||
model = model.model_dir
|
||||
model_type = get_model_type(model, revision)
|
||||
model_type = get_model_type(model, revision) \
|
||||
or get_adapter_type(model, revision)
|
||||
if model_type is not None:
|
||||
model_type = model_type.lower().split('-')[0]
|
||||
if model_type in LLM_FORMAT_MAP:
|
||||
|
||||
@@ -9,11 +9,13 @@ from transformers import PreTrainedTokenizer
|
||||
|
||||
from modelscope import (AutoModelForCausalLM, AutoTokenizer, Pipeline,
|
||||
snapshot_download)
|
||||
from modelscope.hub.file_download import model_file_download
|
||||
from modelscope.models.base import Model
|
||||
from modelscope.models.nlp import ChatGLM2Tokenizer, Llama2Tokenizer
|
||||
from modelscope.outputs import OutputKeys
|
||||
from modelscope.pipelines.builder import PIPELINES
|
||||
from modelscope.pipelines.util import is_model, is_official_hub_path
|
||||
from modelscope.utils.config import Config
|
||||
from modelscope.utils.constant import Invoke, ModelFile, Tasks
|
||||
from modelscope.utils.logger import get_logger
|
||||
|
||||
@@ -27,6 +29,22 @@ class LLMPipeline(Pipeline):
|
||||
def initiate_single_model(self, model):
|
||||
if isinstance(model, str):
|
||||
logger.info(f'initiate model from {model}')
|
||||
if self._is_swift_model(model):
|
||||
from swift import Swift
|
||||
|
||||
base_model = self.cfg.safe_get('adapter_cfg.model_id_or_path')
|
||||
assert base_model is not None, 'Cannot get adapter_cfg.model_id_or_path from configuration.json file.'
|
||||
revision = self.cfg.safe_get('adapter_cfg.model_revision',
|
||||
'master')
|
||||
base_model = Model.from_pretrained(
|
||||
base_model,
|
||||
revision,
|
||||
invoked_by=Invoke.PIPELINE,
|
||||
device_map=self.device_map,
|
||||
torch_dtype=self.torch_dtype,
|
||||
trust_remote_code=True)
|
||||
swift_model = Swift.from_pretrained(base_model, model_id=model)
|
||||
return swift_model
|
||||
if isinstance(model, str) and is_official_hub_path(model):
|
||||
logger.info(f'initiate model from location {model}.')
|
||||
if is_model(model):
|
||||
@@ -50,6 +68,20 @@ class LLMPipeline(Pipeline):
|
||||
else:
|
||||
return model
|
||||
|
||||
def _is_swift_model(self, model: Union[str, Any]) -> bool:
|
||||
if not isinstance(model, str):
|
||||
return False
|
||||
if os.path.exists(model):
|
||||
cfg_file = os.path.join(model, ModelFile.CONFIGURATION)
|
||||
else:
|
||||
try:
|
||||
cfg_file = model_file_download(model, ModelFile.CONFIGURATION)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
self.cfg = Config.from_file(cfg_file)
|
||||
return self.cfg.safe_get('adapter_cfg.tuner_backend') == 'swift'
|
||||
|
||||
def __init__(self,
|
||||
format_messages: Union[Callable, str] = None,
|
||||
format_output: Callable = None,
|
||||
|
||||
@@ -181,8 +181,20 @@ class EpochBasedTrainer(BaseTrainer):
|
||||
compile_options = {}
|
||||
self.model = compile_model(self.model, **compile_options)
|
||||
|
||||
if 'work_dir' in kwargs:
|
||||
if kwargs.get('work_dir', None) is not None:
|
||||
self.work_dir = kwargs['work_dir']
|
||||
if 'train' not in self.cfg:
|
||||
self.cfg['train'] = ConfigDict()
|
||||
self.cfg['train']['work_dir'] = self.work_dir
|
||||
if 'checkpoint' in self.cfg['train']:
|
||||
if 'period' in self.cfg['train']['checkpoint']:
|
||||
self.cfg['train']['checkpoint']['period'][
|
||||
'save_dir'] = self.work_dir
|
||||
if 'best' in self.cfg['train']['checkpoint']:
|
||||
self.cfg['train']['checkpoint']['best'][
|
||||
'save_dir'] = self.work_dir
|
||||
if 'logging' in self.cfg['train']:
|
||||
self.cfg['train']['logging']['out_dir'] = self.work_dir
|
||||
else:
|
||||
self.work_dir = self.cfg.train.get('work_dir', './work_dir')
|
||||
|
||||
|
||||
@@ -6,8 +6,11 @@ from modelscope.utils.ast_utils import INDEX_KEY
|
||||
from modelscope.utils.import_utils import LazyImportModule
|
||||
|
||||
|
||||
def can_load_by_ms(model_dir: str, tast_name: str, model_type: str) -> bool:
|
||||
if ('MODELS', tast_name,
|
||||
def can_load_by_ms(model_dir: str, task_name: Optional[str],
|
||||
model_type: Optional[str]) -> bool:
|
||||
if model_type is None or task_name is None:
|
||||
return False
|
||||
if ('MODELS', task_name,
|
||||
model_type) in LazyImportModule.AST_INDEX[INDEX_KEY]:
|
||||
return True
|
||||
ms_wrapper_path = os.path.join(model_dir, 'ms_wrapper.py')
|
||||
@@ -25,11 +28,27 @@ def _can_load_by_hf_automodel(automodel_class: type, config) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def get_hf_automodel_class(model_dir: str, task_name: str) -> Optional[type]:
|
||||
from modelscope import (AutoConfig, AutoModel, AutoModelForCausalLM,
|
||||
AutoModelForSeq2SeqLM,
|
||||
AutoModelForTokenClassification,
|
||||
AutoModelForSequenceClassification)
|
||||
def get_default_automodel(config) -> Optional[type]:
|
||||
import modelscope.utils.hf_util as hf_util
|
||||
if not hasattr(config, 'auto_map'):
|
||||
return None
|
||||
auto_map = config.auto_map
|
||||
automodel_list = [k for k in auto_map.keys() if k.startswith('AutoModel')]
|
||||
if len(automodel_list) == 1:
|
||||
return getattr(hf_util, automodel_list[0])
|
||||
if len(automodel_list) > 1 and len(
|
||||
set([auto_map[k] for k in automodel_list])) == 1:
|
||||
return getattr(hf_util, automodel_list[0])
|
||||
return None
|
||||
|
||||
|
||||
def get_hf_automodel_class(model_dir: str,
|
||||
task_name: Optional[str]) -> Optional[type]:
|
||||
from modelscope.utils.hf_util import (AutoConfig, AutoModel,
|
||||
AutoModelForCausalLM,
|
||||
AutoModelForSeq2SeqLM,
|
||||
AutoModelForTokenClassification,
|
||||
AutoModelForSequenceClassification)
|
||||
automodel_mapping = {
|
||||
Tasks.backbone: AutoModel,
|
||||
Tasks.chat: AutoModelForCausalLM,
|
||||
@@ -37,19 +56,18 @@ def get_hf_automodel_class(model_dir: str, task_name: str) -> Optional[type]:
|
||||
Tasks.text_classification: AutoModelForSequenceClassification,
|
||||
Tasks.token_classification: AutoModelForTokenClassification,
|
||||
}
|
||||
automodel_class = automodel_mapping.get(task_name, None)
|
||||
if automodel_class is None:
|
||||
return None
|
||||
config_path = os.path.join(model_dir, 'config.json')
|
||||
if not os.path.exists(config_path):
|
||||
return None
|
||||
try:
|
||||
try:
|
||||
config = AutoConfig.from_pretrained(
|
||||
model_dir, trust_remote_code=True)
|
||||
except (FileNotFoundError, ValueError):
|
||||
return None
|
||||
config = AutoConfig.from_pretrained(model_dir, trust_remote_code=True)
|
||||
if task_name is None:
|
||||
automodel_class = get_default_automodel(config)
|
||||
else:
|
||||
automodel_class = automodel_mapping.get(task_name, None)
|
||||
|
||||
if automodel_class is None:
|
||||
return None
|
||||
if _can_load_by_hf_automodel(automodel_class, config):
|
||||
return automodel_class
|
||||
if (automodel_class is AutoModelForCausalLM
|
||||
@@ -71,14 +89,5 @@ def try_to_load_hf_model(model_dir: str, task_name: str,
|
||||
model = None
|
||||
if automodel_class is not None:
|
||||
# use hf
|
||||
device_map = kwargs.get('device_map', None)
|
||||
torch_dtype = kwargs.get('torch_dtype', None)
|
||||
config = kwargs.get('config', None)
|
||||
|
||||
model = automodel_class.from_pretrained(
|
||||
model_dir,
|
||||
device_map=device_map,
|
||||
torch_dtype=torch_dtype,
|
||||
config=config,
|
||||
trust_remote_code=True)
|
||||
model = automodel_class.from_pretrained(model_dir, **kwargs)
|
||||
return model
|
||||
|
||||
@@ -21,7 +21,7 @@ from transformers.models.auto.tokenization_auto import (
|
||||
TOKENIZER_MAPPING_NAMES, get_tokenizer_config)
|
||||
|
||||
from modelscope import snapshot_download
|
||||
from modelscope.utils.constant import Invoke
|
||||
from modelscope.utils.constant import DEFAULT_MODEL_REVISION, Invoke
|
||||
|
||||
try:
|
||||
from transformers import GPTQConfig as GPTQConfigHF
|
||||
@@ -84,69 +84,6 @@ patch_tokenizer_base()
|
||||
patch_model_base()
|
||||
|
||||
|
||||
def check_hf_code(model_dir: str, auto_class: type,
|
||||
trust_remote_code: bool) -> None:
|
||||
config_path = os.path.join(model_dir, 'config.json')
|
||||
if not os.path.exists(config_path):
|
||||
raise FileNotFoundError(f'{config_path} is not found')
|
||||
config_dict = PretrainedConfig.get_config_dict(config_path)[0]
|
||||
auto_class_name = auto_class.__name__
|
||||
if auto_class is AutoTokenizerHF:
|
||||
tokenizer_config = get_tokenizer_config(model_dir)
|
||||
# load from repo
|
||||
if trust_remote_code:
|
||||
has_remote_code = False
|
||||
if auto_class is AutoTokenizerHF:
|
||||
auto_map = tokenizer_config.get('auto_map', None)
|
||||
if auto_map is not None:
|
||||
module_name = auto_map.get(auto_class_name, None)
|
||||
if module_name is not None:
|
||||
module_name = module_name[0]
|
||||
has_remote_code = True
|
||||
else:
|
||||
auto_map = config_dict.get('auto_map', None)
|
||||
if auto_map is not None:
|
||||
module_name = auto_map.get(auto_class_name, None)
|
||||
has_remote_code = module_name is not None
|
||||
|
||||
if has_remote_code:
|
||||
module_path = os.path.join(model_dir,
|
||||
module_name.split('.')[0] + '.py')
|
||||
if not os.path.exists(module_path):
|
||||
raise FileNotFoundError(f'{module_path} is not found')
|
||||
return
|
||||
|
||||
# trust_remote_code is False or has_remote_code is False
|
||||
model_type = config_dict.get('model_type', None)
|
||||
if model_type is None:
|
||||
raise ValueError(f'`model_type` key is not found in {config_path}.')
|
||||
|
||||
trust_remote_code_info = '.'
|
||||
if not trust_remote_code:
|
||||
trust_remote_code_info = ', You can try passing `trust_remote_code=True`.'
|
||||
if auto_class is AutoConfigHF:
|
||||
if model_type not in CONFIG_MAPPING:
|
||||
raise ValueError(
|
||||
f'{model_type} not found in HF `CONFIG_MAPPING`{trust_remote_code_info}'
|
||||
)
|
||||
elif auto_class is AutoTokenizerHF:
|
||||
tokenizer_class = tokenizer_config.get('tokenizer_class')
|
||||
if tokenizer_class is not None:
|
||||
return
|
||||
if model_type not in TOKENIZER_MAPPING_NAMES:
|
||||
raise ValueError(
|
||||
f'{model_type} not found in HF `TOKENIZER_MAPPING_NAMES`{trust_remote_code_info}'
|
||||
)
|
||||
else:
|
||||
mapping_names = [
|
||||
m.model_type for m in auto_class._model_mapping.keys()
|
||||
]
|
||||
if model_type not in mapping_names:
|
||||
raise ValueError(
|
||||
f'{model_type} not found in HF `auto_class._model_mapping`{trust_remote_code_info}'
|
||||
)
|
||||
|
||||
|
||||
def get_wrapped_class(module_class, ignore_file_pattern=[], **kwargs):
|
||||
"""Get a custom wrapper class for auto classes to download the models from the ModelScope hub
|
||||
Args:
|
||||
@@ -166,7 +103,7 @@ def get_wrapped_class(module_class, ignore_file_pattern=[], **kwargs):
|
||||
ignore_file_pattern = kwargs.pop('ignore_file_pattern',
|
||||
default_ignore_file_pattern)
|
||||
if not os.path.exists(pretrained_model_name_or_path):
|
||||
revision = kwargs.pop('revision', None)
|
||||
revision = kwargs.pop('revision', DEFAULT_MODEL_REVISION)
|
||||
model_dir = snapshot_download(
|
||||
pretrained_model_name_or_path,
|
||||
revision=revision,
|
||||
@@ -175,9 +112,6 @@ def get_wrapped_class(module_class, ignore_file_pattern=[], **kwargs):
|
||||
else:
|
||||
model_dir = pretrained_model_name_or_path
|
||||
|
||||
if module_class is not GenerationConfigHF:
|
||||
trust_remote_code = kwargs.get('trust_remote_code', False)
|
||||
check_hf_code(model_dir, module_class, trust_remote_code)
|
||||
module_obj = module_class.from_pretrained(model_dir, *model_args,
|
||||
**kwargs)
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Make sure to modify __release_datetime__ to release time when making official release.
|
||||
__version__ = '1.9.1'
|
||||
__version__ = '1.9.3'
|
||||
# default release datetime for branches under active development is set
|
||||
# to be a time far-far-away-into-the-future
|
||||
__release_datetime__ = '2099-09-06 00:00:00'
|
||||
|
||||
@@ -166,12 +166,24 @@ class CustomPipelineTest(unittest.TestCase):
|
||||
return inputs
|
||||
|
||||
def postprocess(self, out, **kwargs):
|
||||
return {'response': 'xxx', 'history': []}
|
||||
return {'message': {'role': 'assistant', 'content': 'xxx'}}
|
||||
|
||||
pipe = pipeline(
|
||||
task=Tasks.chat, pipeline_name=dummy_module, model=self.model_dir)
|
||||
pipe('text')
|
||||
inputs = {'text': 'aaa', 'history': [('dfd', 'fds')]}
|
||||
inputs = {
|
||||
'messages': [{
|
||||
'role': 'user',
|
||||
'content': 'dfd'
|
||||
}, {
|
||||
'role': 'assistant',
|
||||
'content': 'fds'
|
||||
}, {
|
||||
'role': 'user',
|
||||
'content': 'aaa'
|
||||
}]
|
||||
}
|
||||
pipe(inputs)
|
||||
|
||||
def test_custom(self):
|
||||
|
||||
Reference in New Issue
Block a user