mirror of
https://github.com/modelscope/modelscope.git
synced 2025-12-25 04:29:22 +01:00
More automodel (#1098)
* add more hf alias --------- Co-authored-by: Yingda Chen <yingda.chen@alibaba-inc.com>
This commit is contained in:
@@ -36,9 +36,12 @@ if TYPE_CHECKING:
|
||||
from .utils.hf_util import (
|
||||
AutoModel, AutoModelForCausalLM, AutoModelForSeq2SeqLM,
|
||||
AutoModelForSequenceClassification,
|
||||
AutoModelForTokenClassification, AutoModelForImageSegmentation,
|
||||
AutoTokenizer, GenerationConfig, AutoImageProcessor, BatchFeature,
|
||||
T5EncoderModel)
|
||||
AutoModelForTokenClassification, AutoModelForImageClassification,
|
||||
AutoModelForImageToImage, AutoModelForImageSegmentation,
|
||||
AutoModelForQuestionAnswering, AutoModelForMaskedLM, AutoTokenizer,
|
||||
AutoModelForMaskGeneration, AutoModelForPreTraining,
|
||||
AutoModelForTextEncoding, GenerationConfig, AutoImageProcessor,
|
||||
BatchFeature, T5EncoderModel)
|
||||
else:
|
||||
print(
|
||||
'transformer is not installed, please install it if you want to use related modules'
|
||||
@@ -96,8 +99,13 @@ else:
|
||||
'AwqConfig', 'BitsAndBytesConfig', 'AutoModelForCausalLM',
|
||||
'AutoModelForSeq2SeqLM', 'AutoTokenizer',
|
||||
'AutoModelForSequenceClassification',
|
||||
'AutoModelForTokenClassification', 'AutoModelForImageSegmentation',
|
||||
'AutoImageProcessor', 'BatchFeature', 'T5EncoderModel'
|
||||
'AutoModelForTokenClassification',
|
||||
'AutoModelForImageClassification', 'AutoModelForImageToImage',
|
||||
'AutoModelForQuestionAnswering', 'AutoModelForMaskedLM',
|
||||
'AutoModelForMaskGeneration', 'AutoModelForPreTraining',
|
||||
'AutoModelForTextEncoding', 'AutoModelForTokenClassification',
|
||||
'AutoModelForImageSegmentation', 'AutoImageProcessor',
|
||||
'BatchFeature', 'T5EncoderModel'
|
||||
]
|
||||
|
||||
import sys
|
||||
|
||||
@@ -9,11 +9,21 @@ from transformers import AutoFeatureExtractor as AutoFeatureExtractorHF
|
||||
from transformers import AutoImageProcessor as AutoImageProcessorHF
|
||||
from transformers import AutoModel as AutoModelHF
|
||||
from transformers import AutoModelForCausalLM as AutoModelForCausalLMHF
|
||||
from transformers import \
|
||||
AutoModelForImageClassification as AutoModelForImageClassificationHF
|
||||
from transformers import \
|
||||
AutoModelForImageSegmentation as AutoModelForImageSegmentationHF
|
||||
from transformers import AutoModelForImageToImage as AutoModelForImageToImageHF
|
||||
from transformers import AutoModelForMaskedLM as AutoModelForMaskedLMHF
|
||||
from transformers import \
|
||||
AutoModelForMaskGeneration as AutoModelForMaskGenerationHF
|
||||
from transformers import AutoModelForPreTraining as AutoModelForPreTrainingHF
|
||||
from transformers import \
|
||||
AutoModelForQuestionAnswering as AutoModelForQuestionAnsweringHF
|
||||
from transformers import AutoModelForSeq2SeqLM as AutoModelForSeq2SeqLMHF
|
||||
from transformers import \
|
||||
AutoModelForSequenceClassification as AutoModelForSequenceClassificationHF
|
||||
from transformers import AutoModelForTextEncoding as AutoModelForTextEncodingHF
|
||||
from transformers import \
|
||||
AutoModelForTokenClassification as AutoModelForTokenClassificationHF
|
||||
from transformers import AutoProcessor as AutoProcessorHF
|
||||
@@ -272,7 +282,7 @@ def get_wrapped_class(module_class,
|
||||
ignore_file_pattern = kwargs.pop('ignore_file_pattern',
|
||||
default_ignore_file_pattern)
|
||||
subfolder = kwargs.pop('subfolder', default_file_filter)
|
||||
|
||||
file_filter = None
|
||||
if subfolder:
|
||||
file_filter = f'{subfolder}/*'
|
||||
if not os.path.exists(pretrained_model_name_or_path):
|
||||
@@ -315,25 +325,48 @@ AutoModelForTokenClassification = get_wrapped_class(
|
||||
AutoModelForTokenClassificationHF)
|
||||
AutoModelForImageSegmentation = get_wrapped_class(
|
||||
AutoModelForImageSegmentationHF)
|
||||
AutoModelForImageClassification = get_wrapped_class(
|
||||
AutoModelForImageClassificationHF)
|
||||
AutoModelForImageToImage = get_wrapped_class(AutoModelForImageToImageHF)
|
||||
AutoModelForQuestionAnswering = get_wrapped_class(
|
||||
AutoModelForQuestionAnsweringHF)
|
||||
AutoModelForMaskedLM = get_wrapped_class(AutoModelForMaskedLMHF)
|
||||
AutoModelForMaskGeneration = get_wrapped_class(AutoModelForMaskGenerationHF)
|
||||
AutoModelForPreTraining = get_wrapped_class(AutoModelForPreTrainingHF)
|
||||
AutoModelForTextEncoding = get_wrapped_class(AutoModelForTextEncodingHF)
|
||||
T5EncoderModel = get_wrapped_class(T5EncoderModelHF)
|
||||
|
||||
AutoTokenizer = get_wrapped_class(
|
||||
AutoTokenizerHF,
|
||||
ignore_file_pattern=[
|
||||
r'\w+\.bin', r'\w+\.safetensors', r'\w+\.pth', r'\w+\.pt'
|
||||
r'\w+\.bin', r'\w+\.safetensors', r'\w+\.pth', r'\w+\.pt', r'\w+\.h5'
|
||||
])
|
||||
AutoProcessor = get_wrapped_class(
|
||||
AutoProcessorHF,
|
||||
ignore_file_pattern=[
|
||||
r'\w+\.bin', r'\w+\.safetensors', r'\w+\.pth', r'\w+\.pt', r'\w+\.h5'
|
||||
])
|
||||
AutoConfig = get_wrapped_class(
|
||||
AutoConfigHF,
|
||||
ignore_file_pattern=[
|
||||
r'\w+\.bin', r'\w+\.safetensors', r'\w+\.pth', r'\w+\.pt'
|
||||
r'\w+\.bin', r'\w+\.safetensors', r'\w+\.pth', r'\w+\.pt', r'\w+\.h5'
|
||||
])
|
||||
GenerationConfig = get_wrapped_class(
|
||||
GenerationConfigHF,
|
||||
ignore_file_pattern=[
|
||||
r'\w+\.bin', r'\w+\.safetensors', r'\w+\.pth', r'\w+\.pt'
|
||||
r'\w+\.bin', r'\w+\.safetensors', r'\w+\.pth', r'\w+\.pt', r'\w+\.h5'
|
||||
])
|
||||
BitsAndBytesConfig = get_wrapped_class(
|
||||
BitsAndBytesConfigHF,
|
||||
ignore_file_pattern=[
|
||||
r'\w+\.bin', r'\w+\.safetensors', r'\w+\.pth', r'\w+\.pt', r'\w+\.h5'
|
||||
])
|
||||
AutoImageProcessor = get_wrapped_class(
|
||||
AutoImageProcessorHF,
|
||||
ignore_file_pattern=[
|
||||
r'\w+\.bin', r'\w+\.safetensors', r'\w+\.pth', r'\w+\.pt', r'\w+\.h5'
|
||||
])
|
||||
|
||||
GPTQConfig = GPTQConfigHF
|
||||
AwqConfig = AwqConfigHF
|
||||
BitsAndBytesConfig = BitsAndBytesConfigHF
|
||||
AutoImageProcessor = get_wrapped_class(AutoImageProcessorHF)
|
||||
BatchFeature = get_wrapped_class(BatchFeatureHF)
|
||||
|
||||
Reference in New Issue
Block a user