mirror of
https://github.com/modelscope/modelscope.git
synced 2025-12-25 12:39:25 +01:00
add model type check and give easy-to-understand error prompts
Link: https://code.alibaba-inc.com/Ali-MaaS/MaaS-lib/codereview/11498502
This commit is contained in:
@@ -7,6 +7,7 @@ import numpy as np
|
||||
import torch
|
||||
|
||||
from modelscope.metainfo import Pipelines
|
||||
from modelscope.models.base.base_model import Model
|
||||
from modelscope.outputs import OutputKeys
|
||||
from modelscope.pipelines.base import Input, Pipeline
|
||||
from modelscope.pipelines.builder import PIPELINES
|
||||
@@ -44,6 +45,10 @@ class ImageDefrcnDetectionPipeline(Pipeline):
|
||||
"""
|
||||
super().__init__(model=model, auto_collate=False, **kwargs)
|
||||
|
||||
assert isinstance(
|
||||
self.model, Model
|
||||
), f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
|
||||
model_path = os.path.join(self.model.model_dir,
|
||||
ModelFile.TORCH_MODEL_FILE)
|
||||
self.model.model = self._load_pretrained(
|
||||
|
||||
@@ -8,6 +8,7 @@ import torch
|
||||
from PIL import Image
|
||||
|
||||
from modelscope.metainfo import Pipelines
|
||||
from modelscope.models.base.base_model import Model
|
||||
from modelscope.models.cv.image_instance_segmentation import (
|
||||
CascadeMaskRCNNSwinModel, get_img_ins_seg_result)
|
||||
from modelscope.pipelines.base import Input, Pipeline
|
||||
@@ -40,6 +41,8 @@ class ImageInstanceSegmentationPipeline(Pipeline):
|
||||
super().__init__(model=model, preprocessor=preprocessor, **kwargs)
|
||||
|
||||
if preprocessor is None:
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
config_path = os.path.join(self.model.model_dir,
|
||||
ModelFile.CONFIGURATION)
|
||||
cfg = Config.from_file(config_path)
|
||||
|
||||
@@ -14,6 +14,7 @@ import torch
|
||||
from PIL import Image
|
||||
|
||||
from modelscope.metainfo import Pipelines
|
||||
from modelscope.models.base.base_model import Model
|
||||
from modelscope.models.cv.language_guided_video_summarization import \
|
||||
ClipItVideoSummarization
|
||||
from modelscope.models.cv.language_guided_video_summarization.summarizer import (
|
||||
@@ -44,8 +45,9 @@ class LanguageGuidedVideoSummarizationPipeline(Pipeline):
|
||||
"""
|
||||
super().__init__(model=model, auto_collate=False, **kwargs)
|
||||
logger.info(f'loading model from {model}')
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
self.model_dir = self.model.model_dir
|
||||
|
||||
self.tmp_dir = kwargs.get('tmp_dir', None)
|
||||
if self.tmp_dir is None:
|
||||
self.tmp_dir = tempfile.TemporaryDirectory().name
|
||||
|
||||
@@ -10,7 +10,7 @@ from modelscope.pipelines.builder import PIPELINES
|
||||
from modelscope.pipelines.util import batch_process
|
||||
from modelscope.preprocessors import (MPlugPreprocessor, OfaPreprocessor,
|
||||
Preprocessor)
|
||||
from modelscope.utils.constant import Tasks
|
||||
from modelscope.utils.constant import ModelFile, Tasks
|
||||
from modelscope.utils.logger import get_logger
|
||||
|
||||
logger = get_logger()
|
||||
@@ -31,7 +31,10 @@ class ImageCaptioningPipeline(Pipeline):
|
||||
"""
|
||||
super().__init__(model=model, preprocessor=preprocessor, **kwargs)
|
||||
self.model.eval()
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
if preprocessor is None:
|
||||
|
||||
if isinstance(self.model, OfaForAllTasks):
|
||||
self.preprocessor = OfaPreprocessor(self.model.model_dir)
|
||||
elif isinstance(self.model, MPlugForAllTasks):
|
||||
|
||||
@@ -8,7 +8,7 @@ from modelscope.outputs import OutputKeys
|
||||
from modelscope.pipelines.base import Model, Pipeline
|
||||
from modelscope.pipelines.builder import PIPELINES
|
||||
from modelscope.preprocessors import MPlugPreprocessor, Preprocessor
|
||||
from modelscope.utils.constant import Tasks
|
||||
from modelscope.utils.constant import ModelFile, Tasks
|
||||
from modelscope.utils.logger import get_logger
|
||||
|
||||
logger = get_logger()
|
||||
@@ -30,6 +30,8 @@ class ImageTextRetrievalPipeline(Pipeline):
|
||||
"""
|
||||
super().__init__(model=model, preprocessor=preprocessor, **kwargs)
|
||||
self.model.eval()
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
if preprocessor is None:
|
||||
self.preprocessor = MPlugPreprocessor(self.model.model_dir)
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ from modelscope.outputs import OutputKeys
|
||||
from modelscope.pipelines.base import Pipeline
|
||||
from modelscope.pipelines.builder import PIPELINES
|
||||
from modelscope.preprocessors import Preprocessor
|
||||
from modelscope.utils.constant import Tasks
|
||||
from modelscope.utils.constant import ModelFile, Tasks
|
||||
|
||||
__all__ = ['MGeoRankingPipeline']
|
||||
|
||||
@@ -46,6 +46,8 @@ class MGeoRankingPipeline(Pipeline):
|
||||
device=device,
|
||||
auto_collate=auto_collate)
|
||||
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
if preprocessor is None:
|
||||
self.preprocessor = Preprocessor.from_pretrained(
|
||||
self.model.model_dir,
|
||||
|
||||
@@ -8,7 +8,7 @@ from modelscope.outputs import OutputKeys
|
||||
from modelscope.pipelines.base import Pipeline
|
||||
from modelscope.pipelines.builder import PIPELINES
|
||||
from modelscope.preprocessors import Preprocessor
|
||||
from modelscope.utils.constant import Tasks
|
||||
from modelscope.utils.constant import ModelFile, Tasks
|
||||
|
||||
__all__ = ['FaqQuestionAnsweringPipeline']
|
||||
|
||||
@@ -38,6 +38,8 @@ class FaqQuestionAnsweringPipeline(Pipeline):
|
||||
config_file=config_file,
|
||||
device=device,
|
||||
auto_collate=auto_collate)
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
if preprocessor is None:
|
||||
self.preprocessor = Preprocessor.from_pretrained(
|
||||
self.model.model_dir, **kwargs)
|
||||
|
||||
@@ -55,6 +55,8 @@ class FeatureExtractionPipeline(Pipeline):
|
||||
device=device,
|
||||
auto_collate=auto_collate)
|
||||
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
if preprocessor is None:
|
||||
self.preprocessor = Preprocessor.from_pretrained(
|
||||
self.model.model_dir,
|
||||
|
||||
@@ -10,7 +10,7 @@ from modelscope.outputs import OutputKeys
|
||||
from modelscope.pipelines.base import Pipeline, Tensor
|
||||
from modelscope.pipelines.builder import PIPELINES
|
||||
from modelscope.preprocessors import Preprocessor
|
||||
from modelscope.utils.constant import Tasks
|
||||
from modelscope.utils.constant import ModelFile, Tasks
|
||||
|
||||
__all__ = ['FillMaskPipeline']
|
||||
|
||||
@@ -61,6 +61,8 @@ class FillMaskPipeline(Pipeline):
|
||||
device=device,
|
||||
auto_collate=auto_collate)
|
||||
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
if preprocessor is None:
|
||||
self.preprocessor = Preprocessor.from_pretrained(
|
||||
self.model.model_dir,
|
||||
|
||||
@@ -9,7 +9,7 @@ from modelscope.models import Model
|
||||
from modelscope.pipelines.base import Pipeline
|
||||
from modelscope.pipelines.builder import PIPELINES
|
||||
from modelscope.preprocessors import Preprocessor
|
||||
from modelscope.utils.constant import Tasks
|
||||
from modelscope.utils.constant import ModelFile, Tasks
|
||||
|
||||
__all__ = ['InformationExtractionPipeline']
|
||||
|
||||
@@ -45,6 +45,9 @@ class InformationExtractionPipeline(Pipeline):
|
||||
device=device,
|
||||
auto_collate=auto_collate)
|
||||
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
|
||||
if self.preprocessor is None:
|
||||
self.preprocessor = Preprocessor.from_pretrained(
|
||||
self.model.model_dir,
|
||||
|
||||
@@ -7,7 +7,7 @@ from modelscope.models import Model
|
||||
from modelscope.pipelines.builder import PIPELINES
|
||||
from modelscope.pipelines.nlp import TokenClassificationPipeline
|
||||
from modelscope.preprocessors import Preprocessor
|
||||
from modelscope.utils.constant import Tasks
|
||||
from modelscope.utils.constant import ModelFile, Tasks
|
||||
|
||||
__all__ = ['NamedEntityRecognitionPipeline']
|
||||
|
||||
@@ -55,6 +55,10 @@ class NamedEntityRecognitionPipeline(TokenClassificationPipeline):
|
||||
config_file=config_file,
|
||||
device=device,
|
||||
auto_collate=auto_collate)
|
||||
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
|
||||
if preprocessor is None:
|
||||
self.preprocessor = Preprocessor.from_pretrained(
|
||||
self.model.model_dir,
|
||||
|
||||
@@ -10,7 +10,7 @@ from modelscope.outputs import OutputKeys
|
||||
from modelscope.pipelines.base import Pipeline
|
||||
from modelscope.pipelines.builder import PIPELINES
|
||||
from modelscope.preprocessors import Preprocessor
|
||||
from modelscope.utils.constant import Tasks
|
||||
from modelscope.utils.constant import ModelFile, Tasks
|
||||
|
||||
__all__ = ['SentenceEmbeddingPipeline']
|
||||
|
||||
@@ -42,6 +42,10 @@ class SentenceEmbeddingPipeline(Pipeline):
|
||||
config_file=config_file,
|
||||
device=device,
|
||||
auto_collate=auto_collate)
|
||||
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
|
||||
if preprocessor is None:
|
||||
self.preprocessor = Preprocessor.from_pretrained(
|
||||
self.model.model_dir,
|
||||
|
||||
@@ -53,6 +53,9 @@ class TableQuestionAnsweringPipeline(Pipeline):
|
||||
device=device,
|
||||
auto_collate=auto_collate)
|
||||
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
|
||||
if preprocessor is None:
|
||||
self.preprocessor = TableQuestionAnsweringPreprocessor(
|
||||
self.model.model_dir, **kwargs)
|
||||
|
||||
@@ -11,7 +11,7 @@ from modelscope.pipelines.base import Pipeline
|
||||
from modelscope.pipelines.builder import PIPELINES
|
||||
from modelscope.pipelines.util import batch_process
|
||||
from modelscope.preprocessors import Preprocessor
|
||||
from modelscope.utils.constant import Fields, Tasks
|
||||
from modelscope.utils.constant import Fields, ModelFile, Tasks
|
||||
from modelscope.utils.logger import get_logger
|
||||
|
||||
logger = get_logger()
|
||||
@@ -63,6 +63,9 @@ class TextClassificationPipeline(Pipeline):
|
||||
device=device,
|
||||
auto_collate=auto_collate)
|
||||
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
|
||||
if preprocessor is None:
|
||||
if self.model.__class__.__name__ == 'OfaForAllTasks':
|
||||
self.preprocessor = Preprocessor.from_pretrained(
|
||||
|
||||
@@ -11,7 +11,7 @@ from modelscope.outputs import OutputKeys
|
||||
from modelscope.pipelines.base import Pipeline, Tensor
|
||||
from modelscope.pipelines.builder import PIPELINES
|
||||
from modelscope.preprocessors import Preprocessor
|
||||
from modelscope.utils.constant import Tasks
|
||||
from modelscope.utils.constant import ModelFile, Tasks
|
||||
|
||||
__all__ = ['TextErrorCorrectionPipeline']
|
||||
|
||||
@@ -49,6 +49,8 @@ class TextErrorCorrectionPipeline(Pipeline):
|
||||
config_file=config_file,
|
||||
device=device,
|
||||
auto_collate=auto_collate)
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
if preprocessor is None:
|
||||
self.preprocessor = Preprocessor.from_pretrained(
|
||||
self.model.model_dir, **kwargs)
|
||||
|
||||
@@ -13,7 +13,7 @@ from modelscope.pipelines.base import Pipeline, Tensor
|
||||
from modelscope.pipelines.builder import PIPELINES
|
||||
from modelscope.preprocessors import Preprocessor
|
||||
from modelscope.utils.chinese_utils import remove_space_between_chinese_chars
|
||||
from modelscope.utils.constant import Tasks
|
||||
from modelscope.utils.constant import ModelFile, Tasks
|
||||
from modelscope.utils.hub import Config, read_config
|
||||
|
||||
__all__ = ['TextGenerationPipeline', 'TextGenerationT5Pipeline']
|
||||
@@ -60,6 +60,8 @@ class TextGenerationPipeline(Pipeline):
|
||||
device=device,
|
||||
auto_collate=auto_collate)
|
||||
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
if preprocessor is None:
|
||||
self.preprocessor = Preprocessor.from_pretrained(
|
||||
self.model.model_dir, first_sequence=first_sequence, **kwargs)
|
||||
|
||||
@@ -11,7 +11,7 @@ from modelscope.pipelines.base import Pipeline
|
||||
from modelscope.pipelines.builder import PIPELINES
|
||||
from modelscope.preprocessors import (Preprocessor,
|
||||
TextRankingTransformersPreprocessor)
|
||||
from modelscope.utils.constant import Tasks
|
||||
from modelscope.utils.constant import ModelFile, Tasks
|
||||
|
||||
__all__ = ['TextRankingPipeline']
|
||||
|
||||
@@ -45,6 +45,9 @@ class TextRankingPipeline(Pipeline):
|
||||
device=device,
|
||||
auto_collate=auto_collate)
|
||||
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
|
||||
if preprocessor is None:
|
||||
self.preprocessor = Preprocessor.from_pretrained(
|
||||
self.model.model_dir,
|
||||
|
||||
@@ -11,7 +11,7 @@ from modelscope.outputs import OutputKeys
|
||||
from modelscope.pipelines.base import Pipeline
|
||||
from modelscope.pipelines.builder import PIPELINES
|
||||
from modelscope.preprocessors import Preprocessor
|
||||
from modelscope.utils.constant import Tasks
|
||||
from modelscope.utils.constant import ModelFile, Tasks
|
||||
from modelscope.utils.tensor_utils import (torch_nested_detach,
|
||||
torch_nested_numpify)
|
||||
|
||||
@@ -53,6 +53,9 @@ class TokenClassificationPipeline(Pipeline):
|
||||
device=device,
|
||||
auto_collate=auto_collate)
|
||||
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
|
||||
if preprocessor is None:
|
||||
self.preprocessor = Preprocessor.from_pretrained(
|
||||
self.model.model_dir,
|
||||
|
||||
@@ -46,6 +46,8 @@ class TranslationEvaluationPipeline(Pipeline):
|
||||
|
||||
self.eval_mode = eval_mode
|
||||
self.checking_eval_mode()
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
|
||||
self.preprocessor = TranslationEvaluationPreprocessor(
|
||||
self.model.model_dir,
|
||||
|
||||
@@ -38,6 +38,9 @@ class TranslationPipeline(Pipeline):
|
||||
model: A Model instance.
|
||||
"""
|
||||
super().__init__(model=model, **kwargs)
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
|
||||
model = self.model.model_dir
|
||||
tf.reset_default_graph()
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ from modelscope.pipelines.nlp import TokenClassificationPipeline
|
||||
from modelscope.preprocessors import (
|
||||
Preprocessor, TokenClassificationTransformersPreprocessor,
|
||||
WordSegmentationPreprocessorThai)
|
||||
from modelscope.utils.constant import Tasks
|
||||
from modelscope.utils.constant import ModelFile, Tasks
|
||||
from modelscope.utils.tensor_utils import (torch_nested_detach,
|
||||
torch_nested_numpify)
|
||||
|
||||
@@ -100,6 +100,10 @@ class WordSegmentationThaiPipeline(MultilingualWordSegmentationPipeline):
|
||||
config_file=config_file,
|
||||
device=device,
|
||||
auto_collate=auto_collate)
|
||||
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
|
||||
if preprocessor is None:
|
||||
self.preprocessor = WordSegmentationPreprocessorThai(
|
||||
self.model.model_dir,
|
||||
|
||||
@@ -11,7 +11,7 @@ from modelscope.outputs import OutputKeys
|
||||
from modelscope.pipelines.base import Pipeline
|
||||
from modelscope.pipelines.builder import PIPELINES
|
||||
from modelscope.preprocessors import Preprocessor
|
||||
from modelscope.utils.constant import Tasks
|
||||
from modelscope.utils.constant import ModelFile, Tasks
|
||||
|
||||
__all__ = ['ZeroShotClassificationPipeline']
|
||||
|
||||
@@ -69,6 +69,10 @@ class ZeroShotClassificationPipeline(Pipeline):
|
||||
auto_collate=auto_collate)
|
||||
self.entailment_id = 0
|
||||
self.contradiction_id = 2
|
||||
|
||||
assert isinstance(self.model, Model), \
|
||||
f'please check whether model config exists in {ModelFile.CONFIGURATION}'
|
||||
|
||||
if preprocessor is None:
|
||||
sequence_length = kwargs.pop('sequence_length', 512)
|
||||
self.preprocessor = Preprocessor.from_pretrained(
|
||||
|
||||
Reference in New Issue
Block a user