diff --git a/modelscope/pipelines/nlp/dialog_intent_prediction_pipeline.py b/modelscope/pipelines/nlp/dialog_intent_prediction_pipeline.py index f53f186c..fa7b23b8 100644 --- a/modelscope/pipelines/nlp/dialog_intent_prediction_pipeline.py +++ b/modelscope/pipelines/nlp/dialog_intent_prediction_pipeline.py @@ -40,7 +40,8 @@ class DialogIntentPredictionPipeline(Pipeline): preprocessor=preprocessor, config_file=config_file, device=device, - auto_collate=auto_collate) + auto_collate=auto_collate, + **kwargs) if preprocessor is None: self.preprocessor = DialogIntentPredictionPreprocessor( self.model.model_dir, **kwargs) diff --git a/modelscope/pipelines/nlp/dialog_state_tracking_pipeline.py b/modelscope/pipelines/nlp/dialog_state_tracking_pipeline.py index 207b4f81..75f12a67 100644 --- a/modelscope/pipelines/nlp/dialog_state_tracking_pipeline.py +++ b/modelscope/pipelines/nlp/dialog_state_tracking_pipeline.py @@ -42,7 +42,9 @@ class DialogStateTrackingPipeline(Pipeline): preprocessor=preprocessor, config_file=config_file, device=device, - auto_collate=auto_collate) + auto_collate=auto_collate, + compile=kwargs.pop('compile', False), + compile_options=kwargs.pop('compile_options', {})) if preprocessor is None: self.preprocessor = DialogStateTrackingPreprocessor( diff --git a/modelscope/pipelines/nlp/document_grounded_dialog_generate_pipeline.py b/modelscope/pipelines/nlp/document_grounded_dialog_generate_pipeline.py index 5fc1a193..8c773dfe 100644 --- a/modelscope/pipelines/nlp/document_grounded_dialog_generate_pipeline.py +++ b/modelscope/pipelines/nlp/document_grounded_dialog_generate_pipeline.py @@ -46,7 +46,8 @@ class DocumentGroundedDialogGeneratePipeline(Pipeline): preprocessor=preprocessor, config_file=config_file, device=device, - auto_collate=auto_collate) + auto_collate=auto_collate, + **kwargs) if preprocessor is None: self.preprocessor = DocumentGroundedDialogGeneratePreprocessor( diff --git a/modelscope/pipelines/nlp/document_grounded_dialog_rerank_pipeline.py b/modelscope/pipelines/nlp/document_grounded_dialog_rerank_pipeline.py index d72366e9..8fdef380 100644 --- a/modelscope/pipelines/nlp/document_grounded_dialog_rerank_pipeline.py +++ b/modelscope/pipelines/nlp/document_grounded_dialog_rerank_pipeline.py @@ -64,7 +64,8 @@ class DocumentGroundedDialogRerankPipeline(Pipeline): config_file=config_file, device=device, auto_collate=auto_collate, - seed=seed) + seed=seed, + **kwarg) self.model = model self.preprocessor = preprocessor self.device = device diff --git a/modelscope/pipelines/nlp/document_grounded_dialog_retrieval_pipeline.py b/modelscope/pipelines/nlp/document_grounded_dialog_retrieval_pipeline.py index e3461b09..c3fb1a32 100644 --- a/modelscope/pipelines/nlp/document_grounded_dialog_retrieval_pipeline.py +++ b/modelscope/pipelines/nlp/document_grounded_dialog_retrieval_pipeline.py @@ -55,7 +55,8 @@ class DocumentGroundedDialogRetrievalPipeline(Pipeline): preprocessor=preprocessor, config_file=config_file, device=device, - auto_collate=auto_collate) + auto_collate=auto_collate, + **kwargs) if preprocessor is None: self.preprocessor = DocumentGroundedDialogRetrievalPreprocessor( diff --git a/modelscope/pipelines/nlp/document_segmentation_pipeline.py b/modelscope/pipelines/nlp/document_segmentation_pipeline.py index 6e2121c3..6e195ed0 100644 --- a/modelscope/pipelines/nlp/document_segmentation_pipeline.py +++ b/modelscope/pipelines/nlp/document_segmentation_pipeline.py @@ -48,8 +48,14 @@ class DocumentSegmentationPipeline(Pipeline): preprocessor=preprocessor, config_file=config_file, device=device, - auto_collate=auto_collate) + auto_collate=auto_collate, + **kwargs) + kwargs = kwargs + if 'compile' in kwargs.keys(): + kwargs.pop('compile') + if 'compile_options' in kwargs.keys(): + kwargs.pop('compile_options') self.model_dir = self.model.model_dir self.model_cfg = self.model.model_cfg if preprocessor is None: diff --git a/modelscope/pipelines/nlp/extractive_summarization_pipeline.py b/modelscope/pipelines/nlp/extractive_summarization_pipeline.py index 1581690e..c01f28fc 100644 --- a/modelscope/pipelines/nlp/extractive_summarization_pipeline.py +++ b/modelscope/pipelines/nlp/extractive_summarization_pipeline.py @@ -41,7 +41,14 @@ class ExtractiveSummarizationPipeline(Pipeline): preprocessor=preprocessor, config_file=config_file, device=device, - auto_collate=auto_collate) + auto_collate=auto_collate, + **kwargs) + + kwargs = kwargs + if 'compile' in kwargs.keys(): + kwargs.pop('compile') + if 'compile_options' in kwargs.keys(): + kwargs.pop('compile_options') self.model_dir = self.model.model_dir self.model_cfg = self.model.model_cfg diff --git a/modelscope/pipelines/nlp/feature_extraction_pipeline.py b/modelscope/pipelines/nlp/feature_extraction_pipeline.py index 6131fa61..0f6979ba 100644 --- a/modelscope/pipelines/nlp/feature_extraction_pipeline.py +++ b/modelscope/pipelines/nlp/feature_extraction_pipeline.py @@ -53,7 +53,8 @@ class FeatureExtractionPipeline(Pipeline): preprocessor=preprocessor, config_file=config_file, device=device, - auto_collate=auto_collate) + auto_collate=auto_collate, + **kwargs) assert isinstance(self.model, Model), \ f'please check whether model config exists in {ModelFile.CONFIGURATION}' diff --git a/modelscope/pipelines/nlp/fill_mask_pipeline.py b/modelscope/pipelines/nlp/fill_mask_pipeline.py index dc12efa7..6bc7622f 100644 --- a/modelscope/pipelines/nlp/fill_mask_pipeline.py +++ b/modelscope/pipelines/nlp/fill_mask_pipeline.py @@ -62,7 +62,8 @@ class FillMaskPipeline(Pipeline): preprocessor=preprocessor, config_file=config_file, device=device, - auto_collate=auto_collate) + auto_collate=auto_collate, + **kwargs) assert isinstance(self.model, Model), \ f'please check whether model config exists in {ModelFile.CONFIGURATION}' diff --git a/modelscope/pipelines/nlp/named_entity_recognition_pipeline.py b/modelscope/pipelines/nlp/named_entity_recognition_pipeline.py index ba174bae..2cf30037 100644 --- a/modelscope/pipelines/nlp/named_entity_recognition_pipeline.py +++ b/modelscope/pipelines/nlp/named_entity_recognition_pipeline.py @@ -55,7 +55,8 @@ class NamedEntityRecognitionPipeline(TokenClassificationPipeline): preprocessor=preprocessor, config_file=config_file, device=device, - auto_collate=auto_collate) + auto_collate=auto_collate, + **kwargs) assert isinstance(self.model, Model), \ f'please check whether model config exists in {ModelFile.CONFIGURATION}' diff --git a/modelscope/pipelines/nlp/sentence_embedding_pipeline.py b/modelscope/pipelines/nlp/sentence_embedding_pipeline.py index 7aaa073b..4e01397d 100644 --- a/modelscope/pipelines/nlp/sentence_embedding_pipeline.py +++ b/modelscope/pipelines/nlp/sentence_embedding_pipeline.py @@ -42,7 +42,8 @@ class SentenceEmbeddingPipeline(Pipeline): preprocessor=preprocessor, config_file=config_file, device=device, - auto_collate=auto_collate) + auto_collate=auto_collate, + **kwargs) assert isinstance(self.model, Model), \ f'please check whether model config exists in {ModelFile.CONFIGURATION}' diff --git a/modelscope/pipelines/nlp/siamese_uie_pipeline.py b/modelscope/pipelines/nlp/siamese_uie_pipeline.py index 21582900..cdbd9119 100644 --- a/modelscope/pipelines/nlp/siamese_uie_pipeline.py +++ b/modelscope/pipelines/nlp/siamese_uie_pipeline.py @@ -67,7 +67,8 @@ class SiameseUiePipeline(Pipeline): preprocessor=preprocessor, config_file=config_file, device=device, - auto_collate=auto_collate) + auto_collate=auto_collate, + **kwargs) assert isinstance(self.model, Model), \ f'please check whether model config exists in {ModelFile.CONFIGURATION}' diff --git a/modelscope/pipelines/nlp/table_question_answering_pipeline.py b/modelscope/pipelines/nlp/table_question_answering_pipeline.py index 365c6c6c..0472ecb8 100644 --- a/modelscope/pipelines/nlp/table_question_answering_pipeline.py +++ b/modelscope/pipelines/nlp/table_question_answering_pipeline.py @@ -51,7 +51,8 @@ class TableQuestionAnsweringPipeline(Pipeline): preprocessor=preprocessor, config_file=config_file, device=device, - auto_collate=auto_collate) + auto_collate=auto_collate, + **kwargs) assert isinstance(self.model, Model), \ f'please check whether model config exists in {ModelFile.CONFIGURATION}' diff --git a/modelscope/pipelines/nlp/text_generation_pipeline.py b/modelscope/pipelines/nlp/text_generation_pipeline.py index 61f3a421..2b851dc4 100644 --- a/modelscope/pipelines/nlp/text_generation_pipeline.py +++ b/modelscope/pipelines/nlp/text_generation_pipeline.py @@ -58,7 +58,8 @@ class TextGenerationPipeline(Pipeline): preprocessor=preprocessor, config_file=config_file, device=device, - auto_collate=auto_collate) + auto_collate=auto_collate, + **kwargs) assert isinstance(self.model, Model), \ f'please check whether model config exists in {ModelFile.CONFIGURATION}' diff --git a/modelscope/pipelines/nlp/text_ranking_pipeline.py b/modelscope/pipelines/nlp/text_ranking_pipeline.py index 1b313acc..a42baaa2 100644 --- a/modelscope/pipelines/nlp/text_ranking_pipeline.py +++ b/modelscope/pipelines/nlp/text_ranking_pipeline.py @@ -43,7 +43,8 @@ class TextRankingPipeline(Pipeline): preprocessor=preprocessor, config_file=config_file, device=device, - auto_collate=auto_collate) + auto_collate=auto_collate, + **kwargs) assert isinstance(self.model, Model), \ f'please check whether model config exists in {ModelFile.CONFIGURATION}' diff --git a/modelscope/pipelines/nlp/translation_evaluation_pipeline.py b/modelscope/pipelines/nlp/translation_evaluation_pipeline.py index 1f8ba79a..a9e929d1 100644 --- a/modelscope/pipelines/nlp/translation_evaluation_pipeline.py +++ b/modelscope/pipelines/nlp/translation_evaluation_pipeline.py @@ -42,7 +42,7 @@ class TranslationEvaluationPipeline(Pipeline): `"EvaluationMode.SRC"`, `"EvaluationMode.REF"`. Aside from hypothesis, the source/reference/source+reference can be presented during evaluation. """ - super().__init__(model=model, preprocessor=preprocessor) + super().__init__(model=model, preprocessor=preprocessor, **kwargs) self.eval_mode = eval_mode self.checking_eval_mode() diff --git a/modelscope/pipelines/nlp/user_satisfaction_estimation_pipeline.py b/modelscope/pipelines/nlp/user_satisfaction_estimation_pipeline.py index fc55dc84..76fcd7a8 100644 --- a/modelscope/pipelines/nlp/user_satisfaction_estimation_pipeline.py +++ b/modelscope/pipelines/nlp/user_satisfaction_estimation_pipeline.py @@ -26,7 +26,8 @@ class UserSatisfactionEstimationPipeline(Pipeline): preprocessor: DialogueClassificationUsePreprocessor = None, config_file: str = None, device: str = 'gpu', - auto_collate=True): + auto_collate=True, + **kwargs): """The inference pipeline for the user satisfaction estimation task. Args: @@ -49,7 +50,8 @@ class UserSatisfactionEstimationPipeline(Pipeline): preprocessor=preprocessor, config_file=config_file, device=device, - auto_collate=auto_collate) + auto_collate=auto_collate, + **kwargs) if hasattr(self.preprocessor, 'id2label'): self.id2label = self.preprocessor.id2label diff --git a/modelscope/pipelines/nlp/zero_shot_classification_pipeline.py b/modelscope/pipelines/nlp/zero_shot_classification_pipeline.py index 5bc611fb..9cd27adc 100644 --- a/modelscope/pipelines/nlp/zero_shot_classification_pipeline.py +++ b/modelscope/pipelines/nlp/zero_shot_classification_pipeline.py @@ -66,7 +66,8 @@ class ZeroShotClassificationPipeline(Pipeline): preprocessor=preprocessor, config_file=config_file, device=device, - auto_collate=auto_collate) + auto_collate=auto_collate, + **kwargs) self.entailment_id = 0 self.contradiction_id = 2