From 1f2bb82ea6f6c0f72918ecaef11e2d79a3db98cb Mon Sep 17 00:00:00 2001 From: leway Date: Sun, 7 May 2023 14:51:47 +0800 Subject: [PATCH] Update fid_dialogue_pipeline.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 修复在cpu下推理时的错误,这里不应该检查cuda是否可用。这里的逻辑似乎有错误 --- modelscope/pipelines/nlp/fid_dialogue_pipeline.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modelscope/pipelines/nlp/fid_dialogue_pipeline.py b/modelscope/pipelines/nlp/fid_dialogue_pipeline.py index 702f303a..da802868 100644 --- a/modelscope/pipelines/nlp/fid_dialogue_pipeline.py +++ b/modelscope/pipelines/nlp/fid_dialogue_pipeline.py @@ -191,8 +191,8 @@ class FidDialoguePipeline(Pipeline): def postprocess(self, inputs: TokenGeneratorOutput, **postprocess_params) -> Dict[str, Any]: - if torch.cuda.is_available(): - hypotheses = inputs.sequences.detach().cpu().tolist() + # if torch.cuda.is_available(): + hypotheses = inputs.sequences.detach().cpu().tolist() response = self.preprocessor_tokenizer.decode( hypotheses[0], skip_special_tokens=self.is_t5)