From 04002bd8c232d5dbca7930bfdfa1220a4fa102c6 Mon Sep 17 00:00:00 2001 From: "xingjun.wang" Date: Sun, 22 Oct 2023 20:20:11 +0800 Subject: [PATCH] update --- modelscope/pipelines/nlp/llm_pipeline.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/modelscope/pipelines/nlp/llm_pipeline.py b/modelscope/pipelines/nlp/llm_pipeline.py index 3dac9783..9aef29a4 100644 --- a/modelscope/pipelines/nlp/llm_pipeline.py +++ b/modelscope/pipelines/nlp/llm_pipeline.py @@ -6,6 +6,7 @@ from typing import Any, Callable, Dict, Iterator, List, Tuple, Union import json import torch from transformers import PreTrainedTokenizer +from transformers.modeling_outputs import CausalLMOutputWithPast from modelscope import (AutoModelForCausalLM, AutoTokenizer, Pipeline, snapshot_download) @@ -125,11 +126,12 @@ class LLMPipeline(Pipeline): outputs = self.model.generate(**tokens, **forward_params) print(f'>>>self.model.generate: {self.model.generate}') - outputs_new = self.model(tokens['inputs']) + outputs_new: CausalLMOutputWithPast = self.model(tokens['inputs']) print( f'\n\n>>outputs_new in _process_single for llm_pipe model call: ' f'\n>data: {outputs_new}' - f'\n>shape: {outputs_new.shape}' + f'\n>logits: {outputs_new.logits}' + f'\n>logits shape: {outputs_new.logits.shape}' f'\n>type: {type(outputs_new)}\n\n') elif hasattr(self.model, 'model') and hasattr(self.model.model,