From 4cf929f63568f577de166f32a79fbc4f82cff365 Mon Sep 17 00:00:00 2001 From: "xingjun.wang" Date: Sun, 22 Oct 2023 15:34:16 +0800 Subject: [PATCH] update --- modelscope/models/nlp/chatglm2/text_generation.py | 8 ++++++++ modelscope/pipelines/nlp/llm_pipeline.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/modelscope/models/nlp/chatglm2/text_generation.py b/modelscope/models/nlp/chatglm2/text_generation.py index 21323e64..02fe397c 100644 --- a/modelscope/models/nlp/chatglm2/text_generation.py +++ b/modelscope/models/nlp/chatglm2/text_generation.py @@ -1088,6 +1088,10 @@ class ChatGLM2ForConditionalGeneration(ChatGLMPreTrainedModel): return_dict=return_dict, ) + print( + f'\n>>transformer_outputs in ChatGLM2ForConditionalGeneration forward:\n {transformer_outputs}' + ) + hidden_states = transformer_outputs[0] if return_last_logit: hidden_states = hidden_states[-1:] @@ -1115,6 +1119,10 @@ class ChatGLM2ForConditionalGeneration(ChatGLMPreTrainedModel): output = (lm_logits, ) + transformer_outputs[1:] return ((loss, ) + output) if loss is not None else output + print( + f'\n>>lm_logits in ChatGLM2ForConditionalGeneration forward:\n {lm_logits}' + ) + return CausalLMOutputWithPast( loss=loss, logits=lm_logits, diff --git a/modelscope/pipelines/nlp/llm_pipeline.py b/modelscope/pipelines/nlp/llm_pipeline.py index 7df98398..d509fcc0 100644 --- a/modelscope/pipelines/nlp/llm_pipeline.py +++ b/modelscope/pipelines/nlp/llm_pipeline.py @@ -143,7 +143,7 @@ class LLMPipeline(Pipeline): print(f'>>response in _process_single for llm_pipe: {response}') - print(f'\n>>self.model: {self.model}') + print(f'\n>>self.model:\n {self.model}') return response