From 9a489b13fe2fa535365d40690524f133dc633533 Mon Sep 17 00:00:00 2001 From: mushenL <125954878+mushenL@users.noreply.github.com> Date: Wed, 9 Aug 2023 11:07:09 +0800 Subject: [PATCH] llama2 max_length change (#452) default max_length set to 2048 --- modelscope/pipelines/nlp/llama2_text_generation_pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modelscope/pipelines/nlp/llama2_text_generation_pipeline.py b/modelscope/pipelines/nlp/llama2_text_generation_pipeline.py index 3a9d3d44..d366ec9c 100644 --- a/modelscope/pipelines/nlp/llama2_text_generation_pipeline.py +++ b/modelscope/pipelines/nlp/llama2_text_generation_pipeline.py @@ -65,7 +65,7 @@ class Llama2TaskPipeline(TextGenerationPipeline): def forward(self, inputs, - max_length=50, + max_length=2048, do_sample=True, top_p=0.85, temperature=1.0,