From cba1156f0da09ab78fe5ec63d6a8fe5d4efa12e4 Mon Sep 17 00:00:00 2001 From: wenmeng zhou Date: Tue, 8 Aug 2023 14:53:29 +0800 Subject: [PATCH] fix chatglm2b rope_ratio config is missing (#440) --- modelscope/models/nlp/chatglm2/configuration.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/modelscope/models/nlp/chatglm2/configuration.py b/modelscope/models/nlp/chatglm2/configuration.py index ab40de0e..ec056cce 100644 --- a/modelscope/models/nlp/chatglm2/configuration.py +++ b/modelscope/models/nlp/chatglm2/configuration.py @@ -35,6 +35,7 @@ class ChatGLM2Config(PretrainedConfig): quantization_bit=0, pre_seq_len=None, prefix_projection=False, + rope_ratio=1.0, **kwargs): self.num_layers = num_layers self.vocab_size = padded_vocab_size @@ -61,4 +62,5 @@ class ChatGLM2Config(PretrainedConfig): self.quantization_bit = quantization_bit self.pre_seq_len = pre_seq_len self.prefix_projection = prefix_projection + self.rope_ratio = rope_ratio super().__init__(**kwargs)