From 66430171ae3618dc8e86fc39910aa75da54f781f Mon Sep 17 00:00:00 2001 From: "mulin.lyh" Date: Tue, 17 Oct 2023 22:15:54 +0800 Subject: [PATCH] fix chatglm2 can't find tokenizer issue Link: https://code.alibaba-inc.com/Ali-MaaS/MaaS-lib/codereview/14335080 * fix chatglm2 can't find tokenizer issue --- modelscope/models/nlp/chatglm2/tokenization.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modelscope/models/nlp/chatglm2/tokenization.py b/modelscope/models/nlp/chatglm2/tokenization.py index 7014dc9c..4523dcdd 100644 --- a/modelscope/models/nlp/chatglm2/tokenization.py +++ b/modelscope/models/nlp/chatglm2/tokenization.py @@ -72,7 +72,6 @@ class ChatGLM2Tokenizer(PreTrainedTokenizer): model_input_names = ['input_ids', 'attention_mask', 'position_ids'] def __init__(self, vocab_file, padding_side='left', **kwargs): - super().__init__(padding_side=padding_side, **kwargs) self.name = 'GLMTokenizer' self.vocab_file = vocab_file @@ -82,6 +81,7 @@ class ChatGLM2Tokenizer(PreTrainedTokenizer): '': self.tokenizer.eos_id, '': self.tokenizer.pad_id } + super().__init__(padding_side=padding_side, **kwargs) def get_command(self, token): if token in self.special_tokens: