Files
modelscope/tests/utils/test_hf_util.py
mulin.lyh d30ef8b202 fix huggingface position_ids compatible issue
Link: https://code.alibaba-inc.com/Ali-MaaS/MaaS-lib/codereview/14406558
* fix compatible issues

* fix transformer compatible issue

* skip case for huggingface link issue

* fix hf autotokenlizer case

* Merge branch 'fix_ci_issue' of http://gitlab.alibaba-inc.com/Ali-MaaS/MaaS-lib into fix_ci_issue
2023-10-24 15:18:55 +08:00

60 lines
1.9 KiB
Python

# Copyright (c) Alibaba, Inc. and its affiliates.
import unittest
from transformers import LlamaForCausalLM, LlamaTokenizer
from modelscope import (AutoConfig, AutoModel, AutoModelForCausalLM,
AutoTokenizer, GenerationConfig)
class HFUtilTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_auto_tokenizer(self):
tokenizer = AutoTokenizer.from_pretrained(
'baichuan-inc/Baichuan2-7B-Chat',
trust_remote_code=True,
revision='v1.0.3')
self.assertEqual(tokenizer.vocab_size, 125696)
self.assertEqual(tokenizer.model_max_length, 4096)
self.assertFalse(tokenizer.is_fast)
def test_quantization_import(self):
from modelscope import GPTQConfig, BitsAndBytesConfig
self.assertTrue(BitsAndBytesConfig is not None)
def test_auto_model(self):
model = AutoModelForCausalLM.from_pretrained(
'baichuan-inc/baichuan-7B', trust_remote_code=True)
self.assertTrue(model is not None)
def test_auto_config(self):
config = AutoConfig.from_pretrained(
'baichuan-inc/Baichuan-13B-Chat',
trust_remote_code=True,
revision='v1.0.3')
self.assertEqual(config.model_type, 'baichuan')
gen_config = GenerationConfig.from_pretrained(
'baichuan-inc/Baichuan-13B-Chat',
trust_remote_code=True,
revision='v1.0.3')
self.assertEqual(gen_config.assistant_token_id, 196)
def test_transformer_patch(self):
tokenizer = LlamaTokenizer.from_pretrained(
'skyline2006/llama-7b', revision='v1.0.1')
self.assertIsNotNone(tokenizer)
model = LlamaForCausalLM.from_pretrained(
'skyline2006/llama-7b', revision='v1.0.1')
self.assertIsNotNone(model)
if __name__ == '__main__':
unittest.main()