This commit is contained in:
Yingda Chen
2024-11-22 00:48:01 +08:00
parent 023149b552
commit d5c8ffa044
4 changed files with 37 additions and 15 deletions

View File

@@ -110,7 +110,7 @@ def pipeline(task: str = None,
raise ValueError('task or pipeline_name is required')
prefer_llm_pipeline = kwargs.get('llm_first')
if task is not None and task.lower() in [
Tasks.text_generation, Tasks.text2text_generation, Tasks.chat
Tasks.text_generation, Tasks.chat
]:
# if not specified, prefer llm pipeline for aforementioned tasks
if prefer_llm_pipeline is None:

View File

@@ -262,7 +262,7 @@ class LLMPipeline(Pipeline, PipelineStreamingOutputMixin):
return dict(system=system, prompt=prompt, history=history)
assert model_id in SWIFT_MODEL_ID_MAPPING,\
f'Invalid model id {model_id} or Swift framework does not this model.'
f'Invalid model id {model_id} or Swift framework does support not this model.'
args = InferArguments(model_type=SWIFT_MODEL_ID_MAPPING[model_id])
model, template = prepare_model_template(
args, device_map=self.device_map)

View File

@@ -19,24 +19,28 @@ class TextGPT3GenerationTest(unittest.TestCase):
@unittest.skipUnless(test_level() >= 0, 'skip test in current test level')
def test_gpt3_1_3B(self):
pipe = pipeline(Tasks.text_generation, model=self.model_id_1_3B)
pipe = pipeline(
Tasks.text_generation, model=self.model_id_1_3B, llm_first=False)
print(pipe(self.input))
@unittest.skipUnless(test_level() >= 0, 'skip test in current test level')
def test_gpt3_1_3B_with_streaming(self):
pipe = pipeline(Tasks.text_generation, model=self.model_id_1_3B)
pipe = pipeline(
Tasks.text_generation, model=self.model_id_1_3B, llm_first=False)
for output in pipe.stream_generate(self.input, max_length=64):
print(output, end='\r')
print()
@unittest.skipUnless(test_level() >= 2, 'skip test in current test level')
def test_gpt3_2_7B(self):
pipe = pipeline(Tasks.text_generation, model=self.model_id_2_7B)
pipe = pipeline(
Tasks.text_generation, model=self.model_id_2_7B, llm_first=False)
print(pipe(self.input))
@unittest.skipUnless(test_level() >= 0, 'skip test in current test level')
def test_gpt3_1_3B_with_args(self):
pipe = pipeline(Tasks.text_generation, model=self.model_id_1_3B)
pipe = pipeline(
Tasks.text_generation, model=self.model_id_1_3B, llm_first=False)
print(pipe(self.input, top_p=0.9, temperature=0.9, max_length=32))
@unittest.skip('distributed gpt3 13B, skipped')
@@ -62,7 +66,8 @@ class TextGPT3GenerationTest(unittest.TestCase):
|_ mp_rank_06_model_states.pt
|_ mp_rank_07_model_states.pt
"""
pipe = pipeline(Tasks.text_generation, model=self.model_dir_13B)
pipe = pipeline(
Tasks.text_generation, model=self.model_dir_13B, llm_first=False)
print(pipe(self.input))

View File

@@ -56,7 +56,10 @@ class TextGenerationTest(unittest.TestCase):
first_sequence='sentence',
second_sequence=None)
pipeline_ins = pipeline(
task=Tasks.text_generation, model=model, preprocessor=preprocessor)
task=Tasks.text_generation,
model=model,
preprocessor=preprocessor,
llm_first=False)
print(pipeline_ins(input))
def run_pipeline_with_model_id(self,
@@ -64,6 +67,7 @@ class TextGenerationTest(unittest.TestCase):
input,
init_kwargs={},
run_kwargs={}):
init_kwargs['llm_first'] = False
pipeline_ins = pipeline(
task=Tasks.text_generation, model=model_id, **init_kwargs)
print(pipeline_ins(input, **run_kwargs))
@@ -73,12 +77,14 @@ class TextGenerationTest(unittest.TestCase):
input,
init_kwargs={},
run_kwargs={}):
init_kwargs['llm_first'] = False
pipeline_ins = pipeline(
task=Tasks.text_generation, model=model_id, **init_kwargs)
# set stream inputs
assert isinstance(pipeline_ins, StreamingOutputMixin)
for output in pipeline_ins.stream_generate(input, **run_kwargs):
for output in pipeline_ins.stream_generate(
input, **run_kwargs, llm_first=False):
print(output, end='\r')
print()
@@ -256,7 +262,10 @@ class TextGenerationTest(unittest.TestCase):
cache_path, first_sequence='sentence', second_sequence=None)
pipeline1 = TextGenerationPipeline(model, preprocessor)
pipeline2 = pipeline(
Tasks.text_generation, model=model, preprocessor=preprocessor)
Tasks.text_generation,
model=model,
preprocessor=preprocessor,
llm_first=False)
print(
f'pipeline1: {pipeline1(input)}\npipeline2: {pipeline2(input)}'
)
@@ -272,14 +281,17 @@ class TextGenerationTest(unittest.TestCase):
second_sequence=None)
pipeline1 = TextGenerationPipeline(model, preprocessor)
pipeline2 = pipeline(
Tasks.text_generation, model=model, preprocessor=preprocessor)
Tasks.text_generation,
model=model,
preprocessor=preprocessor,
llm_first=False)
print(
f'pipeline1: {pipeline1(self.gpt3_input)}\npipeline2: {pipeline2(self.gpt3_input)}'
)
@unittest.skipUnless(test_level() >= 2, 'skip test in current test level')
def test_run_with_default_model(self):
pipeline_ins = pipeline(task=Tasks.text_generation)
pipeline_ins = pipeline(task=Tasks.text_generation, llm_first=False)
print(
pipeline_ins(
[self.palm_input_zh, self.palm_input_zh, self.palm_input_zh],
@@ -288,13 +300,17 @@ class TextGenerationTest(unittest.TestCase):
@unittest.skipUnless(test_level() >= 2, 'skip test in current test level')
def test_bloom(self):
pipe = pipeline(
task=Tasks.text_generation, model='langboat/bloom-1b4-zh')
task=Tasks.text_generation,
model='langboat/bloom-1b4-zh',
llm_first=False)
print(pipe('中国的首都是'))
@unittest.skipUnless(test_level() >= 2, 'skip test in current test level')
def test_gpt_neo(self):
pipe = pipeline(
task=Tasks.text_generation, model='langboat/mengzi-gpt-neo-base')
task=Tasks.text_generation,
model='langboat/mengzi-gpt-neo-base',
llm_first=False)
print(
pipe(
'我是',
@@ -308,7 +324,8 @@ class TextGenerationTest(unittest.TestCase):
def test_gpt2(self):
pipe = pipeline(
task=Tasks.text_generation,
model='damo/nlp_gpt2_text-generation_english-base')
model='damo/nlp_gpt2_text-generation_english-base',
llm_first=False)
print(pipe('My name is Teven and I am'))
@unittest.skip('oom error for 7b model')