feat: all other ollama models (#1174)

* add cases

* new models

---------

Co-authored-by: suluyan <suluyan.sly@alibaba-inc.com>
This commit is contained in:
suluyana
2025-01-10 15:42:22 +08:00
committed by GitHub
parent cc6fbbab5f
commit 9aa661118c
2 changed files with 153 additions and 21 deletions

View File

@@ -230,6 +230,10 @@ template_info = [
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/dolphin-mistral',
),
TemplateInfo(
template_regex=f'.*{cases("dolphin3", "dolphin-3")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/dolphin3'),
# "phi"
TemplateInfo(
@@ -251,6 +255,12 @@ template_info = [
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/phi3',
),
TemplateInfo(
template_regex=
f'.*{cases("phi4", "phi-4")}{no_multi_modal()}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/phi4',
),
TemplateInfo(
template_regex=
f'.*{cases("phi")}{no_multi_modal()}.*',
@@ -591,7 +601,7 @@ template_info = [
template_regex=
f'.*{cases("deepseek")}.*{cases("v2")}{no("v2.5")}{no_multi_modal()}.*{chat_suffix}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/deepseek_v2',
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/deepseek-v2',
),
# deepseek_coder
@@ -623,6 +633,94 @@ template_info = [
template=TemplateType.telechat_v2,
template_regex=f'.*{cases("TeleChat")}.*{cases("v2")}.*'),
# tulu3
TemplateInfo(
template_regex=f'.*{cases("tulu3", "tulu-3")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/tulu3'),
# athene-v2
TemplateInfo(
template_regex=f'.*{cases("athene-v2")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/athene-v2'),
# granite
TemplateInfo(
template_regex=f'.*{cases("granite-guardian-3")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/granite3-guardian'),
TemplateInfo(
template_regex=f'.*{cases("granite")}.*{cases("code")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/granite-code'),
TemplateInfo(
template_regex=f'.*{cases("granite-3.1")}.*{cases("2b", "8b")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/granite3.1-dense'),
TemplateInfo(
template_regex=f'.*{cases("granite-3.1")}.*{cases("1b", "3b")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/granite3.1-moe'),
TemplateInfo(
template_regex=f'.*{cases("granite-embedding")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/granite-embedding'),
TemplateInfo(
template_regex=f'.*{cases("granite-3")}.*{cases("2b", "8b")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/granite3-dense'),
TemplateInfo(
template_regex=f'.*{cases("granite-3")}.*{cases("1b", "3b")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/granite3-moe'),
# opencoder
TemplateInfo(
template_regex=f'.*{cases("opencoder")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/opencoder'),
# smollm
TemplateInfo(
template_regex=f'.*{cases("smollm2")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/smollm2'),
TemplateInfo(
template_regex=f'.*{cases("smollm")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/smollm'),
# 'aya'
TemplateInfo(
template_regex=f'.*{cases("aya-expanse")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/aya-expanse'),
TemplateInfo(
template_regex=f'.*{cases("aya")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/aya'),
# falcon
TemplateInfo(
template_regex=f'.*{cases("falcon3")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/falcon3'),
TemplateInfo(
template_regex=f'.*{cases("falcon")}.*{cases("-2")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/falcon2'),
TemplateInfo(
template_regex=f'.*{cases("falcon")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/falcon'),
# smallthinker
TemplateInfo(
template_regex=f'.*{cases("smallthinker")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/smallthinker'),
TemplateInfo(
template_regex=f'.*{cases("nomic-embed-text")}.*',
modelfile_prefix=
@@ -651,10 +749,6 @@ template_info = [
template_regex=f'.*{cases("starcoder")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/starcoder'),
TemplateInfo(
template_regex=f'.*{cases("granite")}.*{cases("code")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/granite-code'),
TemplateInfo(
template_regex=f'.*{cases("all-minilm")}.*',
modelfile_prefix=
@@ -663,10 +757,6 @@ template_info = [
template_regex=f'.*{cases("openchat")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/openchat'),
TemplateInfo(
template_regex=f'.*{cases("aya")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/aya'),
TemplateInfo(
template_regex=f'.*{cases("openhermes")}.*',
modelfile_prefix=
@@ -687,10 +777,6 @@ template_info = [
template_regex=f'.*{cases("xwin")}.*{cases("lm")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/xwinlm'),
TemplateInfo(
template_regex=f'.*{cases("smollm")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/smollm'),
TemplateInfo(
template_regex=f'.*{cases("sqlcoder")}.*',
modelfile_prefix=
@@ -699,14 +785,6 @@ template_info = [
template_regex=f'.*{cases("starling-lm")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/starling-lm'),
TemplateInfo(
template_regex=f'.*{cases("falcon")}.*{cases("-2")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/falcon2'),
TemplateInfo(
template_regex=f'.*{cases("falcon")}.*',
modelfile_prefix=
'https://modelscope.oss-cn-beijing.aliyuncs.com/llm_template/ollama/falcon'),
TemplateInfo(
template_regex=f'.*{cases("solar-pro")}.*',
modelfile_prefix=

View File

@@ -311,6 +311,60 @@ class TestToOllama(unittest.TestCase):
'llama3.3')
_test_check_tmpl_type('bartowski/EXAONE-3.5-7.8B-Instruct-GGUF',
'exaone3.5')
_test_check_tmpl_type(
'QuantFactory/Tulu-3.1-8B-SuperNova-Smart-GGUF',
'tulu3',
gguf_meta={'general.name': 'Tulu 3.1 8B SuperNova'})
_test_check_tmpl_type(
'bartowski/Athene-V2-Chat-GGUF',
'athene-v2',
gguf_meta={'general.name': 'Athene V2 Chat'})
_test_check_tmpl_type(
'QuantFactory/granite-guardian-3.0-2b-GGUF',
'granite3-guardian',
gguf_meta={'general.name': 'Models'})
_test_check_tmpl_type('lmstudio-community/OpenCoder-8B-Instruct-GGUF',
'opencoder')
_test_check_tmpl_type(
'QuantFactory/SmolLM2-1.7B-Instruct-GGUF',
'smollm2',
gguf_meta={'general.name': 'Smollm2 1.7B 8k Mix7 Ep2 v2'})
_test_check_tmpl_type(
'prithivMLmods/Aya-Expanse-8B-GGUF',
'aya-expanse',
gguf_meta={'general.name': 'Aya Expanse 8b'})
_test_check_tmpl_type('lmstudio-community/Falcon3-7B-Instruct-GGUF',
'falcon3')
_test_check_tmpl_type(
'lmstudio-community/granite-3.1-8b-instruct-GGUF',
'granite3.1-dense',
gguf_meta={'general.name': 'Granite 3.1 8b Instruct'})
_test_check_tmpl_type(
'lmstudio-community/granite-3.1-2b-instruct-GGUF',
'granite3.1-dense',
gguf_meta={'general.name': 'Granite 3.1 2b Instruct'})
_test_check_tmpl_type(
'lmstudio-community/granite-embedding-278m-multilingual-GGUF',
'granite-embedding',
gguf_meta={'general.name': 'Granite Embedding 278m Multilingual'})
_test_check_tmpl_type(
'QuantFactory/granite-3.1-3b-a800m-instruct-GGUF',
'granite3.1-moe',
gguf_meta={'general.name': 'Granite 3.1 3b A800M Base'})
_test_check_tmpl_type(
'bartowski/granite-3.1-1b-a400m-instruct-GGUF',
'granite3.1-moe',
gguf_meta={'general.name': 'Granite 3.1 1b A400M Instruct'})
_test_check_tmpl_type(
'bartowski/SmallThinker-3B-Preview-GGUF',
'smallthinker',
gguf_meta={'general.name': 'SmallThinker 3B Preview'})
_test_check_tmpl_type(
'bartowski/Dolphin3.0-Llama3.1-8B-GGUF',
'dolphin3',
gguf_meta={'general.name': 'Dolphin 3.0 Llama 3.1 8B'})
_test_check_tmpl_type(
'AI-ModelScope/phi-4', 'phi4', gguf_meta={'general.name': 'Phi 4'})
if __name__ == '__main__':