update readme

This commit is contained in:
wenmeng.zwm
2023-11-10 18:40:37 +08:00
parent b3e8fd0609
commit 331b71092b
5 changed files with 89 additions and 4 deletions

View File

@@ -4,6 +4,7 @@
"Oh No! I'm Surrounded by LLMs!" is an intellectual challenge game. We use LLM to automatically generate corresponding game code based on existing Large Language Model (LLM) dialogue Gradio application codes within the ModelScope community, combined with preset questions from the Zhihu article ["How to Accomplish Tasks with 'Impossible'"](https://zhuanlan.zhihu.com/p/665393240), creating a unique gameplay experience. In this stream, players are required to cleverly construct questions that challenge the LLM to provide answers that meet specific conditions.
## News
November 9, 2023 - Added two new questions, and introduced the chatglm-turbo model 🔥🔥🔥
November 7, 2023 - Released the initial demo version 🔥
November 8, 2023 - Segregated level modules and LLM, enabling independent integration of levels and LLM. Pull Requests welcome 🔥 🔥

View File

@@ -5,6 +5,7 @@
## 更新
2023.11.9 新增两道题目, 新增chatglm-turbo模型🔥 🔥🔥
2023.11.7 发布初版demo🔥
2023.11.8 拆分关卡模块和llm支持关卡独立接入llm独立接入 欢迎PR 🔥 🔥

View File

@@ -112,9 +112,15 @@ def generate_response(input, model_name):
return ''
def on_submit(input, state):
model_name = os.environ.get('MODEL', 'qwen-plus')
gen_fn = functools.partial(generate_response, model_name=model_name)
def on_submit(input, model_name, state):
# model_name = os.environ.get('MODEL', 'qwen-plus')
name_map = {
'通义千问max': 'qwen-max',
'通义千问plus': 'qwen-plus',
'chatglm-turbo': 'chatglm_turbo',
}
gen_fn = functools.partial(
generate_response, model_name=name_map[model_name])
response = gen_fn(input)
history = [(input, response)]
print(history)
@@ -167,6 +173,11 @@ with block as demo:
你将通过本游戏对大型语言模型产生更深刻的理解。
在本游戏中,你需要构造一个提给一个大型语言模型的问题,使得它回复的答案符合要求。""")
model_selector = gr.Dropdown(
label='选择模型',
choices=['通义千问max', '通义千问plus', 'chatglm-turbo'],
value='qwen-plus')
question_info = gr.Markdown(
update_question_info(current_chapter_index, current_challenge_index))
challenge_info = gr.Textbox(
@@ -187,7 +198,7 @@ with block as demo:
submit.click(
on_submit,
inputs=[message, state],
inputs=[message, model_selector, state],
outputs=[challenge_result, chatbot, question_info, challenge_info])
shareBtn.click(generate_share_image, inputs=[state], outputs=[shareImg])

View File

@@ -80,6 +80,70 @@ class DashScope:
return ''
class ZhiPu:
def __init__(self, model_name: str = 'chatglm_turbo'):
"""Initializes the ZhiPu instance with a given model name.
The constructor sets up the model name that will be used for response generation
and initializes the Dashscope API key from environment variables.
Args:
model_name (str): The name of the model to be used. Defaults to 'qwen-plus'.
"""
import zhipuai # Import dashscope module at runtime
zhipuai.api_key = os.getenv(
'ZHIPU_API_KEY') # Set the API key from environment variable
self.model: str = model_name # Assign the model name to an instance variable
def __call__(self, input: Union[str, List[Dict[str, str]]],
**kwargs: Any) -> Union[str, None]:
"""Allows the ZhiPu instance to be called as a function.
{
"code":200,
"msg":"操作成功",
"data":{
"request_id":"8098024428488935671",
"task_id":"8098024428488935671",
"task_status":"SUCCESS",
"choices":[
{
"role":"assistant",
"content":"\" 您好!作为人工智能助手,我很乐意为您提供帮助。请问您有什么问题或者需要解决的事情吗?您可以向我提问,我会尽力为您解答。\""
}
],
"usage":{
"prompt_tokens":2,
"completion_tokens":32,
"total_tokens":34
}
},
"success":true
}
"""
import zhipuai
if isinstance(input, str):
messages: List[Dict[str, str]] = [{
'role': 'user',
'content': input
}]
else:
messages = input
response = zhipuai.model_api.invoke(
model=self.model,
prompt=messages,
top_p=0.7,
temperature=0.9,
)
if response['code'] == 200:
return response['data']['choices'][0]['content']
else:
print(f'{self.model} error: ', response)
return ''
def create_model(model_name: str):
"""Factory function to create a DashScope model instance based on the model name.
@@ -94,5 +158,12 @@ def create_model(model_name: str):
"""
if model_name.startswith('qwen'):
return DashScope(model_name)
elif model_name.startswith('chatglm'):
return ZhiPu(model_name)
else:
raise ValueError('Other model implementations need to be provided.')
if __name__ == '__main__':
model = create_model('chatglm_turbo')
print(model('输入'))

View File

@@ -2,3 +2,4 @@ dashscope
gradio==3.39.0
pillow
sympy
zhipuai