Started on the CL version

This commit is contained in:
Lukas Nöllemeyer 2025-02-04 06:27:11 +01:00
parent 17b7c3adb4
commit 0aac3aae10
11 changed files with 1181 additions and 3 deletions

View file

@ -18,7 +18,7 @@ def root():
PROMPT_TEXT_PREFIX = "<|im_start|>system\nYou are a helpful assistant. You only give short answers.<|im_end|>\n<|im_start|>user\n"
PROMPT_TEXT_POSTFIX = "<|im_end|>\n<|im_start|>assistant\n"
MSG_START_TOKEN = "<|im_start|>"
MSG_START_TOKEN = "<|im_start|>" # there work for Qwen, miniCPM and deepseek, but not for chatglm3
MSG_END_TOKEN = "<|im_end|>"
def msg_to_prompt(user, msg):
@ -146,7 +146,7 @@ if __name__ == "__main__":
global global_abort
global_abort = True
code = rkllm_model.abort()
return {"code":code}, 200 if code is None else 500
return {"code":code}, 200
# Create a function to receive data sent by the user using a request
@app.route('/rkllm_chat', methods=['POST'])