OliveSensorAPI/demo/cli_internlm2.py
HongCheng a22ec59be5 update cli_internlm2.py
three methods to load model

1. download model in openxlab
2. download model in modelscope
3. offline model
2024-03-23 15:43:01 +09:00

33 lines
1.4 KiB
Python
Raw Permalink Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import torch
from transformers import AutoTokenizer, AutoModelForCausalLM
from openxlab.model import download
from modelscope import snapshot_download
# download model in openxlab
model_name_or_path =download(model_repo='ajupyter/EmoLLM_internlm2_7b_full',
output='EmoLLM_internlm2_7b_full')
# download model in modelscope
model_name_or_path = snapshot_download('chg0901/EmoLLM-InternLM7B-base')
# offline model
# model_name_or_path = "/root/StableCascade/emollm2/EmoLLM/xtuner_config/merged"
tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, trust_remote_code=True)
model = AutoModelForCausalLM.from_pretrained(model_name_or_path, trust_remote_code=True, torch_dtype=torch.bfloat16, device_map='auto')
model = model.eval()
system_prompt = '你是心理健康助手EmoLLM由EmoLLM团队打造。你旨在通过专业心理咨询协助来访者完成心理诊断。请充分利用专业心理学知识与咨询技术一步步帮助来访者解决心理问题。'
messages = [(system_prompt, '')]
print("=============Welcome to InternLM chatbot, type 'exit' to exit.=============")
while True:
input_text = input("User >>> ")
input_text.replace(' ', '')
if input_text == "exit":
break
response, history = model.chat(tokenizer, input_text, history=messages)
messages.append((input_text, response))
print(f"robot >>> {response}")