import dai
mnt = dai.DataSource("chatglm_6b_int4").mount()
from transformers import AutoTokenizer, AutoModel
tokenizer = AutoTokenizer.from_pretrained(mnt.path, trust_remote_code=True)
model = AutoModel.from_pretrained(mnt.path, trust_remote_code=True).half().cuda()
model = model.eval()
history = []
response, history = model.chat(tokenizer, "你好", history=history)
print(response)
response, history = model.chat(tokenizer, f"中国最好一所大学是?", history=history)
print(response)
response, history = model.chat(tokenizer, f"中国最好的10所大学是?", history=history)
print(response)
table_fields = ",".join(dai.DataSource("cn_stock_bar1d").metadata["schema"].keys())
table_desc = f"cn_stock_bar1d({table_fields})"
response, history = model.chat(tokenizer, f"你是一个专业的量化研究员,使用BigQuant人工智能量化投资平台开发因子和策略。BigQuant平台提供股票行情表 {table_desc},计算每天价格最高的股票,使用SQL实现", history=[])
print(response)
# 最好卸载数据
mnt.unmount()