通过 Ollama 跑本地模型,通过 langchain 进行调用。
# 模型列表参考:https://ollama.com/library
ollama pull [model] # 拉取模型import { ChatBaiduWenxin } from "@langchain/community/chat_models/baiduwenxin";
const ernieTurbo = new ChatBaiduWenxin({
baiduApiKey: process.env.BAIDU_API_KEY, // In Node.js defaults to process.env.BAIDU_API_KEY
baiduSecretKey: process.env.BAIDU_SECRET_KEY, // In Node.js defaults to process.env.BAIDU_SECRET_KEY
});Node >= 18.0.0