LangChain与百度千帆集成调用文心一言大模型:
一段最基本的调用代码:
from langchain_core.messages import HumanMessage from langchain_core.prompts import ChatPromptTemplate from langchain_core.output_parsers import StrOutputParser os.environ["QIANFAN_AK"] = "你的apikey" os.environ["QIANFAN_SK"] = "你的密钥" chat = QianfanChatEndpoint( model="ERNIE-Lite-8K-0308",#这里做测试使用最小模型,还免费 streaming=False, ) prompt = ChatPromptTemplate.from_messages( [ ("system", "你是一个{name}"), ("human", "我是{user}请推荐关于{book}这本书的扩展阅读书目"), ] ) # res = chat([HumanMessage(content="你好,你是谁?")]) # print(res.content) chain = prompt | chat|StrOutputParser() res = chain.invoke({"name": "图书管理员", "book": "三国演义", "user": "小学生"}) print(res)
LangChain使用openai调用百度部署的DeepSeek:
下面是一段最基础的调用代码:
from qianfan import Qianfan from langchain.chat_models.base import BaseChatModel from langchain_openai import ChatOpenAI from langchain import PromptTemplate from langchain_core.messages import HumanMessage from langchain_core.prompts import ChatPromptTemplate from langchain_core.output_parsers import StrOutputParser,JsonOutputParser Model = ChatOpenAI( base_url='https://qianfan.baidubce.com/v2', api_key='你的api_key', model_name='deepseek-r1-distill-llama-8b' ,max_tokens=2000 ) res=Model.invoke("你好") print(res)
LangChain直接调用deepseek官方大模型
代码如下:
#首先需要安装包 pip install -U langchain-deepseek #申请api-key,设置环境变量 export DEEPSEEK_API_KEY="your-api-key"
from langchain_core.prompts import ChatPromptTemplate from langchain_openai import ChatOpenAI from langchain_deepseek import ChatDeepSeek def CreateChartDeepSeek(): llm = ChatDeepSeek( model="deepseek-chat", temperature=0, max_tokens=2048, timeout=None, max_retries=2, api_key="设置api_key", ) return llm if __name__ == "__main__": llm = CreateChartDeepSeek() res = llm.invoke("存值2块钱能用deepseek多少个tokens") print(res)
更多内容参考官网:
https://python.langchain.com/api_reference/deepseek/chat_models/langchain_deepseek.ch