Predibase
Predibase 讓您訓練、微調和部署任何 ML 模型,從線性迴歸到大型語言模型。
此範例示範如何將 Langchain 與部署在 Predibase 上的模型一起使用
設定 (Setup)
若要執行此筆記本,您需要一個 Predibase 帳戶 和一個 API 金鑰。
您還需要安裝 Predibase Python 套件
%pip install --upgrade --quiet predibase
import os
os.environ["PREDIBASE_API_TOKEN"] = "{PREDIBASE_API_TOKEN}"
初始呼叫 (Initial Call)
from langchain_community.llms import Predibase
model = Predibase(
model="mistral-7b",
predibase_api_key=os.environ.get("PREDIBASE_API_TOKEN"),
)
API 參考:Predibase
from langchain_community.llms import Predibase
# With a fine-tuned adapter hosted at Predibase (adapter_version must be specified).
model = Predibase(
model="mistral-7b",
predibase_api_key=os.environ.get("PREDIBASE_API_TOKEN"),
predibase_sdk_version=None, # optional parameter (defaults to the latest Predibase SDK version if omitted)
adapter_id="e2e_nlg",
adapter_version=1,
**{
"api_token": os.environ.get("HUGGING_FACE_HUB_TOKEN"),
"max_new_tokens": 5, # default is 256
},
)
API 參考:Predibase
from langchain_community.llms import Predibase
# With a fine-tuned adapter hosted at HuggingFace (adapter_version does not apply and will be ignored).
model = Predibase(
model="mistral-7b",
predibase_api_key=os.environ.get("PREDIBASE_API_TOKEN"),
predibase_sdk_version=None, # optional parameter (defaults to the latest Predibase SDK version if omitted)
adapter_id="predibase/e2e_nlg",
**{
"api_token": os.environ.get("HUGGING_FACE_HUB_TOKEN"),
"max_new_tokens": 5, # default is 256
},
)
API 參考:Predibase
# Optionally use `kwargs` to dynamically overwrite "generate()" settings.
response = model.invoke(
"Can you recommend me a nice dry wine?",
**{"temperature": 0.5, "max_new_tokens": 1024},
)
print(response)
鏈呼叫設定 (Chain Call Setup)
from langchain_community.llms import Predibase
model = Predibase(
model="mistral-7b",
predibase_api_key=os.environ.get("PREDIBASE_API_TOKEN"),
predibase_sdk_version=None, # optional parameter (defaults to the latest Predibase SDK version if omitted)
**{
"api_token": os.environ.get("HUGGING_FACE_HUB_TOKEN"),
"max_new_tokens": 5, # default is 256
},
)
API 參考:Predibase
# With a fine-tuned adapter hosted at Predibase (adapter_version must be specified).
model = Predibase(
model="mistral-7b",
predibase_api_key=os.environ.get("PREDIBASE_API_TOKEN"),
predibase_sdk_version=None, # optional parameter (defaults to the latest Predibase SDK version if omitted)
adapter_id="e2e_nlg",
adapter_version=1,
**{
"api_token": os.environ.get("HUGGING_FACE_HUB_TOKEN"),
"max_new_tokens": 5, # default is 256
},
)
# With a fine-tuned adapter hosted at HuggingFace (adapter_version does not apply and will be ignored).
llm = Predibase(
model="mistral-7b",
predibase_api_key=os.environ.get("PREDIBASE_API_TOKEN"),
predibase_sdk_version=None, # optional parameter (defaults to the latest Predibase SDK version if omitted)
adapter_id="predibase/e2e_nlg",
**{
"api_token": os.environ.get("HUGGING_FACE_HUB_TOKEN"),
"max_new_tokens": 5, # default is 256
},
)
SequentialChain
from langchain.chains import LLMChain
from langchain_core.prompts import PromptTemplate
API 參考:LLMChain | PromptTemplate
# This is an LLMChain to write a synopsis given a title of a play.
template = """You are a playwright. Given the title of play, it is your job to write a synopsis for that title.
Title: {title}
Playwright: This is a synopsis for the above play:"""
prompt_template = PromptTemplate(input_variables=["title"], template=template)
synopsis_chain = LLMChain(llm=llm, prompt=prompt_template)
# This is an LLMChain to write a review of a play given a synopsis.
template = """You are a play critic from the New York Times. Given the synopsis of play, it is your job to write a review for that play.
Play Synopsis:
{synopsis}
Review from a New York Times play critic of the above play:"""
prompt_template = PromptTemplate(input_variables=["synopsis"], template=template)
review_chain = LLMChain(llm=llm, prompt=prompt_template)
# This is the overall chain where we run these two chains in sequence.
from langchain.chains import SimpleSequentialChain
overall_chain = SimpleSequentialChain(
chains=[synopsis_chain, review_chain], verbose=True
)
API 參考:SimpleSequentialChain
review = overall_chain.run("Tragedy at sunset on the beach")
微調的 LLM(使用您自己來自 Predibase 的微調 LLM)(Fine-tuned LLM (Use your own fine-tuned LLM from Predibase))
from langchain_community.llms import Predibase
model = Predibase(
model="my-base-LLM",
predibase_api_key=os.environ.get(
"PREDIBASE_API_TOKEN"
), # Adapter argument is optional.
predibase_sdk_version=None, # optional parameter (defaults to the latest Predibase SDK version if omitted)
adapter_id="my-finetuned-adapter-id", # Supports both, Predibase-hosted and HuggingFace-hosted adapter repositories.
adapter_version=1, # required for Predibase-hosted adapters (ignored for HuggingFace-hosted adapters)
**{
"api_token": os.environ.get("HUGGING_FACE_HUB_TOKEN"),
"max_new_tokens": 5, # default is 256
},
)
# replace my-base-LLM with the name of your choice of a serverless base model in Predibase
API 參考:Predibase
# Optionally use `kwargs` to dynamically overwrite "generate()" settings.
# response = model.invoke("Can you help categorize the following emails into positive, negative, and neutral?", **{"temperature": 0.5, "max_new_tokens": 1024})