blob: db0c5ff9ad4c345a96f96fc94adc4c4527187635 [file] [log] [blame]
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnablePassthrough
from langchain_openai import ChatOpenAI
prompt = ChatPromptTemplate.from_template("Tell me a short joke about {topic}")
output_parser = StrOutputParser()
model = ChatOpenAI(model="gpt-3.5-turbo")
chain = {"topic": RunnablePassthrough()} | prompt | model | output_parser
if __name__ == "__main__":
print(chain.batch(["ice cream", "spaghetti", "dumplings"]))