import os os.environ["OPENAI_API_KEY"] = "YOUR_OPENAI_API_KEY"
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader from llama_index.core.indices.query.query_transform import HyDEQueryTransform from llama_index.core.query_engine import TransformQueryEngine
(llamaindex_010) Florian:~ Florian$ python /Users/Florian/Documents/test_hyde.py ---------------------------------------------------------------------------------------------------- 基础查询: Paul Graham在RISD之后回到了纽约的旧生活。他变得富有并继续他的旧习惯,但有了新的机会,比如能够轻松叫到出租车和在迷人的餐厅用餐。他还开始尝试一种新的静物画技法。 ---------------------------------------------------------------------------------------------------- 假设文档: ["在罗德岛设计学院 (RISD) 就读后,Paul Graham 创立了 Viaweb,一个在线商店构建器,后来以 4900 万美元被雅虎收购。Viaweb 的成功之后,Graham 成为科技行业的影响力人物,于 2005 年共同创立了创业加速器 Y Combinator。Y Combinator 已成为世界上最负盛名和最成功的创业加速器之一,帮助启动了 Dropbox、Airbnb 和 Reddit 等公司。Graham 还以其关于技术、创业和创业精神的丰富写作而闻名,他的文章在科技社区中被广泛阅读和尊重。总的来说,Paul Graham 在 RISD 之后的职业生涯以创新、成功和对创业生态系统的重大影响为标志。", 'what did paul graham do after going to RISD'] ---------------------------------------------------------------------------------------------------- 经过HyDEQueryTransform后: 在RISD之后,Paul Graham回到了纽约的旧生活,但现在他变得富有。他继续他的旧习惯,但有了新的机会,比如能够轻松叫到出租车和在迷人的餐厅用餐。他还开始专注于他的绘画,尝试一种新的技法。此外,他开始寻找要购买的公寓,并考虑构建一个用于制作网页应用的网页应用的想法,这最终导致他创立了一家名为Aspra的新公司。
import os os.environ["OPENAI_API_KEY"] = "YOUR_OPEN_AI_KEY"
from langchain_community.utilities import DuckDuckGoSearchAPIWrapper from langchain_core.output_parsers import StrOutputParser from langchain_core.prompts import ChatPromptTemplate from langchain_core.runnables import RunnablePassthrough from langchain_openai import ChatOpenAI
import os os.environ["OPENAI_API_KEY"] = "YOUR_OPEN_AI_KEY"
from langchain_core.output_parsers import StrOutputParser from langchain_core.prompts import ChatPromptTemplate, FewShotChatMessagePromptTemplate from langchain_core.runnables import RunnableLambda from langchain_openai import ChatOpenAI from langchain_community.utilities import DuckDuckGoSearchAPIWrapper
question = "was chatgpt around while trump was president?"
base_prompt_template = """You are an expert of world knowledge. I am going to ask you a question. Your response should be comprehensive and not contradicted with the following context if they are relevant. Otherwise, ignore them if they are not relevant. {normal_context} Original Question: {question} Answer:"""
base_chain = ( { # Retrieve context using the normal question (only the first 3 results) "normal_context": RunnableLambda(lambda x: x["question"]) | retriever, # Pass on the question "question": lambda x: x["question"], } | base_prompt | ChatOpenAI(temperature=0) | StrOutputParser() )
june_print('The searched contexts of the original question:', retriever(question)) june_print('The result of base_chain:', base_chain.invoke({"question": question}) )
结果如下:
1 2 3 4 5 6 7
(langchain) Florian:~ Florian$ python /Users/Florian/Documents/test_step_back.py ---------------------------------------------------------------------------------------------------- The searched contexts of the original question: While impressive in many respects, ChatGPT also has some major flaws. ... [President's Name]," refused to write a poem about ex-President Trump, but wrote one about President Biden ... The company said GPT-4 recently passed a simulated law school bar exam with a score around the top 10% of test takers. By contrast, the prior version, GPT-3.5, scored around the bottom 10%. The ... These two moments show how Twitter's choices helped former President Trump. ... With ChatGPT, which launched to the public in late November, users can generate essays, stories and song lyrics ... Donald Trump is asked a question—say, whether he regrets his actions on Jan. 6—and he answers with something like this: " Let me tell you, there's nobody who loves this country more than me ... ---------------------------------------------------------------------------------------------------- The result of base_chain: Yes, ChatGPT was around while Trump was president. ChatGPT is an AI language model developed by OpenAI and was launched to the public in late November. It has the capability to generate essays, stories, and song lyrics. While it may have been used to write a poem about President Biden, it also has the potential to be used in various other contexts, including generating responses from hypothetical scenarios involving former President Trump.
# Few Shot Examples examples = [ { "input": "Could the members of The Police perform lawful arrests?", "output": "what can the members of The Police do?", }, { "input": "Jan Sindel’s was born in what country?", "output": "what is Jan Sindel’s personal history?", }, ] # We now transform these to example messages example_prompt = ChatPromptTemplate.from_messages( [ ("human", "{input}"), ("ai", "{output}"), ] ) few_shot_prompt = FewShotChatMessagePromptTemplate( example_prompt=example_prompt, examples=examples, )
step_back_prompt = ChatPromptTemplate.from_messages( [ ( "system", """You are an expert at world knowledge. Your task is to step back and paraphrase a question to a more generic step-back question, which is easier to answer. Here are a few examples:""", ), # Few shot examples few_shot_prompt, # New question ("user", "{question}"), ] ) step_back_question_chain = step_back_prompt | ChatOpenAI(temperature=0) | StrOutputParser() june_print('The step-back question:', step_back_question_chain.invoke({"question": question})) june_print('The searched contexts of the step-back question:', retriever(step_back_question_chain.invoke({"question": question})) )
response_prompt_template = """You are an expert of world knowledge. I am going to ask you a question. Your response should be comprehensive and not contradicted with the following context if they are relevant. Otherwise, ignore them if they are not relevant. {normal_context} {step_back_context} Original Question: {question} Answer:""" response_prompt = ChatPromptTemplate.from_template(response_prompt_template)
step_back_chain = ( { # Retrieve context using the normal question "normal_context": RunnableLambda(lambda x: x["question"]) | retriever, # Retrieve context using the step-back question "step_back_context": step_back_question_chain | retriever, # Pass on the question "question": lambda x: x["question"], } | response_prompt | ChatOpenAI(temperature=0) | StrOutputParser() )
june_print('The result of step_back_chain:', step_back_chain.invoke({"question": question}) )