from langchain.vectorstores.faiss import FAISS
from langchain_openai import OpenAIEmbeddings
from langchain.embeddings import CacheBackedEmbeddings
from langchain.storage import LocalFileStore
cache_dir = LocalFileStore("./cache/")
embeddings = OpenAIEmbeddings()
cached_embeddings = CacheBackedEmbeddings.from_bytes_store(embeddings, cache_dir)
vector_store = FAISS.from_documents(docs, cached_embeddings)
retriver = vector_store.as_retriever()
docs = retriver.invoke("nexus")
docs
Chain (vector store에서 검색하고 그걸 llm으로 보내기)
from langchain.prompts import ChatPromptTemplate
prompt = ChatPromptTemplate.from_messages(
[
("system",
"""
You are a helpful AI talking to a human, Answer questions using only the following context.
If you don't know the answer just say you don't know, don't make it up:
{context}
"""),
("human", "{question}"),
]
)
from langchain_openai import ChatOpenAI
llm = ChatOpenAI(
temperature=0.1,
)