78 lines
2.3 KiB
Python
78 lines
2.3 KiB
Python
from langchain_community.document_loaders import TextLoader
|
|
from langchain.indexes import VectorstoreIndexCreator
|
|
from langchain_community.llms import Ollama
|
|
from langchain.chains import RetrievalQA
|
|
from langchain_community.embeddings import HuggingFaceEmbeddings
|
|
from langchain.prompts import PromptTemplate
|
|
# from django.conf import settings
|
|
|
|
|
|
# Load YAML doc
|
|
loader = TextLoader("haikal_kb.yaml")
|
|
|
|
# Create embeddings model
|
|
embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
|
|
|
|
# Create an instance of VectorstoreIndexCreator with the embeddings
|
|
index_creator = VectorstoreIndexCreator(embedding=embeddings)
|
|
|
|
# Then call the from_loaders method on the instance
|
|
index = index_creator.from_loaders([loader])
|
|
|
|
# Create LLM instance
|
|
llm = Ollama(model="qwen3:8b", temperature=0.3)
|
|
|
|
# Define a custom prompt template for instructional responses
|
|
template = """
|
|
You are Haikal, an assistant for the car inventory management system.
|
|
Your goal is to provide clear step-by-step instructions for users to complete tasks.
|
|
|
|
Use the following pieces of context to answer the question at the end.
|
|
If you don't know the answer, just say you don't know. Don't try to make up an answer.
|
|
|
|
Context:
|
|
{context}
|
|
|
|
Question: {question}
|
|
|
|
Provide a clear step-by-step guide with numbered instructions. Include:
|
|
1. Where to click in the interface
|
|
2. What to enter or select
|
|
3. Any buttons to press to complete the action
|
|
4. Any alternatives or shortcuts if available
|
|
|
|
Helpful Step-by-Step Instructions:"""
|
|
|
|
PROMPT = PromptTemplate(
|
|
template=template,
|
|
input_variables=["context", "question"]
|
|
)
|
|
|
|
# Setup QA chain
|
|
qa = RetrievalQA.from_chain_type(
|
|
llm=llm,
|
|
chain_type="stuff",
|
|
retriever=index.vectorstore.as_retriever(),
|
|
return_source_documents=True,
|
|
chain_type_kwargs={"prompt": PROMPT}
|
|
)
|
|
|
|
# Function to run a query
|
|
def ask_haikal(query):
|
|
response = qa.invoke({"query": query})
|
|
print("\n" + "="*50)
|
|
print(f"Question: {query}")
|
|
print("="*50)
|
|
print("\nAnswer:")
|
|
print(response["result"])
|
|
print("\nSources:")
|
|
for doc in response["source_documents"]:
|
|
print(f"- {doc.metadata.get('source', 'Unknown source')}")
|
|
print("="*50)
|
|
return response["result"]
|
|
|
|
# Example query
|
|
if __name__ == "__main__":
|
|
query = "How do I add a new car to the inventory? answer in Arabic"
|
|
ask_haikal(query)
|