yopiindra commited on
Commit
b937087
Β·
verified Β·
1 Parent(s): 97058f6

Update core/rag_services.py

Browse files
Files changed (1) hide show
  1. core/rag_services.py +31 -31
core/rag_services.py CHANGED
@@ -1,12 +1,11 @@
1
  # ==========================================================
2
- # File: core/rag_services.py (FINAL STABLE – Okt 2025)
3
  # ----------------------------------------------------------
4
  # βœ… Bisa baca PDF, DOCX, TXT
5
- # βœ… Aman di Hugging Face (gunakan /tmp)
6
  # βœ… Incremental indexing + manifest
7
  # βœ… Auto rebuild index
8
- # βœ… Fix "'dict' object has no attribute 'replace'"
9
- # βœ… Fix "'ChatPromptValue' object is not subscriptable"
10
  # ==========================================================
11
 
12
  import os
@@ -43,8 +42,6 @@ except ImportError:
43
  from langchain_community.document_loaders import UnstructuredFileLoader as UnstructuredLoader
44
 
45
  from langchain_core.prompts import ChatPromptTemplate
46
- from langchain_core.output_parsers import StrOutputParser
47
- from langchain_core.runnables import RunnablePassthrough
48
 
49
  # ===============================
50
  # Path & Config
@@ -234,7 +231,7 @@ def incremental_index_update(embedding_model):
234
  return db
235
 
236
  # ===============================
237
- # Build RAG Chain (Final Fixed)
238
  # ===============================
239
  def get_rag_chain(llm, vector_store):
240
  if vector_store is None or vector_store.index.ntotal == 0:
@@ -254,30 +251,33 @@ Pertanyaan:
254
  Jawaban:
255
  """)
256
 
257
- def _combine_context(input_data):
258
- q = input_data.get("question") if isinstance(input_data, dict) else str(input_data)
259
- try:
260
- docs = retriever.invoke(q)
261
- return "\n\n".join([getattr(d, "page_content", "") for d in docs if d])
262
- except Exception as e:
263
- return f"(Gagal mengambil konteks: {e})"
264
-
265
- def _extract_question(input_data):
266
- if isinstance(input_data, dict):
267
- return str(input_data.get("question", ""))
268
- return str(input_data)
269
-
270
- chain = (
271
- {
272
- "context": _combine_context,
273
- "question": _extract_question,
274
- }
275
- | prompt
276
- | llm
277
- | StrOutputParser()
278
- )
279
-
280
- return chain
 
 
 
281
 
282
  # ===============================
283
  # Auto Init di Startup
 
1
  # ==========================================================
2
+ # File: core/rag_services.py (FINAL PATCH - Okt 2025)
3
  # ----------------------------------------------------------
4
  # βœ… Bisa baca PDF, DOCX, TXT
 
5
  # βœ… Incremental indexing + manifest
6
  # βœ… Auto rebuild index
7
+ # βœ… Fix 'replace' & 'ChatPromptValue' errors
8
+ # βœ… Pipeline manual (tidak bergantung internal LangChain)
9
  # ==========================================================
10
 
11
  import os
 
42
  from langchain_community.document_loaders import UnstructuredFileLoader as UnstructuredLoader
43
 
44
  from langchain_core.prompts import ChatPromptTemplate
 
 
45
 
46
  # ===============================
47
  # Path & Config
 
231
  return db
232
 
233
  # ===============================
234
+ # Build RAG Chain (Manual Pipeline)
235
  # ===============================
236
  def get_rag_chain(llm, vector_store):
237
  if vector_store is None or vector_store.index.ntotal == 0:
 
251
  Jawaban:
252
  """)
253
 
254
+ class RAGPipeline:
255
+ def __init__(self, retriever, prompt, llm):
256
+ self.retriever = retriever
257
+ self.prompt = prompt
258
+ self.llm = llm
259
+
260
+ def invoke(self, query):
261
+ # Normalisasi input
262
+ question = str(query.get("question", "")) if isinstance(query, dict) else str(query)
263
+
264
+ try:
265
+ docs = self.retriever.invoke(question)
266
+ context = "\n\n".join([getattr(d, "page_content", "") for d in docs if d])
267
+ except Exception as e:
268
+ context = f"(Gagal mengambil konteks: {e})"
269
+
270
+ formatted_prompt = self.prompt.format(context=context, question=question)
271
+
272
+ try:
273
+ output = self.llm.invoke(formatted_prompt)
274
+ if hasattr(output, "content"):
275
+ return output.content
276
+ return str(output)
277
+ except Exception as e:
278
+ return f"[RAG Error] Gagal memanggil LLM: {e}"
279
+
280
+ return RAGPipeline(retriever, prompt, llm)
281
 
282
  # ===============================
283
  # Auto Init di Startup