Spaces:
Running
Running
bigwolfe
commited on
Commit
·
f4cf7ef
1
Parent(s):
85efd83
bugs
Browse files- backend/src/services/rag_index.py +14 -13
- backend/uv.lock +0 -0
backend/src/services/rag_index.py
CHANGED
|
@@ -21,11 +21,11 @@ from llama_index.core.tools import FunctionTool
|
|
| 21 |
|
| 22 |
# Try to import Gemini, handle missing dependency gracefully
|
| 23 |
try:
|
| 24 |
-
from llama_index.llms.google_genai import GoogleGenAI
|
| 25 |
-
from llama_index.embeddings.google_genai import GoogleGenAIEmbedding
|
| 26 |
except ImportError as e:
|
| 27 |
-
|
| 28 |
-
|
| 29 |
logger.warning(f"Could not import google_genai modules: {e}")
|
| 30 |
|
| 31 |
from llama_index.core.base.response.schema import Response as LlamaResponse
|
|
@@ -61,7 +61,7 @@ class RAGIndexService:
|
|
| 61 |
|
| 62 |
def _setup_gemini(self):
|
| 63 |
"""Configure global LlamaIndex settings for Gemini."""
|
| 64 |
-
if not
|
| 65 |
logger.error("Google GenAI modules not loaded. RAG setup skipped.")
|
| 66 |
return
|
| 67 |
|
|
@@ -69,7 +69,7 @@ class RAGIndexService:
|
|
| 69 |
if not api_key:
|
| 70 |
logger.warning("GOOGLE_API_KEY not set. RAG features will fail.")
|
| 71 |
return
|
| 72 |
-
|
| 73 |
# Log key status (masked)
|
| 74 |
masked_key = f"{api_key[:4]}...{api_key[-4:]}" if len(api_key) > 8 else "***"
|
| 75 |
logger.info(f"Configuring Gemini with API key: {masked_key}")
|
|
@@ -77,12 +77,12 @@ class RAGIndexService:
|
|
| 77 |
# Set up Gemini
|
| 78 |
try:
|
| 79 |
# Configure global settings
|
| 80 |
-
Settings.llm =
|
| 81 |
-
model="gemini-2.0-flash",
|
| 82 |
api_key=self.config.google_api_key
|
| 83 |
)
|
| 84 |
-
Settings.embed_model =
|
| 85 |
-
model_name="models/text-embedding-004",
|
| 86 |
api_key=self.config.google_api_key
|
| 87 |
)
|
| 88 |
except Exception as e:
|
|
@@ -319,7 +319,7 @@ class RAGIndexService:
|
|
| 319 |
logger.error("Could not import FunctionAgent. Check llama-index-core version.")
|
| 320 |
raise
|
| 321 |
|
| 322 |
-
#
|
| 323 |
agent = FunctionAgent(
|
| 324 |
tools=all_tools,
|
| 325 |
llm=Settings.llm,
|
|
@@ -327,8 +327,9 @@ class RAGIndexService:
|
|
| 327 |
verbose=True,
|
| 328 |
system_prompt="You are a documentation assistant. Use vault_search to find info. You can create notes and folders."
|
| 329 |
)
|
| 330 |
-
|
| 331 |
-
|
|
|
|
| 332 |
|
| 333 |
return self._format_response(response)
|
| 334 |
|
|
|
|
| 21 |
|
| 22 |
# Try to import Gemini, handle missing dependency gracefully
|
| 23 |
try:
|
| 24 |
+
from llama_index.llms.google_genai import GoogleGenAI
|
| 25 |
+
from llama_index.embeddings.google_genai import GoogleGenAIEmbedding
|
| 26 |
except ImportError as e:
|
| 27 |
+
GoogleGenAI = None
|
| 28 |
+
GoogleGenAIEmbedding = None
|
| 29 |
logger.warning(f"Could not import google_genai modules: {e}")
|
| 30 |
|
| 31 |
from llama_index.core.base.response.schema import Response as LlamaResponse
|
|
|
|
| 61 |
|
| 62 |
def _setup_gemini(self):
|
| 63 |
"""Configure global LlamaIndex settings for Gemini."""
|
| 64 |
+
if not GoogleGenAI or not GoogleGenAIEmbedding:
|
| 65 |
logger.error("Google GenAI modules not loaded. RAG setup skipped.")
|
| 66 |
return
|
| 67 |
|
|
|
|
| 69 |
if not api_key:
|
| 70 |
logger.warning("GOOGLE_API_KEY not set. RAG features will fail.")
|
| 71 |
return
|
| 72 |
+
|
| 73 |
# Log key status (masked)
|
| 74 |
masked_key = f"{api_key[:4]}...{api_key[-4:]}" if len(api_key) > 8 else "***"
|
| 75 |
logger.info(f"Configuring Gemini with API key: {masked_key}")
|
|
|
|
| 77 |
# Set up Gemini
|
| 78 |
try:
|
| 79 |
# Configure global settings
|
| 80 |
+
Settings.llm = GoogleGenAI(
|
| 81 |
+
model="gemini-2.0-flash",
|
| 82 |
api_key=self.config.google_api_key
|
| 83 |
)
|
| 84 |
+
Settings.embed_model = GoogleGenAIEmbedding(
|
| 85 |
+
model_name="models/text-embedding-004",
|
| 86 |
api_key=self.config.google_api_key
|
| 87 |
)
|
| 88 |
except Exception as e:
|
|
|
|
| 319 |
logger.error("Could not import FunctionAgent. Check llama-index-core version.")
|
| 320 |
raise
|
| 321 |
|
| 322 |
+
# Create FunctionAgent with tools (0.14.x pattern)
|
| 323 |
agent = FunctionAgent(
|
| 324 |
tools=all_tools,
|
| 325 |
llm=Settings.llm,
|
|
|
|
| 327 |
verbose=True,
|
| 328 |
system_prompt="You are a documentation assistant. Use vault_search to find info. You can create notes and folders."
|
| 329 |
)
|
| 330 |
+
|
| 331 |
+
# Use .run() method (not .chat() which doesn't exist in 0.14.x)
|
| 332 |
+
response = await agent.run(user_msg=query_text)
|
| 333 |
|
| 334 |
return self._format_response(response)
|
| 335 |
|
backend/uv.lock
CHANGED
|
The diff for this file is too large to render.
See raw diff
|
|
|