tuan3335 commited on
Commit
1cf80b8
Β·
1 Parent(s): ed71eea

Restore HuggingFace imports and create hybrid AI brain with HF primary + Groq fallback

Browse files
Files changed (1) hide show
  1. agent.py +37 -12
agent.py CHANGED
@@ -24,7 +24,10 @@ from langgraph.graph import StateGraph, END
24
  from langgraph.graph.message import add_messages
25
  from typing_extensions import TypedDict
26
 
27
- # Groq imports for free AI model
 
 
 
28
  from groq import Groq
29
 
30
  # Utils system imports
@@ -68,33 +71,55 @@ class AgentState(TypedDict):
68
  # =============================================================================
69
 
70
  class LangChainQwen3Brain:
71
- """AI Brain using LangChain + Groq free models"""
72
 
73
  def __init__(self):
74
- # Use Groq instead of HuggingFace
75
- self.client = Groq(
 
 
 
 
 
 
 
76
  api_key=os.environ.get("GROQ_API_KEY", "")
77
  )
78
- self.model_name = "llama3-8b-8192" # Free Groq model
79
 
80
  # Setup parsers
81
  self.json_parser = JsonOutputParser()
82
  self.str_parser = StrOutputParser()
83
 
84
- print("🧠 LangChain Groq Brain initialized")
85
 
86
  def _invoke_model(self, messages: List[Dict[str, str]]) -> str:
87
- """Invoke model with messages"""
 
 
88
  try:
89
- completion = self.client.chat.completions.create(
90
- model=self.model_name,
91
  messages=messages,
92
  max_tokens=2048,
93
  temperature=0.7
94
  )
95
  return completion.choices[0].message.content
96
- except Exception as e:
97
- return f"AI Error: {str(e)}"
 
 
 
 
 
 
 
 
 
 
 
 
 
98
 
99
  def analyze_question(self, question: str, task_id: str = "") -> Dict[str, Any]:
100
  """AI analyzes question and decides approach"""
@@ -386,7 +411,7 @@ class LangGraphUtilsAgent:
386
  self.ai_brain = ai_brain
387
 
388
  print("πŸ€– LangGraph Utils Agent initialized!")
389
- print("🧠 AI Brain: Groq free models with LangChain")
390
  print("πŸ”§ Tools: YouTube, Image OCR, Audio Transcript, Wikipedia, File Reader, Text Processor")
391
  print("⚑ Features: AI-driven routing, Smart tool selection, Multimodal processing")
392
 
 
24
  from langgraph.graph.message import add_messages
25
  from typing_extensions import TypedDict
26
 
27
+ # HuggingFace imports
28
+ from huggingface_hub import InferenceClient
29
+
30
+ # Groq imports for fallback
31
  from groq import Groq
32
 
33
  # Utils system imports
 
71
  # =============================================================================
72
 
73
  class LangChainQwen3Brain:
74
+ """AI Brain using LangChain + HuggingFace with Groq fallback"""
75
 
76
  def __init__(self):
77
+ # Primary: HuggingFace
78
+ self.hf_client = InferenceClient(
79
+ provider="auto",
80
+ api_key=os.environ.get("HF_TOKEN", "")
81
+ )
82
+ self.hf_model = "Qwen/Qwen3-8B"
83
+
84
+ # Fallback: Groq
85
+ self.groq_client = Groq(
86
  api_key=os.environ.get("GROQ_API_KEY", "")
87
  )
88
+ self.groq_model = "llama3-8b-8192"
89
 
90
  # Setup parsers
91
  self.json_parser = JsonOutputParser()
92
  self.str_parser = StrOutputParser()
93
 
94
+ print("🧠 LangChain Hybrid Brain initialized (HF + Groq fallback)")
95
 
96
  def _invoke_model(self, messages: List[Dict[str, str]]) -> str:
97
+ """Invoke model with messages - try HF first, fallback to Groq"""
98
+
99
+ # Try HuggingFace first
100
  try:
101
+ completion = self.hf_client.chat.completions.create(
102
+ model=self.hf_model,
103
  messages=messages,
104
  max_tokens=2048,
105
  temperature=0.7
106
  )
107
  return completion.choices[0].message.content
108
+ except Exception as hf_error:
109
+ print(f"⚠️ HuggingFace failed: {str(hf_error)[:100]}...")
110
+ print("πŸ”„ Falling back to Groq...")
111
+
112
+ # Fallback to Groq
113
+ try:
114
+ completion = self.groq_client.chat.completions.create(
115
+ model=self.groq_model,
116
+ messages=messages,
117
+ max_tokens=2048,
118
+ temperature=0.7
119
+ )
120
+ return completion.choices[0].message.content
121
+ except Exception as groq_error:
122
+ return f"AI Error: Both HF ({str(hf_error)[:50]}) and Groq ({str(groq_error)[:50]}) failed"
123
 
124
  def analyze_question(self, question: str, task_id: str = "") -> Dict[str, Any]:
125
  """AI analyzes question and decides approach"""
 
411
  self.ai_brain = ai_brain
412
 
413
  print("πŸ€– LangGraph Utils Agent initialized!")
414
+ print("🧠 AI Brain: LangChain + HuggingFace with Groq fallback")
415
  print("πŸ”§ Tools: YouTube, Image OCR, Audio Transcript, Wikipedia, File Reader, Text Processor")
416
  print("⚑ Features: AI-driven routing, Smart tool selection, Multimodal processing")
417