sproducts commited on
Commit
d325648
·
verified ·
1 Parent(s): f4d8e9f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +98 -20
app.py CHANGED
@@ -1,39 +1,117 @@
 
 
1
  import streamlit as st
2
- import gradio as gr
3
  from huggingface_hub import InferenceClient
4
 
5
- # Initialize the model via Hugging Face Inference API
 
 
6
  MODEL_NAME = "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B"
7
  client = InferenceClient(MODEL_NAME)
8
 
9
- # Streamlit UI for displaying project information
10
- st.title("Project Guidance AI Chatbot")
11
- st.subheader("This chatbot is powered by DeepSeek-R1-Distill-Qwen-7B")
12
-
13
- # Display the roadmap and rules from files
14
  def load_file(filename):
 
15
  try:
16
  with open(filename, "r", encoding="utf-8") as file:
17
  return file.read()
18
  except FileNotFoundError:
19
  return f"⚠️ Error: {filename} not found!"
20
 
 
21
  roadmap_content = load_file("roadmap.txt")
22
  rules_content = load_file("rules.txt")
23
 
24
- st.write("### Project Roadmap")
25
- st.text(roadmap_content)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
 
27
- st.write("### Project Rules")
28
- st.text(rules_content)
 
 
29
 
30
- # Gradio component for model interaction (as part of Streamlit)
31
- def generate_response(user_input):
32
- system_message = f"Project Roadmap:\n{roadmap_content}\n\nProject Rules:\n{rules_content}"
33
- messages = [{"role": "system", "content": system_message},
34
- {"role": "user", "content": user_input}]
35
-
36
- response = client.chat_completion(messages)
37
- return response.choices[0].delta.content
38
 
39
- gr.Interface(fn=generate_response, inputs="text", outputs="text").launch(share=True)
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
  import streamlit as st
 
4
  from huggingface_hub import InferenceClient
5
 
6
+ # -------------------------------------------------
7
+ # 1. Model Selection (DeepSeek-R1-Distill-Qwen-7B)
8
+ # -------------------------------------------------
9
  MODEL_NAME = "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B"
10
  client = InferenceClient(MODEL_NAME)
11
 
12
+ # -------------------------------------------------
13
+ # 2. Load Roadmap and Rules
14
+ # -------------------------------------------------
 
 
15
  def load_file(filename):
16
+ """Reads a text file and returns its content."""
17
  try:
18
  with open(filename, "r", encoding="utf-8") as file:
19
  return file.read()
20
  except FileNotFoundError:
21
  return f"⚠️ Error: {filename} not found!"
22
 
23
+ # Load project guidance files
24
  roadmap_content = load_file("roadmap.txt")
25
  rules_content = load_file("rules.txt")
26
 
27
+ # -------------------------------------------------
28
+ # 3. Chatbot Response Function (With Project Guidance)
29
+ # -------------------------------------------------
30
+ def respond(
31
+ message,
32
+ history: list[tuple[str, str]],
33
+ system_message,
34
+ temperature,
35
+ top_p,
36
+ ):
37
+ """
38
+ Generates an AI response while considering project roadmap and rules.
39
+ """
40
+ # System message includes roadmap and rules for AI context
41
+ full_system_message = f"{system_message}\n\nProject Roadmap:\n{roadmap_content}\n\nProject Rules:\n{rules_content}"
42
+
43
+ messages = [{"role": "system", "content": full_system_message}]
44
+
45
+ for val in history:
46
+ if val[0]: # User message
47
+ messages.append({"role": "user", "content": val[0]})
48
+ if val[1]: # AI response
49
+ messages.append({"role": "assistant", "content": val[1]})
50
+
51
+ messages.append({"role": "user", "content": message})
52
+
53
+ response = ""
54
+
55
+ try:
56
+ for message in client.chat_completion(
57
+ messages,
58
+ max_tokens=None, # 🚀 No token limit
59
+ stream=True,
60
+ temperature=temperature,
61
+ top_p=top_p,
62
+ ):
63
+ token = message.choices[0].delta.content
64
+
65
+ if token:
66
+ response += token
67
+ yield response # Stream response live
68
+ except Exception as e:
69
+ yield f"⚠️ Error: {str(e)}" # Show error messages
70
+
71
+ # -------------------------------------------------
72
+ # 4. Set up the Streamlit UI
73
+ # -------------------------------------------------
74
+
75
+ st.title("Project Guidance AI Chatbot")
76
+ st.write("Chat with the AI to get project guidance based on the roadmap and rules.")
77
+
78
+ # Display the roadmap and rules text
79
+ st.subheader("Project Roadmap:")
80
+ st.write(roadmap_content)
81
+ st.subheader("Project Rules:")
82
+ st.write(rules_content)
83
+
84
+ # -----------------------------------
85
+ # Create a chat history state
86
+ # -----------------------------------
87
+ if "history" not in st.session_state:
88
+ st.session_state.history = []
89
+
90
+ # -----------------------------------
91
+ # Input and Chat Display
92
+ # -----------------------------------
93
+ user_input = st.text_input("You:", "")
94
+
95
+ if user_input:
96
+ st.session_state.history.append(("You", user_input))
97
+
98
+ # Set system message (helps guide the conversation)
99
+ system_message = "You are an AI assistant helping with project guidance based on a roadmap and rules."
100
 
101
+ # Generate response
102
+ ai_response = ""
103
+ for resp in respond(user_input, st.session_state.history, system_message, temperature=0.7, top_p=0.95):
104
+ ai_response = resp
105
 
106
+ # Update chat history with AI response
107
+ st.session_state.history.append(("AI", ai_response))
 
 
 
 
 
 
108
 
109
+ # -----------------------------------
110
+ # Display the chat history
111
+ # -----------------------------------
112
+ if st.session_state.history:
113
+ for chat in st.session_state.history:
114
+ if chat[0] == "You":
115
+ st.write(f"**You**: {chat[1]}")
116
+ else:
117
+ st.write(f"**AI**: {chat[1]}")