sproducts commited on
Commit
f4d8e9f
·
verified ·
1 Parent(s): 32593c3

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +39 -0
app.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import gradio as gr
3
+ from huggingface_hub import InferenceClient
4
+
5
+ # Initialize the model via Hugging Face Inference API
6
+ MODEL_NAME = "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B"
7
+ client = InferenceClient(MODEL_NAME)
8
+
9
+ # Streamlit UI for displaying project information
10
+ st.title("Project Guidance AI Chatbot")
11
+ st.subheader("This chatbot is powered by DeepSeek-R1-Distill-Qwen-7B")
12
+
13
+ # Display the roadmap and rules from files
14
+ def load_file(filename):
15
+ try:
16
+ with open(filename, "r", encoding="utf-8") as file:
17
+ return file.read()
18
+ except FileNotFoundError:
19
+ return f"⚠️ Error: {filename} not found!"
20
+
21
+ roadmap_content = load_file("roadmap.txt")
22
+ rules_content = load_file("rules.txt")
23
+
24
+ st.write("### Project Roadmap")
25
+ st.text(roadmap_content)
26
+
27
+ st.write("### Project Rules")
28
+ st.text(rules_content)
29
+
30
+ # Gradio component for model interaction (as part of Streamlit)
31
+ def generate_response(user_input):
32
+ system_message = f"Project Roadmap:\n{roadmap_content}\n\nProject Rules:\n{rules_content}"
33
+ messages = [{"role": "system", "content": system_message},
34
+ {"role": "user", "content": user_input}]
35
+
36
+ response = client.chat_completion(messages)
37
+ return response.choices[0].delta.content
38
+
39
+ gr.Interface(fn=generate_response, inputs="text", outputs="text").launch(share=True)