mahesh1209 commited on
Commit
5a8a57e
·
verified ·
1 Parent(s): 6f19a4e

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +64 -0
app.py ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # app.py
2
+ import os
3
+ import glob
4
+ from huggingface_hub import snapshot_download
5
+ from ctransformers import AutoModelForCausalLM
6
+ import gradio as gr
7
+
8
+ # Model info
9
+ MODEL_REPO = "TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF"
10
+ MODEL_FILE = "tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf"
11
+ MODEL_CACHE_DIR = os.environ.get("MODEL_DIR", "/app/model_cache")
12
+
13
+ os.makedirs(MODEL_CACHE_DIR, exist_ok=True)
14
+
15
+ # If model not present, download it (public HF repo)
16
+ def ensure_model():
17
+ # If user committed model locally, prefer that
18
+ local_paths = glob.glob(os.path.join(MODEL_CACHE_DIR, "**", MODEL_FILE), recursive=True)
19
+ candidate = local_paths[0] if local_paths else os.path.join(MODEL_CACHE_DIR, MODEL_FILE)
20
+ if os.path.exists(candidate):
21
+ return candidate
22
+
23
+ print("Downloading model snapshot from Hugging Face hub...")
24
+ repo_path = snapshot_download(repo_id=MODEL_REPO, cache_dir=MODEL_CACHE_DIR)
25
+ matches = glob.glob(os.path.join(repo_path, "**", MODEL_FILE), recursive=True)
26
+ if matches:
27
+ return matches[0]
28
+ # fallback: check repo_path root
29
+ fallback = os.path.join(repo_path, MODEL_FILE)
30
+ if os.path.exists(fallback):
31
+ return fallback
32
+ raise FileNotFoundError(f"Could not locate {MODEL_FILE} after download (repo_path={repo_path}).")
33
+
34
+ model_path = ensure_model()
35
+ print("Using model file:", model_path)
36
+
37
+ # Load model (ctransformers handles GGUF)
38
+ model = AutoModelForCausalLM.from_pretrained(
39
+ MODEL_REPO,
40
+ model_file=model_path,
41
+ model_type="llama",
42
+ max_new_tokens=256,
43
+ temperature=0.7
44
+ )
45
+
46
+ custom_data = {
47
+ "hi": "hello Good morning,i am chatbot deepika",
48
+ "about you": "I am deepika AI Chatbot,how may i help you..",
49
+ "i love you": "i love you too",
50
+ "age": "45"
51
+ }
52
+
53
+ def chatbot(msg: str):
54
+ if not msg:
55
+ return ""
56
+ if msg in custom_data:
57
+ return custom_data[msg]
58
+ # ctransformers returns a string for simple calls
59
+ return model(f"### Instruction:\n{msg}\n\n### Response:\n")
60
+
61
+ if __name__ == "__main__":
62
+ port = int(os.environ.get("PORT", 7860))
63
+ gr.Interface(fn=chatbot, inputs="text", outputs="text", title="Custom Chatbot - TinyLlama")\
64
+ .launch(server_name="0.0.0.0", server_port=port)