diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 0000000000000000000000000000000000000000..9a5e027383a6a094f8af59a141ab498c5f1d23a7
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1,2 @@
+*.tflite filter=lfs diff=lfs merge=lfs -text
+*.jpg filter=lfs diff=lfs merge=lfs -text
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..77e24df871b26b98391caccd828b1e3727b54559
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,18 @@
+FROM python:3.10-slim
+
+WORKDIR /app
+
+# Install minimal system dependencies
+RUN apt-get update && apt-get install -y \
+ libgl1-mesa-glx \
+ libglib2.0-0 \
+ && rm -rf /var/lib/apt/lists/*
+
+# Copy requirements first for better caching
+COPY requirements.txt .
+RUN pip install --no-cache-dir -r requirements.txt
+
+# Copy application files
+COPY . .
+
+CMD ["streamlit", "run", "app.py", "--server.port=8501", "--server.address=0.0.0.0"]
diff --git a/Google_Search_tool.py b/Google_Search_tool.py
new file mode 100644
index 0000000000000000000000000000000000000000..f357319d3a28fbba5b47484fdac53f575b16237e
--- /dev/null
+++ b/Google_Search_tool.py
@@ -0,0 +1,41 @@
+import os
+import logging
+from googleapiclient.discovery import build
+from dotenv import load_dotenv
+
+load_dotenv()
+
+class GoogleSearch:
+ """Performs Google Custom Search API queries."""
+ def __init__(self):
+ self.api_key = os.getenv("GOOGLE_API_KEY")
+ self.cse_id = os.getenv("GOOGLE_CSE_ID")
+ self.service = None
+ if self.api_key and self.cse_id:
+ try:
+ self.service = build("customsearch", "v1", developerKey=self.api_key)
+ logging.info("✅ Google Custom Search initialized.")
+ except Exception as e:
+ logging.error(f"❌ CSE init failed: {e}")
+ else:
+ logging.warning("⚠️ GOOGLE_API_KEY or GOOGLE_CSE_ID not set; search disabled.")
+
+ def search(self, queries: list, num_results: int = 1) -> list:
+ if not self.service:
+ return []
+ out = []
+ for q in queries:
+ try:
+ resp = self.service.cse().list(q=q, cx=self.cse_id, num=num_results).execute()
+ items = resp.get("items", [])
+ formatted = [
+ {"title": it.get("title"), "link": it.get("link"), "snippet": it.get("snippet")}
+ for it in items
+ ]
+ out.append({"query": q, "results": formatted})
+ except Exception as e:
+ logging.error(f"❌ Search error for '{q}': {e}")
+ out.append({"query": q, "results": []})
+ return out
+
+google_search = GoogleSearch()
diff --git a/README.md b/README.md
index 152c83e3f727caa975abfa7dc16b04e6058073c7..3797697b9342615ceea9b6505e43244930cbf3ca 100644
--- a/README.md
+++ b/README.md
@@ -1,59 +1,14 @@
---
-title: HexAI Demo
-emoji: 🔥
-colorFrom: pink
-colorTo: gray
-sdk: static
-pinned: false
-app_build_command: npm run build
-app_file: dist/index.html
-short_description: Demo with limited version
+title: RxGuard Prescription Validator
+emoji: ⚕️
+colorFrom: blue
+colorTo: indigo
+sdk: streamlit
+sdk_version: 1.36.0
+app_file: app.py
+hf_oauth: true
+hardware:
+ accelerator: T4
+ cpu: 2
+ memory: 16
---
-
-# Svelte + TS + Vite
-
-This template should help get you started developing with Svelte and TypeScript in Vite.
-
-## Recommended IDE Setup
-
-[VS Code](https://code.visualstudio.com/) + [Svelte](https://marketplace.visualstudio.com/items?itemName=svelte.svelte-vscode).
-
-## Need an official Svelte framework?
-
-Check out [SvelteKit](https://github.com/sveltejs/kit#readme), which is also powered by Vite. Deploy anywhere with its serverless-first approach and adapt to various platforms, with out of the box support for TypeScript, SCSS, and Less, and easily-added support for mdsvex, GraphQL, PostCSS, Tailwind CSS, and more.
-
-## Technical considerations
-
-**Why use this over SvelteKit?**
-
-- It brings its own routing solution which might not be preferable for some users.
-- It is first and foremost a framework that just happens to use Vite under the hood, not a Vite app.
-
-This template contains as little as possible to get started with Vite + TypeScript + Svelte, while taking into account the developer experience with regards to HMR and intellisense. It demonstrates capabilities on par with the other `create-vite` templates and is a good starting point for beginners dipping their toes into a Vite + Svelte project.
-
-Should you later need the extended capabilities and extensibility provided by SvelteKit, the template has been structured similarly to SvelteKit so that it is easy to migrate.
-
-**Why `global.d.ts` instead of `compilerOptions.types` inside `jsconfig.json` or `tsconfig.json`?**
-
-Setting `compilerOptions.types` shuts out all other types not explicitly listed in the configuration. Using triple-slash references keeps the default TypeScript setting of accepting type information from the entire workspace, while also adding `svelte` and `vite/client` type information.
-
-**Why include `.vscode/extensions.json`?**
-
-Other templates indirectly recommend extensions via the README, but this file allows VS Code to prompt the user to install the recommended extension upon opening the project.
-
-**Why enable `allowJs` in the TS template?**
-
-While `allowJs: false` would indeed prevent the use of `.js` files in the project, it does not prevent the use of JavaScript syntax in `.svelte` files. In addition, it would force `checkJs: false`, bringing the worst of both worlds: not being able to guarantee the entire codebase is TypeScript, and also having worse typechecking for the existing JavaScript. In addition, there are valid use cases in which a mixed codebase may be relevant.
-
-**Why is HMR not preserving my local component state?**
-
-HMR state preservation comes with a number of gotchas! It has been disabled by default in both `svelte-hmr` and `@sveltejs/vite-plugin-svelte` due to its often surprising behavior. You can read the details [here](https://github.com/rixo/svelte-hmr#svelte-hmr).
-
-If you have state that's important to retain within a component, consider creating an external store which would not be replaced by HMR.
-
-```ts
-// store.ts
-// An extremely simple external store
-import { writable } from "svelte/store";
-export default writable(0);
-```
diff --git a/__pycache__/config.cpython-310.pyc b/__pycache__/config.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..facf05a71f762db4a1098cae11e663390bb2fc07
Binary files /dev/null and b/__pycache__/config.cpython-310.pyc differ
diff --git a/__pycache__/config.cpython-39.pyc b/__pycache__/config.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dda04cee96af3fa2c41b5088c8daae2b25e02d51
Binary files /dev/null and b/__pycache__/config.cpython-39.pyc differ
diff --git a/__pycache__/google_search_tool.cpython-310.pyc b/__pycache__/google_search_tool.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..41c59ad7968ff63fa0a3452cb0c3a901ac3bf1aa
Binary files /dev/null and b/__pycache__/google_search_tool.cpython-310.pyc differ
diff --git a/__pycache__/validate_prescription.cpython-310.pyc b/__pycache__/validate_prescription.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..008dc35fb40e9d306183391cf9bcde1a57e2c4dd
Binary files /dev/null and b/__pycache__/validate_prescription.cpython-310.pyc differ
diff --git a/app.py b/app.py
new file mode 100644
index 0000000000000000000000000000000000000000..2ad1a32c687eeda008284ed47147aa5bfe654b43
--- /dev/null
+++ b/app.py
@@ -0,0 +1,137 @@
+import os
+import streamlit as st
+from PIL import Image
+from config import (
+ STATIC_DIR,
+ UPLOADS_DIR,
+ HF_TOKEN,
+ GOOGLE_API_KEY,
+ GOOGLE_CSE_ID,
+ GEMINI_API_KEY,
+ DEVICE
+)
+
+# ─── App Configuration ────────────────────────────────────────────────────
+st.set_page_config(
+ page_title="RxGuard Prescription Validator",
+ page_icon="⚕️",
+ layout="wide",
+ menu_items={
+ 'Get Help': 'https://github.com/your-repo',
+ 'About': "RxGuard v1.0 - Advanced Prescription Validation"
+ }
+)
+
+# ─── Session State ────────────────────────────────────────────────────────
+if "analysis_result" not in st.session_state:
+ st.session_state.analysis_result = None
+if "uploaded_filename" not in st.session_state:
+ st.session_state.uploaded_filename = None
+
+# ─── UI Components ────────────────────────────────────────────────────────
+def show_service_status():
+ """Displays service connectivity status."""
+ cols = st.columns(4)
+ with cols[0]:
+ st.metric("HuggingFace", "✅" if HF_TOKEN else "❌")
+ with cols[1]:
+ st.metric("Google API", "✅" if GOOGLE_API_KEY else "❌")
+ with cols[2]:
+ st.metric("Gemini", "✅" if GEMINI_API_KEY else "❌")
+ with cols[3]:
+ st.metric("Device", DEVICE.upper())
+
+def display_patient_info(info: dict):
+ """Displays patient information in a formatted card."""
+ with st.container(border=True):
+ st.subheader("👤 Patient Details")
+ cols = st.columns(2)
+ with cols[0]:
+ st.markdown(f"**Name:** {info.get('Name', 'Not detected')}")
+ st.markdown(f"**Age:** {info.get('Age', 'N/A')}")
+ with cols[1]:
+ st.markdown(f"**Date:** {info.get('Date', 'N/A')}")
+ st.markdown(f"**Physician:** {info.get('PhysicianName', 'N/A')}")
+
+def display_medications(medications: list):
+ """Displays medication information with verification."""
+ st.subheader("💊 Medications")
+ if not medications:
+ st.warning("No medications detected in prescription")
+ return
+
+ for med in medications:
+ with st.expander(f"{med.get('drug_raw', 'Unknown Medication')}"):
+ cols = st.columns([1, 2])
+ with cols[0]:
+ st.markdown(f"""
+ **Dosage:** `{med.get('dosage', 'N/A')}`
+ **Frequency:** `{med.get('frequency', 'N/A')}`
+ """)
+
+ with cols[1]:
+ if verification := med.get("verification"):
+ if dosage := verification.get("standard_dosage"):
+ st.success(f"**Standard Dosage:** {dosage}")
+ if side_effects := verification.get("side_effects"):
+ st.warning(f"**Side Effects:** {side_effects}")
+ if interactions := verification.get("interactions"):
+ st.error(f"**Interactions:** {interactions}")
+
+# ─── Main Application ─────────────────────────────────────────────────────
+def main():
+ st.title("⚕️ RxGuard Prescription Validator")
+ st.caption("AI-powered prescription verification system")
+
+ show_service_status()
+
+ # Only enable upload if required services are available
+ if all([HF_TOKEN, GOOGLE_API_KEY, GEMINI_API_KEY]):
+ uploaded_file = st.file_uploader(
+ "Upload prescription image (PNG/JPG/JPEG):",
+ type=["png", "jpg", "jpeg"],
+ help="Clear image of the prescription"
+ )
+
+ if uploaded_file and uploaded_file.name != st.session_state.uploaded_filename:
+ with st.status("Analyzing prescription...", expanded=True) as status:
+ try:
+ # Store the uploaded file
+ st.session_state.uploaded_filename = uploaded_file.name
+ file_path = os.path.join(UPLOADS_DIR, uploaded_file.name)
+
+ with open(file_path, "wb") as f:
+ f.write(uploaded_file.getvalue())
+
+ # Import processing function only when needed
+ from validate_prescription import extract_prescription_info
+ st.session_state.analysis_result = extract_prescription_info(file_path)
+
+ status.update(label="Analysis complete!", state="complete", expanded=False)
+ except Exception as e:
+ st.error(f"Processing failed: {str(e)}")
+ st.session_state.analysis_result = {"error": str(e)}
+ status.update(label="Analysis failed", state="error")
+
+ # Display results if available
+ if st.session_state.analysis_result:
+ result = st.session_state.analysis_result
+
+ if result.get("error"):
+ st.error(f"❌ Error: {result['error']}")
+ else:
+ tab1, tab2 = st.tabs(["Patient Information", "Medication Details"])
+
+ with tab1:
+ if uploaded_file:
+ st.image(uploaded_file, use_column_width=True)
+ display_patient_info(result["info"])
+
+ with tab2:
+ display_medications(result["info"].get("Medications", []))
+
+ if st.toggle("Show technical details"):
+ st.json(result.get("debug_info", {}))
+
+if __name__ == "__main__":
+ main()
diff --git a/apt-packages.txt b/apt-packages.txt
new file mode 100644
index 0000000000000000000000000000000000000000..6b05dfc387ff0e73463dfa959682bf07609450a5
--- /dev/null
+++ b/apt-packages.txt
@@ -0,0 +1,13 @@
+libgl1
+libgl1-mesa-glx
+libglib2.0-0
+tesseract-ocr
+tesseract-ocr-hin
+git
+git-lfs
+curl
+libssl-dev
+libffi-dev
+python3-dev
+build-essential
+libsqlite3-dev
diff --git a/config.py b/config.py
new file mode 100644
index 0000000000000000000000000000000000000000..343c4451a3fc7af328e2367270426c6d8fd8dbbc
--- /dev/null
+++ b/config.py
@@ -0,0 +1,37 @@
+import os
+import torch
+from dotenv import load_dotenv
+
+load_dotenv()
+
+# ─── Directory Configuration ────────────────────────────────────────────────
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+STATIC_DIR = os.path.join(BASE_DIR, 'static')
+os.makedirs(STATIC_DIR, exist_ok=True)
+
+# ─── API Secrets ────────────────────────────────────────────────────────────
+HF_TOKEN = os.getenv("HUGGINGFACE_HUB_TOKEN") # For Hugging Face models
+GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY") # For Gemini and Custom Search
+GOOGLE_CSE_ID = os.getenv("GOOGLE_CSE_ID") # For medication verification
+GEMINI_API_KEY = os.getenv("GEMINI_API_KEY") # Alternative Gemini auth
+HALODOC_API_KEY = os.getenv("HALODOC_API_KEY") # Future integration
+
+# ─── Model Configuration ────────────────────────────────────────────────────
+HF_MODELS = {
+ "donut": "naver-clova-ix/donut-base-finetuned-cord-v2",
+ "phi3": "microsoft/phi-3-mini-4k-instruct",
+}
+GEMINI_MODEL_NAME = "gemini-1.5-flash" # Balanced for speed and accuracy
+
+# ─── Processing Parameters ─────────────────────────────────────────────────
+LEV_THRESH = 0.75 # Levenshtein similarity threshold
+SIG_THRESH = 0.65 # Signature verification threshold
+
+# ─── File Paths ───────────────────────────────────────────────────────────
+DB_PATH = os.path.join(STATIC_DIR, "rxguard.db")
+UPLOADS_DIR = os.path.join(STATIC_DIR, "uploads")
+os.makedirs(UPLOADS_DIR, exist_ok=True)
+
+# ─── Hardware Configuration ────────────────────────────────────────────────
+DEVICE = "cpu" # Force CPU for Hugging Face Spaces compatibility
+USE_GPU = False
diff --git a/data/interactions.csv b/data/interactions.csv
new file mode 100644
index 0000000000000000000000000000000000000000..815ad5af3124eaa748c3d8e2dfa602097405abac
--- /dev/null
+++ b/data/interactions.csv
@@ -0,0 +1,3 @@
+cui1,cui2,severity,advice
+1191,6801,moderate,"Monitor blood glucose closely; aspirin may enhance the hypoglycemic effect of metformin."
+4241,1113,moderate,"Ascorbic acid can increase the absorption of iron. While often beneficial, monitor for iron overload in susceptible patients."
diff --git a/data/rxnorm.db b/data/rxnorm.db
new file mode 100644
index 0000000000000000000000000000000000000000..8dade48909ab53de6764c55a329191edb5f58cf5
Binary files /dev/null and b/data/rxnorm.db differ
diff --git a/data/rxnorm_names.csv b/data/rxnorm_names.csv
new file mode 100644
index 0000000000000000000000000000000000000000..ae175da09cf28ff9d578aedd38a0656bed86b60c
--- /dev/null
+++ b/data/rxnorm_names.csv
@@ -0,0 +1,13 @@
+name,cui
+Aspirin,1191
+Acetaminophen,161
+Amoxicillin,723
+Metformin,6801
+Lisinopril,29046
+Ferrous Sulfate,4241
+Ascorbic Acid,1113
+Calcium Carbonate / Vitamin D3,12345
+Clonazepam,2623
+Meganeuron,54321
+Creatine,9876
+Salbutamol,9648
diff --git a/drug_interaction_detection/__pycache__/interaction_checker.cpython-310.pyc b/drug_interaction_detection/__pycache__/interaction_checker.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..23488f8e8c168e82b9b2a111b51d7cc0e44c4488
Binary files /dev/null and b/drug_interaction_detection/__pycache__/interaction_checker.cpython-310.pyc differ
diff --git a/drug_interaction_detection/interaction_checker.py b/drug_interaction_detection/interaction_checker.py
new file mode 100644
index 0000000000000000000000000000000000000000..a2811c30b90979b4624356423271a102c44db5ba
--- /dev/null
+++ b/drug_interaction_detection/interaction_checker.py
@@ -0,0 +1,34 @@
+# drug_interaction_detection/interaction_checker.py
+import sqlite3
+from config import DB_PATH
+
+class InteractionChecker:
+ def __init__(self, db_path: str = DB_PATH):
+ self.conn = sqlite3.connect(db_path)
+ self.conn.row_factory = sqlite3.Row
+
+ def find(self, cuis: list[str]) -> list[dict]:
+ if len(cuis) < 2: return []
+
+ # Create a list of all unique pairs
+ pairs = []
+ for i in range(len(cuis)):
+ for j in range(i + 1, len(cuis)):
+ # Ensure consistent order for querying (e.g., smaller CUI first)
+ pair = tuple(sorted((cuis[i], cuis[j])))
+ pairs.append(pair)
+
+ if not pairs: return []
+
+ # Query for all pairs at once
+ placeholders = ", ".join(["(?, ?)"] * len(pairs))
+ flat_params = [item for pair in pairs for item in pair]
+
+ query = f"""
+ SELECT cui1, cui2, severity, advice
+ FROM interactions
+ WHERE (cui1, cui2) IN ({placeholders})
+ """
+
+ rows = self.conn.execute(query, flat_params).fetchall()
+ return [{"pair": (r["cui1"], r["cui2"]), "severity": r["severity"], "advice": r["advice"]} for r in rows]
diff --git a/earnest-trilogy-465710-e7-6cc7bbbddb97.json b/earnest-trilogy-465710-e7-6cc7bbbddb97.json
new file mode 100644
index 0000000000000000000000000000000000000000..c6c65f131b35215f28287dd21e763e114032755f
--- /dev/null
+++ b/earnest-trilogy-465710-e7-6cc7bbbddb97.json
@@ -0,0 +1,13 @@
+{
+ "type": "service_account",
+ "project_id": "earnest-trilogy-465710-e7",
+ "private_key_id": "6cc7bbbddb970f16876e3f4120ff18be704f5ee5",
+ "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCL9vzHnI9qvAMm\n4VvadaDB//B8ibFEi3BNojVaWg5iM12nhIRhhDTCHEryjrQUmxslGiJ/xslQzO6c\nueAXVGiaT+dyZJ6q004aRpjfjkV+T8bmIX8B2hx9bJZ0mTn4rN7Yeg2mOR7F+Psf\njjSm91sX3FTtDO6lRd6olGiRCXn65GPkbXPUZW52xuEVwRuFhsNcJnKgRD3f7rko\nbnBZ0/un7HRg5eNZspHW4eOynOSZcoykCfnQbB4noRsLyGi3VknZDCXpRc9dhlLf\nDZFhjnqzwsoy24Z6uoPSctAX1itL54zQ0x25xUS4pvPO3jy7W9ONctRjzp2oQyIC\n199jv0thAgMBAAECggEAARzStqpT+bDv1E+ACwu6k4mLluKC0YT2Eh3pxJOORUxR\nJc+Kka+1pNHerCXY1ghDcNnvyoTRDUarF71CrrjLFz1ESBnh6NZ4FVgjPLVdMCNQ\nSnxwwvoMgNg77BvFiih4odn63AvcnA2uFFqtaI2IkIyIiHUHpsdth85HNFjx5T3y\nxQwx0P+7QxFXpaQkDXUFqcTysJNzZbhKHVJAtmjVo32H103PKJtYbmVdNci0fxE4\n5SugGB/AvFrmZ2rPzVraIarFueE2m6ENrpU+ESwBFnASwJ0uAqir0ZEz+7VXPGYL\nGwk6rkhhK+GD+z+CRkA6RDuObkUG1ijqT9kf+DCiAQKBgQDDRrDSf6WXhiuh6Cl8\n5JGjD1xfw0T6U63bJdRXN1iPE5+WUwq7Jdtb1tjB9ocNg/6ISIfHBrribLapwrJl\npFQl4UOB731K0NLWbIhiJeVdX/By5boYQK7FB0J4+dQZfIgzl0gsJuT+1wsxvTMJ\nXImmaG+bFLEtEuWOEDrM4OflgQKBgQC3fSM3PIY3o9XC7EIU97G5eWQRAjvSYU6l\nfETIR93ycxahPhU+JvB6ncteJTSEdLLAKq+VnxOvXH/3tlufMmHeQuSEx2IGip2B\nvqnNV8IhY7aQtA0enfT88PmlpEnM+mBPDPKAoJXOoRTanKiMXuwhVFsqfRo8A597\nCxzDPZsV4QKBgC4bWNC61uxp/49g87rLdR+dFuB9iKHadChUTEizhrNxnLKvtM7v\nZ1XN6qwRe13TlpuzFGwHyMSBireWguzA2iV/hKL/WwP5Pm7mfWU/MWLUrj9SwpfL\nXfijeCx8QHosDzSvOZlDLbqGJ9x8obpKIS4rZn6lahgMaCsc5eVODTMBAoGAaQRJ\nFIMielPds1tPEvsVEAeHGykBHg6tWY9/OnXPdMUj7ZM/yzu0JSmMzMxUe37jE5Ma\nvXK3bIVvhFItrDbExtXYPppy4zWQokKCotEYfc25Hqa+X4ieP+qXp5MY3iVq27OY\nU8AVHZcZ/WjuGrD1SroiF3ZUfobAT0bz5larHWECgYAki/VOTjmOfvtR+6aLZW0T\nuPYomIZGCxCY+b7zoxk3YPEd15KG25SI0JsdK2QDwbGyQan4X/eZQlwFacGH+2is\nsOpYrsMuOktTUtjTmOCPq3+6a22k6yxXvxGIjn0XcP1Pgh/aAi2yi2Ejlxgr4JpU\nJUzmpT+C4PQCHMepVFoaLw==\n-----END PRIVATE KEY-----\n",
+ "client_email": "aihackathon@earnest-trilogy-465710-e7.iam.gserviceaccount.com",
+ "client_id": "102524806684057807383",
+ "auth_uri": "https://accounts.google.com/o/oauth2/auth",
+ "token_uri": "https://oauth2.googleapis.com/token",
+ "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
+ "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/aihackathon%40earnest-trilogy-465710-e7.iam.gserviceaccount.com",
+ "universe_domain": "googleapis.com"
+}
diff --git a/index.html b/index.html
deleted file mode 100644
index b6c5f0afafd6f2e009ed3d2c5de7351bcf7fb12d..0000000000000000000000000000000000000000
--- a/index.html
+++ /dev/null
@@ -1,13 +0,0 @@
-
-
-
-
-
-
- Vite + Svelte + TS
-
-
-
-
-
-
diff --git a/models/signature_sia.tflite b/models/signature_sia.tflite
new file mode 100644
index 0000000000000000000000000000000000000000..d9fa2bc3485c6b9dc476c2e5a80b4497f6880700
--- /dev/null
+++ b/models/signature_sia.tflite
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9ba80e2f5c688e9fd9fc766a85f266d0a47201def65265ef294168546a2814de
+size 804888
diff --git a/package.json b/package.json
deleted file mode 100644
index 9013b8ca459416df8386e8607bf202977c5442de..0000000000000000000000000000000000000000
--- a/package.json
+++ /dev/null
@@ -1,20 +0,0 @@
-{
- "name": "svelte",
- "private": true,
- "version": "0.0.0",
- "type": "module",
- "scripts": {
- "dev": "vite",
- "build": "vite build",
- "preview": "vite preview",
- "check": "svelte-check --tsconfig ./tsconfig.app.json && tsc -p tsconfig.node.json"
- },
- "devDependencies": {
- "@sveltejs/vite-plugin-svelte": "^5.0.3",
- "@tsconfig/svelte": "^5.0.4",
- "svelte": "^5.28.1",
- "svelte-check": "^4.1.6",
- "typescript": "~5.8.3",
- "vite": "^6.3.5"
- }
-}
diff --git a/prescription_validation/__pycache__/fuzzy_match.cpython-310.pyc b/prescription_validation/__pycache__/fuzzy_match.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..35aaef85b25ee925fe3c78c724014efa37d64a3e
Binary files /dev/null and b/prescription_validation/__pycache__/fuzzy_match.cpython-310.pyc differ
diff --git a/prescription_validation/fuzzy_match.py b/prescription_validation/fuzzy_match.py
new file mode 100644
index 0000000000000000000000000000000000000000..6e3ca2fa5d9bf2bea8c56426d867a5debae49bc5
--- /dev/null
+++ b/prescription_validation/fuzzy_match.py
@@ -0,0 +1,44 @@
+# prescription_validation/fuzzy_match.py
+import sqlite3
+import re
+from rapidfuzz.distance import Levenshtein
+from config import DB_PATH, LEV_THRESH
+
+class RxLookup:
+ def __init__(self, db_path: str = DB_PATH):
+ self.conn = sqlite3.connect(db_path)
+ self.conn.row_factory = sqlite3.Row
+ self.drugs = self.conn.execute("SELECT name, cui FROM drugs").fetchall()
+
+ def _clean_token(self, token: str) -> str:
+ """Removes dosage, form factor, and non-alpha characters."""
+ cleaned = token.lower()
+ cleaned = re.sub(r'(\d+)\s*(mg|ml|mcg|tab|cap|#)', '', cleaned)
+ cleaned = re.sub(r'[^a-z]', '', cleaned)
+ return cleaned
+
+ def match(self, token: str) -> tuple[str | None, str | None]:
+ if not token:
+ return (None, None)
+
+ cleaned_token = self._clean_token(token)
+ if not cleaned_token:
+ return (None, None)
+
+ best_match = None
+ min_distance = float('inf')
+
+ for row in self.drugs:
+ name, cui = row["name"], row["cui"]
+ cleaned_db_name = self._clean_token(name)
+
+ distance = Levenshtein.distance(cleaned_token, cleaned_db_name)
+
+ if distance < min_distance:
+ min_distance = distance
+ best_match = (name, cui)
+
+ if best_match and min_distance / len(cleaned_token) < LEV_THRESH:
+ return best_match
+
+ return (None, None)
diff --git a/prescription_validation/zone_detector.py b/prescription_validation/zone_detector.py
new file mode 100644
index 0000000000000000000000000000000000000000..9c1b69adb7cd2a3bfa329a782793bbf79db95f24
--- /dev/null
+++ b/prescription_validation/zone_detector.py
@@ -0,0 +1,27 @@
+# prescription_validation/zone_detector.py
+import cv2
+import numpy as np
+from ultralytics import YOLO
+
+class ZoneDetector:
+ def __init__(self, model_path: str = "models/signature_model.pt"):
+ try:
+ self.model = YOLO(model_path)
+ print(f"✅ Loaded local YOLO model from '{model_path}'")
+ except Exception as e:
+ self.model = None
+ print(f"❌ Failed to load local YOLO model from '{model_path}'. Please ensure the model file exists.")
+
+ def detect(self, img: np.ndarray) -> list[dict]:
+ if not self.model: return []
+ rgb_img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
+ results = self.model(rgb_img, verbose=False)
+ detections = []
+ for box in results[0].boxes:
+ x1, y1, x2, y2 = box.xyxy[0].tolist()
+ class_id = int(box.cls[0])
+ detections.append({
+ "label": self.model.names[class_id],
+ "bbox": tuple(map(int, [x1, y1, x2, y2])),
+ })
+ return detections
diff --git a/public/vite.svg b/public/vite.svg
deleted file mode 100644
index e7b8dfb1b2a60bd50538bec9f876511b9cac21e3..0000000000000000000000000000000000000000
--- a/public/vite.svg
+++ /dev/null
@@ -1 +0,0 @@
-
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..65260cecff634926fac4fe3ad07852200fc38098
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,23 @@
+# Core
+streamlit==1.36.0
+python-dotenv==1.0.1
+
+# AI & Vision
+# Using Google's recommended versions for Gemini and Vision
+google-generativeai==0.7.1
+google-cloud-vision==3.7.3
+torch==2.3.1
+pillow==10.3.0
+transformers==4.41.0
+
+# OCR
+paddleocr==2.7.3
+# Using the CPU version of paddlepaddle for broader compatibility on HF Spaces
+paddlepaddle==2.6.1
+
+# Utils
+numpy==1.26.4
+requests==2.32.3
+opencv-python-headless==4.10.0.84
+scikit-image==0.22.0
+pytz==2024.1
diff --git a/scripts/build_interactions_db.py b/scripts/build_interactions_db.py
new file mode 100644
index 0000000000000000000000000000000000000000..c998337b601e5d6ad1384ced042eeb093615f8b4
--- /dev/null
+++ b/scripts/build_interactions_db.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python3
+import sqlite3, csv, argparse
+import sys, os
+sys.path.append(os.getcwd())
+from config import DB_PATH, INTERACTIONS_CSV
+
+def build_db(db_path: str, csv_path: str):
+ conn = sqlite3.connect(db_path)
+ c = conn.cursor()
+ c.execute("DROP TABLE IF EXISTS interactions")
+ c.execute("CREATE TABLE interactions(cui1 TEXT, cui2 TEXT, severity TEXT, advice TEXT, UNIQUE(cui1, cui2))")
+
+ with open(csv_path, newline='', encoding='utf-8') as f:
+ reader = csv.DictReader(f)
+ for row in reader:
+ # Ensure consistent order to avoid duplicate entries like (A,B) and (B,A)
+ cui1, cui2 = sorted((row['cui1'], row['cui2']))
+ c.execute(
+ "INSERT OR IGNORE INTO interactions(cui1, cui2, severity, advice) VALUES(?,?,?,?)",
+ (cui1, cui2, row['severity'], row['advice'])
+ )
+ conn.commit()
+ conn.close()
+
+if __name__ == "__main__":
+ build_db(DB_PATH, INTERACTIONS_CSV)
+ print(f"✅ Interactions DB built at {DB_PATH}")
diff --git a/scripts/build_rxnorm_db.py b/scripts/build_rxnorm_db.py
new file mode 100644
index 0000000000000000000000000000000000000000..fda5823aade24623c0a86d8bae700ee9dab00389
--- /dev/null
+++ b/scripts/build_rxnorm_db.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python3
+import sqlite3, csv, argparse
+from metaphone import doublemetaphone
+import sys, os
+sys.path.append(os.getcwd())
+from config import DB_PATH, RXNORM_CSV
+
+def build_db(db_path: str, csv_path: str):
+ conn = sqlite3.connect(db_path)
+ c = conn.cursor()
+ c.execute("DROP TABLE IF EXISTS drugs")
+ c.execute("DROP TABLE IF EXISTS metaphone")
+ c.execute("CREATE TABLE drugs(name TEXT PRIMARY KEY, cui TEXT)")
+ c.execute("CREATE TABLE metaphone(meta TEXT, name TEXT, cui TEXT)")
+
+ with open(csv_path, newline='', encoding='utf-8') as f:
+ reader = csv.DictReader(f)
+ for row in reader:
+ nm, cui = row['name'], row['cui']
+ c.execute("INSERT OR IGNORE INTO drugs VALUES(?,?)", (nm, cui))
+ meta = doublemetaphone(nm)[0]
+ c.execute("INSERT INTO metaphone VALUES(?,?,?)", (meta, nm, cui))
+ conn.commit()
+ conn.close()
+
+if __name__ == "__main__":
+ build_db(DB_PATH, RXNORM_CSV)
+ print(f"✅ RxNorm DB built at {DB_PATH}")
diff --git a/scripts/create_dummy_signature_model.py b/scripts/create_dummy_signature_model.py
new file mode 100644
index 0000000000000000000000000000000000000000..06c4305c42693f9e5625dd44401a4461a2b25694
--- /dev/null
+++ b/scripts/create_dummy_signature_model.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python3
+import os
+import tensorflow as tf
+from tensorflow.keras.layers import Input, Flatten, Dense, Subtract
+from tensorflow.keras.models import Model
+import sys
+sys.path.append(os.getcwd())
+from config import SIA_MODEL_PATH
+
+def build_dummy_siamese():
+ inp_a = Input(shape=(224, 224, 1), name="img_a")
+ inp_b = Input(shape=(224, 224, 1), name="img_b")
+ encoder_input = Input(shape=(224, 224, 1))
+ x = Flatten()(encoder_input)
+ x = Dense(16, activation="relu")(x)
+ encoder = Model(encoder_input, x)
+ encoded_a = encoder(inp_a)
+ encoded_b = encoder(inp_b)
+ distance = Subtract()([encoded_a, encoded_b])
+ model = Model(inputs=[inp_a, inp_b], outputs=distance)
+ return model
+
+if __name__ == "__main__":
+ print("Building and converting dummy Siamese model...")
+ model = build_dummy_siamese()
+ converter = tf.lite.TFLiteConverter.from_keras_model(model)
+ converter.optimizations = [tf.lite.Optimize.DEFAULT]
+ tflite_model = converter.convert()
+ os.makedirs(os.path.dirname(SIA_MODEL_PATH), exist_ok=True)
+ with open(SIA_MODEL_PATH, "wb") as f:
+ f.write(tflite_model)
+ print(f"✅ Dummy TFLite signature model saved to '{SIA_MODEL_PATH}'")
diff --git a/signature_verification/__pycache__/signature_detector.cpython-310.pyc b/signature_verification/__pycache__/signature_detector.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..748c4c72e8cc998ca1165e3b5435b2a64365acbe
Binary files /dev/null and b/signature_verification/__pycache__/signature_detector.cpython-310.pyc differ
diff --git a/signature_verification/__pycache__/signature_generator.cpython-310.pyc b/signature_verification/__pycache__/signature_generator.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4fe60963b121626f9e55b0db8a21eb62eb57585a
Binary files /dev/null and b/signature_verification/__pycache__/signature_generator.cpython-310.pyc differ
diff --git a/signature_verification/__pycache__/signature_siamese.cpython-310.pyc b/signature_verification/__pycache__/signature_siamese.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3930e68ce53f1aebf91c7c6d91a13825230b135e
Binary files /dev/null and b/signature_verification/__pycache__/signature_siamese.cpython-310.pyc differ
diff --git a/signature_verification/signature_detector.py b/signature_verification/signature_detector.py
new file mode 100644
index 0000000000000000000000000000000000000000..7a70ee57de4f18d2e8087344cd359432d920176f
--- /dev/null
+++ b/signature_verification/signature_detector.py
@@ -0,0 +1,40 @@
+# signature_verification/signature_detector.py
+import cv2
+import numpy as np
+
+class SignatureDetector:
+ """
+ Detects and crops a signature from a prescription image using refined OpenCV contour analysis.
+ This method is fast, offline, and does not require a pre-trained model.
+ """
+ def crop(self, img: np.ndarray) -> np.ndarray | None:
+ if img is None: return None
+
+ gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
+ blurred = cv2.GaussianBlur(gray, (5, 5), 0)
+ _, thresh = cv2.threshold(blurred, 0, 255, cv2.THRESH_BINARY_INV + cv2.THRESH_OTSU)
+
+ contours, _ = cv2.findContours(thresh, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
+ if not contours: return None
+
+ height, width = img.shape[:2]
+ significant_contours = []
+ for cnt in contours:
+ x, y, w, h = cv2.boundingRect(cnt)
+ # Filter for contours that are reasonably large and in the bottom 70% of the page
+ if w > 50 and h > 15 and y > height * 0.3:
+ if w < width * 0.8 and h < height * 0.4:
+ significant_contours.append(cnt)
+
+ if not significant_contours: return None
+
+ largest_contour = max(significant_contours, key=cv2.contourArea)
+ x, y, w, h = cv2.boundingRect(largest_contour)
+
+ padding = 15
+ x1 = max(0, x - padding)
+ y1 = max(0, y - padding)
+ x2 = min(width, x + w + padding)
+ y2 = min(height, y + h + padding)
+
+ return img[y1:y2, x1:x2]
diff --git a/signature_verification/signature_generator.py b/signature_verification/signature_generator.py
new file mode 100644
index 0000000000000000000000000000000000000000..1fa10070cec7fe3df3f9ed3aed2e71034d65ee84
--- /dev/null
+++ b/signature_verification/signature_generator.py
@@ -0,0 +1,50 @@
+import os
+import logging
+import numpy as np
+import cv2
+from dotenv import load_dotenv
+from huggingface_hub import InferenceClient, HfFolder
+from PIL import Image
+
+load_dotenv()
+
+# Acquire token
+hf_token = os.getenv("HUGGING_FACE_HUB_TOKEN") or HfFolder.get_token()
+if not hf_token:
+ print("❌ No Hugging Face token found. Signature generation will fail.")
+ CLIENT = None
+else:
+ print("✅ Using Hugging Face token for signature generation.")
+ CLIENT = InferenceClient(token=hf_token)
+
+MODEL_ID = "stabilityai/stable-diffusion-xl-base-1.0"
+
+def generate_signatures(name: str, num_variations: int = 3) -> list:
+ """Generates multiple signature variations for a given name."""
+ if CLIENT is None:
+ return []
+
+ prompts = [
+ f"A clean, elegant, handwritten signature of the name '{name}' on a plain white background. Cursive, professional.",
+ f"Calligraphy signature of '{name}'. Black ink on white paper. Minimalist, artistic.",
+ f"A doctor's signature for '{name}'. Quick, scribbled, but legible. Official-looking script."
+ ]
+ images = []
+ for i in range(num_variations):
+ prompt = prompts[i % len(prompts)]
+ logging.info(f"Generating signature variation {i+1} for '{name}'…")
+ try:
+ pil_img = CLIENT.text_to_image(
+ prompt,
+ model=MODEL_ID,
+ negative_prompt=(
+ "photograph, text, multiple signatures, watermark, blurry, colorful, background"
+ ),
+ guidance_scale=8.0
+ )
+ np_img = np.array(pil_img)
+ cv_img = cv2.cvtColor(np_img, cv2.COLOR_RGB2BGR)
+ images.append(cv_img)
+ except Exception as e:
+ logging.error(f"❌ Signature generation failed: {e}")
+ return images
diff --git a/signature_verification/signature_siamese.py b/signature_verification/signature_siamese.py
new file mode 100644
index 0000000000000000000000000000000000000000..bcc4ec564b895113d4ac65555f1e9b84e52bbe09
--- /dev/null
+++ b/signature_verification/signature_siamese.py
@@ -0,0 +1,34 @@
+# signature_verification/signature_siamese.py
+import cv2
+import numpy as np
+import tensorflow as tf
+from config import SIA_MODEL_PATH, SIG_THRESH
+
+class SignatureSiamese:
+ def __init__(self, model_path: str = SIA_MODEL_PATH):
+ try:
+ self.interp = tf.lite.Interpreter(model_path=model_path)
+ self.interp.allocate_tensors()
+ ids = self.interp.get_input_details()
+ self.in1, self.in2 = ids[0]['index'], ids[1]['index']
+ self.out_idx = self.interp.get_output_details()[0]['index']
+ print(f"✅ Loaded TFLite Siamese model from {model_path}")
+ except Exception as e:
+ self.interp = None
+ print(f"❌ Failed to load Siamese model from {model_path}: {e}")
+
+ def _prep(self, img: np.ndarray) -> np.ndarray:
+ gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) if img.ndim == 3 else img
+ r = cv2.resize(gray, (224, 224))
+ return r.reshape(1, 224, 224, 1).astype(np.float32) / 255.0
+
+ def verify(self, img1: np.ndarray, img2: np.ndarray) -> tuple[bool, float]:
+ if not self.interp:
+ return False, float('inf')
+ inp1, inp2 = self._prep(img1), self._prep(img2)
+ self.interp.set_tensor(self.in1, inp1)
+ self.interp.set_tensor(self.in2, inp2)
+ self.interp.invoke()
+ diff = self.interp.get_tensor(self.out_idx)
+ dist = np.linalg.norm(diff)
+ return dist < SIG_THRESH, dist
diff --git a/src/App.svelte b/src/App.svelte
deleted file mode 100644
index f75b68aa25d9c715d2c7fa808b24516bf89e0f11..0000000000000000000000000000000000000000
--- a/src/App.svelte
+++ /dev/null
@@ -1,47 +0,0 @@
-
-
-
-