# app.py import os import time import re import requests import phonenumbers import pandas as pd import urllib.parse from bs4 import BeautifulSoup import torch from transformers import ( AutoTokenizer, AutoModelForTokenClassification, AutoModelForSeq2SeqLM, pipeline ) import gradio as gr from concurrent.futures import ThreadPoolExecutor, as_completed from email.message import EmailMessage import smtplib from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText # ============================ # CONFIG (ENV VARS recommended) # ============================ # IMPORTANT: set these as Space "Secrets" (see README below) API_KEY = os.environ.get("GOOGLE_API_KEY", "YOUR_GOOGLE_API_KEY") CX = os.environ.get("GOOGLE_CSE_ID", "YOUR_CSE_ID") DEFAULT_COUNTRY = "Ghana" RESULTS_PER_QUERY = int(os.environ.get("RESULTS_PER_QUERY", 4)) MAX_SCRAPE_WORKERS = int(os.environ.get("MAX_SCRAPE_WORKERS", 6)) ALLY_AI_NAME = os.environ.get("ALLY_AI_NAME", "Ally AI Assistant") ALLY_AI_LOGO_URL_DEFAULT = os.environ.get("ALLY_AI_LOGO_URL", "https://i.ibb.co/7nZqz0H/ai-logo.png") # Optional country maps for search bias & phone parsing COUNTRY_TLD_MAP = {"Ghana":"gh","Nigeria":"ng","Kenya":"ke","South Africa":"za","USA":"us","United Kingdom":"uk"} COUNTRY_REGION_MAP= {"Ghana":"GH","Nigeria":"NG","Kenya":"KE","South Africa":"ZA","USA":"US","United Kingdom":"GB"} # HTTP + Regex HEADERS = {"User-Agent":"Mozilla/5.0 (X11; Linux x86_64)"} EMAIL_REGEX = re.compile(r"[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}") # ============================ # MODELS (lightweight & CPU-friendly) # ============================ DEVICE = "cuda" if torch.cuda.is_available() else "cpu" print("Device set to use", DEVICE) # NER model (people/orgs/locs) ner_model_id = "dslim/bert-base-NER" ner_tokenizer = AutoTokenizer.from_pretrained(ner_model_id) ner_model = AutoModelForTokenClassification.from_pretrained(ner_model_id) ner_pipe = pipeline("ner", model=ner_model, tokenizer=ner_tokenizer, aggregation_strategy="simple", device=0 if DEVICE=="cuda" else -1) # Summarizer / anonymizer text_model_id = "google/flan-t5-large" text_tokenizer = AutoTokenizer.from_pretrained(text_model_id) text_model = AutoModelForSeq2SeqLM.from_pretrained(text_model_id).to(DEVICE) # ============================ # TAXONOMY & HELPERS # ============================ PROFESSION_KEYWORDS = ["lawyer","therapist","doctor","counselor","social worker", "advocate","psychologist","psychiatrist","consultant","nurse","hotline","gbv"] PROBLEM_PROFESSION_MAP = { "rape": ["lawyer","therapist","counselor","doctor"], "sexual assault": ["lawyer","therapist","counselor"], "domestic violence": ["lawyer","social worker","therapist"], "abuse": ["counselor","social worker","therapist","lawyer"], "trauma": ["therapist","psychologist","psychiatrist"], "depression": ["therapist","psychologist","doctor"], "violence": ["lawyer","counselor","social worker"], } def get_region_for_country(country: str) -> str: return COUNTRY_REGION_MAP.get(country, "GH") def get_tld_for_country(country: str) -> str: return COUNTRY_TLD_MAP.get(country, "") def build_country_biased_query(core: str, country: str) -> str: tld = get_tld_for_country(country) suffix = f" in {country}" if tld: return f"{core}{suffix} site:.{tld} OR {country}" return f"{core}{suffix}" def dedup_by_url(items): seen, out = set(), [] for it in items: u = it.get("link") or it.get("url") if u and u not in seen: seen.add(u) out.append(it) return out # ============================ # SEARCH & SCRAPING # ============================ def google_search(query, num_results=5): if not API_KEY or not CX or "YOUR_GOOGLE_API_KEY" in API_KEY or "YOUR_CSE_ID" in CX: raise RuntimeError("Google API key and CSE ID must be set as environment variables.") url = "https://www.googleapis.com/customsearch/v1" params = {"q":query, "key":API_KEY, "cx":CX, "num":num_results} r = requests.get(url, params=params, timeout=20) r.raise_for_status() items = r.json().get("items", []) or [] return [{"title":i.get("title",""), "link":i.get("link",""), "snippet":i.get("snippet","")} for i in items] def extract_phones(text, region="GH"): phones = [] for match in phonenumbers.PhoneNumberMatcher(text, region): try: phones.append(phonenumbers.format_number(match.number, phonenumbers.PhoneNumberFormat.INTERNATIONAL)) except Exception: pass return list(set(phones)) def scrape_contacts(url, region="GH"): try: res = requests.get(url, headers=HEADERS, timeout=12) if not res.ok or not res.text: return {"emails": [], "phones": []} text = BeautifulSoup(res.text, "html.parser").get_text(separator=" ") text = " ".join(text.split())[:300000] emails = list(set(EMAIL_REGEX.findall(text))) phones = extract_phones(text, region) return {"emails": emails, "phones": phones} except Exception as e: print(f"[scrape error] {url} -> {e}") return {"emails": [], "phones": []} # ============================ # NER + STORY → PROFESSIONS # ============================ def extract_entities(text): if not text: return [],[],[] try: ner_results = ner_pipe(text) except Exception as e: print("[ner error]", e) return [],[],[] people = [e["word"] for e in ner_results if e.get("entity_group") == "PER"] orgs = [e["word"] for e in ner_results if e.get("entity_group") == "ORG"] locs = [e["word"] for e in ner_results if e.get("entity_group") == "LOC"] return list(set(people)), list(set(orgs)), list(set(locs)) def professions_from_story(story: str): s = (story or "").lower() found = set([p for p in PROFESSION_KEYWORDS if p in s]) for prob, profs in PROBLEM_PROFESSION_MAP.items(): if prob in s: found.update(profs) if not found: return ["gbv","counselor"] order = ["lawyer","therapist","counselor","social worker","psychologist","psychiatrist","doctor","advocate","nurse","hotline","gbv"] return [p for p in order if p in found] def build_queries(story: str, country: str): profs = professions_from_story(story) cores = [] for p in profs: if p == "gbv": cores += ["GBV support organizations", "gender based violence help"] else: cores += [f"{p} for GBV", f"{p} for sexual assault"] unique_cores, seen = [], set() for c in cores: if c not in seen: unique_cores.append(c); seen.add(c) return [build_country_biased_query(core, country) for core in unique_cores], profs # ============================ # TEXT GEN: anonymize + result summary # ============================ def anonymize_story(story: str, max_sentences: int = 2): if not story or not story.strip(): return "" prompt = ( "Anonymize and shorten the following personal story for contacting professionals. " "Remove names, exact ages, dates, locations and any identifying details. " f"Keep only the essential problem and the type of help requested. Output <= {max_sentences} sentences.\n\n" f"Story: {story}\n\nSummary:" ) inputs = text_tokenizer(prompt, return_tensors="pt").to(DEVICE) with torch.no_grad(): outputs = text_model.generate(**inputs, max_new_tokens=120, temperature=0.2) return text_tokenizer.decode(outputs[0], skip_special_tokens=True).strip() def generate_summary(query, people, orgs, locs): prompt = ( "Write a short, empathetic summary of these search results for a person seeking GBV help.\n" f"Query: {query}\nPeople: {', '.join(people) or '—'}\nOrgs: {', '.join(orgs) or '—'}\nLocations: {', '.join(locs) or '—'}\n\n" "Explain how the organizations/professionals can help in 3-4 sentences." ) inputs = text_tokenizer(prompt, return_tensors="pt").to(DEVICE) with torch.no_grad(): outputs = text_model.generate(**inputs, max_new_tokens=150, temperature=0.7) return text_tokenizer.decode(outputs[0], skip_special_tokens=True).strip() # ============================ # MAIN PIPELINE # ============================ def find_professionals_from_story(story, country=DEFAULT_COUNTRY, results_per_query=RESULTS_PER_QUERY): region = get_region_for_country(country) queries, profs = build_queries(story, country) # Search search_results = [] for q in queries: try: items = google_search(q, num_results=results_per_query) for it in items: it["query"] = q search_results.extend(items) except Exception as e: print("[search error]", q, e) search_results = dedup_by_url(search_results) if not search_results: return {"summary":"No results found. Try a different country or wording.", "professionals":[], "queries_used":queries} # NER on titles/snippets all_people, all_orgs, all_locs = [], [], [] for r in search_results: ctx = f"{r.get('title','')}. {r.get('snippet','')}" p,o,l = extract_entities(ctx) all_people += p; all_orgs += o; all_locs += l # Scrape contacts concurrently professionals = [] with ThreadPoolExecutor(max_workers=MAX_SCRAPE_WORKERS) as ex: futures = {ex.submit(scrape_contacts, r["link"], region): r for r in search_results} for fut in as_completed(futures): r = futures[fut] contacts = {"emails": [], "phones": []} try: contacts = fut.result() except Exception as e: print("[scrape future error]", r["link"], e) professionals.append({ "title": r.get("title",""), "url": r.get("link",""), "email": contacts["emails"][0] if contacts["emails"] else "Not found", "phone": contacts["phones"][0] if contacts["phones"] else "Not found", "source_query": r.get("query","") }) summary = generate_summary("; ".join(queries[:3]) + (" ..." if len(queries)>3 else ""), list(set(all_people)), list(set(all_orgs)), list(set(all_locs))) # Sort by availability of email/phone professionals.sort(key=lambda it: (0 if it["email"]!="Not found" else 1, 0 if it["phone"]!="Not found" else 1)) return {"summary": summary, "professionals": professionals, "queries_used": queries} # ============================ # DRAFT (mailto + .eml) # ============================ def build_mailto_and_eml(to_addr, subject, body, default_from="noreply@ally.ai"): from email.message import EmailMessage import time msg = EmailMessage() msg["From"] = default_from msg["To"] = to_addr msg["Subject"] = subject msg.set_content(body) # ✅ Save to a writable directory (current working dir or "tmp") os.makedirs("tmp", exist_ok=True) fname = os.path.join("tmp", f"email_draft_{int(time.time())}.eml") with open(fname, "wb") as f: f.write(msg.as_bytes()) # Create mailto link (this part is fine) mailto = f"mailto:{to_addr}?subject={subject}&body={body}" return mailto, fname # ============================ # SENDER (SMTP) — Ally AI branding # ============================ def send_ally_ai_email(to_email, subject, body, user_email, sender_email, sender_password, ai_name=ALLY_AI_NAME, logo_url=ALLY_AI_LOGO_URL_DEFAULT): """ Sends an HTML email branded as Ally AI. to_email: recipient (organization) subject: subject line body: main message (already anonymized or full text) user_email: survivor's email (included for reply inside body) sender_email/sender_password: SMTP credentials (use Gmail App Password with Gmail) """ if not to_email or to_email == "Not found": return "❌ No recipient email found — choose a contact with an email." msg = MIMEMultipart("alternative") msg["Subject"] = subject or "Request for support" msg["From"] = f"{ai_name} <{sender_email}>" msg["To"] = to_email html_content = f"""
{ai_name} Logo

{body}

Contact the survivor back at: {user_email}


This message was prepared with the help of {ai_name} — connecting survivors with help safely.

""" msg.attach(MIMEText(html_content, "html")) try: server = smtplib.SMTP("smtp.gmail.com", 587) server.starttls() server.login(sender_email, sender_password) # Gmail App Password recommended server.sendmail(sender_email, [to_email], msg.as_string()) server.quit() return f"✅ Email sent successfully to {to_email}" except Exception as e: return f"❌ Failed to send email: {str(e)}" # ============================ # GRADIO UI # ============================ # ------- Replace existing run_search and _on_search with these ------- def run_search(story, country): """ Robust search wrapper: returns (summary, table_records, dropdown_options, anonymized_text). Avoids returning gr.update(...) to prevent KeyError during serialization. """ try: out = find_professionals_from_story(story, country=country, results_per_query=RESULTS_PER_QUERY) except Exception as e: err_msg = f"Search failed: {e}" placeholder = ["0 — No results (search failed)"] return err_msg, [], placeholder, "" pros = out.get("professionals", []) or [] # build table records try: records = pd.DataFrame(pros).to_dict(orient="records") if pros else [] except Exception: records = [] # build dropdown options as list of strings (guarantee at least one) options = [] for i, r in enumerate(pros): label_contact = r.get("email") if r.get("email") and r.get("email") != "Not found" else (r.get("phone", "No contact")) title = r.get("title") or r.get("url") or "(no title)" label = f"{i} — {title} ({label_contact})" options.append(label) if not options: options = ["0 — No results (try a different country/query)"] # anonymize safely try: anon = anonymize_story(story) or "I am seeking confidential support regarding gender-based violence." except Exception as e: print("[anonymize error]", e) anon = "I am seeking confidential support regarding gender-based violence." summary = out.get("summary", "No results found.") return summary, records, options, anon def _on_search(story, country): """ Function wired to the search button. Returns exactly 5 outputs to match: [summary_out, results_table, dropdown_sel, anon_out, message_in] """ summary, records, options, anon = run_search(story, country) # pre-fill message body with anonymized text (user email left empty for now) prefill = make_body(anon, story, True, "") # Return plain serializable values (not gr.update) # summary -> str # records -> list[dict] (or []) # options -> list[str] for dropdown (Gradio will accept it) # anon -> str # prefill -> str (message body) return summary, records, options, anon, prefill def make_body(anon_text, full_story, use_anon, user_email): core = (anon_text or "").strip() if use_anon else (full_story or "").strip() # polite template with user email included in body lines = [ core, "", f"Reply contact: {user_email}", "", "Thank you." ] return "\n".join([l for l in lines if l is not None]) def preview_contact(dropdown_value, df_json, subject, message_text): if not dropdown_value: return "No contact selected.", "" try: idx = int(str(dropdown_value).split(" — ")[0]) rows = pd.DataFrame(df_json) contact = rows.iloc[idx].to_dict() recipient = contact.get("email") if contact.get("email") and contact.get("email")!="Not found" else "[no email]" html = f"""

Preview

To: {recipient}
Organization: {contact.get('title')}
Subject: {subject}

{message_text}
""" text = f"To: {recipient}\nSubject: {subject}\n\n{message_text[:600]}{'...' if len(message_text)>600 else ''}" return text, html except Exception as e: return f"Preview error: {e}", "" def confirm_action(mode, dropdown_value, df_json, subject, message_text, user_email, sender_email, sender_password, logo_url): """ mode: "Draft only" or "Send via SMTP (Gmail)" """ if not dropdown_value: return "❌ No contact selected.", "", None # locate contact try: idx = int(str(dropdown_value).split(" — ")[0]) rows = pd.DataFrame(df_json) contact = rows.iloc[idx].to_dict() except Exception as e: return f"❌ Selection error: {e}", "", None recipient = contact.get("email") if mode.startswith("Send"): # Validate required fields if not recipient or recipient == "Not found": return "❌ This contact has no email address. Choose another contact.", "", None if not user_email or "@" not in user_email: return "❌ Please enter your email (so the organisation can contact you).", "", None if not sender_email or not sender_password: return "❌ Sender email and app password are required for SMTP sending.", "", None status = send_ally_ai_email( to_email=recipient, subject=subject, body=message_text, user_email=user_email, sender_email=sender_email, sender_password=sender_password, ai_name=ALLY_AI_NAME, logo_url=logo_url or ALLY_AI_LOGO_URL_DEFAULT ) # also provide an .eml draft copy (optional) _, eml_path = build_mailto_and_eml(recipient, subject, message_text, default_from=sender_email) file_out = eml_path if eml_path and os.path.exists(eml_path) else None return status, "", file_out else: # Draft-only path recip_for_draft = recipient if (recipient and recipient!="Not found") else "" mailto, eml_path = build_mailto_and_eml(recip_for_draft, subject, message_text, default_from="noreply@ally.ai") html_link = f'Open draft in email client' file_out = eml_path if eml_path and os.path.exists(eml_path) else None return "✅ Draft created (no email sent).", html_link, file_out with gr.Blocks() as demo: gr.Markdown("## Ally AI — GBV Help Finder & Email Assistant\n" "This tool searches local organizations, lets you select a contact, and creates an email draft or sends a branded email via SMTP.\n" "**Privacy tip:** Prefer anonymized summaries unless you’re comfortable sharing details.") with gr.Row(): story_in = gr.Textbox(label="Your story (free text)", lines=6, placeholder="Describe your situation and the help you want...") country_in = gr.Textbox(value=DEFAULT_COUNTRY, label="Country (to bias search)") search_btn = gr.Button("Search for professionals") summary_out = gr.Textbox(label="Search summary (AI)", interactive=False) results_table = gr.Dataframe(headers=["title","url","email","phone","source_query"], label="Search results") dropdown_sel = gr.Dropdown(label="Select organization (from results)", choices=[]) with gr.Row(): use_anon = gr.Checkbox(value=True, label="Use anonymized summary (recommended)") anon_out = gr.Textbox(label="Anonymized summary", lines=3) user_email_in = gr.Textbox(label="Your email (for the organisation to reply to you)") gr.Markdown("### Compose message") subject_in = gr.Textbox(value="Request for GBV support", label="Email subject") message_in = gr.Textbox(label="Message body", lines=10) with gr.Accordion("Sending options (for automatic sending via Ally AI SMTP)", open=False): mode = gr.Radio(choices=["Draft only (mailto + .eml)", "Send via SMTP (Gmail)"], value="Draft only (mailto + .eml)", label="Delivery mode") sender_email_in = gr.Textbox(label="Ally AI sender email (SMTP account)") sender_pass_in = gr.Textbox(label="Ally AI sender app password", type="password") logo_url_in = gr.Textbox(value=ALLY_AI_LOGO_URL_DEFAULT, label="Ally AI logo URL") with gr.Row(): preview_btn = gr.Button("Preview") confirm_btn = gr.Button("Confirm (Create Draft or Send)") preview_text_out = gr.Textbox(label="Preview (text)", interactive=False) preview_html_out = gr.HTML() status_out = gr.Textbox(label="Status", interactive=False) mailto_html_out = gr.HTML() eml_file_out = gr.File(label="Download .eml") # Wire: Search def _on_search(story, country): s, records, options, anon = run_search(story, country) # set dropdown + anonymized text and prefill message prefill = make_body(anon, story, True, "") # user email unknown yet return s, records, gr.update(choices=options, value=(options[0] if options else None)), anon, prefill search_btn.click(_on_search, inputs=[story_in, country_in], outputs=[summary_out, results_table, dropdown_sel, anon_out, message_in]) # When user toggles anonymized vs full story, refresh the message body def _refresh_body(use_anon_flag, anon_text, story, user_email): return make_body(anon_text, story, use_anon_flag, user_email) use_anon.change(_refresh_body, inputs=[use_anon, anon_out, story_in, user_email_in], outputs=message_in) user_email_in.change(_refresh_body, inputs=[use_anon, anon_out, story_in, user_email_in], outputs=message_in) anon_out.change(_refresh_body, inputs=[use_anon, anon_out, story_in, user_email_in], outputs=message_in) story_in.change(_refresh_body, inputs=[use_anon, anon_out, story_in, user_email_in], outputs=message_in) # Preview preview_btn.click(preview_contact, inputs=[dropdown_sel, results_table, subject_in, message_in], outputs=[preview_text_out, preview_html_out]) # Confirm (create draft or send) confirm_btn.click(confirm_action, inputs=[mode, dropdown_sel, results_table, subject_in, message_in, user_email_in, sender_email_in, sender_pass_in, logo_url_in], outputs=[status_out, mailto_html_out, eml_file_out]) demo.launch(share=False)