QuentinL52 commited on
Commit
84c7ce0
·
verified ·
1 Parent(s): 57af3d0

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +50 -78
main.py CHANGED
@@ -1,24 +1,35 @@
1
- import tempfile
 
2
  import os
3
  import json
 
 
 
 
4
  from fastapi import FastAPI, Request, HTTPException, UploadFile, File, BackgroundTasks
 
5
  from fastapi.concurrency import run_in_threadpool
6
  from fastapi.middleware.cors import CORSMiddleware
7
  from pydantic import BaseModel, Field
8
  from typing import List, Dict, Any, Optional
 
9
 
 
10
  from src.models import load_all_models
11
  from src.services.cv_service import CVParsingService
12
- from src.services.interview_service import InterviewProcessor
13
  from src.services.analysis_service import AnalysisService
14
- from services.graph_service import GraphInterviewProcessor
15
- from fastapi.responses import JSONResponse
16
- from bson import ObjectId
 
 
 
17
 
18
- os.environ['HOME'] = '/tmp'
19
  os.makedirs('/tmp/feedbacks', exist_ok=True)
20
 
21
 
 
22
  app = FastAPI(
23
  title="AIrh Interview Assistant",
24
  description="API pour l'analyse de CV et la simulation d'entretiens d'embauche avec analyse asynchrone.",
@@ -35,67 +46,46 @@ app.add_middleware(
35
  allow_headers=["*"],
36
  )
37
 
 
 
38
  models = load_all_models()
39
  cv_service = CVParsingService(models)
40
- analysis_service = AnalysisService(models)
41
- class InterviewRequest(BaseModel):
42
- user_id: str = Field(..., example="user_12345")
43
- job_offer_id: str = Field(..., example="job_offer_abcde")
44
- cv_document: Dict[str, Any]
45
- job_offer: Dict[str, Any]
46
- messages: List[Dict[str, Any]]
47
- conversation_history: List[Dict[str, Any]]
48
 
 
 
49
  class Feedback(BaseModel):
50
  status: str
51
  feedback_data: Optional[Dict[str, Any]] = None
52
 
53
  class HealthCheck(BaseModel):
54
  status: str = "ok"
55
- services: Dict[str, bool] = Field(default_factory=dict)
56
- message: str = "API AIrh fonctionnelle"
57
-
58
- def background_analysis_task(user_id: str, conversation_history: list, job_description: str):
59
- feedback_path = f"/tmp/feedbacks/{user_id}.json"
60
-
61
- with open(feedback_path, "w", encoding="utf-8") as f:
62
- json.dump({"status": "processing"}, f, ensure_ascii=False, indent=4)
63
-
64
- result = analysis_service.run_analysis(conversation_history, job_description)
65
-
66
- with open(feedback_path, "w", encoding="utf-8") as f:
67
- json.dump({"status": "completed", "feedback_data": result}, f, ensure_ascii=False, indent=4)
68
 
 
69
  @app.get("/", response_model=HealthCheck, tags=["Status"])
70
  async def health_check():
71
- services = {
72
- "models_loaded": models.get("status", False),
73
- "cv_parsing": True,
74
- "interview_simulation": True,
75
- "scoring_engine": True
76
- }
77
- return HealthCheck(services=services)
78
-
79
- class MongoJSONEncoder(json.JSONEncoder):
80
- def default(self, o):
81
- if isinstance(o, ObjectId):
82
- return str(o)
83
- return super().default(o)
84
-
85
-
86
- @app.post("/simulate-interview/")
87
  async def simulate_interview(request: Request):
88
  """
89
  Ce endpoint reçoit les données de l'entretien, instancie le processeur de graphe
90
  et lance la conversation.
91
  """
 
 
92
  try:
93
  payload = await request.json()
 
94
  if not all(k in payload for k in ["user_id", "job_offer_id", "cv_document", "job_offer"]):
95
  raise HTTPException(status_code=400, detail="Données manquantes dans le payload (user_id, job_offer_id, cv_document, job_offer).")
 
96
  logger.info(f"Début de la simulation pour l'utilisateur : {payload['user_id']}")
 
97
  processor = GraphInterviewProcessor(payload)
98
  result = processor.invoke(payload.get("messages", []))
 
99
  return JSONResponse(content=result)
100
 
101
  except ValueError as ve:
@@ -108,61 +98,43 @@ async def simulate_interview(request: Request):
108
  status_code=500
109
  )
110
 
 
111
  @app.post("/parse-cv/", tags=["CV Parsing"])
112
  async def parse_cv(file: UploadFile = File(...)):
 
 
 
113
  if file.content_type != "application/pdf":
114
  raise HTTPException(status_code=400, detail="Fichier PDF requis")
115
-
116
  contents = await file.read()
117
 
118
  with tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") as tmp:
119
  tmp.write(contents)
120
  tmp_path = tmp.name
121
 
122
- result = await run_in_threadpool(cv_service.parse_cv, tmp_path)
123
-
124
- if os.path.exists(tmp_path):
125
- os.remove(tmp_path)
126
-
 
 
 
 
127
  return result
128
- '''
129
- @app.post("/simulate-interview/", tags=["Interview"])
130
- async def simulate_interview(request: InterviewRequest, background_tasks: BackgroundTasks):
131
- processor = InterviewProcessor(
132
- request.cv_document,
133
- request.job_offer,
134
- request.conversation_history
135
- )
136
 
137
- result = await run_in_threadpool(
138
- processor.run,
139
- request.messages
140
- )
141
- response_content = result["messages"][-1].content
142
-
143
- if "nous allons maintenant passer a l'analyse" in response_content.lower():
144
- job_description = request.job_offer.get('description', '')
145
- background_tasks.add_task(
146
- background_analysis_task,
147
- request.user_id,
148
- request.conversation_history + request.messages,
149
- job_description
150
- )
151
-
152
- return {"response": response_content}
153
- '''
154
  @app.get("/get-feedback/{user_id}", response_model=Feedback, tags=["Analysis"])
155
  async def get_feedback(user_id: str):
156
  feedback_path = f"/tmp/feedbacks/{user_id}.json"
157
-
158
  if not os.path.exists(feedback_path):
159
  raise HTTPException(status_code=404, detail="Feedback non trouvé ou non encore traité.")
160
-
161
  with open(feedback_path, "r", encoding="utf-8") as f:
162
  data = json.load(f)
163
-
164
  return Feedback(**data)
165
 
 
166
  if __name__ == "__main__":
167
  import uvicorn
168
- uvicorn.run(app, host="0.0.0.0", port=7860)
 
1
+ import logging
2
+ import sys
3
  import os
4
  import json
5
+ import tempfile
6
+ from datetime import datetime
7
+
8
+ # CORRECTION : Assurez-vous que 'Request' est bien présent dans cette ligne d'import
9
  from fastapi import FastAPI, Request, HTTPException, UploadFile, File, BackgroundTasks
10
+ from fastapi.responses import JSONResponse
11
  from fastapi.concurrency import run_in_threadpool
12
  from fastapi.middleware.cors import CORSMiddleware
13
  from pydantic import BaseModel, Field
14
  from typing import List, Dict, Any, Optional
15
+ from bson import ObjectId
16
 
17
+ # --- Imports de vos services et classes ---
18
  from src.models import load_all_models
19
  from src.services.cv_service import CVParsingService
 
20
  from src.services.analysis_service import AnalysisService
21
+ from services.graph_service import GraphInterviewProcessor
22
+
23
+ # --- Configuration du logging ---
24
+ logging.basicConfig(level=logging.INFO)
25
+ # Le logger global est utilisé pour les messages au démarrage de l'application.
26
+ logger = logging.getLogger(__name__)
27
 
28
+ # --- Configuration de l'environnement pour les fichiers temporaires ---
29
  os.makedirs('/tmp/feedbacks', exist_ok=True)
30
 
31
 
32
+ # --- Initialisation de l'application FastAPI ---
33
  app = FastAPI(
34
  title="AIrh Interview Assistant",
35
  description="API pour l'analyse de CV et la simulation d'entretiens d'embauche avec analyse asynchrone.",
 
46
  allow_headers=["*"],
47
  )
48
 
49
+ # --- Initialisation des services ---
50
+ logger.info("Chargement des modèles et initialisation des services...")
51
  models = load_all_models()
52
  cv_service = CVParsingService(models)
53
+ logger.info("Services initialisés.")
 
 
 
 
 
 
 
54
 
55
+
56
+ # --- Définition des modèles Pydantic ---
57
  class Feedback(BaseModel):
58
  status: str
59
  feedback_data: Optional[Dict[str, Any]] = None
60
 
61
  class HealthCheck(BaseModel):
62
  status: str = "ok"
 
 
 
 
 
 
 
 
 
 
 
 
 
63
 
64
+ # --- Endpoint de santé ---
65
  @app.get("/", response_model=HealthCheck, tags=["Status"])
66
  async def health_check():
67
+ return HealthCheck()
68
+
69
+ # --- Endpoint principal pour la simulation d'entretien ---
70
+ @app.post("/api/v1/simulate-interview/")
 
 
 
 
 
 
 
 
 
 
 
 
71
  async def simulate_interview(request: Request):
72
  """
73
  Ce endpoint reçoit les données de l'entretien, instancie le processeur de graphe
74
  et lance la conversation.
75
  """
76
+ # CORRECTION : Récupérer l'instance du logger pour garantir sa disponibilité dans le scope de la fonction.
77
+ logger = logging.getLogger(__name__)
78
  try:
79
  payload = await request.json()
80
+
81
  if not all(k in payload for k in ["user_id", "job_offer_id", "cv_document", "job_offer"]):
82
  raise HTTPException(status_code=400, detail="Données manquantes dans le payload (user_id, job_offer_id, cv_document, job_offer).")
83
+
84
  logger.info(f"Début de la simulation pour l'utilisateur : {payload['user_id']}")
85
+
86
  processor = GraphInterviewProcessor(payload)
87
  result = processor.invoke(payload.get("messages", []))
88
+
89
  return JSONResponse(content=result)
90
 
91
  except ValueError as ve:
 
98
  status_code=500
99
  )
100
 
101
+ # --- Endpoint pour l'analyse de CV ---
102
  @app.post("/parse-cv/", tags=["CV Parsing"])
103
  async def parse_cv(file: UploadFile = File(...)):
104
+ """
105
+ Analyse un fichier CV (PDF) et retourne les données extraites.
106
+ """
107
  if file.content_type != "application/pdf":
108
  raise HTTPException(status_code=400, detail="Fichier PDF requis")
109
+
110
  contents = await file.read()
111
 
112
  with tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") as tmp:
113
  tmp.write(contents)
114
  tmp_path = tmp.name
115
 
116
+ try:
117
+ result = await run_in_threadpool(cv_service.parse_cv, tmp_path)
118
+ finally:
119
+ if os.path.exists(tmp_path):
120
+ os.remove(tmp_path)
121
+
122
+ if not result:
123
+ raise HTTPException(status_code=500, detail="Échec de l'extraction des données du CV.")
124
+
125
  return result
 
 
 
 
 
 
 
 
126
 
127
+ # --- Endpoint pour récupérer le feedback ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
128
  @app.get("/get-feedback/{user_id}", response_model=Feedback, tags=["Analysis"])
129
  async def get_feedback(user_id: str):
130
  feedback_path = f"/tmp/feedbacks/{user_id}.json"
 
131
  if not os.path.exists(feedback_path):
132
  raise HTTPException(status_code=404, detail="Feedback non trouvé ou non encore traité.")
 
133
  with open(feedback_path, "r", encoding="utf-8") as f:
134
  data = json.load(f)
 
135
  return Feedback(**data)
136
 
137
+ # --- Démarrage de l'application (pour un test local) ---
138
  if __name__ == "__main__":
139
  import uvicorn
140
+ uvicorn.run(app, host="0.0.0.0", port=8000)