aubynsamuel05 commited on
Commit
f86b3df
Β·
1 Parent(s): b20b87f

added cache to prevent re-analysis of the same headline

Browse files
Files changed (2) hide show
  1. deploy/index.py +10 -1
  2. deploy/main/claim_verifier.py +2 -3
deploy/index.py CHANGED
@@ -35,6 +35,7 @@ class FakeNewsDetector:
35
  self.claim_verifier = ClaimVerifier()
36
  self.network_analyzer = NetworkAnalyzer()
37
  self.clickbait_predictor = ClickbaitPredictor()
 
38
  except Exception as e:
39
  print(f"❌ Error initializing components: {e}")
40
  raise
@@ -235,6 +236,13 @@ class FakeNewsDetector:
235
  Comprehensive fact-checking with ML integration.
236
  This method orchestrates the analysis by calling various specialized components.
237
  """
 
 
 
 
 
 
 
238
  print(f'\nπŸ”Ž Comprehensive Analysis: "{raw_headline}"')
239
  print("=" * 80)
240
 
@@ -356,6 +364,7 @@ class FakeNewsDetector:
356
  },
357
  }
358
 
359
- # self._print_summary(analysis_results)
 
360
  gc.collect()
361
  return analysis_results
 
35
  self.claim_verifier = ClaimVerifier()
36
  self.network_analyzer = NetworkAnalyzer()
37
  self.clickbait_predictor = ClickbaitPredictor()
38
+ self.analysis_cache: Dict[str, Dict] = {}
39
  except Exception as e:
40
  print(f"❌ Error initializing components: {e}")
41
  raise
 
236
  Comprehensive fact-checking with ML integration.
237
  This method orchestrates the analysis by calling various specialized components.
238
  """
239
+ if raw_headline in self.analysis_cache:
240
+ print(f'\nβœ… Using Cached Analysis: "{raw_headline}"')
241
+ print("=" * 80)
242
+ cached_result = self.analysis_cache[raw_headline]
243
+ self._print_summary(cached_result)
244
+ return cached_result
245
+
246
  print(f'\nπŸ”Ž Comprehensive Analysis: "{raw_headline}"')
247
  print("=" * 80)
248
 
 
364
  },
365
  }
366
 
367
+ self._print_summary(analysis_results)
368
+ self.analysis_cache[raw_headline] = analysis_results
369
  gc.collect()
370
  return analysis_results
deploy/main/claim_verifier.py CHANGED
@@ -273,14 +273,13 @@ class ClaimVerifier:
273
  }
274
  )
275
 
276
- for source_detail in source_details:
277
- logging.info(f"Source Details:\n{source_detail}\n")
278
-
279
  except Exception as e:
280
  logging.error(f"Error processing {url}: {e}")
281
  except TimeoutError:
282
  logging.warning("⏰ Timeout: Some URLs were skipped.")
283
 
 
 
284
  support_sum = sum(support_scores)
285
 
286
  if total_weight > 0:
 
273
  }
274
  )
275
 
 
 
 
276
  except Exception as e:
277
  logging.error(f"Error processing {url}: {e}")
278
  except TimeoutError:
279
  logging.warning("⏰ Timeout: Some URLs were skipped.")
280
 
281
+ for source_detail in source_details:
282
+ logging.info(f"Source Details:\n{source_detail}\n")
283
  support_sum = sum(support_scores)
284
 
285
  if total_weight > 0: