Really-amin commited on
Commit
6b51475
·
verified ·
1 Parent(s): dc25dc3

Upload 17 files

Browse files
collectors/explorers.py CHANGED
@@ -472,6 +472,71 @@ async def collect_explorer_data() -> List[Dict[str, Any]]:
472
  return processed_results
473
 
474
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
475
  # Example usage
476
  if __name__ == "__main__":
477
  async def main():
 
472
  return processed_results
473
 
474
 
475
+ class ExplorerDataCollector:
476
+ """
477
+ Explorer Data Collector class for WebSocket streaming interface
478
+ Wraps the standalone explorer data collection functions
479
+ """
480
+
481
+ def __init__(self, config: Any = None):
482
+ """
483
+ Initialize the explorer data collector
484
+
485
+ Args:
486
+ config: Configuration object (optional, for compatibility)
487
+ """
488
+ self.config = config
489
+ self.logger = logger
490
+
491
+ async def collect(self) -> Dict[str, Any]:
492
+ """
493
+ Collect blockchain explorer data from all sources
494
+
495
+ Returns:
496
+ Dict with aggregated explorer data
497
+ """
498
+ results = await collect_explorer_data()
499
+
500
+ # Aggregate data for WebSocket streaming
501
+ aggregated = {
502
+ "latest_block": None,
503
+ "network_hashrate": None,
504
+ "difficulty": None,
505
+ "mempool_size": None,
506
+ "transactions_count": None,
507
+ "gas_prices": {},
508
+ "sources": [],
509
+ "timestamp": datetime.now(timezone.utc).isoformat()
510
+ }
511
+
512
+ for result in results:
513
+ if result.get("success") and result.get("data"):
514
+ provider = result.get("provider", "unknown")
515
+ aggregated["sources"].append(provider)
516
+
517
+ data = result["data"]
518
+
519
+ # Parse gas price data
520
+ if "result" in data and isinstance(data["result"], dict):
521
+ gas_data = data["result"]
522
+ if provider == "Etherscan":
523
+ aggregated["gas_prices"]["ethereum"] = {
524
+ "safe": gas_data.get("SafeGasPrice"),
525
+ "propose": gas_data.get("ProposeGasPrice"),
526
+ "fast": gas_data.get("FastGasPrice")
527
+ }
528
+ elif provider == "BscScan":
529
+ aggregated["gas_prices"]["bsc"] = gas_data.get("result")
530
+
531
+ # Parse network stats
532
+ if provider == "TronScan" and "data" in data:
533
+ stats = data["data"]
534
+ aggregated["latest_block"] = stats.get("latestBlock")
535
+ aggregated["transactions_count"] = stats.get("totalTransaction")
536
+
537
+ return aggregated
538
+
539
+
540
  # Example usage
541
  if __name__ == "__main__":
542
  async def main():
collectors/market_data.py CHANGED
@@ -436,6 +436,92 @@ async def collect_market_data() -> List[Dict[str, Any]]:
436
  return processed_results
437
 
438
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
439
  # Example usage
440
  if __name__ == "__main__":
441
  async def main():
 
436
  return processed_results
437
 
438
 
439
+ class MarketDataCollector:
440
+ """
441
+ Market Data Collector class for WebSocket streaming interface
442
+ Wraps the standalone market data collection functions
443
+ """
444
+
445
+ def __init__(self, config: Any = None):
446
+ """
447
+ Initialize the market data collector
448
+
449
+ Args:
450
+ config: Configuration object (optional, for compatibility)
451
+ """
452
+ self.config = config
453
+ self.logger = logger
454
+
455
+ async def collect(self) -> Dict[str, Any]:
456
+ """
457
+ Collect market data from all sources
458
+
459
+ Returns:
460
+ Dict with aggregated market data
461
+ """
462
+ results = await collect_market_data()
463
+
464
+ # Aggregate data for WebSocket streaming
465
+ aggregated = {
466
+ "prices": {},
467
+ "volumes": {},
468
+ "market_caps": {},
469
+ "price_changes": {},
470
+ "sources": [],
471
+ "timestamp": datetime.now(timezone.utc).isoformat()
472
+ }
473
+
474
+ for result in results:
475
+ if result.get("success") and result.get("data"):
476
+ provider = result.get("provider", "unknown")
477
+ aggregated["sources"].append(provider)
478
+
479
+ data = result["data"]
480
+
481
+ # Parse CoinGecko data
482
+ if provider == "CoinGecko" and isinstance(data, dict):
483
+ for coin_id, coin_data in data.items():
484
+ if isinstance(coin_data, dict):
485
+ symbol = coin_id.upper()
486
+ if "usd" in coin_data:
487
+ aggregated["prices"][symbol] = coin_data["usd"]
488
+ if "usd_market_cap" in coin_data:
489
+ aggregated["market_caps"][symbol] = coin_data["usd_market_cap"]
490
+ if "usd_24h_vol" in coin_data:
491
+ aggregated["volumes"][symbol] = coin_data["usd_24h_vol"]
492
+ if "usd_24h_change" in coin_data:
493
+ aggregated["price_changes"][symbol] = coin_data["usd_24h_change"]
494
+
495
+ # Parse CoinMarketCap data
496
+ elif provider == "CoinMarketCap" and isinstance(data, dict):
497
+ if "data" in data:
498
+ for symbol, coin_data in data["data"].items():
499
+ if isinstance(coin_data, dict) and "quote" in coin_data:
500
+ quote = coin_data.get("quote", {}).get("USD", {})
501
+ if "price" in quote:
502
+ aggregated["prices"][symbol] = quote["price"]
503
+ if "market_cap" in quote:
504
+ aggregated["market_caps"][symbol] = quote["market_cap"]
505
+ if "volume_24h" in quote:
506
+ aggregated["volumes"][symbol] = quote["volume_24h"]
507
+ if "percent_change_24h" in quote:
508
+ aggregated["price_changes"][symbol] = quote["percent_change_24h"]
509
+
510
+ # Parse Binance data
511
+ elif provider == "Binance" and isinstance(data, list):
512
+ for ticker in data:
513
+ if isinstance(ticker, dict):
514
+ symbol = ticker.get("symbol", "").replace("USDT", "")
515
+ if "lastPrice" in ticker:
516
+ aggregated["prices"][symbol] = float(ticker["lastPrice"])
517
+ if "volume" in ticker:
518
+ aggregated["volumes"][symbol] = float(ticker["volume"])
519
+ if "priceChangePercent" in ticker:
520
+ aggregated["price_changes"][symbol] = float(ticker["priceChangePercent"])
521
+
522
+ return aggregated
523
+
524
+
525
  # Example usage
526
  if __name__ == "__main__":
527
  async def main():
collectors/news.py CHANGED
@@ -358,6 +358,77 @@ async def collect_news_data() -> List[Dict[str, Any]]:
358
  return processed_results
359
 
360
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
361
  # Example usage
362
  if __name__ == "__main__":
363
  async def main():
 
358
  return processed_results
359
 
360
 
361
+ # Alias for backward compatibility
362
+ collect_news = collect_news_data
363
+
364
+
365
+ class NewsCollector:
366
+ """
367
+ News Collector class for WebSocket streaming interface
368
+ Wraps the standalone news collection functions
369
+ """
370
+
371
+ def __init__(self, config: Any = None):
372
+ """
373
+ Initialize the news collector
374
+
375
+ Args:
376
+ config: Configuration object (optional, for compatibility)
377
+ """
378
+ self.config = config
379
+ self.logger = logger
380
+
381
+ async def collect(self) -> Dict[str, Any]:
382
+ """
383
+ Collect news data from all sources
384
+
385
+ Returns:
386
+ Dict with aggregated news data
387
+ """
388
+ results = await collect_news_data()
389
+
390
+ # Aggregate data for WebSocket streaming
391
+ aggregated = {
392
+ "articles": [],
393
+ "sources": [],
394
+ "categories": [],
395
+ "breaking": [],
396
+ "timestamp": datetime.now(timezone.utc).isoformat()
397
+ }
398
+
399
+ for result in results:
400
+ if result.get("success") and result.get("data"):
401
+ provider = result.get("provider", "unknown")
402
+ aggregated["sources"].append(provider)
403
+
404
+ data = result["data"]
405
+
406
+ # Parse CryptoPanic posts
407
+ if provider == "CryptoPanic" and "results" in data:
408
+ for post in data["results"][:10]: # Take top 10
409
+ aggregated["articles"].append({
410
+ "title": post.get("title"),
411
+ "url": post.get("url"),
412
+ "source": post.get("source", {}).get("title"),
413
+ "published_at": post.get("published_at"),
414
+ "kind": post.get("kind"),
415
+ "votes": post.get("votes", {})
416
+ })
417
+
418
+ # Parse NewsAPI articles
419
+ elif provider == "NewsAPI" and "articles" in data:
420
+ for article in data["articles"][:10]: # Take top 10
421
+ aggregated["articles"].append({
422
+ "title": article.get("title"),
423
+ "url": article.get("url"),
424
+ "source": article.get("source", {}).get("name"),
425
+ "published_at": article.get("publishedAt"),
426
+ "description": article.get("description")
427
+ })
428
+
429
+ return aggregated
430
+
431
+
432
  # Example usage
433
  if __name__ == "__main__":
434
  async def main():
collectors/onchain.py CHANGED
@@ -423,6 +423,58 @@ async def collect_onchain_data() -> List[Dict[str, Any]]:
423
  return processed_results
424
 
425
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
426
  # Example usage
427
  if __name__ == "__main__":
428
  async def main():
 
423
  return processed_results
424
 
425
 
426
+ class OnChainCollector:
427
+ """
428
+ On-Chain Analytics Collector class for WebSocket streaming interface
429
+ Wraps the standalone on-chain data collection functions
430
+ """
431
+
432
+ def __init__(self, config: Any = None):
433
+ """
434
+ Initialize the on-chain collector
435
+
436
+ Args:
437
+ config: Configuration object (optional, for compatibility)
438
+ """
439
+ self.config = config
440
+ self.logger = logger
441
+
442
+ async def collect(self) -> Dict[str, Any]:
443
+ """
444
+ Collect on-chain analytics data from all sources
445
+
446
+ Returns:
447
+ Dict with aggregated on-chain data
448
+ """
449
+ results = await collect_onchain_data()
450
+
451
+ # Aggregate data for WebSocket streaming
452
+ aggregated = {
453
+ "active_addresses": None,
454
+ "transaction_count": None,
455
+ "total_fees": None,
456
+ "gas_price": None,
457
+ "network_utilization": None,
458
+ "contract_events": [],
459
+ "timestamp": datetime.now(timezone.utc).isoformat()
460
+ }
461
+
462
+ for result in results:
463
+ if result.get("success") and result.get("data"):
464
+ provider = result.get("provider", "unknown")
465
+ data = result["data"]
466
+
467
+ # Skip placeholders but still return basic structure
468
+ if isinstance(data, dict) and data.get("status") == "placeholder":
469
+ continue
470
+
471
+ # Parse data from various providers (when implemented)
472
+ # Currently all are placeholders, so this will be empty
473
+ pass
474
+
475
+ return aggregated
476
+
477
+
478
  # Example usage
479
  if __name__ == "__main__":
480
  async def main():
collectors/rpc_nodes.py CHANGED
@@ -527,6 +527,89 @@ async def collect_rpc_data(
527
  return all_results
528
 
529
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
530
  # Example usage
531
  if __name__ == "__main__":
532
  async def main():
 
527
  return all_results
528
 
529
 
530
+ class RPCNodeCollector:
531
+ """
532
+ RPC Node Collector class for WebSocket streaming interface
533
+ Wraps the standalone RPC node collection functions
534
+ """
535
+
536
+ def __init__(self, config: Any = None):
537
+ """
538
+ Initialize the RPC node collector
539
+
540
+ Args:
541
+ config: Configuration object (optional, for compatibility)
542
+ """
543
+ self.config = config
544
+ self.logger = logger
545
+
546
+ async def collect(self) -> Dict[str, Any]:
547
+ """
548
+ Collect RPC node data from all sources
549
+
550
+ Returns:
551
+ Dict with aggregated RPC node data
552
+ """
553
+ import os
554
+ infura_key = os.getenv("INFURA_API_KEY")
555
+ alchemy_key = os.getenv("ALCHEMY_API_KEY")
556
+ results = await collect_rpc_data(infura_key, alchemy_key)
557
+
558
+ # Aggregate data for WebSocket streaming
559
+ aggregated = {
560
+ "nodes": [],
561
+ "active_nodes": 0,
562
+ "total_nodes": 0,
563
+ "average_latency": 0,
564
+ "events": [],
565
+ "block_number": None,
566
+ "timestamp": datetime.now(timezone.utc).isoformat()
567
+ }
568
+
569
+ total_latency = 0
570
+ latency_count = 0
571
+
572
+ for result in results:
573
+ aggregated["total_nodes"] += 1
574
+
575
+ if result.get("success"):
576
+ aggregated["active_nodes"] += 1
577
+ provider = result.get("provider", "unknown")
578
+ response_time = result.get("response_time_ms", 0)
579
+ data = result.get("data", {})
580
+
581
+ # Track latency
582
+ if response_time:
583
+ total_latency += response_time
584
+ latency_count += 1
585
+
586
+ # Add node info
587
+ node_info = {
588
+ "provider": provider,
589
+ "response_time_ms": response_time,
590
+ "status": "active",
591
+ "data": data
592
+ }
593
+
594
+ # Extract block number
595
+ if "result" in data and isinstance(data["result"], str):
596
+ try:
597
+ block_number = int(data["result"], 16)
598
+ node_info["block_number"] = block_number
599
+ if aggregated["block_number"] is None or block_number > aggregated["block_number"]:
600
+ aggregated["block_number"] = block_number
601
+ except:
602
+ pass
603
+
604
+ aggregated["nodes"].append(node_info)
605
+
606
+ # Calculate average latency
607
+ if latency_count > 0:
608
+ aggregated["average_latency"] = total_latency / latency_count
609
+
610
+ return aggregated
611
+
612
+
613
  # Example usage
614
  if __name__ == "__main__":
615
  async def main():
collectors/sentiment.py CHANGED
@@ -213,6 +213,63 @@ async def collect_sentiment_data() -> List[Dict[str, Any]]:
213
  return processed_results
214
 
215
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
216
  # Example usage
217
  if __name__ == "__main__":
218
  async def main():
 
213
  return processed_results
214
 
215
 
216
+ # Alias for backward compatibility
217
+ collect_sentiment = collect_sentiment_data
218
+
219
+
220
+ class SentimentCollector:
221
+ """
222
+ Sentiment Collector class for WebSocket streaming interface
223
+ Wraps the standalone sentiment collection functions
224
+ """
225
+
226
+ def __init__(self, config: Any = None):
227
+ """
228
+ Initialize the sentiment collector
229
+
230
+ Args:
231
+ config: Configuration object (optional, for compatibility)
232
+ """
233
+ self.config = config
234
+ self.logger = logger
235
+
236
+ async def collect(self) -> Dict[str, Any]:
237
+ """
238
+ Collect sentiment data from all sources
239
+
240
+ Returns:
241
+ Dict with aggregated sentiment data
242
+ """
243
+ results = await collect_sentiment_data()
244
+
245
+ # Aggregate data for WebSocket streaming
246
+ aggregated = {
247
+ "overall_sentiment": None,
248
+ "sentiment_score": None,
249
+ "social_volume": None,
250
+ "trending_topics": [],
251
+ "by_source": {},
252
+ "social_trends": [],
253
+ "timestamp": datetime.now(timezone.utc).isoformat()
254
+ }
255
+
256
+ for result in results:
257
+ if result.get("success") and result.get("data"):
258
+ provider = result.get("provider", "unknown")
259
+
260
+ # Parse Fear & Greed Index
261
+ if provider == "Alternative.me" and "data" in result["data"]:
262
+ index_data = result["data"]["data"][0] if result["data"]["data"] else {}
263
+ aggregated["sentiment_score"] = int(index_data.get("value", 0))
264
+ aggregated["overall_sentiment"] = index_data.get("value_classification", "neutral")
265
+ aggregated["by_source"][provider] = {
266
+ "value": aggregated["sentiment_score"],
267
+ "classification": aggregated["overall_sentiment"]
268
+ }
269
+
270
+ return aggregated
271
+
272
+
273
  # Example usage
274
  if __name__ == "__main__":
275
  async def main():
collectors/whale_tracking.py CHANGED
@@ -465,6 +465,76 @@ async def collect_whale_tracking_data(whalealert_key: Optional[str] = None) -> L
465
  return processed_results
466
 
467
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
468
  # Example usage
469
  if __name__ == "__main__":
470
  async def main():
 
465
  return processed_results
466
 
467
 
468
+ class WhaleTrackingCollector:
469
+ """
470
+ Whale Tracking Collector class for WebSocket streaming interface
471
+ Wraps the standalone whale tracking collection functions
472
+ """
473
+
474
+ def __init__(self, config: Any = None):
475
+ """
476
+ Initialize the whale tracking collector
477
+
478
+ Args:
479
+ config: Configuration object (optional, for compatibility)
480
+ """
481
+ self.config = config
482
+ self.logger = logger
483
+
484
+ async def collect(self) -> Dict[str, Any]:
485
+ """
486
+ Collect whale tracking data from all sources
487
+
488
+ Returns:
489
+ Dict with aggregated whale tracking data
490
+ """
491
+ import os
492
+ whalealert_key = os.getenv("WHALEALERT_API_KEY")
493
+ results = await collect_whale_tracking_data(whalealert_key)
494
+
495
+ # Aggregate data for WebSocket streaming
496
+ aggregated = {
497
+ "large_transactions": [],
498
+ "whale_wallets": [],
499
+ "total_volume": 0,
500
+ "alert_threshold": 1000000, # $1M default threshold
501
+ "alerts": [],
502
+ "timestamp": datetime.now(timezone.utc).isoformat()
503
+ }
504
+
505
+ for result in results:
506
+ if result.get("success") and result.get("data"):
507
+ provider = result.get("provider", "unknown")
508
+ data = result["data"]
509
+
510
+ # Skip placeholders
511
+ if isinstance(data, dict) and data.get("status") == "placeholder":
512
+ continue
513
+
514
+ # Parse WhaleAlert transactions
515
+ if provider == "WhaleAlert" and isinstance(data, dict):
516
+ transactions = data.get("transactions", [])
517
+ for tx in transactions:
518
+ aggregated["large_transactions"].append({
519
+ "amount": tx.get("amount", 0),
520
+ "amount_usd": tx.get("amount_usd", 0),
521
+ "symbol": tx.get("symbol", "unknown"),
522
+ "from": tx.get("from", {}).get("owner", "unknown"),
523
+ "to": tx.get("to", {}).get("owner", "unknown"),
524
+ "timestamp": tx.get("timestamp"),
525
+ "source": provider
526
+ })
527
+ aggregated["total_volume"] += data.get("total_value_usd", 0)
528
+
529
+ # Parse other sources
530
+ elif isinstance(data, dict):
531
+ tx_count = data.get("transaction_count", 0)
532
+ total_value = data.get("total_value_usd", data.get("total_value", 0))
533
+ aggregated["total_volume"] += total_value
534
+
535
+ return aggregated
536
+
537
+
538
  # Example usage
539
  if __name__ == "__main__":
540
  async def main():