Really-amin commited on
Commit
b9613eb
·
verified ·
1 Parent(s): f83e6ae

Upload 10 files

Browse files
Files changed (10) hide show
  1. .dockerignore +6 -96
  2. Dockerfile +24 -58
  3. README.md +26 -262
  4. app/__init__.py +2 -0
  5. app/indicators.py +99 -0
  6. app/models.py +55 -0
  7. app/storage.py +112 -0
  8. app/utils.py +75 -0
  9. main.py +248 -0
  10. requirements.txt +10 -31
.dockerignore CHANGED
@@ -1,9 +1,9 @@
1
- # Python
2
  __pycache__/
3
  *.py[cod]
4
  *$py.class
5
  *.so
6
  .Python
 
7
  build/
8
  develop-eggs/
9
  dist/
@@ -15,107 +15,17 @@ lib64/
15
  parts/
16
  sdist/
17
  var/
18
- wheels/
19
  *.egg-info/
20
  .installed.cfg
21
  *.egg
22
- MANIFEST
23
- pip-log.txt
24
- pip-delete-this-directory.txt
25
-
26
- # Virtual environments
27
  venv/
28
  ENV/
29
- env/
30
- .venv
31
-
32
- # IDE
33
- .vscode/
34
  .idea/
 
35
  *.swp
36
- *.swo
37
- *~
38
- .DS_Store
39
-
40
- # Git
41
- .git/
42
- .gitignore
43
- .gitattributes
44
-
45
- # Documentation
46
- *.md
47
- docs/
48
- README*.md
49
- CHANGELOG.md
50
- LICENSE
51
-
52
- # Testing
53
- .pytest_cache/
54
- .coverage
55
- htmlcov/
56
- .tox/
57
- .hypothesis/
58
- tests/
59
- test_*.py
60
-
61
- # Logs and databases (will be created in container)
62
- *.log
63
- logs/
64
- data/*.db
65
- data/*.sqlite
66
- data/*.db-journal
67
-
68
- # Environment files (should be set via docker-compose or HF Secrets)
69
- .env
70
- .env.*
71
- !.env.example
72
-
73
- # Docker
74
- docker-compose*.yml
75
- !docker-compose.yml
76
- Dockerfile
77
- .dockerignore
78
-
79
- # CI/CD
80
- .github/
81
- .gitlab-ci.yml
82
- .travis.yml
83
- azure-pipelines.yml
84
-
85
- # Temporary files
86
- *.tmp
87
  *.bak
88
- *.swp
89
- temp/
90
- tmp/
91
-
92
- # Node modules (if any)
93
- node_modules/
94
- package-lock.json
95
- yarn.lock
96
-
97
- # OS files
98
- Thumbs.db
99
  .DS_Store
100
- desktop.ini
101
-
102
- # Jupyter notebooks
103
- .ipynb_checkpoints/
104
- *.ipynb
105
-
106
- # Model cache (models will be downloaded in container)
107
- models/
108
- .cache/
109
- .huggingface/
110
-
111
- # Large files that shouldn't be in image
112
- *.tar
113
- *.tar.gz
114
- *.zip
115
- *.rar
116
- *.7z
117
-
118
- # Screenshots and assets not needed
119
- screenshots/
120
- assets/*.png
121
- assets/*.jpg
 
 
1
  __pycache__/
2
  *.py[cod]
3
  *$py.class
4
  *.so
5
  .Python
6
+ env/
7
  build/
8
  develop-eggs/
9
  dist/
 
15
  parts/
16
  sdist/
17
  var/
 
18
  *.egg-info/
19
  .installed.cfg
20
  *.egg
21
+ .env
22
+ .venv
 
 
 
23
  venv/
24
  ENV/
 
 
 
 
 
25
  .idea/
26
+ .vscode/
27
  *.swp
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  *.bak
 
 
 
 
 
 
 
 
 
 
 
29
  .DS_Store
30
+ .git
31
+ .gitignore
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
Dockerfile CHANGED
@@ -1,71 +1,37 @@
1
- # Dockerfile for Crypto API Monitoring System
2
- # Optimized for HuggingFace Spaces deployment
3
- FROM python:3.10-slim
4
 
5
- # Set working directory
6
- WORKDIR /app
7
 
8
- # Set environment variables for better Python behavior
9
- ENV PYTHONUNBUFFERED=1 \
10
- PYTHONDONTWRITEBYTECODE=1 \
11
- PIP_NO_CACHE_DIR=1 \
12
- PIP_DISABLE_PIP_VERSION_CHECK=1
13
-
14
- # Install system dependencies required for building Python packages
15
- RUN apt-get update && apt-get install -y --no-install-recommends \
16
- gcc \
17
- g++ \
18
- git \
19
  curl \
 
 
20
  && rm -rf /var/lib/apt/lists/*
21
 
22
- # Copy requirements first for better layer caching
23
  COPY requirements.txt .
24
 
25
- # Install Python dependencies with optimizations
26
- # Split into two steps: core dependencies first, then ML libraries
27
- RUN pip install --no-cache-dir \
28
- fastapi==0.104.1 \
29
- uvicorn[standard]==0.24.0 \
30
- pydantic==2.5.0 \
31
- python-multipart==0.0.6 \
32
- websockets==12.0 \
33
- SQLAlchemy==2.0.23 \
34
- APScheduler==3.10.4 \
35
- aiohttp==3.9.1 \
36
- requests==2.31.0 \
37
- httpx \
38
- python-dotenv==1.0.0 \
39
- feedparser==6.0.11 \
40
- gradio==4.14.0 \
41
- pandas==2.1.4 \
42
- plotly==5.18.0
43
 
44
- # Install HuggingFace ML dependencies separately
45
- RUN pip install --no-cache-dir \
46
- transformers>=4.44.0 \
47
- datasets>=3.0.0 \
48
- huggingface_hub>=0.24.0 \
49
- torch>=2.0.0 --index-url https://download.pytorch.org/whl/cpu \
50
- sentencepiece>=0.1.99 \
51
- protobuf>=3.20.0
52
 
53
- # Copy all application code
54
- COPY . .
 
55
 
56
- # Create necessary directories
57
- RUN mkdir -p data logs
 
58
 
59
- # Set proper permissions for data directories
60
- RUN chmod -R 755 data logs
61
-
62
- # Expose port 7860 (HuggingFace Spaces standard port)
63
  EXPOSE 7860
64
 
65
- # Health check endpoint for HuggingFace Spaces
66
- HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
67
- CMD curl -f http://localhost:7860/health || exit 1
68
-
69
- # Run the FastAPI application with uvicorn
70
- # Using multiple workers for better performance (adjust based on available resources)
71
- CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860", "--log-level", "info", "--workers", "1"]
 
1
+ FROM python:3.9-slim
 
 
2
 
3
+ WORKDIR /code
 
4
 
5
+ # Install system dependencies
6
+ RUN apt-get update && \
7
+ apt-get install -y --no-install-recommends \
8
+ build-essential \
 
 
 
 
 
 
 
9
  curl \
10
+ git \
11
+ && apt-get clean \
12
  && rm -rf /var/lib/apt/lists/*
13
 
14
+ # Copy requirements first to leverage Docker cache
15
  COPY requirements.txt .
16
 
17
+ # Install Python dependencies
18
+ RUN pip install --no-cache-dir --upgrade pip && \
19
+ pip install --no-cache-dir -r requirements.txt
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
 
21
+ # Create necessary directories with correct permissions
22
+ RUN mkdir -p /code/data /code/logs && \
23
+ chmod -R 755 /code/data /code/logs
 
 
 
 
 
24
 
25
+ # Copy application code
26
+ COPY app /code/app
27
+ COPY main.py /code/
28
 
29
+ # Set environment variables
30
+ ENV PYTHONPATH=/code
31
+ ENV PORT=7860
32
 
33
+ # Expose the port
 
 
 
34
  EXPOSE 7860
35
 
36
+ # Run the application
37
+ CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
 
 
 
 
 
README.md CHANGED
@@ -1,281 +1,45 @@
1
  ---
2
- title: Crypto API Monitor - Vidya UI
3
- emoji: 📊
4
  colorFrom: blue
5
- colorTo: purple
6
  sdk: docker
7
  app_port: 7860
8
  pinned: false
9
- license: mit
10
  ---
11
 
12
- # 📊 Cryptocurrency API Monitor - Vidya Dashboard
13
 
14
- > **Production-ready real-time cryptocurrency API monitoring with beautiful Vidya HTML UI**
15
 
16
- A comprehensive monitoring system that tracks the health, performance, and availability of 162+ cryptocurrency APIs including market data, blockchain explorers, RPC nodes, news sources, sentiment analyzers, and more. Features a stunning real-time dashboard with WebSocket support.
17
 
18
- ## 🌟 Features
19
-
20
- ### Beautiful Vidya HTML UI
21
- - **Modern Design**: Gradient-based UI with smooth animations and transitions
22
- - **Real-time Updates**: WebSocket-powered live data streaming
23
- - **Interactive Dashboard**: KPI cards, charts, and data visualizations
24
- - **Multiple Views**: Dashboard, Inventory, Logs, Analytics, and HuggingFace Integration
25
- - **Responsive**: Works perfectly on desktop, tablet, and mobile devices
26
-
27
- ### Core Capabilities
28
- - **Real-Time Monitoring**: Track 162+ API endpoints with live status updates
29
- - **WebSocket Streaming**: Real-time data feeds for all services
30
- - **Multi-Category Support**: Market Data, Blockchain Explorers, RPC Nodes, News, Sentiment, On-chain Analytics
31
- - **Health Tracking**: Response times, uptime percentages, failure detection
32
- - **Rate Limit Management**: Automatic rate limiting with configurable rules
33
- - **Database Persistence**: SQLite-based historical data storage
34
- - **Automated Scheduling**: Background tasks for continuous monitoring
35
- - **Alert System**: Real-time alerts for critical failures
36
- - **HuggingFace Integration**: AI/ML capabilities with sentiment analysis
37
-
38
- ### WebSocket Services
39
- The application provides comprehensive WebSocket APIs for real-time streaming:
40
-
41
- #### Data Collection Streams
42
- - `/ws/market_data` - Live market data updates
43
- - `/ws/news` - Real-time crypto news feed
44
- - `/ws/sentiment` - Sentiment analysis stream
45
- - `/ws/whale_tracking` - Whale transaction alerts
46
- - `/ws/data` - Unified data collection stream
47
-
48
- #### Monitoring Streams
49
- - `/ws/health` - System health status
50
- - `/ws/pool_status` - Pool management updates
51
- - `/ws/scheduler_status` - Scheduler activity
52
- - `/ws/monitoring` - Unified monitoring stream
53
-
54
- #### Integration Streams
55
- - `/ws/huggingface` - HuggingFace AI/ML integration
56
- - `/ws/ai` - AI services stream
57
- - `/ws/persistence` - Persistence service updates
58
- - `/ws/integration` - Unified integration stream
59
-
60
- #### Master Streams
61
- - `/ws` or `/ws/master` - All services combined
62
- - `/ws/live` - Legacy live updates (compatible with older clients)
63
 
64
  ## API Endpoints
65
 
66
- ### System Status
67
- - `GET /api/status` - Overall system health
68
- - `GET /api/categories` - Statistics by category
69
- - `GET /api/providers` - List all providers with filtering
70
-
71
- ### Logs & Analytics
72
- - `GET /api/logs` - Connection logs with pagination
73
- - `GET /api/failures` - Failure analysis with remediation suggestions
74
- - `GET /api/freshness` - Data freshness metrics
75
-
76
- ### Scheduler
77
- - `GET /api/schedule` - Schedule status and compliance
78
- - `POST /api/schedule/trigger` - Trigger immediate health check
79
-
80
- ### Rate Limits
81
- - `GET /api/rate-limits` - Current rate limit status for all providers
82
-
83
- ### Configuration
84
- - `GET /api/config/keys` - API key status (masked)
85
- - `POST /api/config/keys/test` - Test API key validity
86
-
87
- ### Charts Data
88
- - `GET /api/charts/health-history` - Time series for charts
89
- - `GET /api/charts/compliance` - Compliance trends
90
-
91
- ## WebSocket Support
92
-
93
- Real-time updates at `ws://localhost:7860/ws/live`
94
-
95
- ### Message Types
96
- - `status_update` - System status every 10 seconds
97
- - `new_log_entry` - Real-time log notifications
98
- - `rate_limit_alert` - Warnings when usage ≥80%
99
- - `provider_status_change` - Provider status changes
100
- - `ping` - Heartbeat every 30 seconds
101
-
102
- ## Architecture
103
-
104
- ```
105
- crypto-dt-source/
106
- ├── app.py # Main FastAPI application
107
- ├── config.py # Configuration loader
108
- ├── monitoring/
109
- │ ├── health_checker.py # API health monitoring
110
- │ ├── rate_limiter.py # Rate limit tracking
111
- │ └── scheduler.py # Task scheduling
112
- ├── database/
113
- │ ├── models.py # SQLAlchemy models
114
- │ └── db_manager.py # Database operations
115
- ├── collectors/
116
- │ ├── market_data.py # Market data collection
117
- │ ├── explorers.py # Blockchain explorer data
118
- │ ├── news.py # News aggregation
119
- │ ├── sentiment.py # Sentiment data
120
- │ └── onchain.py # On-chain analytics
121
- ├── api/
122
- │ ├── endpoints.py # REST API endpoints
123
- │ └── websocket.py # WebSocket support
124
- └── utils/
125
- ├── logger.py # Structured JSON logging
126
- └── api_client.py # HTTP client with retry logic
127
- ```
128
-
129
- ## Database Schema
130
-
131
- SQLite database at `data/api_monitor.db` with tables:
132
- - **providers** - API provider configurations
133
- - **connection_attempts** - All health check attempts
134
- - **data_collections** - Data collection logs
135
- - **rate_limit_usage** - Rate limit snapshots
136
- - **schedule_config** - Schedule configurations
137
- - **schedule_compliance** - Schedule execution tracking
138
- - **failure_logs** - Detailed failure logs
139
- - **alerts** - System alerts
140
- - **system_metrics** - System-wide metrics
141
-
142
- ## Environment Variables
143
-
144
- Configure API keys via environment variables:
145
-
146
- ```bash
147
- # Blockchain Explorers
148
- export ETHERSCAN_KEY_1="your_key"
149
- export ETHERSCAN_KEY_2="your_backup_key"
150
- export BSCSCAN_KEY="your_key"
151
- export TRONSCAN_KEY="your_key"
152
-
153
- # Market Data
154
- export COINMARKETCAP_KEY_1="your_key"
155
- export COINMARKETCAP_KEY_2="your_backup_key"
156
- export CRYPTOCOMPARE_KEY="your_key"
157
-
158
- # News
159
- export NEWSAPI_KEY="your_key"
160
-
161
- # Optional
162
- export HUGGINGFACE_KEY="your_key"
163
- ```
164
-
165
- ## 🚀 Quick Start
166
-
167
- ### Hugging Face Spaces Deployment (Recommended)
168
-
169
- This application is configured for **Docker SDK** deployment on Hugging Face Spaces:
170
-
171
- 1. **Create a new Space** on [Hugging Face](https://huggingface.co/spaces)
172
- 2. **Select SDK**: Choose "Docker" as the SDK
173
- 3. **Link Repository**: Connect this GitHub repository
174
- 4. **Configure Secrets** (Optional - for API keys):
175
- ```
176
- ETHERSCAN_KEY
177
- BSCSCAN_KEY
178
- TRONSCAN_KEY
179
- CMC_KEY
180
- CRYPTOCOMPARE_KEY
181
- NEWSAPI_KEY
182
- INFURA_KEY
183
- ALCHEMY_KEY
184
- ```
185
- 5. **Deploy**: Push to your repository - auto-deploy triggers!
186
-
187
- The application will be available at `https://YOUR_USERNAME-SPACE_NAME.hf.space`
188
-
189
- ### Local Development
190
-
191
- ```bash
192
- # Clone repository
193
- git clone https://github.com/nimazasinich/crypto-dt-source.git
194
- cd crypto-dt-source
195
-
196
- # Install dependencies
197
- pip install -r requirements.txt
198
-
199
- # Run the application
200
- python app.py
201
- ```
202
-
203
- Visit `http://localhost:7860` to access the Vidya dashboard.
204
-
205
- ### Docker Deployment
206
-
207
- ```bash
208
- # Build image
209
- docker build -t crypto-api-monitor .
210
-
211
- # Run container
212
- docker run -p 7860:7860 crypto-api-monitor
213
- ```
214
-
215
- ## API Documentation
216
-
217
- Once running, visit:
218
- - **Swagger UI**: http://localhost:7860/docs
219
- - **ReDoc**: http://localhost:7860/redoc
220
-
221
- ## Tech Stack
222
-
223
- - **FastAPI** - Modern async web framework
224
- - **SQLAlchemy** - Database ORM
225
- - **APScheduler** - Task scheduling
226
- - **aiohttp** - Async HTTP client
227
- - **WebSockets** - Real-time communication
228
- - **SQLite** - Embedded database
229
-
230
- ## Features Breakdown
231
-
232
- ### Health Checking (every 5 minutes)
233
- - CoinGecko: `GET /api/v3/ping`
234
- - CoinMarketCap: `GET /v1/cryptocurrency/map?limit=1`
235
- - Etherscan: `GET /api?module=stats&action=ethsupply`
236
- - BscScan: `GET /api?module=stats&action=bnbsupply`
237
- - TronScan: `GET /api/system/status`
238
- - CryptoPanic: `GET /v1/posts/?auth_token=free`
239
- - Alternative.me: `GET /fng/`
240
- - NewsAPI: `GET /news?category=business`
241
- - CryptoCompare: `GET /data/price?fsym=BTC&tsyms=USD`
242
-
243
- ### Data Collection Schedules
244
- - **Market Data** (1 min): Price updates for BTC, ETH, BNB
245
- - **Explorers** (5 min): Gas prices, network stats
246
- - **News** (10 min): Latest crypto news posts
247
- - **Sentiment** (15 min): Fear & Greed Index
248
-
249
- ### Failure Analysis
250
- - Error type distribution (timeout, rate_limit, server_error, auth_error)
251
- - Top 10 failing providers
252
- - Recent failure log with retry results
253
- - Intelligent remediation suggestions
254
-
255
- ### Retry Logic
256
- - **Timeout**: Retry with timeout +50%, max 3 attempts
257
- - **Rate Limit**: Wait until reset_time + 10s buffer
258
- - **5xx Errors**: Exponential backoff (1min, 2min, 4min), max 5 attempts
259
- - **401 Auth**: No retry, log key_expired alert
260
-
261
- ## License
262
-
263
- MIT License - see LICENSE file for details
264
 
265
- ## Contributing
266
 
267
- Contributions welcome! Please open an issue or pull request.
268
 
269
- ## Monitoring
270
 
271
- The system provides comprehensive monitoring:
272
- - Response times in milliseconds
273
- - Success/failure rates
274
- - Rate limit usage percentages
275
- - Data staleness metrics
276
- - Schedule compliance percentages
277
- - System health scores
278
 
279
- ## Support
280
 
281
- For issues, please open a GitHub issue or contact the maintainers.
 
1
  ---
2
+ title: Cryptocurrency Data API
3
+ emoji: 📈
4
  colorFrom: blue
5
+ colorTo: green
6
  sdk: docker
7
  app_port: 7860
8
  pinned: false
 
9
  ---
10
 
11
+ # Cryptocurrency Data Source API
12
 
13
+ This API provides cryptocurrency market data and technical indicators from various exchanges. It's built with FastAPI and designed to be deployed on Hugging Face Spaces.
14
 
15
+ ## Features
16
 
17
+ - Access to multiple cryptocurrency exchanges through a unified API
18
+ - OHLCV data retrieval with caching for better performance
19
+ - Technical indicators calculation (RSI, MACD, Bollinger Bands, EMA, etc.)
20
+ - Designed for easy integration with trading systems and analysis tools
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
 
22
  ## API Endpoints
23
 
24
+ - `GET /exchanges` - List available exchanges
25
+ - `GET /exchanges/{exchange_id}/markets` - List markets for a specific exchange
26
+ - `GET /data/{exchange_id}/{symbol}/{timeframe}` - Get OHLCV data for a market
27
+ - `GET /indicators/{exchange_id}/{symbol}/{timeframe}` - Get technical indicators for a market
28
+ - `GET /data/available` - Get list of available cached data files
29
+ - `GET /health` - Health check endpoint
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
 
31
+ ## Usage
32
 
33
+ The API documentation is available at `/docs` once the service is running.
34
 
35
+ ## Development
36
 
37
+ The API is built with:
38
+ - FastAPI - Modern web framework for building APIs
39
+ - CCXT - Library for cryptocurrency exchange trading
40
+ - TA-Lib - Technical analysis library
41
+ - Pandas - Data manipulation and analysis
 
 
42
 
43
+ ## Deployment
44
 
45
+ This application is designed for deployment on Hugging Face Spaces using Docker.
app/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ # Cryptocurrency Data Source API
2
+ # Version 1.0.0
app/indicators.py ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import numpy as np
3
+ import ta
4
+ from typing import List, Dict, Any
5
+
6
+ def calculate_indicators(df: pd.DataFrame, indicators: List[str]) -> pd.DataFrame:
7
+ """
8
+ Calculate technical indicators for the given OHLCV data
9
+
10
+ Args:
11
+ df: DataFrame with OHLCV data
12
+ indicators: List of indicators to calculate
13
+
14
+ Returns:
15
+ DataFrame with calculated indicators
16
+ """
17
+ # Make a copy to avoid modifying the original DataFrame
18
+ result = df.copy()
19
+
20
+ # Ensure we have required columns
21
+ required_columns = ['timestamp', 'datetime', 'open', 'high', 'low', 'close', 'volume']
22
+ for col in required_columns:
23
+ if col not in result.columns:
24
+ if col == 'datetime' and 'timestamp' in result.columns:
25
+ # We can calculate datetime from timestamp
26
+ continue
27
+ else:
28
+ raise ValueError(f"Required column {col} not found in DataFrame")
29
+
30
+ # Calculate RSI
31
+ if 'rsi' in indicators:
32
+ result['rsi'] = ta.momentum.RSIIndicator(
33
+ close=result['close'],
34
+ window=14
35
+ ).rsi()
36
+
37
+ # Calculate MACD
38
+ if 'macd' in indicators:
39
+ macd = ta.trend.MACD(
40
+ close=result['close'],
41
+ window_fast=12,
42
+ window_slow=26,
43
+ window_sign=9
44
+ )
45
+ result['macd'] = macd.macd()
46
+ result['macd_signal'] = macd.macd_signal()
47
+ result['macd_histogram'] = macd.macd_diff()
48
+
49
+ # Calculate Bollinger Bands
50
+ if 'bollinger' in indicators:
51
+ bollinger = ta.volatility.BollingerBands(
52
+ close=result['close'],
53
+ window=20,
54
+ window_dev=2
55
+ )
56
+ result['bb_upper'] = bollinger.bollinger_hband()
57
+ result['bb_middle'] = bollinger.bollinger_mavg()
58
+ result['bb_lower'] = bollinger.bollinger_lband()
59
+
60
+ # Calculate Exponential Moving Averages
61
+ if 'ema' in indicators:
62
+ result['ema_9'] = ta.trend.EMAIndicator(
63
+ close=result['close'], window=9
64
+ ).ema_indicator()
65
+
66
+ result['ema_21'] = ta.trend.EMAIndicator(
67
+ close=result['close'], window=21
68
+ ).ema_indicator()
69
+
70
+ result['ema_50'] = ta.trend.EMAIndicator(
71
+ close=result['close'], window=50
72
+ ).ema_indicator()
73
+
74
+ result['ema_200'] = ta.trend.EMAIndicator(
75
+ close=result['close'], window=200
76
+ ).ema_indicator()
77
+
78
+ # Calculate Average True Range (ATR)
79
+ if 'atr' in indicators:
80
+ result['atr'] = ta.volatility.AverageTrueRange(
81
+ high=result['high'],
82
+ low=result['low'],
83
+ close=result['close'],
84
+ window=14
85
+ ).average_true_range()
86
+
87
+ # Calculate Stochastic Oscillator
88
+ if 'stoch' in indicators:
89
+ stoch = ta.momentum.StochasticOscillator(
90
+ high=result['high'],
91
+ low=result['low'],
92
+ close=result['close'],
93
+ window=14,
94
+ smooth_window=3
95
+ )
96
+ result['stoch_k'] = stoch.stoch()
97
+ result['stoch_d'] = stoch.stoch_signal()
98
+
99
+ return result
app/models.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pydantic import BaseModel, Field
2
+ from typing import List, Dict, Any, Optional
3
+ from enum import Enum
4
+
5
+ class TimeFrame(str, Enum):
6
+ MINUTE_1 = '1m'
7
+ MINUTE_5 = '5m'
8
+ MINUTE_15 = '15m'
9
+ MINUTE_30 = '30m'
10
+ HOUR_1 = '1h'
11
+ HOUR_4 = '4h'
12
+ DAY_1 = '1d'
13
+ WEEK_1 = '1w'
14
+ MONTH_1 = '1M'
15
+
16
+ class CryptoPair(BaseModel):
17
+ symbol: str
18
+ base: str
19
+ quote: str
20
+ type: str = "spot"
21
+
22
+ class ExchangeInfo(BaseModel):
23
+ id: str
24
+ name: str
25
+ has_fetchOHLCV: bool = False
26
+ timeframes: List[str] = []
27
+
28
+ class OHLCVData(BaseModel):
29
+ timestamp: int
30
+ datetime: str
31
+ open: float
32
+ high: float
33
+ low: float
34
+ close: float
35
+ volume: float
36
+
37
+ class IndicatorValues(BaseModel):
38
+ timestamp: int
39
+ datetime: str
40
+ rsi: Optional[float] = None
41
+ macd: Optional[float] = None
42
+ macd_signal: Optional[float] = None
43
+ macd_histogram: Optional[float] = None
44
+ ema_9: Optional[float] = None
45
+ ema_21: Optional[float] = None
46
+ ema_50: Optional[float] = None
47
+ ema_200: Optional[float] = None
48
+ bb_upper: Optional[float] = None
49
+ bb_middle: Optional[float] = None
50
+ bb_lower: Optional[float] = None
51
+ open: Optional[float] = None
52
+ high: Optional[float] = None
53
+ low: Optional[float] = None
54
+ close: Optional[float] = None
55
+ volume: Optional[float] = None
app/storage.py ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import json
3
+ from typing import List, Dict, Any, Union
4
+ import pandas as pd
5
+
6
+ def save_market_data(data: List[List[Union[int, float]]], file_path: str) -> bool:
7
+ """
8
+ Save market data to a JSON file
9
+
10
+ Args:
11
+ data: List of OHLCV candles
12
+ file_path: Path to save the data
13
+
14
+ Returns:
15
+ bool: True if saved successfully
16
+ """
17
+ try:
18
+ # Create directory if it doesn't exist
19
+ os.makedirs(os.path.dirname(file_path), exist_ok=True)
20
+
21
+ with open(file_path, 'w') as f:
22
+ json.dump(data, f)
23
+
24
+ return True
25
+ except Exception as e:
26
+ print(f"Error saving market data: {str(e)}")
27
+ return False
28
+
29
+ def load_market_data(file_path: str) -> List[List[Union[int, float]]]:
30
+ """
31
+ Load market data from a JSON file
32
+
33
+ Args:
34
+ file_path: Path to the data file
35
+
36
+ Returns:
37
+ List of OHLCV candles
38
+ """
39
+ try:
40
+ with open(file_path, 'r') as f:
41
+ data = json.load(f)
42
+
43
+ return data
44
+ except Exception as e:
45
+ print(f"Error loading market data: {str(e)}")
46
+ return []
47
+
48
+ def get_available_data(directory: str) -> List[Dict[str, Any]]:
49
+ """
50
+ Get list of available data files
51
+
52
+ Args:
53
+ directory: Directory to scan
54
+
55
+ Returns:
56
+ List of data file information
57
+ """
58
+ result = []
59
+
60
+ try:
61
+ # Create directory if it doesn't exist
62
+ os.makedirs(directory, exist_ok=True)
63
+
64
+ # List files in directory
65
+ files = os.listdir(directory)
66
+
67
+ for file in files:
68
+ if file.endswith('.json'):
69
+ # Parse file name to extract information
70
+ parts = file.replace('.json', '').split('_')
71
+
72
+ if len(parts) >= 3:
73
+ exchange_id = parts[0]
74
+
75
+ # Reconstruct symbol (may contain underscores)
76
+ timeframe_index = None
77
+ for i, part in enumerate(parts[2:], start=2):
78
+ if part in ['1m', '5m', '15m', '30m', '1h', '4h', '1d', '1w', '1M']:
79
+ timeframe_index = i
80
+ break
81
+
82
+ if timeframe_index is not None:
83
+ symbol_parts = parts[1:timeframe_index]
84
+ symbol = '/'.join(symbol_parts) if len(symbol_parts) > 1 else symbol_parts[0]
85
+ timeframe = parts[timeframe_index]
86
+
87
+ # Get file stats
88
+ file_path = os.path.join(directory, file)
89
+ file_size = os.path.getsize(file_path)
90
+ modified_time = os.path.getmtime(file_path)
91
+
92
+ # Get candle count
93
+ try:
94
+ data = load_market_data(file_path)
95
+ candle_count = len(data)
96
+ except:
97
+ candle_count = 0
98
+
99
+ result.append({
100
+ 'exchange': exchange_id,
101
+ 'symbol': symbol,
102
+ 'timeframe': timeframe,
103
+ 'filename': file,
104
+ 'size_kb': round(file_size / 1024, 2),
105
+ 'modified': modified_time,
106
+ 'candle_count': candle_count
107
+ })
108
+
109
+ return result
110
+ except Exception as e:
111
+ print(f"Error getting available data: {str(e)}")
112
+ return []
app/utils.py ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import logging
3
+ import ccxt
4
+ from logging.handlers import RotatingFileHandler
5
+ from datetime import datetime
6
+ import json
7
+ from typing import List, Dict, Any
8
+
9
+ def setup_logger():
10
+ """Configure and set up the logger"""
11
+ # Create logs directory if it doesn't exist
12
+ os.makedirs("logs", exist_ok=True)
13
+
14
+ # Configure logger
15
+ logger = logging.getLogger("crypto_data_source")
16
+ logger.setLevel(logging.INFO)
17
+
18
+ # Create console handler
19
+ console_handler = logging.StreamHandler()
20
+ console_handler.setLevel(logging.INFO)
21
+
22
+ # Create file handler
23
+ log_file = f"logs/crypto_data_source_{datetime.now().strftime('%Y%m%d')}.log"
24
+ file_handler = RotatingFileHandler(
25
+ log_file, maxBytes=10*1024*1024, backupCount=5
26
+ )
27
+ file_handler.setLevel(logging.INFO)
28
+
29
+ # Create formatter
30
+ formatter = logging.Formatter(
31
+ '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
32
+ )
33
+
34
+ # Add formatter to handlers
35
+ console_handler.setFormatter(formatter)
36
+ file_handler.setFormatter(formatter)
37
+
38
+ # Add handlers to logger
39
+ logger.addHandler(console_handler)
40
+ logger.addHandler(file_handler)
41
+
42
+ return logger
43
+
44
+ def get_available_exchanges() -> List[str]:
45
+ """Get list of available exchanges with OHLCV support"""
46
+ exchanges = []
47
+
48
+ # Focus on major exchanges that are reliable and have good API support
49
+ preferred_exchanges = [
50
+ 'binance', 'kucoin', 'coinbase', 'kraken', 'okx', 'bybit'
51
+ ]
52
+
53
+ for exchange_id in preferred_exchanges:
54
+ try:
55
+ exchange_class = getattr(ccxt, exchange_id)
56
+ exchange = exchange_class()
57
+
58
+ # Only include exchanges that support OHLCV data
59
+ if exchange.has['fetchOHLCV']:
60
+ exchanges.append(exchange_id)
61
+ except:
62
+ # Skip exchanges that don't initialize properly
63
+ continue
64
+
65
+ return exchanges
66
+
67
+ def format_timestamp(timestamp_ms: int) -> str:
68
+ """Format timestamp to ISO string"""
69
+ return datetime.fromtimestamp(timestamp_ms / 1000).isoformat()
70
+
71
+ def safe_json_serialize(obj):
72
+ """Custom JSON serializer for handling non-serializable types"""
73
+ if isinstance(obj, (datetime)):
74
+ return obj.isoformat()
75
+ raise TypeError(f"Type {type(obj)} not serializable")
main.py ADDED
@@ -0,0 +1,248 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException, Query, Depends, Path
2
+ from fastapi.middleware.cors import CORSMiddleware
3
+ from fastapi.responses import JSONResponse
4
+ import os
5
+ import logging
6
+ from datetime import datetime, timedelta
7
+ from typing import List, Dict, Any, Optional
8
+ import json
9
+ import ccxt
10
+ import pandas as pd
11
+ import numpy as np
12
+
13
+ from app.models import CryptoPair, TimeFrame, ExchangeInfo
14
+ from app.utils import setup_logger, get_available_exchanges
15
+ from app.indicators import calculate_indicators
16
+ from app.storage import save_market_data, load_market_data, get_available_data
17
+
18
+ # Setup logging
19
+ logger = setup_logger()
20
+ logger.info("Starting Cryptocurrency Data Source API")
21
+
22
+ # Initialize FastAPI app
23
+ app = FastAPI(
24
+ title="Cryptocurrency Data Source API",
25
+ description="API for fetching cryptocurrency market data and technical indicators",
26
+ version="1.0.0",
27
+ )
28
+
29
+ # Configure CORS
30
+ app.add_middleware(
31
+ CORSMiddleware,
32
+ allow_origins=["*"],
33
+ allow_credentials=True,
34
+ allow_methods=["*"],
35
+ allow_headers=["*"],
36
+ )
37
+
38
+ # Initialize exchanges
39
+ available_exchanges = get_available_exchanges()
40
+ exchange_instances = {}
41
+
42
+ for exchange_id in available_exchanges:
43
+ try:
44
+ exchange_instances[exchange_id] = getattr(ccxt, exchange_id)({
45
+ 'enableRateLimit': True,
46
+ })
47
+ logger.info(f"Initialized exchange: {exchange_id}")
48
+ except Exception as e:
49
+ logger.error(f"Error initializing {exchange_id}: {str(e)}")
50
+
51
+ @app.get("/", response_class=JSONResponse)
52
+ async def root():
53
+ """Root endpoint returning API information"""
54
+ return {
55
+ "message": "Cryptocurrency Data Source API is running",
56
+ "version": "1.0.0",
57
+ "endpoints": {
58
+ "GET /exchanges": "List available exchanges",
59
+ "GET /exchanges/{exchange_id}/markets": "List markets for a specific exchange",
60
+ "GET /data/{exchange_id}/{symbol}/{timeframe}": "Get OHLCV data for a market",
61
+ "GET /indicators/{exchange_id}/{symbol}/{timeframe}": "Get technical indicators for a market",
62
+ }
63
+ }
64
+
65
+ @app.get("/exchanges", response_model=List[ExchangeInfo])
66
+ async def get_exchanges():
67
+ """Get list of available exchanges"""
68
+ result = []
69
+ for exchange_id, exchange in exchange_instances.items():
70
+ try:
71
+ # Add basic exchange info
72
+ result.append(ExchangeInfo(
73
+ id=exchange_id,
74
+ name=exchange.name,
75
+ has_fetchOHLCV=exchange.has.get('fetchOHLCV', False),
76
+ timeframes=list(exchange.timeframes.keys()) if hasattr(exchange, 'timeframes') else []
77
+ ))
78
+ except Exception as e:
79
+ logger.error(f"Error getting info for {exchange_id}: {str(e)}")
80
+
81
+ return result
82
+
83
+ @app.get("/exchanges/{exchange_id}/markets", response_model=List[CryptoPair])
84
+ async def get_markets(exchange_id: str = Path(..., description="Exchange ID")):
85
+ """Get available markets for a specific exchange"""
86
+ if exchange_id not in exchange_instances:
87
+ raise HTTPException(status_code=404, detail=f"Exchange {exchange_id} not found")
88
+
89
+ try:
90
+ exchange = exchange_instances[exchange_id]
91
+ markets = exchange.load_markets()
92
+
93
+ result = []
94
+ for symbol, market in markets.items():
95
+ if market['active']:
96
+ result.append(CryptoPair(
97
+ symbol=symbol,
98
+ base=market['base'],
99
+ quote=market['quote'],
100
+ type=market['type']
101
+ ))
102
+
103
+ return result
104
+ except Exception as e:
105
+ logger.error(f"Error fetching markets for {exchange_id}: {str(e)}")
106
+ raise HTTPException(status_code=500, detail=str(e))
107
+
108
+ @app.get("/data/{exchange_id}/{symbol}/{timeframe}")
109
+ async def get_market_data(
110
+ exchange_id: str = Path(..., description="Exchange ID"),
111
+ symbol: str = Path(..., description="Market symbol (e.g. BTC/USDT)"),
112
+ timeframe: str = Path(..., description="Timeframe (e.g. 1h, 1d)"),
113
+ limit: int = Query(100, description="Number of candles to return"),
114
+ from_timestamp: Optional[int] = Query(None, description="Start timestamp in milliseconds"),
115
+ use_cache: bool = Query(True, description="Whether to use cached data")
116
+ ):
117
+ """Get OHLCV market data for a specific market"""
118
+ if exchange_id not in exchange_instances:
119
+ raise HTTPException(status_code=404, detail=f"Exchange {exchange_id} not found")
120
+
121
+ exchange = exchange_instances[exchange_id]
122
+
123
+ # Check if exchange supports OHLCV
124
+ if not exchange.has.get('fetchOHLCV', False):
125
+ raise HTTPException(status_code=400, detail=f"Exchange {exchange_id} does not support OHLCV data")
126
+
127
+ # Check if timeframe is supported
128
+ if hasattr(exchange, 'timeframes') and timeframe not in exchange.timeframes:
129
+ raise HTTPException(status_code=400, detail=f"Timeframe {timeframe} not supported by {exchange_id}")
130
+
131
+ try:
132
+ # Check if we have cached data
133
+ cache_file = f"data/{exchange_id}_{symbol.replace('/', '_')}_{timeframe}.json"
134
+ cache_file = cache_file.replace(':', '_') # Replace colons for Windows compatibility
135
+
136
+ if use_cache and os.path.exists(cache_file):
137
+ data = load_market_data(cache_file)
138
+
139
+ # Filter by from_timestamp if provided
140
+ if from_timestamp:
141
+ data = [candle for candle in data if candle[0] >= from_timestamp]
142
+
143
+ # Limit the number of candles
144
+ data = data[-limit:]
145
+ else:
146
+ # Fetch data from exchange
147
+ data = exchange.fetch_ohlcv(
148
+ symbol=symbol,
149
+ timeframe=timeframe,
150
+ limit=limit,
151
+ since=from_timestamp
152
+ )
153
+
154
+ # Save to cache
155
+ save_market_data(data, cache_file)
156
+
157
+ # Convert to a more readable format
158
+ result = []
159
+ for candle in data:
160
+ timestamp, open_price, high, low, close, volume = candle
161
+ result.append({
162
+ "timestamp": timestamp,
163
+ "datetime": datetime.fromtimestamp(timestamp / 1000).isoformat(),
164
+ "open": open_price,
165
+ "high": high,
166
+ "low": low,
167
+ "close": close,
168
+ "volume": volume
169
+ })
170
+
171
+ return result
172
+
173
+ except ccxt.NetworkError as e:
174
+ logger.error(f"Network error: {str(e)}")
175
+ raise HTTPException(status_code=503, detail="Network error when connecting to exchange")
176
+ except ccxt.ExchangeError as e:
177
+ logger.error(f"Exchange error: {str(e)}")
178
+ raise HTTPException(status_code=500, detail=str(e))
179
+ except Exception as e:
180
+ logger.error(f"Error fetching data: {str(e)}")
181
+ raise HTTPException(status_code=500, detail=str(e))
182
+
183
+ @app.get("/indicators/{exchange_id}/{symbol}/{timeframe}")
184
+ async def get_indicators(
185
+ exchange_id: str = Path(..., description="Exchange ID"),
186
+ symbol: str = Path(..., description="Market symbol (e.g. BTC/USDT)"),
187
+ timeframe: str = Path(..., description="Timeframe (e.g. 1h, 1d)"),
188
+ limit: int = Query(100, description="Number of candles to include"),
189
+ include_price_data: bool = Query(False, description="Whether to include price data"),
190
+ indicators: List[str] = Query(
191
+ ["rsi", "macd", "bollinger", "ema"],
192
+ description="Indicators to calculate"
193
+ )
194
+ ):
195
+ """Get technical indicators for a specific market"""
196
+ try:
197
+ # Get market data first
198
+ market_data = await get_market_data(
199
+ exchange_id=exchange_id,
200
+ symbol=symbol,
201
+ timeframe=timeframe,
202
+ limit=limit + 100, # Extra data for accurate indicator calculation
203
+ use_cache=True
204
+ )
205
+
206
+ # Convert to pandas DataFrame
207
+ df = pd.DataFrame(market_data)
208
+
209
+ # Calculate indicators
210
+ result = calculate_indicators(df, indicators)
211
+
212
+ # Remove extra data used for calculation
213
+ result = result.iloc[-limit:].reset_index(drop=True)
214
+
215
+ # Include or exclude price data
216
+ if not include_price_data:
217
+ for col in ['open', 'high', 'low', 'close', 'volume']:
218
+ if col in result.columns:
219
+ result = result.drop(col, axis=1)
220
+
221
+ # Convert DataFrame to list of records
222
+ result_list = result.to_dict(orient='records')
223
+
224
+ return result_list
225
+
226
+ except Exception as e:
227
+ logger.error(f"Error calculating indicators: {str(e)}")
228
+ raise HTTPException(status_code=500, detail=str(e))
229
+
230
+ @app.get("/data/available")
231
+ async def get_available_data_files():
232
+ """Get list of available cached data files"""
233
+ try:
234
+ files = get_available_data("data")
235
+ return {"available_data": files}
236
+ except Exception as e:
237
+ logger.error(f"Error listing available data: {str(e)}")
238
+ raise HTTPException(status_code=500, detail=str(e))
239
+
240
+ # Add a health check endpoint
241
+ @app.get("/health")
242
+ async def health_check():
243
+ """Health check endpoint"""
244
+ return {"status": "healthy", "timestamp": datetime.now().isoformat()}
245
+
246
+ if __name__ == "__main__":
247
+ import uvicorn
248
+ uvicorn.run("main:app", host="0.0.0.0", port=int(os.getenv("PORT", 7860)), log_level="info")
requirements.txt CHANGED
@@ -1,36 +1,15 @@
1
- # Gradio Application Dependencies
2
- gradio==4.14.0
3
- pandas==2.1.4
4
- plotly==5.18.0
5
-
6
- # Task Scheduling
7
- APScheduler==3.10.4
8
-
9
- # API & HTTP
10
- aiohttp==3.9.1
11
- requests==2.31.0
12
- httpx>=0.24
13
-
14
- # FastAPI (for API mode)
15
  fastapi==0.104.1
16
  uvicorn[standard]==0.24.0
17
  pydantic==2.5.0
18
  python-multipart==0.0.6
19
- websockets==12.0
20
-
21
- # Database
22
- SQLAlchemy==2.0.23
23
-
24
- # Configuration
25
  python-dotenv==1.0.0
26
-
27
- # HuggingFace Integration (required for HF Spaces deployment)
28
- transformers>=4.44.0
29
- datasets>=3.0.0
30
- huggingface_hub>=0.24.0
31
- torch>=2.0.0
32
- sentencepiece>=0.1.99
33
- protobuf>=3.20.0
34
-
35
- # RSS Feed Parsing
36
- feedparser==6.0.11
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  fastapi==0.104.1
2
  uvicorn[standard]==0.24.0
3
  pydantic==2.5.0
4
  python-multipart==0.0.6
5
+ httpx==0.25.0
 
 
 
 
 
6
  python-dotenv==1.0.0
7
+ requests==2.31.0
8
+ ccxt==4.0.0
9
+ pandas==2.1.1
10
+ numpy==1.26.0
11
+ matplotlib==3.8.0
12
+ plotly==5.18.0
13
+ ta==0.10.2
14
+ scikit-learn==1.3.1
15
+ joblib==1.3.2