feat: trading bot MVP — ICT Order Block + Liquidity Sweep strategy

Full-stack trading bot with:
- FastAPI backend with ICT strategy (Order Block + Liquidity Sweep detection)
- Backtester engine with rolling window, spread simulation, and performance metrics
- Hybrid market data service (yfinance + TwelveData with rate limiting + SQLite cache)
- Simulated exchange for paper trading
- React/TypeScript frontend with TradingView lightweight-charts v5
- Live dashboard with candlestick chart, OHLC legend, trade markers
- Backtest page with configurable parameters, equity curve, and trade table
- WebSocket support for real-time updates
- Bot runner with asyncio loop for automated trading

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-24 23:25:51 +01:00
commit 4df8d53b1a
58 changed files with 7484 additions and 0 deletions

View File

@@ -0,0 +1,82 @@
"""
Constantes de mapping entre les noms canoniques du projet
et les symboles/intervalles propres à chaque source de données.
"""
# ── Limites yfinance (jours de données disponibles par granularité) ──────────
YF_MAX_DAYS: dict[str, int] = {
"M1": 7,
"M5": 60,
"M15": 60,
"M30": 60,
"H1": 730,
"H4": 730,
"D": 9999,
}
# ── Durée d'une bougie en minutes ─────────────────────────────────────────────
GRANULARITY_MINUTES: dict[str, int] = {
"M1": 1,
"M5": 5,
"M15": 15,
"M30": 30,
"H1": 60,
"H4": 240,
"D": 1440,
}
# ── Mapping vers les intervalles yfinance ─────────────────────────────────────
GRANULARITY_TO_YF: dict[str, str] = {
"M1": "1m",
"M5": "5m",
"M15": "15m",
"M30": "30m",
"H1": "1h",
"H4": "4h",
"D": "1d",
}
# ── Mapping vers les intervalles TwelveData ───────────────────────────────────
GRANULARITY_TO_TD: dict[str, str] = {
"M1": "1min",
"M5": "5min",
"M15": "15min",
"M30": "30min",
"H1": "1h",
"H4": "4h",
"D": "1day",
}
# ── Mapping instrument → symbole yfinance ─────────────────────────────────────
INSTRUMENT_TO_YF: dict[str, str] = {
"EUR_USD": "EURUSD=X",
"GBP_USD": "GBPUSD=X",
"USD_JPY": "USDJPY=X",
"USD_CHF": "USDCHF=X",
"AUD_USD": "AUDUSD=X",
"USD_CAD": "USDCAD=X",
"GBP_JPY": "GBPJPY=X",
"EUR_JPY": "EURJPY=X",
"EUR_GBP": "EURGBP=X",
"SPX500_USD": "^GSPC",
"NAS100_USD": "^NDX",
"XAU_USD": "GC=F",
"US30_USD": "YM=F",
}
# ── Mapping instrument → symbole TwelveData ───────────────────────────────────
INSTRUMENT_TO_TD: dict[str, str] = {
"EUR_USD": "EUR/USD",
"GBP_USD": "GBP/USD",
"USD_JPY": "USD/JPY",
"USD_CHF": "USD/CHF",
"AUD_USD": "AUD/USD",
"USD_CAD": "USD/CAD",
"GBP_JPY": "GBP/JPY",
"EUR_JPY": "EUR/JPY",
"EUR_GBP": "EUR/GBP",
"SPX500_USD": "SPY",
"NAS100_USD": "QQQ",
"XAU_USD": "XAU/USD",
"US30_USD": "DJI",
}

View File

@@ -0,0 +1,159 @@
"""
Provider TwelveData — données OHLCV historiques illimitées.
Plan gratuit : 800 requêtes/jour, 8 req/min.
Docs : https://twelvedata.com/docs
"""
import asyncio
import logging
import time
from datetime import datetime, timedelta
from typing import Optional
import httpx
import pandas as pd
from app.core.config import settings
from app.services.data_providers.constants import GRANULARITY_TO_TD, INSTRUMENT_TO_TD
logger = logging.getLogger(__name__)
TWELVEDATA_BASE_URL = "https://api.twelvedata.com"
# Nombre max de points par requête TwelveData (plan gratuit)
MAX_OUTPUTSIZE = 5000
# Limite du plan gratuit : 8 req/min
_RATE_LIMIT = 8
_RATE_WINDOW = 61 # secondes (légèrement au-dessus de 60 pour la marge)
_rate_lock = asyncio.Lock()
_request_times: list[float] = []
async def _rate_limited_get(client: httpx.AsyncClient, url: str, params: dict) -> httpx.Response:
"""Wrapper qui respecte la limite de 8 req/min de TwelveData."""
global _request_times
async with _rate_lock:
now = time.monotonic()
# Purger les timestamps hors fenêtre
_request_times = [t for t in _request_times if now - t < _RATE_WINDOW]
if len(_request_times) >= _RATE_LIMIT:
wait = _RATE_WINDOW - (now - _request_times[0])
if wait > 0:
logger.info("TwelveData rate limit : attente %.1f s", wait)
await asyncio.sleep(wait)
_request_times = [t for t in _request_times if time.monotonic() - t < _RATE_WINDOW]
_request_times.append(time.monotonic())
return await client.get(url, params=params)
class TwelveDataProvider:
"""Fetche des candles depuis l'API TwelveData."""
def __init__(self) -> None:
self._api_key = settings.twelvedata_api_key
def is_configured(self) -> bool:
return bool(self._api_key)
async def fetch(
self,
instrument: str,
granularity: str,
start: datetime,
end: Optional[datetime] = None,
) -> pd.DataFrame:
"""Fetche les candles pour la période [start, end]."""
if not self.is_configured():
logger.warning("TwelveData : TWELVEDATA_API_KEY non configurée")
return pd.DataFrame()
td_symbol = INSTRUMENT_TO_TD.get(instrument)
td_interval = GRANULARITY_TO_TD.get(granularity)
if not td_symbol or not td_interval:
logger.warning("TwelveData : instrument/granularité non supporté — %s %s", instrument, granularity)
return pd.DataFrame()
if end is None:
end = datetime.utcnow()
logger.info(
"TwelveData fetch : %s (%s) %s%s",
instrument, granularity, start.strftime("%Y-%m-%d"), end.strftime("%Y-%m-%d"),
)
# TwelveData supporte max 5000 points par requête
# Si la période est longue, on fait plusieurs requêtes
all_frames: list[pd.DataFrame] = []
current_end = end
while current_end > start:
df_chunk = await self._fetch_chunk(td_symbol, td_interval, start, current_end)
if df_chunk.empty:
break
all_frames.append(df_chunk)
oldest = df_chunk["time"].min()
if oldest <= start:
break
# Reculer pour la prochaine requête
current_end = oldest - timedelta(seconds=1)
if not all_frames:
return pd.DataFrame()
df = pd.concat(all_frames, ignore_index=True)
df = df.drop_duplicates(subset=["time"])
df = df.sort_values("time").reset_index(drop=True)
df = df[(df["time"] >= start) & (df["time"] <= end)]
logger.info("TwelveData : %d bougies récupérées pour %s %s", len(df), instrument, granularity)
return df
async def _fetch_chunk(
self,
td_symbol: str,
td_interval: str,
start: datetime,
end: datetime,
) -> pd.DataFrame:
params = {
"symbol": td_symbol,
"interval": td_interval,
"start_date": start.strftime("%Y-%m-%d %H:%M:%S"),
"end_date": end.strftime("%Y-%m-%d %H:%M:%S"),
"outputsize": MAX_OUTPUTSIZE,
"format": "JSON",
"apikey": self._api_key,
}
try:
async with httpx.AsyncClient(timeout=30) as client:
resp = await _rate_limited_get(client, f"{TWELVEDATA_BASE_URL}/time_series", params=params)
resp.raise_for_status()
data = resp.json()
except Exception as e:
logger.error("TwelveData erreur HTTP : %s", e)
return pd.DataFrame()
if data.get("status") == "error":
logger.error("TwelveData API erreur : %s", data.get("message"))
return pd.DataFrame()
values = data.get("values", [])
if not values:
return pd.DataFrame()
rows = []
for v in values:
rows.append({
"time": pd.to_datetime(v["datetime"]),
"open": float(v["open"]),
"high": float(v["high"]),
"low": float(v["low"]),
"close": float(v["close"]),
"volume": int(v.get("volume", 0)),
})
df = pd.DataFrame(rows)
df = df.sort_values("time").reset_index(drop=True)
return df

View File

@@ -0,0 +1,134 @@
"""
Provider yfinance — données OHLCV gratuites.
Limites :
- M1 : 7 derniers jours
- M5/M15/M30 : 60 derniers jours
- H1/H4 : 730 derniers jours
- D : illimité
"""
import asyncio
import logging
from datetime import datetime, timedelta, timezone
from typing import Optional
import pandas as pd
from app.services.data_providers.constants import (
GRANULARITY_TO_YF,
INSTRUMENT_TO_YF,
YF_MAX_DAYS,
)
logger = logging.getLogger(__name__)
def _normalize(df: pd.DataFrame) -> pd.DataFrame:
"""Normalise un DataFrame yfinance vers le format interne."""
df = df.copy()
df.index = pd.to_datetime(df.index, utc=True)
df.index = df.index.tz_localize(None) if df.index.tz is not None else df.index
df.columns = [c.lower() for c in df.columns]
# yfinance peut retourner des colonnes multi-index
if isinstance(df.columns, pd.MultiIndex):
df.columns = df.columns.get_level_values(0)
df = df.rename(columns={"adj close": "close"})[["open", "high", "low", "close", "volume"]]
df = df.dropna(subset=["open", "high", "low", "close"])
df.index.name = "time"
df = df.reset_index()
df["time"] = pd.to_datetime(df["time"]).dt.tz_localize(None)
return df
def _fetch_sync(
yf_symbol: str,
yf_interval: str,
start: datetime,
end: datetime,
) -> pd.DataFrame:
"""Exécution synchrone de yfinance (sera appelée dans un thread)."""
import yfinance as yf
ticker = yf.Ticker(yf_symbol)
df = ticker.history(
interval=yf_interval,
start=start.strftime("%Y-%m-%d"),
end=(end + timedelta(days=1)).strftime("%Y-%m-%d"),
auto_adjust=True,
prepost=False,
)
return df
class YFinanceProvider:
"""Fetche des candles depuis Yahoo Finance."""
def yf_cutoff(self, granularity: str) -> Optional[datetime]:
"""Retourne la date la plus ancienne que yfinance peut fournir."""
max_days = YF_MAX_DAYS.get(granularity)
if max_days is None:
return None
return datetime.utcnow() - timedelta(days=max_days - 1)
def can_provide(self, granularity: str, start: datetime) -> bool:
"""Vérifie si yfinance peut fournir des données pour cette période."""
cutoff = self.yf_cutoff(granularity)
if cutoff is None:
return False
return start >= cutoff
async def fetch(
self,
instrument: str,
granularity: str,
start: datetime,
end: Optional[datetime] = None,
) -> pd.DataFrame:
"""
Fetche les candles pour la période [start, end].
Tronque start à la limite yfinance si nécessaire.
"""
yf_symbol = INSTRUMENT_TO_YF.get(instrument)
yf_interval = GRANULARITY_TO_YF.get(granularity)
if not yf_symbol or not yf_interval:
logger.warning("yfinance : instrument ou granularité non supporté — %s %s", instrument, granularity)
return pd.DataFrame()
# Tronquer start à la limite yfinance
cutoff = self.yf_cutoff(granularity)
if cutoff and start < cutoff:
logger.debug("yfinance : start tronqué de %s à %s", start, cutoff)
start = cutoff
if end is None:
end = datetime.utcnow()
if start >= end:
return pd.DataFrame()
logger.info(
"yfinance fetch : %s (%s) %s%s",
instrument, granularity, start.strftime("%Y-%m-%d"), end.strftime("%Y-%m-%d"),
)
try:
loop = asyncio.get_event_loop()
raw = await loop.run_in_executor(
None, _fetch_sync, yf_symbol, yf_interval, start, end
)
except Exception as e:
logger.error("yfinance erreur : %s", e)
return pd.DataFrame()
if raw.empty:
logger.warning("yfinance : aucune donnée pour %s %s", instrument, granularity)
return pd.DataFrame()
df = _normalize(raw)
df = df[(df["time"] >= start) & (df["time"] <= end)]
logger.info("yfinance : %d bougies récupérées pour %s %s", len(df), instrument, granularity)
return df