feat: trading bot MVP — ICT Order Block + Liquidity Sweep strategy
Full-stack trading bot with: - FastAPI backend with ICT strategy (Order Block + Liquidity Sweep detection) - Backtester engine with rolling window, spread simulation, and performance metrics - Hybrid market data service (yfinance + TwelveData with rate limiting + SQLite cache) - Simulated exchange for paper trading - React/TypeScript frontend with TradingView lightweight-charts v5 - Live dashboard with candlestick chart, OHLC legend, trade markers - Backtest page with configurable parameters, equity curve, and trade table - WebSocket support for real-time updates - Bot runner with asyncio loop for automated trading Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
0
backend/app/services/__init__.py
Normal file
0
backend/app/services/__init__.py
Normal file
0
backend/app/services/data_providers/__init__.py
Normal file
0
backend/app/services/data_providers/__init__.py
Normal file
82
backend/app/services/data_providers/constants.py
Normal file
82
backend/app/services/data_providers/constants.py
Normal file
@@ -0,0 +1,82 @@
|
||||
"""
|
||||
Constantes de mapping entre les noms canoniques du projet
|
||||
et les symboles/intervalles propres à chaque source de données.
|
||||
"""
|
||||
|
||||
# ── Limites yfinance (jours de données disponibles par granularité) ──────────
|
||||
YF_MAX_DAYS: dict[str, int] = {
|
||||
"M1": 7,
|
||||
"M5": 60,
|
||||
"M15": 60,
|
||||
"M30": 60,
|
||||
"H1": 730,
|
||||
"H4": 730,
|
||||
"D": 9999,
|
||||
}
|
||||
|
||||
# ── Durée d'une bougie en minutes ─────────────────────────────────────────────
|
||||
GRANULARITY_MINUTES: dict[str, int] = {
|
||||
"M1": 1,
|
||||
"M5": 5,
|
||||
"M15": 15,
|
||||
"M30": 30,
|
||||
"H1": 60,
|
||||
"H4": 240,
|
||||
"D": 1440,
|
||||
}
|
||||
|
||||
# ── Mapping vers les intervalles yfinance ─────────────────────────────────────
|
||||
GRANULARITY_TO_YF: dict[str, str] = {
|
||||
"M1": "1m",
|
||||
"M5": "5m",
|
||||
"M15": "15m",
|
||||
"M30": "30m",
|
||||
"H1": "1h",
|
||||
"H4": "4h",
|
||||
"D": "1d",
|
||||
}
|
||||
|
||||
# ── Mapping vers les intervalles TwelveData ───────────────────────────────────
|
||||
GRANULARITY_TO_TD: dict[str, str] = {
|
||||
"M1": "1min",
|
||||
"M5": "5min",
|
||||
"M15": "15min",
|
||||
"M30": "30min",
|
||||
"H1": "1h",
|
||||
"H4": "4h",
|
||||
"D": "1day",
|
||||
}
|
||||
|
||||
# ── Mapping instrument → symbole yfinance ─────────────────────────────────────
|
||||
INSTRUMENT_TO_YF: dict[str, str] = {
|
||||
"EUR_USD": "EURUSD=X",
|
||||
"GBP_USD": "GBPUSD=X",
|
||||
"USD_JPY": "USDJPY=X",
|
||||
"USD_CHF": "USDCHF=X",
|
||||
"AUD_USD": "AUDUSD=X",
|
||||
"USD_CAD": "USDCAD=X",
|
||||
"GBP_JPY": "GBPJPY=X",
|
||||
"EUR_JPY": "EURJPY=X",
|
||||
"EUR_GBP": "EURGBP=X",
|
||||
"SPX500_USD": "^GSPC",
|
||||
"NAS100_USD": "^NDX",
|
||||
"XAU_USD": "GC=F",
|
||||
"US30_USD": "YM=F",
|
||||
}
|
||||
|
||||
# ── Mapping instrument → symbole TwelveData ───────────────────────────────────
|
||||
INSTRUMENT_TO_TD: dict[str, str] = {
|
||||
"EUR_USD": "EUR/USD",
|
||||
"GBP_USD": "GBP/USD",
|
||||
"USD_JPY": "USD/JPY",
|
||||
"USD_CHF": "USD/CHF",
|
||||
"AUD_USD": "AUD/USD",
|
||||
"USD_CAD": "USD/CAD",
|
||||
"GBP_JPY": "GBP/JPY",
|
||||
"EUR_JPY": "EUR/JPY",
|
||||
"EUR_GBP": "EUR/GBP",
|
||||
"SPX500_USD": "SPY",
|
||||
"NAS100_USD": "QQQ",
|
||||
"XAU_USD": "XAU/USD",
|
||||
"US30_USD": "DJI",
|
||||
}
|
||||
159
backend/app/services/data_providers/twelvedata_provider.py
Normal file
159
backend/app/services/data_providers/twelvedata_provider.py
Normal file
@@ -0,0 +1,159 @@
|
||||
"""
|
||||
Provider TwelveData — données OHLCV historiques illimitées.
|
||||
|
||||
Plan gratuit : 800 requêtes/jour, 8 req/min.
|
||||
Docs : https://twelvedata.com/docs
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
|
||||
import httpx
|
||||
import pandas as pd
|
||||
|
||||
from app.core.config import settings
|
||||
from app.services.data_providers.constants import GRANULARITY_TO_TD, INSTRUMENT_TO_TD
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
TWELVEDATA_BASE_URL = "https://api.twelvedata.com"
|
||||
# Nombre max de points par requête TwelveData (plan gratuit)
|
||||
MAX_OUTPUTSIZE = 5000
|
||||
# Limite du plan gratuit : 8 req/min
|
||||
_RATE_LIMIT = 8
|
||||
_RATE_WINDOW = 61 # secondes (légèrement au-dessus de 60 pour la marge)
|
||||
_rate_lock = asyncio.Lock()
|
||||
_request_times: list[float] = []
|
||||
|
||||
|
||||
async def _rate_limited_get(client: httpx.AsyncClient, url: str, params: dict) -> httpx.Response:
|
||||
"""Wrapper qui respecte la limite de 8 req/min de TwelveData."""
|
||||
global _request_times
|
||||
async with _rate_lock:
|
||||
now = time.monotonic()
|
||||
# Purger les timestamps hors fenêtre
|
||||
_request_times = [t for t in _request_times if now - t < _RATE_WINDOW]
|
||||
if len(_request_times) >= _RATE_LIMIT:
|
||||
wait = _RATE_WINDOW - (now - _request_times[0])
|
||||
if wait > 0:
|
||||
logger.info("TwelveData rate limit : attente %.1f s", wait)
|
||||
await asyncio.sleep(wait)
|
||||
_request_times = [t for t in _request_times if time.monotonic() - t < _RATE_WINDOW]
|
||||
_request_times.append(time.monotonic())
|
||||
return await client.get(url, params=params)
|
||||
|
||||
|
||||
class TwelveDataProvider:
|
||||
"""Fetche des candles depuis l'API TwelveData."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._api_key = settings.twelvedata_api_key
|
||||
|
||||
def is_configured(self) -> bool:
|
||||
return bool(self._api_key)
|
||||
|
||||
async def fetch(
|
||||
self,
|
||||
instrument: str,
|
||||
granularity: str,
|
||||
start: datetime,
|
||||
end: Optional[datetime] = None,
|
||||
) -> pd.DataFrame:
|
||||
"""Fetche les candles pour la période [start, end]."""
|
||||
if not self.is_configured():
|
||||
logger.warning("TwelveData : TWELVEDATA_API_KEY non configurée")
|
||||
return pd.DataFrame()
|
||||
|
||||
td_symbol = INSTRUMENT_TO_TD.get(instrument)
|
||||
td_interval = GRANULARITY_TO_TD.get(granularity)
|
||||
|
||||
if not td_symbol or not td_interval:
|
||||
logger.warning("TwelveData : instrument/granularité non supporté — %s %s", instrument, granularity)
|
||||
return pd.DataFrame()
|
||||
|
||||
if end is None:
|
||||
end = datetime.utcnow()
|
||||
|
||||
logger.info(
|
||||
"TwelveData fetch : %s (%s) %s → %s",
|
||||
instrument, granularity, start.strftime("%Y-%m-%d"), end.strftime("%Y-%m-%d"),
|
||||
)
|
||||
|
||||
# TwelveData supporte max 5000 points par requête
|
||||
# Si la période est longue, on fait plusieurs requêtes
|
||||
all_frames: list[pd.DataFrame] = []
|
||||
current_end = end
|
||||
|
||||
while current_end > start:
|
||||
df_chunk = await self._fetch_chunk(td_symbol, td_interval, start, current_end)
|
||||
if df_chunk.empty:
|
||||
break
|
||||
all_frames.append(df_chunk)
|
||||
oldest = df_chunk["time"].min()
|
||||
if oldest <= start:
|
||||
break
|
||||
# Reculer pour la prochaine requête
|
||||
current_end = oldest - timedelta(seconds=1)
|
||||
|
||||
if not all_frames:
|
||||
return pd.DataFrame()
|
||||
|
||||
df = pd.concat(all_frames, ignore_index=True)
|
||||
df = df.drop_duplicates(subset=["time"])
|
||||
df = df.sort_values("time").reset_index(drop=True)
|
||||
df = df[(df["time"] >= start) & (df["time"] <= end)]
|
||||
|
||||
logger.info("TwelveData : %d bougies récupérées pour %s %s", len(df), instrument, granularity)
|
||||
return df
|
||||
|
||||
async def _fetch_chunk(
|
||||
self,
|
||||
td_symbol: str,
|
||||
td_interval: str,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
) -> pd.DataFrame:
|
||||
params = {
|
||||
"symbol": td_symbol,
|
||||
"interval": td_interval,
|
||||
"start_date": start.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"end_date": end.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"outputsize": MAX_OUTPUTSIZE,
|
||||
"format": "JSON",
|
||||
"apikey": self._api_key,
|
||||
}
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
resp = await _rate_limited_get(client, f"{TWELVEDATA_BASE_URL}/time_series", params=params)
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
except Exception as e:
|
||||
logger.error("TwelveData erreur HTTP : %s", e)
|
||||
return pd.DataFrame()
|
||||
|
||||
if data.get("status") == "error":
|
||||
logger.error("TwelveData API erreur : %s", data.get("message"))
|
||||
return pd.DataFrame()
|
||||
|
||||
values = data.get("values", [])
|
||||
if not values:
|
||||
return pd.DataFrame()
|
||||
|
||||
rows = []
|
||||
for v in values:
|
||||
rows.append({
|
||||
"time": pd.to_datetime(v["datetime"]),
|
||||
"open": float(v["open"]),
|
||||
"high": float(v["high"]),
|
||||
"low": float(v["low"]),
|
||||
"close": float(v["close"]),
|
||||
"volume": int(v.get("volume", 0)),
|
||||
})
|
||||
|
||||
df = pd.DataFrame(rows)
|
||||
df = df.sort_values("time").reset_index(drop=True)
|
||||
return df
|
||||
134
backend/app/services/data_providers/yfinance_provider.py
Normal file
134
backend/app/services/data_providers/yfinance_provider.py
Normal file
@@ -0,0 +1,134 @@
|
||||
"""
|
||||
Provider yfinance — données OHLCV gratuites.
|
||||
|
||||
Limites :
|
||||
- M1 : 7 derniers jours
|
||||
- M5/M15/M30 : 60 derniers jours
|
||||
- H1/H4 : 730 derniers jours
|
||||
- D : illimité
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Optional
|
||||
|
||||
import pandas as pd
|
||||
|
||||
from app.services.data_providers.constants import (
|
||||
GRANULARITY_TO_YF,
|
||||
INSTRUMENT_TO_YF,
|
||||
YF_MAX_DAYS,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _normalize(df: pd.DataFrame) -> pd.DataFrame:
|
||||
"""Normalise un DataFrame yfinance vers le format interne."""
|
||||
df = df.copy()
|
||||
df.index = pd.to_datetime(df.index, utc=True)
|
||||
df.index = df.index.tz_localize(None) if df.index.tz is not None else df.index
|
||||
|
||||
df.columns = [c.lower() for c in df.columns]
|
||||
# yfinance peut retourner des colonnes multi-index
|
||||
if isinstance(df.columns, pd.MultiIndex):
|
||||
df.columns = df.columns.get_level_values(0)
|
||||
|
||||
df = df.rename(columns={"adj close": "close"})[["open", "high", "low", "close", "volume"]]
|
||||
df = df.dropna(subset=["open", "high", "low", "close"])
|
||||
df.index.name = "time"
|
||||
df = df.reset_index()
|
||||
df["time"] = pd.to_datetime(df["time"]).dt.tz_localize(None)
|
||||
return df
|
||||
|
||||
|
||||
def _fetch_sync(
|
||||
yf_symbol: str,
|
||||
yf_interval: str,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
) -> pd.DataFrame:
|
||||
"""Exécution synchrone de yfinance (sera appelée dans un thread)."""
|
||||
import yfinance as yf
|
||||
|
||||
ticker = yf.Ticker(yf_symbol)
|
||||
df = ticker.history(
|
||||
interval=yf_interval,
|
||||
start=start.strftime("%Y-%m-%d"),
|
||||
end=(end + timedelta(days=1)).strftime("%Y-%m-%d"),
|
||||
auto_adjust=True,
|
||||
prepost=False,
|
||||
)
|
||||
return df
|
||||
|
||||
|
||||
class YFinanceProvider:
|
||||
"""Fetche des candles depuis Yahoo Finance."""
|
||||
|
||||
def yf_cutoff(self, granularity: str) -> Optional[datetime]:
|
||||
"""Retourne la date la plus ancienne que yfinance peut fournir."""
|
||||
max_days = YF_MAX_DAYS.get(granularity)
|
||||
if max_days is None:
|
||||
return None
|
||||
return datetime.utcnow() - timedelta(days=max_days - 1)
|
||||
|
||||
def can_provide(self, granularity: str, start: datetime) -> bool:
|
||||
"""Vérifie si yfinance peut fournir des données pour cette période."""
|
||||
cutoff = self.yf_cutoff(granularity)
|
||||
if cutoff is None:
|
||||
return False
|
||||
return start >= cutoff
|
||||
|
||||
async def fetch(
|
||||
self,
|
||||
instrument: str,
|
||||
granularity: str,
|
||||
start: datetime,
|
||||
end: Optional[datetime] = None,
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Fetche les candles pour la période [start, end].
|
||||
Tronque start à la limite yfinance si nécessaire.
|
||||
"""
|
||||
yf_symbol = INSTRUMENT_TO_YF.get(instrument)
|
||||
yf_interval = GRANULARITY_TO_YF.get(granularity)
|
||||
|
||||
if not yf_symbol or not yf_interval:
|
||||
logger.warning("yfinance : instrument ou granularité non supporté — %s %s", instrument, granularity)
|
||||
return pd.DataFrame()
|
||||
|
||||
# Tronquer start à la limite yfinance
|
||||
cutoff = self.yf_cutoff(granularity)
|
||||
if cutoff and start < cutoff:
|
||||
logger.debug("yfinance : start tronqué de %s à %s", start, cutoff)
|
||||
start = cutoff
|
||||
|
||||
if end is None:
|
||||
end = datetime.utcnow()
|
||||
|
||||
if start >= end:
|
||||
return pd.DataFrame()
|
||||
|
||||
logger.info(
|
||||
"yfinance fetch : %s (%s) %s → %s",
|
||||
instrument, granularity, start.strftime("%Y-%m-%d"), end.strftime("%Y-%m-%d"),
|
||||
)
|
||||
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
raw = await loop.run_in_executor(
|
||||
None, _fetch_sync, yf_symbol, yf_interval, start, end
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("yfinance erreur : %s", e)
|
||||
return pd.DataFrame()
|
||||
|
||||
if raw.empty:
|
||||
logger.warning("yfinance : aucune donnée pour %s %s", instrument, granularity)
|
||||
return pd.DataFrame()
|
||||
|
||||
df = _normalize(raw)
|
||||
df = df[(df["time"] >= start) & (df["time"] <= end)]
|
||||
logger.info("yfinance : %d bougies récupérées pour %s %s", len(df), instrument, granularity)
|
||||
return df
|
||||
264
backend/app/services/market_data.py
Normal file
264
backend/app/services/market_data.py
Normal file
@@ -0,0 +1,264 @@
|
||||
"""
|
||||
MarketDataService — source de données hybride avec cache DB.
|
||||
|
||||
Stratégie de fetch pour une période [start, end] demandée :
|
||||
|
||||
1. DB d'abord → on récupère ce qu'on a déjà, on ne refetch jamais ce qui existe
|
||||
2. Gaps récents → yfinance (dans ses limites temporelles)
|
||||
3. Gaps historiques → TwelveData (pour tout ce que yfinance ne peut pas couvrir)
|
||||
4. Tout est stocké → les prochaines requêtes seront servies depuis la DB
|
||||
|
||||
Exemple (M1, 10 derniers jours demandés) :
|
||||
- DB : déjà ce qu'on a en cache
|
||||
- yfinance : J-7 → maintenant (limite M1 = 7 jours)
|
||||
- TwelveData : J-10 → J-7 (historique au-delà de yfinance)
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
|
||||
import pandas as pd
|
||||
from sqlalchemy import and_, select, text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.candle import Candle
|
||||
from app.services.data_providers.constants import GRANULARITY_MINUTES
|
||||
from app.services.data_providers.twelvedata_provider import TwelveDataProvider
|
||||
from app.services.data_providers.yfinance_provider import YFinanceProvider
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Facteur pour compenser weekends + jours fériés dans le calcul de la fenêtre
|
||||
TRADING_DAYS_FACTOR = 1.5
|
||||
|
||||
|
||||
class MarketDataService:
|
||||
def __init__(self, db: AsyncSession) -> None:
|
||||
self._db = db
|
||||
self._yf = YFinanceProvider()
|
||||
self._td = TwelveDataProvider()
|
||||
|
||||
# ── API publique ──────────────────────────────────────────────────────────
|
||||
|
||||
async def get_candles(
|
||||
self,
|
||||
instrument: str,
|
||||
granularity: str,
|
||||
count: int = 200,
|
||||
start: Optional[datetime] = None,
|
||||
end: Optional[datetime] = None,
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Retourne jusqu'à `count` bougies pour instrument/granularity.
|
||||
Si start/end fournis, ils définissent la plage exacte.
|
||||
|
||||
Processus :
|
||||
1. Calcul de la fenêtre temporelle nécessaire
|
||||
2. Détection et comblement des gaps (yfinance + TwelveData)
|
||||
3. Lecture depuis DB et retour
|
||||
"""
|
||||
if end is None:
|
||||
end = datetime.utcnow()
|
||||
if start is None:
|
||||
minutes = GRANULARITY_MINUTES.get(granularity, 60)
|
||||
start = end - timedelta(minutes=int(minutes * count * TRADING_DAYS_FACTOR))
|
||||
|
||||
await self._fill_gaps(instrument, granularity, start, end)
|
||||
return await self._db_fetch(instrument, granularity, start, end, limit=count)
|
||||
|
||||
async def get_latest_price(self, instrument: str) -> Optional[float]:
|
||||
"""Retourne le dernier close connu (DB ou yfinance M1)."""
|
||||
stmt = (
|
||||
select(Candle.close)
|
||||
.where(Candle.instrument == instrument)
|
||||
.order_by(Candle.time.desc())
|
||||
.limit(1)
|
||||
)
|
||||
result = await self._db.execute(stmt)
|
||||
price = result.scalar_one_or_none()
|
||||
if price:
|
||||
return float(price)
|
||||
|
||||
df = await self.get_candles(instrument, "M1", count=2)
|
||||
return float(df.iloc[-1]["close"]) if not df.empty else None
|
||||
|
||||
# ── Logique de détection et comblement des gaps ───────────────────────────
|
||||
|
||||
async def _fill_gaps(
|
||||
self,
|
||||
instrument: str,
|
||||
granularity: str,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
) -> None:
|
||||
gaps = await self._find_gaps(instrument, granularity, start, end)
|
||||
for gap_start, gap_end in gaps:
|
||||
await self._fetch_and_store_gap(instrument, granularity, gap_start, gap_end)
|
||||
|
||||
async def _find_gaps(
|
||||
self,
|
||||
instrument: str,
|
||||
granularity: str,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
) -> list[tuple[datetime, datetime]]:
|
||||
"""
|
||||
Retourne la liste des (gap_start, gap_end) manquants en DB.
|
||||
|
||||
Logique :
|
||||
- Si rien en DB pour la plage → un seul gap = (start, end)
|
||||
- Sinon → combler avant le plus ancien et/ou après le plus récent
|
||||
"""
|
||||
stmt = (
|
||||
select(Candle.time)
|
||||
.where(
|
||||
and_(
|
||||
Candle.instrument == instrument,
|
||||
Candle.granularity == granularity,
|
||||
Candle.time >= start,
|
||||
Candle.time <= end,
|
||||
)
|
||||
)
|
||||
.order_by(Candle.time)
|
||||
)
|
||||
result = await self._db.execute(stmt)
|
||||
times = [r[0] for r in result.fetchall()]
|
||||
|
||||
if not times:
|
||||
return [(start, end)]
|
||||
|
||||
gaps: list[tuple[datetime, datetime]] = []
|
||||
interval = timedelta(minutes=GRANULARITY_MINUTES.get(granularity, 60))
|
||||
oldest, newest = times[0], times[-1]
|
||||
|
||||
# Gap avant : demande de données antérieures à ce qu'on a
|
||||
if start < oldest - interval:
|
||||
gaps.append((start, oldest))
|
||||
|
||||
# Gap après : demande de données plus récentes que ce qu'on a
|
||||
freshness_threshold = interval * 2
|
||||
if end > newest + freshness_threshold:
|
||||
gaps.append((newest, end))
|
||||
|
||||
return gaps
|
||||
|
||||
async def _fetch_and_store_gap(
|
||||
self,
|
||||
instrument: str,
|
||||
granularity: str,
|
||||
gap_start: datetime,
|
||||
gap_end: datetime,
|
||||
) -> None:
|
||||
"""
|
||||
Fetche un gap :
|
||||
1. yfinance pour la partie récente (dans ses limites)
|
||||
2. TwelveData en fallback si yfinance échoue, ou pour la partie historique
|
||||
"""
|
||||
yf_cutoff = self._yf.yf_cutoff(granularity)
|
||||
yf_covered = False
|
||||
|
||||
# ── yfinance : partie récente du gap ─────────────────────────────────
|
||||
if yf_cutoff is not None:
|
||||
yf_start = max(gap_start, yf_cutoff)
|
||||
if yf_start < gap_end:
|
||||
df_yf = await self._yf.fetch(instrument, granularity, yf_start, gap_end)
|
||||
if not df_yf.empty:
|
||||
await self._store(df_yf, instrument, granularity)
|
||||
yf_covered = True
|
||||
|
||||
# ── TwelveData : historique + fallback si yfinance indisponible ───────
|
||||
if self._td.is_configured():
|
||||
# Partie historique (avant la limite yfinance)
|
||||
td_end = yf_cutoff if (yf_cutoff and gap_start < yf_cutoff) else None
|
||||
if td_end and gap_start < td_end:
|
||||
df_td = await self._td.fetch(instrument, granularity, gap_start, td_end)
|
||||
if not df_td.empty:
|
||||
await self._store(df_td, instrument, granularity)
|
||||
|
||||
# Fallback pour la partie récente si yfinance n'a rien retourné
|
||||
if not yf_covered:
|
||||
yf_start = max(gap_start, yf_cutoff) if yf_cutoff else gap_start
|
||||
if yf_start < gap_end:
|
||||
logger.info(
|
||||
"yfinance indisponible — fallback TwelveData pour %s %s [%s → %s]",
|
||||
instrument, granularity,
|
||||
yf_start.strftime("%Y-%m-%d"), gap_end.strftime("%Y-%m-%d"),
|
||||
)
|
||||
df_td2 = await self._td.fetch(instrument, granularity, yf_start, gap_end)
|
||||
if not df_td2.empty:
|
||||
await self._store(df_td2, instrument, granularity)
|
||||
elif not yf_covered:
|
||||
logger.warning(
|
||||
"Gap [%s → %s] pour %s %s — "
|
||||
"TWELVEDATA_API_KEY manquante et yfinance indisponible.",
|
||||
gap_start.strftime("%Y-%m-%d"),
|
||||
gap_end.strftime("%Y-%m-%d"),
|
||||
instrument,
|
||||
granularity,
|
||||
)
|
||||
|
||||
# ── DB helpers ────────────────────────────────────────────────────────────
|
||||
|
||||
async def _db_fetch(
|
||||
self,
|
||||
instrument: str,
|
||||
granularity: str,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
limit: int = 5000,
|
||||
) -> pd.DataFrame:
|
||||
stmt = (
|
||||
select(Candle)
|
||||
.where(
|
||||
and_(
|
||||
Candle.instrument == instrument,
|
||||
Candle.granularity == granularity,
|
||||
Candle.time >= start,
|
||||
Candle.time <= end,
|
||||
)
|
||||
)
|
||||
.order_by(Candle.time.desc())
|
||||
.limit(limit)
|
||||
)
|
||||
result = await self._db.execute(stmt)
|
||||
rows = result.scalars().all()
|
||||
|
||||
if not rows:
|
||||
return pd.DataFrame(columns=["time", "open", "high", "low", "close", "volume"])
|
||||
|
||||
df = pd.DataFrame(
|
||||
[{"time": r.time, "open": r.open, "high": r.high,
|
||||
"low": r.low, "close": r.close, "volume": r.volume}
|
||||
for r in rows]
|
||||
)
|
||||
return df.sort_values("time").reset_index(drop=True)
|
||||
|
||||
async def _store(self, df: pd.DataFrame, instrument: str, granularity: str) -> None:
|
||||
"""
|
||||
Insère les bougies en DB avec INSERT OR IGNORE.
|
||||
Les bougies déjà présentes (même instrument+granularity+time) ne sont jamais modifiées.
|
||||
"""
|
||||
if df.empty:
|
||||
return
|
||||
|
||||
for _, row in df.iterrows():
|
||||
await self._db.execute(
|
||||
text(
|
||||
"INSERT OR IGNORE INTO candles "
|
||||
"(instrument, granularity, time, open, high, low, close, volume, complete) "
|
||||
"VALUES (:instrument, :granularity, :time, :open, :high, :low, :close, :volume, 1)"
|
||||
),
|
||||
{
|
||||
"instrument": instrument,
|
||||
"granularity": granularity,
|
||||
"time": pd.Timestamp(row["time"]).to_pydatetime().replace(tzinfo=None),
|
||||
"open": float(row["open"]),
|
||||
"high": float(row["high"]),
|
||||
"low": float(row["low"]),
|
||||
"close": float(row["close"]),
|
||||
"volume": int(row.get("volume", 0)),
|
||||
},
|
||||
)
|
||||
|
||||
await self._db.commit()
|
||||
67
backend/app/services/trade_manager.py
Normal file
67
backend/app/services/trade_manager.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""
|
||||
Gestion des positions : sizing, validation, suivi.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.exchange.base import AbstractExchange, AccountInfo, OrderResult
|
||||
from app.core.strategy.base import TradeSignal
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TradeManager:
|
||||
def __init__(self, exchange: AbstractExchange) -> None:
|
||||
self._exchange = exchange
|
||||
self._risk_percent = settings.bot_risk_percent
|
||||
self._pip_value = 0.0001
|
||||
|
||||
async def execute_signal(self, signal: TradeSignal) -> Optional[OrderResult]:
|
||||
"""
|
||||
Calcule le sizing et place l'ordre sur l'exchange.
|
||||
Retourne None si le signal est invalide ou si le sizing est trop petit.
|
||||
"""
|
||||
account = await self._exchange.get_account_info()
|
||||
|
||||
risk_pips = abs(signal.entry_price - signal.stop_loss) / self._pip_value
|
||||
if risk_pips < 1:
|
||||
logger.warning("Signal ignoré : risque en pips trop faible (%s)", risk_pips)
|
||||
return None
|
||||
|
||||
units = self._calculate_units(account, risk_pips, signal)
|
||||
if abs(units) < 1000:
|
||||
logger.warning("Signal ignoré : taille de position trop petite (%s)", units)
|
||||
return None
|
||||
|
||||
logger.info(
|
||||
"Exécution signal %s %s — entry=%.5f SL=%.5f TP=%.5f units=%d",
|
||||
signal.direction, signal.signal_type,
|
||||
signal.entry_price, signal.stop_loss, signal.take_profit, units,
|
||||
)
|
||||
return await self._exchange.place_order(
|
||||
instrument=signal.signal_type.split("+")[0] if "+" in signal.signal_type else "EUR_USD",
|
||||
units=units,
|
||||
stop_loss=signal.stop_loss,
|
||||
take_profit=signal.take_profit,
|
||||
)
|
||||
|
||||
def _calculate_units(
|
||||
self,
|
||||
account: AccountInfo,
|
||||
risk_pips: float,
|
||||
signal: TradeSignal,
|
||||
) -> float:
|
||||
"""
|
||||
Calcule la taille de position basée sur le risque défini.
|
||||
Formula : units = (balance * risk%) / (risk_pips * pip_value_per_unit)
|
||||
Pour EUR/USD standard : pip_value_per_unit = $0.0001
|
||||
"""
|
||||
risk_amount = account.balance * (self._risk_percent / 100)
|
||||
# Pour simplification : 1 pip = 0.0001, valeur pip sur mini-lot = $1
|
||||
pip_value_per_unit = self._pip_value
|
||||
raw_units = risk_amount / (risk_pips * pip_value_per_unit)
|
||||
# Arrondir à la centaine inférieure
|
||||
units = (int(raw_units) // 100) * 100
|
||||
return float(units) if signal.direction == "buy" else float(-units)
|
||||
Reference in New Issue
Block a user