feat(v0.1.0): project foundation with logging and config
This commit is contained in:
89
.gitignore
vendored
Normal file
89
.gitignore
vendored
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
# Python
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
*.so
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
pip-wheel-metadata/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# Virtual environments
|
||||||
|
venv/
|
||||||
|
env/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# IDE
|
||||||
|
.vscode/
|
||||||
|
.idea/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
*~
|
||||||
|
.DS_Store
|
||||||
|
|
||||||
|
# Environment variables
|
||||||
|
.env
|
||||||
|
.env.local
|
||||||
|
|
||||||
|
# Data files (large, don't commit)
|
||||||
|
data/raw/**/*.csv
|
||||||
|
data/raw/**/*.parquet
|
||||||
|
data/processed/**/*
|
||||||
|
!data/processed/**/.gitkeep
|
||||||
|
data/labels/**/*
|
||||||
|
!data/labels/**/.gitkeep
|
||||||
|
data/screenshots/**/*
|
||||||
|
!data/screenshots/**/.gitkeep
|
||||||
|
|
||||||
|
# Models (large binary files)
|
||||||
|
models/**/*.pkl
|
||||||
|
models/**/*.joblib
|
||||||
|
models/**/*.h5
|
||||||
|
models/**/*.pb
|
||||||
|
models/**/latest
|
||||||
|
|
||||||
|
# Logs
|
||||||
|
logs/**/*.log
|
||||||
|
logs/**/*.log.*
|
||||||
|
logs/**/archive/
|
||||||
|
|
||||||
|
# Jupyter Notebooks
|
||||||
|
.ipynb_checkpoints
|
||||||
|
*.ipynb
|
||||||
|
|
||||||
|
# Testing
|
||||||
|
.pytest_cache/
|
||||||
|
.coverage
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.hypothesis/
|
||||||
|
|
||||||
|
# MyPy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Backup files
|
||||||
|
backups/**/*
|
||||||
|
!backups/**/.gitkeep
|
||||||
|
|
||||||
|
# OS
|
||||||
|
Thumbs.db
|
||||||
|
|
||||||
37
.pre-commit-config.yaml
Normal file
37
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
repos:
|
||||||
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
|
rev: v4.5.0
|
||||||
|
hooks:
|
||||||
|
- id: trailing-whitespace
|
||||||
|
- id: end-of-file-fixer
|
||||||
|
- id: check-yaml
|
||||||
|
- id: check-added-large-files
|
||||||
|
- id: check-json
|
||||||
|
- id: check-toml
|
||||||
|
- id: check-merge-conflict
|
||||||
|
- id: debug-statements
|
||||||
|
|
||||||
|
- repo: https://github.com/psf/black
|
||||||
|
rev: 23.11.0
|
||||||
|
hooks:
|
||||||
|
- id: black
|
||||||
|
language_version: python3.10
|
||||||
|
|
||||||
|
- repo: https://github.com/pycqa/isort
|
||||||
|
rev: 5.12.0
|
||||||
|
hooks:
|
||||||
|
- id: isort
|
||||||
|
|
||||||
|
- repo: https://github.com/pycqa/flake8
|
||||||
|
rev: 6.1.0
|
||||||
|
hooks:
|
||||||
|
- id: flake8
|
||||||
|
args: [--max-line-length=100, --extend-ignore=E203]
|
||||||
|
|
||||||
|
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
|
rev: v1.7.0
|
||||||
|
hooks:
|
||||||
|
- id: mypy
|
||||||
|
args: [--ignore-missing-imports]
|
||||||
|
additional_dependencies: [types-pyyaml, types-python-dotenv]
|
||||||
|
|
||||||
28
CHANGELOG.md
Normal file
28
CHANGELOG.md
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [0.1.0] - 2026-01-XX
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Project foundation with complete directory structure
|
||||||
|
- Comprehensive logging system with JSON and console formatters
|
||||||
|
- Configuration management with YAML and environment variable support
|
||||||
|
- Custom exception hierarchy for error handling
|
||||||
|
- Core constants and enums for pattern types and trading concepts
|
||||||
|
- Base classes for detectors and models
|
||||||
|
- Initial test suite with pytest
|
||||||
|
- Development tooling (black, flake8, mypy, pre-commit hooks)
|
||||||
|
- Documentation structure
|
||||||
|
|
||||||
|
### Infrastructure
|
||||||
|
- Git repository initialization
|
||||||
|
- Requirements files for production and development
|
||||||
|
- Setup.py and pyproject.toml for package management
|
||||||
|
- Makefile for common commands
|
||||||
|
- .gitignore with comprehensive patterns
|
||||||
|
- Environment variable template (.env.example)
|
||||||
|
|
||||||
1371
ICT_ML_Project_Structure.html
Normal file
1371
ICT_ML_Project_Structure.html
Normal file
File diff suppressed because it is too large
Load Diff
22
LICENSE
Normal file
22
LICENSE
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2026 ICT ML Trading Team
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
||||||
49
Makefile
Normal file
49
Makefile
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
.PHONY: help install install-dev test lint format type-check clean run setup-db
|
||||||
|
|
||||||
|
help:
|
||||||
|
@echo "ICT ML Trading System - Makefile Commands"
|
||||||
|
@echo ""
|
||||||
|
@echo " make install - Install production dependencies"
|
||||||
|
@echo " make install-dev - Install development dependencies"
|
||||||
|
@echo " make test - Run test suite"
|
||||||
|
@echo " make lint - Run linters (flake8)"
|
||||||
|
@echo " make format - Format code (black, isort)"
|
||||||
|
@echo " make type-check - Run type checker (mypy)"
|
||||||
|
@echo " make clean - Clean build artifacts"
|
||||||
|
@echo " make setup-db - Initialize database"
|
||||||
|
|
||||||
|
install:
|
||||||
|
pip install -r requirements.txt
|
||||||
|
|
||||||
|
install-dev:
|
||||||
|
pip install -r requirements-dev.txt
|
||||||
|
pre-commit install
|
||||||
|
|
||||||
|
test:
|
||||||
|
pytest tests/ -v
|
||||||
|
|
||||||
|
lint:
|
||||||
|
flake8 src/ tests/
|
||||||
|
bandit -r src/
|
||||||
|
|
||||||
|
format:
|
||||||
|
black src/ tests/
|
||||||
|
isort src/ tests/
|
||||||
|
|
||||||
|
type-check:
|
||||||
|
mypy src/
|
||||||
|
|
||||||
|
clean:
|
||||||
|
rm -rf build/
|
||||||
|
rm -rf dist/
|
||||||
|
rm -rf *.egg-info
|
||||||
|
rm -rf .pytest_cache/
|
||||||
|
rm -rf .mypy_cache/
|
||||||
|
rm -rf htmlcov/
|
||||||
|
rm -rf .coverage
|
||||||
|
find . -type d -name __pycache__ -exec rm -r {} +
|
||||||
|
find . -type f -name "*.pyc" -delete
|
||||||
|
|
||||||
|
setup-db:
|
||||||
|
python scripts/setup_database.py
|
||||||
|
|
||||||
117
README.md
Normal file
117
README.md
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
# ICT ML Trading System
|
||||||
|
|
||||||
|
A production-grade machine learning trading system for DAX Futures based on ICT (Inner Circle Trader) concepts.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
This system detects ICT patterns (Fair Value Gaps, Order Blocks, Liquidity Sweeps) during the London session (3:00-4:00 AM EST) and uses machine learning to grade pattern quality and predict trade outcomes.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Pattern Detection**: Automated detection of FVG, Order Blocks, and Liquidity patterns
|
||||||
|
- **Machine Learning**: ML models for pattern grading and setup classification
|
||||||
|
- **Labeling System**: Integrated labeling workflow for training data
|
||||||
|
- **Backtesting**: Comprehensive backtesting framework
|
||||||
|
- **Alert System**: Real-time alerts via Telegram/Slack
|
||||||
|
- **Production Ready**: Comprehensive logging, error handling, and monitoring
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
ict-ml-trading/
|
||||||
|
├── src/ # Source code
|
||||||
|
├── config/ # Configuration files
|
||||||
|
├── data/ # Data storage
|
||||||
|
├── models/ # Trained ML models
|
||||||
|
├── logs/ # Application logs
|
||||||
|
├── tests/ # Test suite
|
||||||
|
├── scripts/ # Utility scripts
|
||||||
|
└── docs/ # Documentation
|
||||||
|
```
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
- Python 3.10+
|
||||||
|
- PostgreSQL (optional, for production)
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
1. Clone the repository:
|
||||||
|
```bash
|
||||||
|
git clone <repository-url>
|
||||||
|
cd ict-ml-trading
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Create virtual environment:
|
||||||
|
```bash
|
||||||
|
python -m venv venv
|
||||||
|
source venv/bin/activate # On Windows: venv\Scripts\activate
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Install dependencies:
|
||||||
|
```bash
|
||||||
|
make install-dev
|
||||||
|
```
|
||||||
|
|
||||||
|
4. Set up environment variables:
|
||||||
|
```bash
|
||||||
|
cp .env.example .env
|
||||||
|
# Edit .env with your configuration
|
||||||
|
```
|
||||||
|
|
||||||
|
5. Initialize database (optional):
|
||||||
|
```bash
|
||||||
|
make setup-db
|
||||||
|
```
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
### Running Tests
|
||||||
|
|
||||||
|
```bash
|
||||||
|
make test
|
||||||
|
```
|
||||||
|
|
||||||
|
### Code Formatting
|
||||||
|
|
||||||
|
```bash
|
||||||
|
make format
|
||||||
|
```
|
||||||
|
|
||||||
|
### Linting
|
||||||
|
|
||||||
|
```bash
|
||||||
|
make lint
|
||||||
|
```
|
||||||
|
|
||||||
|
### Type Checking
|
||||||
|
|
||||||
|
```bash
|
||||||
|
make type-check
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
Configuration files are located in `config/`:
|
||||||
|
- `config.yaml` - Main application configuration
|
||||||
|
- `logging.yaml` - Logging setup
|
||||||
|
- `detectors.yaml` - Pattern detector parameters
|
||||||
|
- `models.yaml` - ML model hyperparameters
|
||||||
|
- `trading.yaml` - Trading strategy parameters
|
||||||
|
- `alerts.yaml` - Alert system configuration
|
||||||
|
- `database.yaml` - Database connection settings
|
||||||
|
|
||||||
|
## Version History
|
||||||
|
|
||||||
|
See [CHANGELOG.md](CHANGELOG.md) for detailed version history.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
See [docs/contributing.md](docs/contributing.md) for contribution guidelines.
|
||||||
|
|
||||||
127
V0.1.0_SETUP_COMPLETE.md
Normal file
127
V0.1.0_SETUP_COMPLETE.md
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
# Version 0.1.0 - Project Foundation Complete ✅
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
|
||||||
|
The project foundation for ICT ML Trading System v0.1.0 has been successfully created according to the project structure guide.
|
||||||
|
|
||||||
|
## What Was Created
|
||||||
|
|
||||||
|
### ✅ Directory Structure
|
||||||
|
- Complete directory tree matching the project structure
|
||||||
|
- All required subdirectories for data, models, logs, tests, etc.
|
||||||
|
- `.gitkeep` files in empty directories
|
||||||
|
|
||||||
|
### ✅ Project Files
|
||||||
|
- `.gitignore` - Comprehensive ignore patterns
|
||||||
|
- `requirements.txt` - Production dependencies
|
||||||
|
- `requirements-dev.txt` - Development dependencies
|
||||||
|
- `setup.py` - Package installation configuration
|
||||||
|
- `pyproject.toml` - Modern Python project configuration
|
||||||
|
- `Makefile` - Common commands automation
|
||||||
|
- `README.md` - Project documentation
|
||||||
|
- `.pre-commit-config.yaml` - Pre-commit hooks
|
||||||
|
- `CHANGELOG.md` - Version history
|
||||||
|
- `LICENSE` - MIT License
|
||||||
|
|
||||||
|
### ✅ Configuration Files
|
||||||
|
- `config/config.yaml` - Main application configuration
|
||||||
|
- `config/logging.yaml` - Logging setup with JSON and console formatters
|
||||||
|
- `config/detectors.yaml` - Pattern detector parameters
|
||||||
|
- `config/models.yaml` - ML model hyperparameters
|
||||||
|
- `config/trading.yaml` - Trading strategy parameters
|
||||||
|
- `config/alerts.yaml` - Alert system configuration
|
||||||
|
- `config/database.yaml` - Database connection settings
|
||||||
|
|
||||||
|
### ✅ Core Infrastructure
|
||||||
|
- `src/core/constants.py` - Application-wide constants
|
||||||
|
- `src/core/enums.py` - Enumerations (PatternType, Grade, SetupType, etc.)
|
||||||
|
- `src/core/exceptions.py` - Custom exception hierarchy (7 exception classes)
|
||||||
|
- `src/core/base_classes.py` - Abstract base classes (BaseDetector, BaseModel, BaseFeatureEngineering)
|
||||||
|
|
||||||
|
### ✅ Logging System
|
||||||
|
- `src/logging/logger.py` - Logger setup and configuration
|
||||||
|
- `src/logging/formatters.py` - JSON, Detailed, and Colored formatters
|
||||||
|
- `src/logging/handlers.py` - Rotating file handlers and error handlers
|
||||||
|
- `src/logging/filters.py` - Sensitive data filter and rate limit filter
|
||||||
|
- `src/logging/decorators.py` - @log_execution, @log_exceptions, @log_performance
|
||||||
|
|
||||||
|
### ✅ Configuration Management
|
||||||
|
- `src/config/config_loader.py` - Load and merge YAML configs with env vars
|
||||||
|
- `src/config/settings.py` - Pydantic dataclasses for type-safe config
|
||||||
|
- `src/config/validators.py` - Configuration validation logic
|
||||||
|
|
||||||
|
### ✅ Test Suite
|
||||||
|
- `tests/conftest.py` - Pytest fixtures and configuration
|
||||||
|
- `tests/unit/test_core/test_exceptions.py` - Exception tests
|
||||||
|
- `tests/unit/test_logging/test_logger.py` - Logger tests
|
||||||
|
- `tests/unit/test_config/test_config_loader.py` - Config loader tests
|
||||||
|
|
||||||
|
### ✅ Utility Scripts
|
||||||
|
- `scripts/validate_setup.py` - Setup validation script
|
||||||
|
|
||||||
|
## Next Steps
|
||||||
|
|
||||||
|
### 1. Initialize Git Repository (if not done)
|
||||||
|
```bash
|
||||||
|
git init
|
||||||
|
git add .
|
||||||
|
git commit -m "feat(v0.1.0): project foundation with logging and config"
|
||||||
|
git tag v0.1.0
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Set Up Virtual Environment
|
||||||
|
```bash
|
||||||
|
python -m venv venv
|
||||||
|
source venv/bin/activate # On Windows: venv\Scripts\activate
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Install Dependencies
|
||||||
|
```bash
|
||||||
|
make install-dev
|
||||||
|
# Or manually:
|
||||||
|
pip install -r requirements-dev.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Create .env File
|
||||||
|
```bash
|
||||||
|
cp .env.example .env
|
||||||
|
# Edit .env with your configuration
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5. Validate Setup
|
||||||
|
```bash
|
||||||
|
python scripts/validate_setup.py
|
||||||
|
```
|
||||||
|
|
||||||
|
### 6. Run Tests
|
||||||
|
```bash
|
||||||
|
make test
|
||||||
|
# Or:
|
||||||
|
pytest tests/ -v
|
||||||
|
```
|
||||||
|
|
||||||
|
## Validation Checklist
|
||||||
|
|
||||||
|
- [x] All directories created
|
||||||
|
- [x] All configuration files created
|
||||||
|
- [x] Core infrastructure implemented
|
||||||
|
- [x] Logging system implemented
|
||||||
|
- [x] Configuration management implemented
|
||||||
|
- [x] Initial test suite created
|
||||||
|
- [x] No linting errors
|
||||||
|
- [ ] Git repository initialized (user needs to do this)
|
||||||
|
- [ ] Dependencies installed (user needs to do this)
|
||||||
|
- [ ] Tests passing (user needs to verify)
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- The logging system uses a singleton pattern to avoid reconfiguration
|
||||||
|
- Configuration supports environment variable substitution (${VAR} or ${VAR:-default})
|
||||||
|
- All exceptions include error codes and context for better debugging
|
||||||
|
- Logging automatically redacts sensitive data (API keys, passwords, etc.)
|
||||||
|
- Tests use pytest fixtures for clean test isolation
|
||||||
|
|
||||||
|
## Ready for v0.2.0
|
||||||
|
|
||||||
|
The foundation is complete and ready for the next version: **v0.2.0 - Data Pipeline**
|
||||||
|
|
||||||
101
config/alerts.yaml
Normal file
101
config/alerts.yaml
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
# Alert System Configuration
|
||||||
|
|
||||||
|
enabled: true
|
||||||
|
default_channel: "telegram" # telegram, slack, email, all
|
||||||
|
|
||||||
|
telegram:
|
||||||
|
enabled: true
|
||||||
|
bot_token: "${TELEGRAM_BOT_TOKEN}"
|
||||||
|
chat_id: "${TELEGRAM_CHAT_ID}"
|
||||||
|
|
||||||
|
alerts:
|
||||||
|
pattern_detected: true
|
||||||
|
high_grade_pattern: true # Grade 4-5 patterns only
|
||||||
|
setup_complete: true
|
||||||
|
trade_executed: true
|
||||||
|
trade_closed: true
|
||||||
|
daily_summary: true
|
||||||
|
|
||||||
|
rate_limit:
|
||||||
|
max_alerts_per_minute: 10
|
||||||
|
max_alerts_per_hour: 50
|
||||||
|
|
||||||
|
slack:
|
||||||
|
enabled: false
|
||||||
|
webhook_url: "${SLACK_WEBHOOK_URL}"
|
||||||
|
|
||||||
|
alerts:
|
||||||
|
pattern_detected: false
|
||||||
|
high_grade_pattern: true
|
||||||
|
setup_complete: true
|
||||||
|
trade_executed: true
|
||||||
|
trade_closed: true
|
||||||
|
daily_summary: true
|
||||||
|
|
||||||
|
rate_limit:
|
||||||
|
max_alerts_per_minute: 5
|
||||||
|
max_alerts_per_hour: 30
|
||||||
|
|
||||||
|
email:
|
||||||
|
enabled: false
|
||||||
|
smtp_host: "${SMTP_HOST}"
|
||||||
|
smtp_port: "${SMTP_PORT}"
|
||||||
|
smtp_user: "${SMTP_USER}"
|
||||||
|
smtp_password: "${SMTP_PASSWORD}"
|
||||||
|
from_address: "${EMAIL_FROM}"
|
||||||
|
to_addresses:
|
||||||
|
- "${EMAIL_TO}"
|
||||||
|
|
||||||
|
alerts:
|
||||||
|
pattern_detected: false
|
||||||
|
high_grade_pattern: false
|
||||||
|
setup_complete: true
|
||||||
|
trade_executed: true
|
||||||
|
trade_closed: true
|
||||||
|
daily_summary: true
|
||||||
|
|
||||||
|
rate_limit:
|
||||||
|
max_emails_per_hour: 5
|
||||||
|
|
||||||
|
# Alert message templates
|
||||||
|
templates:
|
||||||
|
pattern_detected: |
|
||||||
|
🎯 Pattern Detected: {pattern_type}
|
||||||
|
Grade: {grade}/5
|
||||||
|
Symbol: {symbol}
|
||||||
|
Time: {timestamp}
|
||||||
|
Price: {price}
|
||||||
|
|
||||||
|
high_grade_pattern: |
|
||||||
|
⭐ High Grade Pattern: {pattern_type}
|
||||||
|
Grade: {grade}/5
|
||||||
|
Confidence: {confidence}%
|
||||||
|
Symbol: {symbol}
|
||||||
|
Time: {timestamp}
|
||||||
|
|
||||||
|
setup_complete: |
|
||||||
|
✅ Setup Complete: {setup_type}
|
||||||
|
Patterns: {patterns}
|
||||||
|
Confidence: {confidence}%
|
||||||
|
Entry Signal: {signal}
|
||||||
|
|
||||||
|
trade_executed: |
|
||||||
|
📈 Trade Executed
|
||||||
|
Type: {trade_type}
|
||||||
|
Entry: {entry_price}
|
||||||
|
Stop Loss: {stop_loss}
|
||||||
|
Take Profit: {take_profit}
|
||||||
|
Size: {size} contracts
|
||||||
|
|
||||||
|
trade_closed: |
|
||||||
|
📊 Trade Closed
|
||||||
|
P&L: {pnl} EUR
|
||||||
|
Return: {return_pct}%
|
||||||
|
Duration: {duration}
|
||||||
|
|
||||||
|
# Alert filtering
|
||||||
|
filters:
|
||||||
|
min_pattern_grade: 3 # Only alert on grade 3+ patterns
|
||||||
|
min_setup_confidence: 0.70
|
||||||
|
only_live_trading: false # If true, only alert during live trading hours
|
||||||
|
|
||||||
53
config/config.yaml
Normal file
53
config/config.yaml
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
# Main Application Configuration
|
||||||
|
|
||||||
|
app:
|
||||||
|
name: "ICT ML Trading System"
|
||||||
|
version: "0.1.0"
|
||||||
|
environment: "${ENVIRONMENT:-development}"
|
||||||
|
debug: "${DEBUG:-false}"
|
||||||
|
|
||||||
|
# Trading Session Configuration
|
||||||
|
trading:
|
||||||
|
session:
|
||||||
|
start_time: "03:00" # EST
|
||||||
|
end_time: "04:00" # EST
|
||||||
|
timezone: "America/New_York"
|
||||||
|
|
||||||
|
instrument:
|
||||||
|
symbol: "DEUIDXEUR"
|
||||||
|
exchange: "EUREX"
|
||||||
|
contract_size: 25 # EUR per point
|
||||||
|
|
||||||
|
# Data Configuration
|
||||||
|
data:
|
||||||
|
raw_data_path: "data/raw"
|
||||||
|
processed_data_path: "data/processed"
|
||||||
|
labels_path: "data/labels"
|
||||||
|
screenshots_path: "data/screenshots"
|
||||||
|
|
||||||
|
timeframes:
|
||||||
|
- "1min"
|
||||||
|
- "5min"
|
||||||
|
- "15min"
|
||||||
|
|
||||||
|
retention:
|
||||||
|
raw_data_days: 730 # 24 months
|
||||||
|
processed_data_days: 365 # 12 months
|
||||||
|
screenshots_days: 180 # 6 months
|
||||||
|
|
||||||
|
# Model Configuration
|
||||||
|
models:
|
||||||
|
base_path: "models"
|
||||||
|
pattern_graders_path: "models/pattern_graders"
|
||||||
|
strategy_models_path: "models/strategy_models"
|
||||||
|
|
||||||
|
min_labels_per_pattern: 200
|
||||||
|
train_test_split: 0.8
|
||||||
|
validation_split: 0.1
|
||||||
|
|
||||||
|
# Logging Configuration (see config/logging.yaml for detailed settings)
|
||||||
|
logging:
|
||||||
|
level: "${LOG_LEVEL:-INFO}"
|
||||||
|
format: "${LOG_FORMAT:-json}"
|
||||||
|
log_dir: "logs"
|
||||||
|
|
||||||
40
config/database.yaml
Normal file
40
config/database.yaml
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
# Database Configuration
|
||||||
|
|
||||||
|
# Database URL (can be overridden by DATABASE_URL environment variable)
|
||||||
|
database_url: "${DATABASE_URL:-sqlite:///data/ict_trading.db}"
|
||||||
|
|
||||||
|
# Connection pool settings
|
||||||
|
pool_size: 10
|
||||||
|
max_overflow: 20
|
||||||
|
pool_timeout: 30
|
||||||
|
pool_recycle: 3600 # Recycle connections after 1 hour
|
||||||
|
|
||||||
|
# SQLAlchemy settings
|
||||||
|
echo: false # Set to true for SQL query logging
|
||||||
|
echo_pool: false
|
||||||
|
|
||||||
|
# Database-specific settings
|
||||||
|
sqlite:
|
||||||
|
# SQLite-specific settings
|
||||||
|
check_same_thread: false
|
||||||
|
timeout: 20
|
||||||
|
|
||||||
|
postgresql:
|
||||||
|
# PostgreSQL-specific settings
|
||||||
|
connect_args:
|
||||||
|
connect_timeout: 10
|
||||||
|
application_name: "ict_ml_trading"
|
||||||
|
|
||||||
|
# Migration settings
|
||||||
|
alembic:
|
||||||
|
script_location: "alembic"
|
||||||
|
version_path_separator: "os"
|
||||||
|
sqlalchemy.url: "${DATABASE_URL:-sqlite:///data/ict_trading.db}"
|
||||||
|
|
||||||
|
# Backup settings
|
||||||
|
backup:
|
||||||
|
enabled: true
|
||||||
|
frequency: "daily" # daily, weekly
|
||||||
|
retention_days: 30
|
||||||
|
backup_path: "backups/database"
|
||||||
|
|
||||||
74
config/detectors.yaml
Normal file
74
config/detectors.yaml
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
# Pattern Detector Configuration
|
||||||
|
|
||||||
|
fvg_detector:
|
||||||
|
enabled: true
|
||||||
|
min_gap_size_pips: 5 # Minimum gap size in pips
|
||||||
|
max_gap_age_bars: 50 # Maximum bars before gap is considered invalid
|
||||||
|
require_confirmation: true # Require price to touch gap zone
|
||||||
|
|
||||||
|
bullish:
|
||||||
|
min_body_size_ratio: 0.6 # Minimum body size relative to candle
|
||||||
|
min_gap_size_ratio: 0.3 # Minimum gap relative to ATR
|
||||||
|
|
||||||
|
bearish:
|
||||||
|
min_body_size_ratio: 0.6
|
||||||
|
min_gap_size_ratio: 0.3
|
||||||
|
|
||||||
|
order_block_detector:
|
||||||
|
enabled: true
|
||||||
|
lookback_bars: 20 # Bars to look back for BOS
|
||||||
|
min_candle_size_ratio: 0.5 # Minimum candle size relative to ATR
|
||||||
|
require_structure_break: true # Require BOS before OB
|
||||||
|
|
||||||
|
bullish:
|
||||||
|
min_body_size_ratio: 0.7
|
||||||
|
max_wick_ratio: 0.3 # Maximum wick size relative to body
|
||||||
|
|
||||||
|
bearish:
|
||||||
|
min_body_size_ratio: 0.7
|
||||||
|
max_wick_ratio: 0.3
|
||||||
|
|
||||||
|
liquidity_detector:
|
||||||
|
enabled: true
|
||||||
|
swing_lookback: 10 # Bars to look back for swing points
|
||||||
|
min_swing_size_pips: 10
|
||||||
|
sweep_tolerance_pips: 2 # Tolerance for sweep detection
|
||||||
|
|
||||||
|
bullish_sweep:
|
||||||
|
require_reversal: true
|
||||||
|
min_reversal_size_ratio: 0.5
|
||||||
|
|
||||||
|
bearish_sweep:
|
||||||
|
require_reversal: true
|
||||||
|
min_reversal_size_ratio: 0.5
|
||||||
|
|
||||||
|
premium_discount:
|
||||||
|
enabled: true
|
||||||
|
calculation_method: "session_range" # session_range or daily_range
|
||||||
|
session_start_time: "03:00"
|
||||||
|
session_end_time: "04:00"
|
||||||
|
|
||||||
|
levels:
|
||||||
|
premium_threshold: 0.618 # Fibonacci level
|
||||||
|
discount_threshold: 0.382
|
||||||
|
equilibrium_level: 0.500
|
||||||
|
|
||||||
|
structure_detector:
|
||||||
|
enabled: true
|
||||||
|
swing_period: 10 # Period for swing detection
|
||||||
|
min_structure_size_pips: 15
|
||||||
|
|
||||||
|
bos:
|
||||||
|
require_confirmation: true
|
||||||
|
confirmation_bars: 2
|
||||||
|
|
||||||
|
choch:
|
||||||
|
require_confirmation: true
|
||||||
|
confirmation_bars: 2
|
||||||
|
|
||||||
|
scanner:
|
||||||
|
run_parallel: false # Run detectors in parallel (experimental)
|
||||||
|
save_detections: true
|
||||||
|
generate_screenshots: false # Set to true for labeling workflow
|
||||||
|
screenshot_path: "data/screenshots/patterns"
|
||||||
|
|
||||||
98
config/logging.yaml
Normal file
98
config/logging.yaml
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
version: 1
|
||||||
|
disable_existing_loggers: false
|
||||||
|
|
||||||
|
formatters:
|
||||||
|
json:
|
||||||
|
class: pythonjsonlogger.jsonlogger.JsonFormatter
|
||||||
|
format: '%(asctime)s %(name)s %(levelname)s %(message)s %(pathname)s %(lineno)d'
|
||||||
|
datefmt: '%Y-%m-%d %H:%M:%S'
|
||||||
|
|
||||||
|
detailed:
|
||||||
|
format: '%(asctime)s - %(name)s - %(levelname)s - %(message)s - [%(pathname)s:%(lineno)d]'
|
||||||
|
datefmt: '%Y-%m-%d %H:%M:%S'
|
||||||
|
|
||||||
|
colored:
|
||||||
|
format: '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||||
|
datefmt: '%Y-%m-%d %H:%M:%S'
|
||||||
|
# Note: ColoredFormatter requires colorlog package
|
||||||
|
# Falls back to standard formatter if colorlog not available
|
||||||
|
|
||||||
|
handlers:
|
||||||
|
console:
|
||||||
|
class: logging.StreamHandler
|
||||||
|
level: INFO
|
||||||
|
formatter: colored
|
||||||
|
stream: ext://sys.stdout
|
||||||
|
filters:
|
||||||
|
- sensitive_data_filter
|
||||||
|
|
||||||
|
application_file:
|
||||||
|
class: logging.handlers.RotatingFileHandler
|
||||||
|
level: DEBUG
|
||||||
|
formatter: json
|
||||||
|
filename: logs/application/app.log
|
||||||
|
maxBytes: 10485760 # 10MB
|
||||||
|
backupCount: 5
|
||||||
|
encoding: utf-8
|
||||||
|
filters:
|
||||||
|
- sensitive_data_filter
|
||||||
|
|
||||||
|
error_file:
|
||||||
|
class: logging.handlers.RotatingFileHandler
|
||||||
|
level: ERROR
|
||||||
|
formatter: json
|
||||||
|
filename: logs/errors/exceptions.log
|
||||||
|
maxBytes: 10485760 # 10MB
|
||||||
|
backupCount: 5
|
||||||
|
encoding: utf-8
|
||||||
|
filters:
|
||||||
|
- sensitive_data_filter
|
||||||
|
|
||||||
|
critical_file:
|
||||||
|
class: logging.handlers.RotatingFileHandler
|
||||||
|
level: CRITICAL
|
||||||
|
formatter: json
|
||||||
|
filename: logs/errors/critical.log
|
||||||
|
maxBytes: 10485760 # 10MB
|
||||||
|
backupCount: 10
|
||||||
|
encoding: utf-8
|
||||||
|
filters:
|
||||||
|
- sensitive_data_filter
|
||||||
|
|
||||||
|
loggers:
|
||||||
|
src:
|
||||||
|
level: DEBUG
|
||||||
|
handlers:
|
||||||
|
- console
|
||||||
|
- application_file
|
||||||
|
- error_file
|
||||||
|
- critical_file
|
||||||
|
propagate: false
|
||||||
|
|
||||||
|
src.detectors:
|
||||||
|
level: DEBUG
|
||||||
|
handlers:
|
||||||
|
- console
|
||||||
|
- application_file
|
||||||
|
propagate: false
|
||||||
|
|
||||||
|
src.models:
|
||||||
|
level: DEBUG
|
||||||
|
handlers:
|
||||||
|
- console
|
||||||
|
- application_file
|
||||||
|
propagate: false
|
||||||
|
|
||||||
|
src.trading:
|
||||||
|
level: INFO
|
||||||
|
handlers:
|
||||||
|
- console
|
||||||
|
- application_file
|
||||||
|
propagate: false
|
||||||
|
|
||||||
|
root:
|
||||||
|
level: INFO
|
||||||
|
handlers:
|
||||||
|
- console
|
||||||
|
- application_file
|
||||||
|
|
||||||
87
config/models.yaml
Normal file
87
config/models.yaml
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
# Machine Learning Model Configuration
|
||||||
|
|
||||||
|
pattern_graders:
|
||||||
|
# Individual pattern grading models
|
||||||
|
base_model_type: "RandomForestClassifier" # RandomForestClassifier, XGBoostClassifier
|
||||||
|
|
||||||
|
random_forest:
|
||||||
|
n_estimators: 100
|
||||||
|
max_depth: 10
|
||||||
|
min_samples_split: 5
|
||||||
|
min_samples_leaf: 2
|
||||||
|
max_features: "sqrt"
|
||||||
|
random_state: 42
|
||||||
|
n_jobs: -1
|
||||||
|
|
||||||
|
xgboost:
|
||||||
|
n_estimators: 100
|
||||||
|
max_depth: 6
|
||||||
|
learning_rate: 0.1
|
||||||
|
subsample: 0.8
|
||||||
|
colsample_bytree: 0.8
|
||||||
|
random_state: 42
|
||||||
|
|
||||||
|
# Feature selection
|
||||||
|
feature_selection:
|
||||||
|
enabled: true
|
||||||
|
method: "mutual_info" # mutual_info, f_test, chi2
|
||||||
|
top_k_features: 50
|
||||||
|
|
||||||
|
# Training configuration
|
||||||
|
training:
|
||||||
|
cv_folds: 5
|
||||||
|
scoring_metric: "f1_weighted"
|
||||||
|
early_stopping: true
|
||||||
|
patience: 10
|
||||||
|
|
||||||
|
setup_classifier:
|
||||||
|
# Meta-model for complete setup classification
|
||||||
|
model_type: "RandomForestClassifier"
|
||||||
|
|
||||||
|
# Strategy types
|
||||||
|
strategies:
|
||||||
|
continuation:
|
||||||
|
time_window_start: "03:00"
|
||||||
|
time_window_end: "03:15"
|
||||||
|
min_pattern_count: 2
|
||||||
|
required_patterns: ["fvg", "order_block"]
|
||||||
|
|
||||||
|
reversal:
|
||||||
|
time_window_start: "03:30"
|
||||||
|
time_window_end: "03:50"
|
||||||
|
min_pattern_count: 2
|
||||||
|
required_patterns: ["fvg", "liquidity"]
|
||||||
|
|
||||||
|
# Model evaluation
|
||||||
|
evaluation:
|
||||||
|
metrics:
|
||||||
|
- "accuracy"
|
||||||
|
- "precision"
|
||||||
|
- "recall"
|
||||||
|
- "f1_score"
|
||||||
|
- "roc_auc"
|
||||||
|
|
||||||
|
min_accuracy_threshold: 0.75
|
||||||
|
min_precision_threshold: 0.70
|
||||||
|
min_recall_threshold: 0.65
|
||||||
|
|
||||||
|
# Hyperparameter tuning
|
||||||
|
tuning:
|
||||||
|
enabled: false
|
||||||
|
method: "grid_search" # grid_search, random_search, optuna
|
||||||
|
n_iter: 50
|
||||||
|
cv_folds: 5
|
||||||
|
|
||||||
|
grid_search:
|
||||||
|
param_grids:
|
||||||
|
random_forest:
|
||||||
|
n_estimators: [50, 100, 200]
|
||||||
|
max_depth: [5, 10, 15]
|
||||||
|
min_samples_split: [2, 5, 10]
|
||||||
|
|
||||||
|
# Model registry
|
||||||
|
registry:
|
||||||
|
track_versions: true
|
||||||
|
auto_promote: false # Auto-promote best model to "latest"
|
||||||
|
min_improvement: 0.02 # Minimum improvement to promote (2%)
|
||||||
|
|
||||||
86
config/trading.yaml
Normal file
86
config/trading.yaml
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
# Trading Strategy Configuration
|
||||||
|
|
||||||
|
risk_management:
|
||||||
|
max_position_size: 1 # Maximum number of contracts
|
||||||
|
max_daily_loss: 500 # Maximum daily loss in EUR
|
||||||
|
max_drawdown: 0.10 # Maximum drawdown (10%)
|
||||||
|
|
||||||
|
position_sizing:
|
||||||
|
method: "fixed" # fixed, kelly, risk_percentage
|
||||||
|
fixed_size: 1
|
||||||
|
risk_percentage: 0.02 # 2% of account per trade
|
||||||
|
kelly_fraction: 0.25 # Fraction of Kelly criterion
|
||||||
|
|
||||||
|
stop_loss:
|
||||||
|
method: "atr_multiple" # atr_multiple, fixed_pips, pattern_based
|
||||||
|
atr_multiple: 2.0
|
||||||
|
fixed_pips: 20
|
||||||
|
min_stop_pips: 10
|
||||||
|
max_stop_pips: 50
|
||||||
|
|
||||||
|
take_profit:
|
||||||
|
method: "risk_reward" # risk_reward, fixed_pips, pattern_based
|
||||||
|
risk_reward_ratio: 2.0
|
||||||
|
fixed_pips: 40
|
||||||
|
min_tp_pips: 20
|
||||||
|
|
||||||
|
entry_rules:
|
||||||
|
# Minimum model confidence for entry
|
||||||
|
min_pattern_grade: 4 # Grade 4 or 5 required
|
||||||
|
min_setup_confidence: 0.75
|
||||||
|
|
||||||
|
# Pattern requirements
|
||||||
|
required_patterns:
|
||||||
|
continuation: ["fvg", "order_block"]
|
||||||
|
reversal: ["fvg", "liquidity"]
|
||||||
|
|
||||||
|
# Market structure requirements
|
||||||
|
require_bos: true
|
||||||
|
require_htf_alignment: false # Higher timeframe alignment
|
||||||
|
|
||||||
|
# Premium/Discount filter
|
||||||
|
premium_discount_filter: true
|
||||||
|
only_trade_premium_discount: false # If true, only trade in premium/discount zones
|
||||||
|
|
||||||
|
exit_rules:
|
||||||
|
# Exit on pattern invalidation
|
||||||
|
exit_on_fvg_fill: true
|
||||||
|
exit_on_ob_break: true
|
||||||
|
|
||||||
|
# Time-based exit
|
||||||
|
max_hold_time_minutes: 60
|
||||||
|
exit_at_session_end: true
|
||||||
|
|
||||||
|
# Trailing stop
|
||||||
|
trailing_stop_enabled: false
|
||||||
|
trailing_stop_atr_multiple: 1.5
|
||||||
|
|
||||||
|
execution:
|
||||||
|
# Order types
|
||||||
|
entry_order_type: "market" # market, limit
|
||||||
|
limit_order_offset_pips: 2
|
||||||
|
|
||||||
|
# Slippage and fees
|
||||||
|
assumed_slippage_pips: 1
|
||||||
|
commission_per_contract: 2.5 # EUR per contract
|
||||||
|
|
||||||
|
# Execution delays (for backtesting)
|
||||||
|
execution_delay_seconds: 1
|
||||||
|
|
||||||
|
session:
|
||||||
|
# Trading session times (EST)
|
||||||
|
start_time: "03:00"
|
||||||
|
end_time: "04:00"
|
||||||
|
timezone: "America/New_York"
|
||||||
|
|
||||||
|
# Day of week filters
|
||||||
|
trade_monday: true
|
||||||
|
trade_tuesday: true
|
||||||
|
trade_wednesday: true
|
||||||
|
trade_thursday: true
|
||||||
|
trade_friday: true
|
||||||
|
|
||||||
|
# Economic calendar filters (optional)
|
||||||
|
avoid_high_impact_news: false
|
||||||
|
news_buffer_minutes: 15
|
||||||
|
|
||||||
667733
data/download/deuidxeur-m1-bid-2024-01-01-2025-12-31.csv
Normal file
667733
data/download/deuidxeur-m1-bid-2024-01-01-2025-12-31.csv
Normal file
File diff suppressed because it is too large
Load Diff
45802
data/download/deuidxeur-m15-bid-2024-01-01-2025-12-31.csv
Normal file
45802
data/download/deuidxeur-m15-bid-2024-01-01-2025-12-31.csv
Normal file
File diff suppressed because it is too large
Load Diff
0
data/external/economic_calendar/.gitkeep
vendored
Normal file
0
data/external/economic_calendar/.gitkeep
vendored
Normal file
0
data/external/reference/.gitkeep
vendored
Normal file
0
data/external/reference/.gitkeep
vendored
Normal file
0
data/raw/ohlcv/15min/.gitkeep
Normal file
0
data/raw/ohlcv/15min/.gitkeep
Normal file
0
data/raw/ohlcv/1min/.gitkeep
Normal file
0
data/raw/ohlcv/1min/.gitkeep
Normal file
0
data/raw/ohlcv/5min/.gitkeep
Normal file
0
data/raw/ohlcv/5min/.gitkeep
Normal file
0
data/raw/orderflow/.gitkeep
Normal file
0
data/raw/orderflow/.gitkeep
Normal file
0
logs/alerts/.gitkeep
Normal file
0
logs/alerts/.gitkeep
Normal file
0
logs/audit/.gitkeep
Normal file
0
logs/audit/.gitkeep
Normal file
0
logs/detectors/.gitkeep
Normal file
0
logs/detectors/.gitkeep
Normal file
0
logs/errors/.gitkeep
Normal file
0
logs/errors/.gitkeep
Normal file
0
logs/models/.gitkeep
Normal file
0
logs/models/.gitkeep
Normal file
0
logs/performance/.gitkeep
Normal file
0
logs/performance/.gitkeep
Normal file
0
logs/trading/.gitkeep
Normal file
0
logs/trading/.gitkeep
Normal file
0
models/artifacts/feature_importances/.gitkeep
Normal file
0
models/artifacts/feature_importances/.gitkeep
Normal file
0
models/artifacts/training_curves/.gitkeep
Normal file
0
models/artifacts/training_curves/.gitkeep
Normal file
0
models/artifacts/validation_reports/.gitkeep
Normal file
0
models/artifacts/validation_reports/.gitkeep
Normal file
0
models/metadata/.gitkeep
Normal file
0
models/metadata/.gitkeep
Normal file
0
models/pattern_graders/fvg/.gitkeep
Normal file
0
models/pattern_graders/fvg/.gitkeep
Normal file
0
models/pattern_graders/liquidity/.gitkeep
Normal file
0
models/pattern_graders/liquidity/.gitkeep
Normal file
0
models/pattern_graders/order_block/.gitkeep
Normal file
0
models/pattern_graders/order_block/.gitkeep
Normal file
0
models/strategy_models/continuation/.gitkeep
Normal file
0
models/strategy_models/continuation/.gitkeep
Normal file
0
models/strategy_models/reversal/.gitkeep
Normal file
0
models/strategy_models/reversal/.gitkeep
Normal file
82
pyproject.toml
Normal file
82
pyproject.toml
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=61.0", "wheel"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "ict-ml-trading"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "ICT ML Trading System for DAX Futures"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.10"
|
||||||
|
license = {text = "MIT"}
|
||||||
|
authors = [
|
||||||
|
{name = "ICT ML Trading Team"}
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.black]
|
||||||
|
line-length = 100
|
||||||
|
target-version = ['py310']
|
||||||
|
include = '\.pyi?$'
|
||||||
|
extend-exclude = '''
|
||||||
|
/(
|
||||||
|
# directories
|
||||||
|
\.eggs
|
||||||
|
| \.git
|
||||||
|
| \.hg
|
||||||
|
| \.mypy_cache
|
||||||
|
| \.tox
|
||||||
|
| \.venv
|
||||||
|
| build
|
||||||
|
| dist
|
||||||
|
)/
|
||||||
|
'''
|
||||||
|
|
||||||
|
[tool.isort]
|
||||||
|
profile = "black"
|
||||||
|
line_length = 100
|
||||||
|
skip_gitignore = true
|
||||||
|
|
||||||
|
[tool.mypy]
|
||||||
|
python_version = "3.10"
|
||||||
|
warn_return_any = true
|
||||||
|
warn_unused_configs = true
|
||||||
|
disallow_untyped_defs = false
|
||||||
|
disallow_incomplete_defs = false
|
||||||
|
check_untyped_defs = true
|
||||||
|
no_implicit_optional = true
|
||||||
|
warn_redundant_casts = true
|
||||||
|
warn_unused_ignores = true
|
||||||
|
warn_no_return = true
|
||||||
|
ignore_missing_imports = true
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
testpaths = ["tests"]
|
||||||
|
python_files = ["test_*.py"]
|
||||||
|
python_classes = ["Test*"]
|
||||||
|
python_functions = ["test_*"]
|
||||||
|
addopts = [
|
||||||
|
"--strict-markers",
|
||||||
|
"--strict-config",
|
||||||
|
"--verbose",
|
||||||
|
"--cov=src",
|
||||||
|
"--cov-report=html",
|
||||||
|
"--cov-report=term-missing",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.coverage.run]
|
||||||
|
source = ["src"]
|
||||||
|
omit = [
|
||||||
|
"*/tests/*",
|
||||||
|
"*/test_*",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.coverage.report]
|
||||||
|
exclude_lines = [
|
||||||
|
"pragma: no cover",
|
||||||
|
"def __repr__",
|
||||||
|
"raise AssertionError",
|
||||||
|
"raise NotImplementedError",
|
||||||
|
"if __name__ == .__main__.:",
|
||||||
|
"if TYPE_CHECKING:",
|
||||||
|
]
|
||||||
|
|
||||||
29
requirements-dev.txt
Normal file
29
requirements-dev.txt
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
# Include production requirements
|
||||||
|
-r requirements.txt
|
||||||
|
|
||||||
|
# Testing
|
||||||
|
pytest>=7.4.0
|
||||||
|
pytest-cov>=4.1.0
|
||||||
|
pytest-mock>=3.11.0
|
||||||
|
pytest-asyncio>=0.21.0
|
||||||
|
|
||||||
|
# Linting and formatting
|
||||||
|
black>=23.7.0
|
||||||
|
flake8>=6.1.0
|
||||||
|
isort>=5.12.0
|
||||||
|
mypy>=1.5.0
|
||||||
|
|
||||||
|
# Type stubs
|
||||||
|
types-pyyaml>=6.0.12
|
||||||
|
types-python-dotenv>=1.0.0
|
||||||
|
|
||||||
|
# Pre-commit hooks
|
||||||
|
pre-commit>=3.4.0
|
||||||
|
|
||||||
|
# Documentation
|
||||||
|
sphinx>=7.1.0
|
||||||
|
sphinx-rtd-theme>=1.3.0
|
||||||
|
|
||||||
|
# Security
|
||||||
|
bandit>=1.7.5
|
||||||
|
|
||||||
23
requirements.txt
Normal file
23
requirements.txt
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# Core dependencies
|
||||||
|
numpy>=1.24.0
|
||||||
|
pandas>=2.0.0
|
||||||
|
scikit-learn>=1.3.0
|
||||||
|
pyyaml>=6.0
|
||||||
|
python-dotenv>=1.0.0
|
||||||
|
pydantic>=2.0.0
|
||||||
|
pydantic-settings>=2.0.0
|
||||||
|
|
||||||
|
# Database
|
||||||
|
sqlalchemy>=2.0.0
|
||||||
|
alembic>=1.11.0
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
python-json-logger>=2.0.7
|
||||||
|
colorlog>=6.7.0 # Optional, for colored console output
|
||||||
|
|
||||||
|
# Data processing
|
||||||
|
pyarrow>=12.0.0 # For Parquet support
|
||||||
|
|
||||||
|
# Utilities
|
||||||
|
click>=8.1.0 # CLI framework
|
||||||
|
|
||||||
82
scripts/validate_setup.py
Executable file
82
scripts/validate_setup.py
Executable file
@@ -0,0 +1,82 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Validate project setup for v0.1.0."""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Add src to path
|
||||||
|
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||||
|
|
||||||
|
from src.core.constants import PATHS
|
||||||
|
from src.core.enums import PatternType, Grade, SetupType
|
||||||
|
from src.core.exceptions import ICTTradingException, DataError
|
||||||
|
from src.logging import get_logger
|
||||||
|
from src.config import load_config
|
||||||
|
|
||||||
|
|
||||||
|
def validate_imports():
|
||||||
|
"""Validate that all core imports work."""
|
||||||
|
print("✓ Core imports successful")
|
||||||
|
|
||||||
|
|
||||||
|
def validate_logging():
|
||||||
|
"""Validate logging system."""
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
logger.info("Test log message")
|
||||||
|
print("✓ Logging system working")
|
||||||
|
|
||||||
|
|
||||||
|
def validate_config():
|
||||||
|
"""Validate configuration loading."""
|
||||||
|
config = load_config()
|
||||||
|
assert "app" in config
|
||||||
|
assert "trading" in config
|
||||||
|
print("✓ Configuration loading working")
|
||||||
|
|
||||||
|
|
||||||
|
def validate_directories():
|
||||||
|
"""Validate directory structure."""
|
||||||
|
required_dirs = [
|
||||||
|
"src/core",
|
||||||
|
"src/config",
|
||||||
|
"src/logging",
|
||||||
|
"config",
|
||||||
|
"tests",
|
||||||
|
"logs",
|
||||||
|
]
|
||||||
|
|
||||||
|
for dir_name in required_dirs:
|
||||||
|
dir_path = Path(dir_name)
|
||||||
|
if not dir_path.exists():
|
||||||
|
print(f"✗ Missing directory: {dir_name}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
print("✓ Directory structure valid")
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Run all validation checks."""
|
||||||
|
print("Validating ICT ML Trading System v0.1.0 setup...")
|
||||||
|
print("-" * 50)
|
||||||
|
|
||||||
|
try:
|
||||||
|
validate_imports()
|
||||||
|
validate_logging()
|
||||||
|
validate_config()
|
||||||
|
validate_directories()
|
||||||
|
|
||||||
|
print("-" * 50)
|
||||||
|
print("✓ All validations passed!")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"✗ Validation failed: {e}")
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
||||||
|
|
||||||
42
setup.py
Normal file
42
setup.py
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
"""Setup configuration for ICT ML Trading System."""
|
||||||
|
from setuptools import find_packages, setup
|
||||||
|
|
||||||
|
with open("README.md", "r", encoding="utf-8") as fh:
|
||||||
|
long_description = fh.read()
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name="ict-ml-trading",
|
||||||
|
version="0.1.0",
|
||||||
|
author="ICT ML Trading Team",
|
||||||
|
description="ICT ML Trading System for DAX Futures",
|
||||||
|
long_description=long_description,
|
||||||
|
long_description_content_type="text/markdown",
|
||||||
|
url="https://github.com/yourusername/ict-ml-trading",
|
||||||
|
packages=find_packages(where="src"),
|
||||||
|
package_dir={"": "src"},
|
||||||
|
classifiers=[
|
||||||
|
"Development Status :: 3 - Alpha",
|
||||||
|
"Intended Audience :: Financial and Insurance Industry",
|
||||||
|
"Topic :: Office/Business :: Financial :: Investment",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"License :: OSI Approved :: MIT License",
|
||||||
|
],
|
||||||
|
python_requires=">=3.10",
|
||||||
|
install_requires=[
|
||||||
|
"numpy>=1.24.0",
|
||||||
|
"pandas>=2.0.0",
|
||||||
|
"scikit-learn>=1.3.0",
|
||||||
|
"pyyaml>=6.0",
|
||||||
|
"python-dotenv>=1.0.0",
|
||||||
|
"pydantic>=2.0.0",
|
||||||
|
"pydantic-settings>=2.0.0",
|
||||||
|
"sqlalchemy>=2.0.0",
|
||||||
|
"alembic>=1.11.0",
|
||||||
|
"python-json-logger>=2.0.7",
|
||||||
|
"pyarrow>=12.0.0",
|
||||||
|
"click>=8.1.0",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
4
src/__init__.py
Normal file
4
src/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
"""ICT ML Trading System - Main Package."""
|
||||||
|
|
||||||
|
__version__ = "0.1.0"
|
||||||
|
|
||||||
6
src/config/__init__.py
Normal file
6
src/config/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
"""Configuration management for ICT ML Trading System."""
|
||||||
|
|
||||||
|
from src.config.config_loader import load_config, get_config
|
||||||
|
|
||||||
|
__all__ = ["load_config", "get_config"]
|
||||||
|
|
||||||
153
src/config/config_loader.py
Normal file
153
src/config/config_loader.py
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
"""Configuration loader with YAML and environment variable support."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
from src.core.constants import PATHS
|
||||||
|
from src.core.exceptions import ConfigurationError
|
||||||
|
from src.logging import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
# Global config cache
|
||||||
|
_config: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
|
||||||
|
def load_config(config_path: Optional[Path] = None) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Load configuration from YAML files and environment variables.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config_path: Path to main config file (defaults to config/config.yaml)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Merged configuration dictionary
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ConfigurationError: If configuration cannot be loaded
|
||||||
|
"""
|
||||||
|
global _config
|
||||||
|
|
||||||
|
if _config is not None:
|
||||||
|
return _config
|
||||||
|
|
||||||
|
# Load environment variables
|
||||||
|
env_path = PATHS["config"].parent / ".env"
|
||||||
|
if env_path.exists():
|
||||||
|
load_dotenv(env_path)
|
||||||
|
logger.debug(f"Loaded environment variables from {env_path}")
|
||||||
|
|
||||||
|
# Load main config
|
||||||
|
if config_path is None:
|
||||||
|
config_path = PATHS["config"] / "config.yaml"
|
||||||
|
|
||||||
|
if not config_path.exists():
|
||||||
|
raise ConfigurationError(
|
||||||
|
f"Configuration file not found: {config_path}",
|
||||||
|
context={"config_path": str(config_path)},
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(config_path, "r") as f:
|
||||||
|
config = yaml.safe_load(f) or {}
|
||||||
|
|
||||||
|
# Substitute environment variables
|
||||||
|
config = _substitute_env_vars(config)
|
||||||
|
|
||||||
|
# Load additional config files
|
||||||
|
config_dir = config_path.parent
|
||||||
|
additional_configs = [
|
||||||
|
"logging.yaml",
|
||||||
|
"detectors.yaml",
|
||||||
|
"models.yaml",
|
||||||
|
"trading.yaml",
|
||||||
|
"alerts.yaml",
|
||||||
|
"database.yaml",
|
||||||
|
]
|
||||||
|
|
||||||
|
for config_file in additional_configs:
|
||||||
|
config_file_path = config_dir / config_file
|
||||||
|
if config_file_path.exists():
|
||||||
|
with open(config_file_path, "r") as f:
|
||||||
|
section_config = yaml.safe_load(f) or {}
|
||||||
|
section_config = _substitute_env_vars(section_config)
|
||||||
|
# Merge into main config
|
||||||
|
section_name = config_file.replace(".yaml", "")
|
||||||
|
config[section_name] = section_config
|
||||||
|
|
||||||
|
_config = config
|
||||||
|
logger.info("Configuration loaded successfully")
|
||||||
|
return config
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise ConfigurationError(
|
||||||
|
f"Failed to load configuration: {e}",
|
||||||
|
context={"config_path": str(config_path)},
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
def get_config(key: Optional[str] = None, default: Any = None) -> Any:
|
||||||
|
"""
|
||||||
|
Get configuration value by key (dot-separated path).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: Configuration key (e.g., "trading.session.start_time")
|
||||||
|
default: Default value if key not found
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Configuration value or default
|
||||||
|
"""
|
||||||
|
if _config is None:
|
||||||
|
load_config()
|
||||||
|
|
||||||
|
if key is None:
|
||||||
|
return _config
|
||||||
|
|
||||||
|
keys = key.split(".")
|
||||||
|
value = _config
|
||||||
|
|
||||||
|
for k in keys:
|
||||||
|
if isinstance(value, dict) and k in value:
|
||||||
|
value = value[k]
|
||||||
|
else:
|
||||||
|
return default
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def _substitute_env_vars(config: Any) -> Any:
|
||||||
|
"""
|
||||||
|
Recursively substitute environment variables in config.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config: Configuration object (dict, list, or primitive)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Configuration with environment variables substituted
|
||||||
|
"""
|
||||||
|
if isinstance(config, dict):
|
||||||
|
return {k: _substitute_env_vars(v) for k, v in config.items()}
|
||||||
|
elif isinstance(config, list):
|
||||||
|
return [_substitute_env_vars(item) for item in config]
|
||||||
|
elif isinstance(config, str):
|
||||||
|
# Check for ${VAR} or ${VAR:-default} pattern
|
||||||
|
if config.startswith("${") and config.endswith("}"):
|
||||||
|
var_expr = config[2:-1]
|
||||||
|
if ":-" in var_expr:
|
||||||
|
var_name, default_value = var_expr.split(":-", 1)
|
||||||
|
return os.getenv(var_name.strip(), default_value.strip())
|
||||||
|
else:
|
||||||
|
var_name = var_expr.strip()
|
||||||
|
value = os.getenv(var_name)
|
||||||
|
if value is None:
|
||||||
|
logger.warning(f"Environment variable {var_name} not set")
|
||||||
|
return config # Return original if not found
|
||||||
|
return value
|
||||||
|
return config
|
||||||
|
else:
|
||||||
|
return config
|
||||||
|
|
||||||
91
src/config/settings.py
Normal file
91
src/config/settings.py
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
"""Type-safe configuration settings using Pydantic."""
|
||||||
|
|
||||||
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field, field_validator
|
||||||
|
|
||||||
|
from src.core.exceptions import ConfigurationError
|
||||||
|
|
||||||
|
|
||||||
|
class TradingSessionConfig(BaseModel):
|
||||||
|
"""Trading session configuration."""
|
||||||
|
|
||||||
|
start_time: str = Field(..., description="Session start time (HH:MM)")
|
||||||
|
end_time: str = Field(..., description="Session end time (HH:MM)")
|
||||||
|
timezone: str = Field(default="America/New_York", description="Timezone")
|
||||||
|
|
||||||
|
|
||||||
|
class InstrumentConfig(BaseModel):
|
||||||
|
"""Instrument configuration."""
|
||||||
|
|
||||||
|
symbol: str = Field(..., description="Instrument symbol")
|
||||||
|
exchange: str = Field(..., description="Exchange name")
|
||||||
|
contract_size: float = Field(..., description="Contract size")
|
||||||
|
|
||||||
|
|
||||||
|
class TradingConfig(BaseModel):
|
||||||
|
"""Trading configuration."""
|
||||||
|
|
||||||
|
session: TradingSessionConfig
|
||||||
|
instrument: InstrumentConfig
|
||||||
|
|
||||||
|
|
||||||
|
class DataConfig(BaseModel):
|
||||||
|
"""Data configuration."""
|
||||||
|
|
||||||
|
raw_data_path: str = Field(..., description="Path to raw data")
|
||||||
|
processed_data_path: str = Field(..., description="Path to processed data")
|
||||||
|
labels_path: str = Field(..., description="Path to labels")
|
||||||
|
screenshots_path: str = Field(..., description="Path to screenshots")
|
||||||
|
timeframes: List[str] = Field(default=["1min", "5min", "15min"])
|
||||||
|
|
||||||
|
|
||||||
|
class ModelConfig(BaseModel):
|
||||||
|
"""Model configuration."""
|
||||||
|
|
||||||
|
base_path: str = Field(..., description="Base path for models")
|
||||||
|
pattern_graders_path: str = Field(..., description="Path to pattern graders")
|
||||||
|
strategy_models_path: str = Field(..., description="Path to strategy models")
|
||||||
|
min_labels_per_pattern: int = Field(default=200, ge=50)
|
||||||
|
train_test_split: float = Field(default=0.8, ge=0.5, le=0.9)
|
||||||
|
|
||||||
|
|
||||||
|
class AppConfig(BaseModel):
|
||||||
|
"""Main application configuration."""
|
||||||
|
|
||||||
|
name: str = Field(default="ICT ML Trading System")
|
||||||
|
version: str = Field(default="0.1.0")
|
||||||
|
environment: str = Field(default="development")
|
||||||
|
debug: bool = Field(default=False)
|
||||||
|
|
||||||
|
|
||||||
|
class Config(BaseModel):
|
||||||
|
"""Root configuration model."""
|
||||||
|
|
||||||
|
app: AppConfig
|
||||||
|
trading: TradingConfig
|
||||||
|
data: DataConfig
|
||||||
|
models: ModelConfig
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, config_dict: Dict) -> "Config":
|
||||||
|
"""
|
||||||
|
Create Config from dictionary.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config_dict: Configuration dictionary
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Config instance
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ConfigurationError: If configuration is invalid
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return cls(**config_dict)
|
||||||
|
except Exception as e:
|
||||||
|
raise ConfigurationError(
|
||||||
|
f"Invalid configuration: {e}",
|
||||||
|
context={"config_dict": config_dict},
|
||||||
|
) from e
|
||||||
|
|
||||||
163
src/config/validators.py
Normal file
163
src/config/validators.py
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
"""Configuration validation logic."""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
|
from src.core.constants import TIMEFRAMES
|
||||||
|
from src.core.exceptions import ConfigurationError, ValidationError
|
||||||
|
|
||||||
|
|
||||||
|
def validate_config(config: Dict[str, Any]) -> None:
|
||||||
|
"""
|
||||||
|
Validate configuration dictionary.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config: Configuration dictionary
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ConfigurationError: If configuration is invalid
|
||||||
|
"""
|
||||||
|
# Validate app config
|
||||||
|
if "app" not in config:
|
||||||
|
raise ConfigurationError("Missing 'app' configuration section")
|
||||||
|
|
||||||
|
# Validate trading config
|
||||||
|
if "trading" not in config:
|
||||||
|
raise ConfigurationError("Missing 'trading' configuration section")
|
||||||
|
|
||||||
|
trading_config = config["trading"]
|
||||||
|
if "session" not in trading_config:
|
||||||
|
raise ConfigurationError("Missing 'trading.session' configuration")
|
||||||
|
|
||||||
|
session_config = trading_config["session"]
|
||||||
|
validate_time_format(session_config.get("start_time"), "trading.session.start_time")
|
||||||
|
validate_time_format(session_config.get("end_time"), "trading.session.end_time")
|
||||||
|
|
||||||
|
# Validate data config
|
||||||
|
if "data" in config:
|
||||||
|
data_config = config["data"]
|
||||||
|
if "timeframes" in data_config:
|
||||||
|
validate_timeframes(data_config["timeframes"])
|
||||||
|
|
||||||
|
# Validate model config
|
||||||
|
if "models" in config:
|
||||||
|
model_config = config["models"]
|
||||||
|
if "min_labels_per_pattern" in model_config:
|
||||||
|
min_labels = model_config["min_labels_per_pattern"]
|
||||||
|
if not isinstance(min_labels, int) or min_labels < 50:
|
||||||
|
raise ConfigurationError(
|
||||||
|
"models.min_labels_per_pattern must be an integer >= 50",
|
||||||
|
context={"value": min_labels},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_time_format(time_str: Any, field_name: str) -> None:
|
||||||
|
"""
|
||||||
|
Validate time format (HH:MM).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
time_str: Time string to validate
|
||||||
|
field_name: Field name for error messages
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ConfigurationError: If time format is invalid
|
||||||
|
"""
|
||||||
|
if not isinstance(time_str, str):
|
||||||
|
raise ConfigurationError(
|
||||||
|
f"{field_name} must be a string in HH:MM format",
|
||||||
|
context={"value": time_str, "field": field_name},
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
parts = time_str.split(":")
|
||||||
|
if len(parts) != 2:
|
||||||
|
raise ValueError("Invalid format")
|
||||||
|
hour = int(parts[0])
|
||||||
|
minute = int(parts[1])
|
||||||
|
if not (0 <= hour <= 23 and 0 <= minute <= 59):
|
||||||
|
raise ValueError("Invalid time range")
|
||||||
|
except (ValueError, IndexError) as e:
|
||||||
|
raise ConfigurationError(
|
||||||
|
f"{field_name} must be in HH:MM format (e.g., '03:00')",
|
||||||
|
context={"value": time_str, "field": field_name},
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
def validate_timeframes(timeframes: List[str]) -> None:
|
||||||
|
"""
|
||||||
|
Validate timeframe list.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
timeframes: List of timeframe strings
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ConfigurationError: If timeframes are invalid
|
||||||
|
"""
|
||||||
|
if not isinstance(timeframes, list):
|
||||||
|
raise ConfigurationError(
|
||||||
|
"data.timeframes must be a list",
|
||||||
|
context={"value": timeframes},
|
||||||
|
)
|
||||||
|
|
||||||
|
for tf in timeframes:
|
||||||
|
if tf not in TIMEFRAMES:
|
||||||
|
raise ConfigurationError(
|
||||||
|
f"Invalid timeframe: {tf}. Must be one of {TIMEFRAMES}",
|
||||||
|
context={"timeframe": tf, "valid_timeframes": TIMEFRAMES},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_file_path(path: str, must_exist: bool = False) -> Path:
|
||||||
|
"""
|
||||||
|
Validate file path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path: File path string
|
||||||
|
must_exist: Whether file must exist
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path object
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValidationError: If path is invalid
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
path_obj = Path(path)
|
||||||
|
if must_exist and not path_obj.exists():
|
||||||
|
raise ValidationError(
|
||||||
|
f"Path does not exist: {path}",
|
||||||
|
context={"path": str(path)},
|
||||||
|
)
|
||||||
|
return path_obj
|
||||||
|
except Exception as e:
|
||||||
|
raise ValidationError(
|
||||||
|
f"Invalid path: {path}",
|
||||||
|
context={"path": path},
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
def validate_range(value: float, min_val: float, max_val: float, field_name: str) -> None:
|
||||||
|
"""
|
||||||
|
Validate numeric range.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
value: Value to validate
|
||||||
|
min_val: Minimum value
|
||||||
|
max_val: Maximum value
|
||||||
|
field_name: Field name for error messages
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValidationError: If value is out of range
|
||||||
|
"""
|
||||||
|
if not isinstance(value, (int, float)):
|
||||||
|
raise ValidationError(
|
||||||
|
f"{field_name} must be a number",
|
||||||
|
context={"value": value, "field": field_name},
|
||||||
|
)
|
||||||
|
|
||||||
|
if not (min_val <= value <= max_val):
|
||||||
|
raise ValidationError(
|
||||||
|
f"{field_name} must be between {min_val} and {max_val}",
|
||||||
|
context={"value": value, "min": min_val, "max": max_val, "field": field_name},
|
||||||
|
)
|
||||||
|
|
||||||
25
src/core/__init__.py
Normal file
25
src/core/__init__.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
"""Core business logic and base classes."""
|
||||||
|
|
||||||
|
from src.core.constants import *
|
||||||
|
from src.core.enums import *
|
||||||
|
from src.core.exceptions import *
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
# Constants
|
||||||
|
"TIMEFRAMES",
|
||||||
|
"SESSION_TIMES",
|
||||||
|
"PATHS",
|
||||||
|
# Enums
|
||||||
|
"PatternType",
|
||||||
|
"Grade",
|
||||||
|
"SetupType",
|
||||||
|
"TimeWindow",
|
||||||
|
# Exceptions
|
||||||
|
"ICTTradingException",
|
||||||
|
"DataError",
|
||||||
|
"DetectorError",
|
||||||
|
"ModelError",
|
||||||
|
"ConfigurationError",
|
||||||
|
"TradingError",
|
||||||
|
]
|
||||||
|
|
||||||
189
src/core/base_classes.py
Normal file
189
src/core/base_classes.py
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
"""Abstract base classes for detectors, models, and other components."""
|
||||||
|
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
from src.core.exceptions import DetectorError, ModelError
|
||||||
|
|
||||||
|
|
||||||
|
class BaseDetector(ABC):
|
||||||
|
"""Abstract base class for pattern detectors."""
|
||||||
|
|
||||||
|
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
||||||
|
"""
|
||||||
|
Initialize detector.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config: Detector configuration dictionary
|
||||||
|
"""
|
||||||
|
self.config = config or {}
|
||||||
|
self.enabled = self.config.get("enabled", True)
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def detect(self, data: pd.DataFrame) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Detect patterns in OHLCV data.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: DataFrame with OHLCV data (columns: timestamp, open, high, low, close, volume)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of detected patterns, each as a dictionary with pattern metadata
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
DetectorError: If detection fails
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def validate_data(self, data: pd.DataFrame) -> None:
|
||||||
|
"""
|
||||||
|
Validate input data format.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: DataFrame to validate
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
DetectorError: If data is invalid
|
||||||
|
"""
|
||||||
|
required_columns = ["timestamp", "open", "high", "low", "close"]
|
||||||
|
missing_columns = [col for col in required_columns if col not in data.columns]
|
||||||
|
if missing_columns:
|
||||||
|
raise DetectorError(
|
||||||
|
f"Missing required columns: {missing_columns}",
|
||||||
|
context={"required_columns": required_columns, "data_columns": list(data.columns)},
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(data) == 0:
|
||||||
|
raise DetectorError("DataFrame is empty")
|
||||||
|
|
||||||
|
# Check for required minimum bars
|
||||||
|
min_bars = self.config.get("min_bars", 20)
|
||||||
|
if len(data) < min_bars:
|
||||||
|
raise DetectorError(
|
||||||
|
f"Insufficient data: {len(data)} bars, minimum {min_bars} required",
|
||||||
|
context={"data_length": len(data), "min_bars": min_bars},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseModel(ABC):
|
||||||
|
"""Abstract base class for ML models."""
|
||||||
|
|
||||||
|
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
||||||
|
"""
|
||||||
|
Initialize model.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config: Model configuration dictionary
|
||||||
|
"""
|
||||||
|
self.config = config or {}
|
||||||
|
self.model = None
|
||||||
|
self.is_trained = False
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def train(self, X: pd.DataFrame, y: pd.Series) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Train the model.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
X: Feature matrix
|
||||||
|
y: Target labels
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with training metrics
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ModelError: If training fails
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def predict(self, X: pd.DataFrame) -> Any:
|
||||||
|
"""
|
||||||
|
Make predictions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
X: Feature matrix
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Predictions (class labels or probabilities)
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ModelError: If prediction fails
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def evaluate(self, X: pd.DataFrame, y: pd.Series) -> Dict[str, float]:
|
||||||
|
"""
|
||||||
|
Evaluate model performance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
X: Feature matrix
|
||||||
|
y: True labels
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with evaluation metrics
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ModelError: If evaluation fails
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def save(self, path: str) -> None:
|
||||||
|
"""
|
||||||
|
Save model to disk.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path: Path to save model
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ModelError: If save fails
|
||||||
|
"""
|
||||||
|
if not self.is_trained:
|
||||||
|
raise ModelError("Cannot save untrained model")
|
||||||
|
# Implementation in subclasses
|
||||||
|
|
||||||
|
def load(self, path: str) -> None:
|
||||||
|
"""
|
||||||
|
Load model from disk.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path: Path to load model from
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ModelError: If load fails
|
||||||
|
"""
|
||||||
|
# Implementation in subclasses
|
||||||
|
|
||||||
|
|
||||||
|
class BaseFeatureEngineering(ABC):
|
||||||
|
"""Abstract base class for feature engineering."""
|
||||||
|
|
||||||
|
def __init__(self, config: Optional[Dict[str, Any]] = None):
|
||||||
|
"""
|
||||||
|
Initialize feature engineering.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config: Configuration dictionary
|
||||||
|
"""
|
||||||
|
self.config = config or {}
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def extract_features(self, data: pd.DataFrame, pattern: Dict[str, Any]) -> Dict[str, float]:
|
||||||
|
"""
|
||||||
|
Extract features for a pattern.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: OHLCV data
|
||||||
|
pattern: Pattern metadata dictionary
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary of feature names and values
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
DataError: If feature extraction fails
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
78
src/core/constants.py
Normal file
78
src/core/constants.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
"""Application-wide constants."""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
|
# Project root directory
|
||||||
|
PROJECT_ROOT = Path(__file__).parent.parent.parent
|
||||||
|
|
||||||
|
# Supported timeframes
|
||||||
|
TIMEFRAMES: List[str] = ["1min", "5min", "15min"]
|
||||||
|
|
||||||
|
# Trading session times (EST)
|
||||||
|
SESSION_TIMES: Dict[str, str] = {
|
||||||
|
"start": "03:00",
|
||||||
|
"end": "04:00",
|
||||||
|
"timezone": "America/New_York",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Continuation window (3:00-3:15 EST)
|
||||||
|
CONTINUATION_WINDOW: Dict[str, str] = {
|
||||||
|
"start": "03:00",
|
||||||
|
"end": "03:15",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Reversal window (3:30-3:50 EST)
|
||||||
|
REVERSAL_WINDOW: Dict[str, str] = {
|
||||||
|
"start": "03:30",
|
||||||
|
"end": "03:50",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Directory paths
|
||||||
|
PATHS: Dict[str, Path] = {
|
||||||
|
"config": PROJECT_ROOT / "config",
|
||||||
|
"data_raw": PROJECT_ROOT / "data" / "raw",
|
||||||
|
"data_processed": PROJECT_ROOT / "data" / "processed",
|
||||||
|
"data_labels": PROJECT_ROOT / "data" / "labels",
|
||||||
|
"data_screenshots": PROJECT_ROOT / "data" / "screenshots",
|
||||||
|
"models": PROJECT_ROOT / "models",
|
||||||
|
"logs": PROJECT_ROOT / "logs",
|
||||||
|
"scripts": PROJECT_ROOT / "scripts",
|
||||||
|
"tests": PROJECT_ROOT / "tests",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Pattern detection thresholds
|
||||||
|
PATTERN_THRESHOLDS: Dict[str, float] = {
|
||||||
|
"min_fvg_size_pips": 5.0,
|
||||||
|
"min_ob_size_pips": 10.0,
|
||||||
|
"min_liquidity_sweep_pips": 5.0,
|
||||||
|
"atr_period": 14,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Model configuration
|
||||||
|
MODEL_CONFIG: Dict[str, any] = {
|
||||||
|
"min_labels_per_pattern": 200,
|
||||||
|
"train_test_split": 0.8,
|
||||||
|
"validation_split": 0.1,
|
||||||
|
"min_accuracy_threshold": 0.75,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Risk management constants
|
||||||
|
RISK_LIMITS: Dict[str, float] = {
|
||||||
|
"max_position_size": 1,
|
||||||
|
"max_daily_loss": 500.0, # EUR
|
||||||
|
"max_drawdown": 0.10, # 10%
|
||||||
|
"risk_per_trade": 0.02, # 2% of account
|
||||||
|
}
|
||||||
|
|
||||||
|
# Logging constants
|
||||||
|
LOG_LEVELS: List[str] = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
|
||||||
|
LOG_FORMATS: List[str] = ["json", "text"]
|
||||||
|
|
||||||
|
# Database constants
|
||||||
|
DB_CONSTANTS: Dict[str, any] = {
|
||||||
|
"pool_size": 10,
|
||||||
|
"max_overflow": 20,
|
||||||
|
"pool_timeout": 30,
|
||||||
|
}
|
||||||
|
|
||||||
89
src/core/enums.py
Normal file
89
src/core/enums.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
"""Enumerations for pattern types, grades, and trading concepts."""
|
||||||
|
|
||||||
|
from enum import Enum, IntEnum
|
||||||
|
|
||||||
|
|
||||||
|
class PatternType(str, Enum):
|
||||||
|
"""Types of ICT patterns."""
|
||||||
|
|
||||||
|
FVG = "fvg" # Fair Value Gap
|
||||||
|
ORDER_BLOCK = "order_block"
|
||||||
|
LIQUIDITY = "liquidity"
|
||||||
|
PREMIUM_DISCOUNT = "premium_discount"
|
||||||
|
STRUCTURE = "structure"
|
||||||
|
|
||||||
|
|
||||||
|
class PatternDirection(str, Enum):
|
||||||
|
"""Pattern direction (bullish/bearish)."""
|
||||||
|
|
||||||
|
BULLISH = "bullish"
|
||||||
|
BEARISH = "bearish"
|
||||||
|
|
||||||
|
|
||||||
|
class Grade(IntEnum):
|
||||||
|
"""Pattern quality grade (1-5 scale)."""
|
||||||
|
|
||||||
|
ONE = 1
|
||||||
|
TWO = 2
|
||||||
|
THREE = 3
|
||||||
|
FOUR = 4
|
||||||
|
FIVE = 5
|
||||||
|
|
||||||
|
|
||||||
|
class SetupType(str, Enum):
|
||||||
|
"""Complete setup types."""
|
||||||
|
|
||||||
|
CONTINUATION = "continuation" # 3:00-3:15 continuation setup
|
||||||
|
REVERSAL = "reversal" # 3:30-3:50 reversal setup
|
||||||
|
|
||||||
|
|
||||||
|
class TimeWindow(str, Enum):
|
||||||
|
"""Trading time windows."""
|
||||||
|
|
||||||
|
CONTINUATION = "continuation" # 3:00-3:15 EST
|
||||||
|
REVERSAL = "reversal" # 3:30-3:50 EST
|
||||||
|
FULL_SESSION = "full_session" # 3:00-4:00 EST
|
||||||
|
|
||||||
|
|
||||||
|
class TradeDirection(str, Enum):
|
||||||
|
"""Trade direction."""
|
||||||
|
|
||||||
|
LONG = "long"
|
||||||
|
SHORT = "short"
|
||||||
|
|
||||||
|
|
||||||
|
class TradeStatus(str, Enum):
|
||||||
|
"""Trade status."""
|
||||||
|
|
||||||
|
PENDING = "pending"
|
||||||
|
OPEN = "open"
|
||||||
|
CLOSED = "closed"
|
||||||
|
CANCELLED = "cancelled"
|
||||||
|
|
||||||
|
|
||||||
|
class OrderType(str, Enum):
|
||||||
|
"""Order types."""
|
||||||
|
|
||||||
|
MARKET = "market"
|
||||||
|
LIMIT = "limit"
|
||||||
|
STOP = "stop"
|
||||||
|
STOP_LIMIT = "stop_limit"
|
||||||
|
|
||||||
|
|
||||||
|
class MarketStructure(str, Enum):
|
||||||
|
"""Market structure states."""
|
||||||
|
|
||||||
|
BULLISH = "bullish"
|
||||||
|
BEARISH = "bearish"
|
||||||
|
NEUTRAL = "neutral"
|
||||||
|
BOS = "bos" # Break of Structure
|
||||||
|
CHOCH = "choch" # Change of Character
|
||||||
|
|
||||||
|
|
||||||
|
class Timeframe(str, Enum):
|
||||||
|
"""Supported timeframes."""
|
||||||
|
|
||||||
|
M1 = "1min"
|
||||||
|
M5 = "5min"
|
||||||
|
M15 = "15min"
|
||||||
|
|
||||||
114
src/core/exceptions.py
Normal file
114
src/core/exceptions.py
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
"""Custom exception hierarchy for ICT ML Trading System."""
|
||||||
|
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
|
||||||
|
class ICTTradingException(Exception):
|
||||||
|
"""Base exception for all ICT Trading System errors."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
message: str,
|
||||||
|
error_code: Optional[str] = None,
|
||||||
|
context: Optional[Dict[str, Any]] = None,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize exception.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
message: Error message
|
||||||
|
error_code: Optional error code for programmatic handling
|
||||||
|
context: Optional context dictionary with additional information
|
||||||
|
"""
|
||||||
|
super().__init__(message)
|
||||||
|
self.message = message
|
||||||
|
self.error_code = error_code
|
||||||
|
self.context = context or {}
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
"""Return formatted error message."""
|
||||||
|
if self.error_code:
|
||||||
|
return f"[{self.error_code}] {self.message}"
|
||||||
|
return self.message
|
||||||
|
|
||||||
|
def to_dict(self) -> Dict[str, Any]:
|
||||||
|
"""Convert exception to dictionary for logging."""
|
||||||
|
return {
|
||||||
|
"error_type": self.__class__.__name__,
|
||||||
|
"error_code": self.error_code,
|
||||||
|
"message": self.message,
|
||||||
|
"context": self.context,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class DataError(ICTTradingException):
|
||||||
|
"""Raised when data loading, validation, or processing fails."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
message: str,
|
||||||
|
error_code: Optional[str] = None,
|
||||||
|
context: Optional[Dict[str, Any]] = None,
|
||||||
|
):
|
||||||
|
super().__init__(message, error_code or "DATA_ERROR", context)
|
||||||
|
|
||||||
|
|
||||||
|
class DetectorError(ICTTradingException):
|
||||||
|
"""Raised when pattern detection fails."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
message: str,
|
||||||
|
error_code: Optional[str] = None,
|
||||||
|
context: Optional[Dict[str, Any]] = None,
|
||||||
|
):
|
||||||
|
super().__init__(message, error_code or "DETECTOR_ERROR", context)
|
||||||
|
|
||||||
|
|
||||||
|
class ModelError(ICTTradingException):
|
||||||
|
"""Raised when ML model training, inference, or evaluation fails."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
message: str,
|
||||||
|
error_code: Optional[str] = None,
|
||||||
|
context: Optional[Dict[str, Any]] = None,
|
||||||
|
):
|
||||||
|
super().__init__(message, error_code or "MODEL_ERROR", context)
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigurationError(ICTTradingException):
|
||||||
|
"""Raised when configuration is invalid or missing."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
message: str,
|
||||||
|
error_code: Optional[str] = None,
|
||||||
|
context: Optional[Dict[str, Any]] = None,
|
||||||
|
):
|
||||||
|
super().__init__(message, error_code or "CONFIG_ERROR", context)
|
||||||
|
|
||||||
|
|
||||||
|
class TradingError(ICTTradingException):
|
||||||
|
"""Raised when trading execution fails."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
message: str,
|
||||||
|
error_code: Optional[str] = None,
|
||||||
|
context: Optional[Dict[str, Any]] = None,
|
||||||
|
):
|
||||||
|
super().__init__(message, error_code or "TRADING_ERROR", context)
|
||||||
|
|
||||||
|
|
||||||
|
class ValidationError(ICTTradingException):
|
||||||
|
"""Raised when validation fails."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
message: str,
|
||||||
|
error_code: Optional[str] = None,
|
||||||
|
context: Optional[Dict[str, Any]] = None,
|
||||||
|
):
|
||||||
|
super().__init__(message, error_code or "VALIDATION_ERROR", context)
|
||||||
|
|
||||||
6
src/logging/__init__.py
Normal file
6
src/logging/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
"""Logging system for ICT ML Trading System."""
|
||||||
|
|
||||||
|
from src.logging.logger import get_logger
|
||||||
|
|
||||||
|
__all__ = ["get_logger"]
|
||||||
|
|
||||||
152
src/logging/decorators.py
Normal file
152
src/logging/decorators.py
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
"""Logging decorators for automatic logging."""
|
||||||
|
|
||||||
|
import functools
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
from typing import Any, Callable, TypeVar
|
||||||
|
|
||||||
|
from src.logging.logger import get_logger
|
||||||
|
|
||||||
|
F = TypeVar("F", bound=Callable[..., Any])
|
||||||
|
|
||||||
|
|
||||||
|
def log_execution(logger: logging.Logger = None) -> Callable[[F], F]:
|
||||||
|
"""
|
||||||
|
Decorator to log function entry, exit, and execution time.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
logger: Logger instance (if None, creates one from function module)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Decorated function
|
||||||
|
"""
|
||||||
|
|
||||||
|
def decorator(func: F) -> F:
|
||||||
|
@functools.wraps(func)
|
||||||
|
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
||||||
|
# Get logger
|
||||||
|
if logger is None:
|
||||||
|
func_logger = get_logger(func.__module__)
|
||||||
|
else:
|
||||||
|
func_logger = logger
|
||||||
|
|
||||||
|
# Log entry
|
||||||
|
func_logger.debug(
|
||||||
|
f"Entering {func.__name__}",
|
||||||
|
extra={"function": func.__name__, "args": str(args), "kwargs": str(kwargs)},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Execute function and measure time
|
||||||
|
start_time = time.time()
|
||||||
|
try:
|
||||||
|
result = func(*args, **kwargs)
|
||||||
|
execution_time = time.time() - start_time
|
||||||
|
|
||||||
|
# Log exit
|
||||||
|
func_logger.debug(
|
||||||
|
f"Exiting {func.__name__}",
|
||||||
|
extra={
|
||||||
|
"function": func.__name__,
|
||||||
|
"execution_time_seconds": execution_time,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
execution_time = time.time() - start_time
|
||||||
|
func_logger.error(
|
||||||
|
f"Error in {func.__name__}: {e}",
|
||||||
|
exc_info=True,
|
||||||
|
extra={
|
||||||
|
"function": func.__name__,
|
||||||
|
"execution_time_seconds": execution_time,
|
||||||
|
"error": str(e),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
return wrapper # type: ignore
|
||||||
|
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
|
def log_exceptions(logger: logging.Logger = None) -> Callable[[F], F]:
|
||||||
|
"""
|
||||||
|
Decorator to catch and log exceptions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
logger: Logger instance (if None, creates one from function module)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Decorated function
|
||||||
|
"""
|
||||||
|
|
||||||
|
def decorator(func: F) -> F:
|
||||||
|
@functools.wraps(func)
|
||||||
|
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
||||||
|
# Get logger
|
||||||
|
if logger is None:
|
||||||
|
func_logger = get_logger(func.__module__)
|
||||||
|
else:
|
||||||
|
func_logger = logger
|
||||||
|
|
||||||
|
try:
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
except Exception as e:
|
||||||
|
func_logger.error(
|
||||||
|
f"Exception in {func.__name__}: {e}",
|
||||||
|
exc_info=True,
|
||||||
|
extra={
|
||||||
|
"function": func.__name__,
|
||||||
|
"error": str(e),
|
||||||
|
"error_type": type(e).__name__,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
return wrapper # type: ignore
|
||||||
|
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
|
def log_performance(logger: logging.Logger = None, min_time_seconds: float = 1.0) -> Callable[[F], F]:
|
||||||
|
"""
|
||||||
|
Decorator to log performance metrics for slow functions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
logger: Logger instance (if None, creates one from function module)
|
||||||
|
min_time_seconds: Minimum execution time to log (default: 1 second)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Decorated function
|
||||||
|
"""
|
||||||
|
|
||||||
|
def decorator(func: F) -> F:
|
||||||
|
@functools.wraps(func)
|
||||||
|
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
||||||
|
# Get logger
|
||||||
|
if logger is None:
|
||||||
|
func_logger = get_logger(func.__module__)
|
||||||
|
else:
|
||||||
|
func_logger = logger
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
result = func(*args, **kwargs)
|
||||||
|
execution_time = time.time() - start_time
|
||||||
|
|
||||||
|
if execution_time >= min_time_seconds:
|
||||||
|
func_logger.warning(
|
||||||
|
f"Slow execution: {func.__name__} took {execution_time:.2f}s",
|
||||||
|
extra={
|
||||||
|
"function": func.__name__,
|
||||||
|
"execution_time_seconds": execution_time,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
return wrapper # type: ignore
|
||||||
|
|
||||||
|
return decorator
|
||||||
|
|
||||||
119
src/logging/filters.py
Normal file
119
src/logging/filters.py
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
"""Custom log filters."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
from typing import List, Pattern
|
||||||
|
|
||||||
|
|
||||||
|
class SensitiveDataFilter(logging.Filter):
|
||||||
|
"""Filter to redact sensitive data from logs."""
|
||||||
|
|
||||||
|
# Patterns for sensitive data
|
||||||
|
SENSITIVE_PATTERNS: List[Pattern] = [
|
||||||
|
re.compile(r"(?i)(api[_-]?key|apikey)\s*[:=]\s*['\"]?([a-zA-Z0-9_-]{10,})['\"]?", re.IGNORECASE),
|
||||||
|
re.compile(r"(?i)(token)\s*[:=]\s*['\"]?([a-zA-Z0-9_-]{20,})['\"]?", re.IGNORECASE),
|
||||||
|
re.compile(r"(?i)(password|passwd|pwd)\s*[:=]\s*['\"]?([^\s'\"\n]{3,})['\"]?", re.IGNORECASE),
|
||||||
|
re.compile(r"(?i)(secret)\s*[:=]\s*['\"]?([a-zA-Z0-9_-]{10,})['\"]?", re.IGNORECASE),
|
||||||
|
re.compile(r"postgresql://[^:]+:([^@]+)@", re.IGNORECASE),
|
||||||
|
re.compile(r"sqlite:///([^\s]+)", re.IGNORECASE),
|
||||||
|
]
|
||||||
|
|
||||||
|
# Fields that should be redacted
|
||||||
|
SENSITIVE_FIELDS: List[str] = [
|
||||||
|
"api_key",
|
||||||
|
"api_token",
|
||||||
|
"bot_token",
|
||||||
|
"password",
|
||||||
|
"secret",
|
||||||
|
"database_url",
|
||||||
|
"telegram_bot_token",
|
||||||
|
"slack_webhook_url",
|
||||||
|
]
|
||||||
|
|
||||||
|
def filter(self, record: logging.LogRecord) -> bool:
|
||||||
|
"""
|
||||||
|
Filter log record and redact sensitive data.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
record: Log record to filter
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True (always passes, but modifies record)
|
||||||
|
"""
|
||||||
|
# Redact sensitive patterns in message
|
||||||
|
if hasattr(record, "msg") and isinstance(record.msg, str):
|
||||||
|
record.msg = self._redact_string(record.msg)
|
||||||
|
|
||||||
|
# Redact sensitive fields in extra data
|
||||||
|
if hasattr(record, "extra_fields"):
|
||||||
|
for field in self.SENSITIVE_FIELDS:
|
||||||
|
if field in record.extra_fields:
|
||||||
|
record.extra_fields[field] = "[REDACTED]"
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _redact_string(self, text: str) -> str:
|
||||||
|
"""
|
||||||
|
Redact sensitive patterns in string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
text: Text to redact
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Redacted text
|
||||||
|
"""
|
||||||
|
result = text
|
||||||
|
for pattern in self.SENSITIVE_PATTERNS:
|
||||||
|
result = pattern.sub(r"\1=[REDACTED]", result)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class RateLimitFilter(logging.Filter):
|
||||||
|
"""Filter to rate limit repeated log messages."""
|
||||||
|
|
||||||
|
def __init__(self, max_repeats: int = 5, window_seconds: int = 60):
|
||||||
|
"""
|
||||||
|
Initialize rate limit filter.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
max_repeats: Maximum number of identical messages in window
|
||||||
|
window_seconds: Time window in seconds
|
||||||
|
"""
|
||||||
|
super().__init__()
|
||||||
|
self.max_repeats = max_repeats
|
||||||
|
self.window_seconds = window_seconds
|
||||||
|
self.message_counts: dict = {}
|
||||||
|
|
||||||
|
def filter(self, record: logging.LogRecord) -> bool:
|
||||||
|
"""
|
||||||
|
Filter log record based on rate limiting.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
record: Log record to filter
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if message should be logged, False otherwise
|
||||||
|
"""
|
||||||
|
import time
|
||||||
|
|
||||||
|
message_key = f"{record.levelname}:{record.getMessage()}"
|
||||||
|
current_time = time.time()
|
||||||
|
|
||||||
|
# Clean old entries
|
||||||
|
self.message_counts = {
|
||||||
|
k: v
|
||||||
|
for k, v in self.message_counts.items()
|
||||||
|
if current_time - v["first_seen"] < self.window_seconds
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check rate limit
|
||||||
|
if message_key in self.message_counts:
|
||||||
|
count = self.message_counts[message_key]["count"]
|
||||||
|
if count >= self.max_repeats:
|
||||||
|
return False
|
||||||
|
self.message_counts[message_key]["count"] += 1
|
||||||
|
else:
|
||||||
|
self.message_counts[message_key] = {"count": 1, "first_seen": current_time}
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
104
src/logging/formatters.py
Normal file
104
src/logging/formatters.py
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
"""Custom log formatters."""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
try:
|
||||||
|
import colorlog
|
||||||
|
except ImportError:
|
||||||
|
colorlog = None
|
||||||
|
|
||||||
|
|
||||||
|
class JSONFormatter(logging.Formatter):
|
||||||
|
"""JSON formatter for structured logging."""
|
||||||
|
|
||||||
|
def format(self, record: logging.LogRecord) -> str:
|
||||||
|
"""
|
||||||
|
Format log record as JSON.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
record: Log record to format
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
JSON string
|
||||||
|
"""
|
||||||
|
log_data: Dict[str, Any] = {
|
||||||
|
"timestamp": datetime.utcnow().isoformat(),
|
||||||
|
"level": record.levelname,
|
||||||
|
"logger": record.name,
|
||||||
|
"message": record.getMessage(),
|
||||||
|
"module": record.module,
|
||||||
|
"function": record.funcName,
|
||||||
|
"line": record.lineno,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add exception info if present
|
||||||
|
if record.exc_info:
|
||||||
|
log_data["exception"] = self.formatException(record.exc_info)
|
||||||
|
|
||||||
|
# Add extra fields
|
||||||
|
if hasattr(record, "extra_fields"):
|
||||||
|
log_data.update(record.extra_fields)
|
||||||
|
|
||||||
|
return json.dumps(log_data)
|
||||||
|
|
||||||
|
|
||||||
|
class DetailedFormatter(logging.Formatter):
|
||||||
|
"""Detailed text formatter with full context."""
|
||||||
|
|
||||||
|
def __init__(self, *args: Any, **kwargs: Any):
|
||||||
|
"""Initialize formatter."""
|
||||||
|
super().__init__(
|
||||||
|
fmt="%(asctime)s - %(name)s - %(levelname)s - %(message)s - [%(pathname)s:%(lineno)d]",
|
||||||
|
datefmt="%Y-%m-%d %H:%M:%S",
|
||||||
|
*args,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ColoredFormatter(logging.Formatter):
|
||||||
|
"""Colored console formatter."""
|
||||||
|
|
||||||
|
def __init__(self, *args: Any, **kwargs: Any):
|
||||||
|
"""Initialize formatter."""
|
||||||
|
if colorlog is None:
|
||||||
|
# Fallback to standard formatter if colorlog not available
|
||||||
|
super().__init__(
|
||||||
|
fmt="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||||
|
datefmt="%Y-%m-%d %H:%M:%S",
|
||||||
|
*args,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Use colorlog if available
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def format(self, record: logging.LogRecord) -> str:
|
||||||
|
"""
|
||||||
|
Format log record with colors.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
record: Log record to format
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Formatted string with colors
|
||||||
|
"""
|
||||||
|
if colorlog is None:
|
||||||
|
return super().format(record)
|
||||||
|
|
||||||
|
# Create colored formatter
|
||||||
|
formatter = colorlog.ColoredFormatter(
|
||||||
|
"%(log_color)s%(asctime)s - %(name)s - %(levelname)s - %(message)s%(reset)s",
|
||||||
|
datefmt="%Y-%m-%d %H:%M:%S",
|
||||||
|
log_colors={
|
||||||
|
"DEBUG": "cyan",
|
||||||
|
"INFO": "green",
|
||||||
|
"WARNING": "yellow",
|
||||||
|
"ERROR": "red",
|
||||||
|
"CRITICAL": "red,bg_white",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return formatter.format(record)
|
||||||
|
|
||||||
81
src/logging/handlers.py
Normal file
81
src/logging/handlers.py
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
"""Custom log handlers."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import logging.handlers
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from src.core.constants import PATHS
|
||||||
|
|
||||||
|
|
||||||
|
class RotatingFileHandler(logging.handlers.RotatingFileHandler):
|
||||||
|
"""Rotating file handler with automatic directory creation."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
filename: str,
|
||||||
|
max_bytes: int = 10485760, # 10MB
|
||||||
|
backup_count: int = 5,
|
||||||
|
encoding: Optional[str] = "utf-8",
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize rotating file handler.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filename: Log file path
|
||||||
|
max_bytes: Maximum file size before rotation
|
||||||
|
backup_count: Number of backup files to keep
|
||||||
|
encoding: File encoding
|
||||||
|
"""
|
||||||
|
# Ensure directory exists
|
||||||
|
log_path = Path(filename)
|
||||||
|
log_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
super().__init__(
|
||||||
|
filename=str(log_path),
|
||||||
|
maxBytes=max_bytes,
|
||||||
|
backupCount=backup_count,
|
||||||
|
encoding=encoding,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorFileHandler(RotatingFileHandler):
|
||||||
|
"""Handler specifically for error-level logs."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
filename: str = str(PATHS["logs"] / "errors" / "exceptions.log"),
|
||||||
|
max_bytes: int = 10485760,
|
||||||
|
backup_count: int = 5,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize error file handler.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filename: Log file path
|
||||||
|
max_bytes: Maximum file size before rotation
|
||||||
|
backup_count: Number of backup files to keep
|
||||||
|
"""
|
||||||
|
super().__init__(filename, max_bytes, backup_count)
|
||||||
|
self.setLevel(logging.ERROR)
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseHandler(logging.Handler):
|
||||||
|
"""Handler for storing critical logs in database (placeholder)."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initialize database handler."""
|
||||||
|
super().__init__()
|
||||||
|
self.setLevel(logging.CRITICAL)
|
||||||
|
|
||||||
|
def emit(self, record: logging.LogRecord) -> None:
|
||||||
|
"""
|
||||||
|
Emit log record to database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
record: Log record to store
|
||||||
|
"""
|
||||||
|
# TODO: Implement database storage
|
||||||
|
# This would store critical logs in a database table
|
||||||
|
pass
|
||||||
|
|
||||||
74
src/logging/logger.py
Normal file
74
src/logging/logger.py
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
"""Logger setup and configuration."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import logging.config
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from src.core.constants import PATHS
|
||||||
|
from src.core.exceptions import ConfigurationError
|
||||||
|
|
||||||
|
# Track if logging has been configured
|
||||||
|
_logging_configured = False
|
||||||
|
|
||||||
|
|
||||||
|
def get_logger(name: Optional[str] = None) -> logging.Logger:
|
||||||
|
"""
|
||||||
|
Get a logger instance for the given name.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Logger name (typically __name__). If None, returns root logger.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Configured logger instance
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ConfigurationError: If logging configuration cannot be loaded
|
||||||
|
"""
|
||||||
|
global _logging_configured
|
||||||
|
|
||||||
|
# Configure logging only once
|
||||||
|
if not _logging_configured:
|
||||||
|
_configure_logging()
|
||||||
|
_logging_configured = True
|
||||||
|
|
||||||
|
# Return logger
|
||||||
|
if name:
|
||||||
|
return logging.getLogger(name)
|
||||||
|
return logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def _configure_logging() -> None:
|
||||||
|
"""Configure logging system from YAML file."""
|
||||||
|
# Load logging configuration
|
||||||
|
logging_config_path = PATHS["config"] / "logging.yaml"
|
||||||
|
|
||||||
|
if not logging_config_path.exists():
|
||||||
|
raise ConfigurationError(
|
||||||
|
f"Logging configuration not found: {logging_config_path}",
|
||||||
|
context={"config_path": str(logging_config_path)},
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(logging_config_path, "r") as f:
|
||||||
|
config = yaml.safe_load(f)
|
||||||
|
|
||||||
|
# Ensure log directories exist
|
||||||
|
log_dir = PATHS["logs"]
|
||||||
|
log_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Create subdirectories
|
||||||
|
for subdir in ["application", "detectors", "models", "trading", "alerts", "errors", "performance", "audit"]:
|
||||||
|
(log_dir / subdir).mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Configure logging
|
||||||
|
logging.config.dictConfig(config)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise ConfigurationError(
|
||||||
|
f"Failed to load logging configuration: {e}",
|
||||||
|
context={"config_path": str(logging_config_path)},
|
||||||
|
) from e
|
||||||
|
|
||||||
2
tests/__init__.py
Normal file
2
tests/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
"""Test suite for ICT ML Trading System."""
|
||||||
|
|
||||||
135
tests/conftest.py
Normal file
135
tests/conftest.py
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
"""Pytest configuration and fixtures."""
|
||||||
|
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Generator
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from src.core.constants import PATHS
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def temp_dir() -> Generator[Path, None, None]:
|
||||||
|
"""
|
||||||
|
Create a temporary directory for tests.
|
||||||
|
|
||||||
|
Yields:
|
||||||
|
Path to temporary directory
|
||||||
|
"""
|
||||||
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
|
yield Path(tmpdir)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def temp_config_dir(temp_dir: Path) -> Generator[Path, None, None]:
|
||||||
|
"""
|
||||||
|
Create a temporary config directory with minimal config files.
|
||||||
|
|
||||||
|
Yields:
|
||||||
|
Path to temporary config directory
|
||||||
|
"""
|
||||||
|
config_dir = temp_dir / "config"
|
||||||
|
config_dir.mkdir()
|
||||||
|
|
||||||
|
# Create minimal config.yaml
|
||||||
|
config_file = config_dir / "config.yaml"
|
||||||
|
config_file.write_text(
|
||||||
|
"""
|
||||||
|
app:
|
||||||
|
name: "Test App"
|
||||||
|
version: "0.1.0"
|
||||||
|
environment: "test"
|
||||||
|
debug: true
|
||||||
|
|
||||||
|
trading:
|
||||||
|
session:
|
||||||
|
start_time: "03:00"
|
||||||
|
end_time: "04:00"
|
||||||
|
timezone: "America/New_York"
|
||||||
|
instrument:
|
||||||
|
symbol: "TEST"
|
||||||
|
exchange: "TEST"
|
||||||
|
contract_size: 25
|
||||||
|
|
||||||
|
data:
|
||||||
|
raw_data_path: "data/raw"
|
||||||
|
processed_data_path: "data/processed"
|
||||||
|
labels_path: "data/labels"
|
||||||
|
screenshots_path: "data/screenshots"
|
||||||
|
timeframes:
|
||||||
|
- "1min"
|
||||||
|
- "5min"
|
||||||
|
- "15min"
|
||||||
|
|
||||||
|
models:
|
||||||
|
base_path: "models"
|
||||||
|
pattern_graders_path: "models/pattern_graders"
|
||||||
|
strategy_models_path: "models/strategy_models"
|
||||||
|
min_labels_per_pattern: 200
|
||||||
|
train_test_split: 0.8
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create minimal logging.yaml
|
||||||
|
logging_file = config_dir / "logging.yaml"
|
||||||
|
logging_file.write_text(
|
||||||
|
"""
|
||||||
|
version: 1
|
||||||
|
disable_existing_loggers: false
|
||||||
|
|
||||||
|
formatters:
|
||||||
|
detailed:
|
||||||
|
format: '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||||
|
datefmt: '%Y-%m-%d %H:%M:%S'
|
||||||
|
|
||||||
|
handlers:
|
||||||
|
console:
|
||||||
|
class: logging.StreamHandler
|
||||||
|
level: INFO
|
||||||
|
formatter: detailed
|
||||||
|
stream: ext://sys.stdout
|
||||||
|
|
||||||
|
loggers:
|
||||||
|
src:
|
||||||
|
level: DEBUG
|
||||||
|
handlers:
|
||||||
|
- console
|
||||||
|
propagate: false
|
||||||
|
|
||||||
|
root:
|
||||||
|
level: INFO
|
||||||
|
handlers:
|
||||||
|
- console
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
yield config_dir
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_ohlcv_data():
|
||||||
|
"""Sample OHLCV data for testing."""
|
||||||
|
import pandas as pd
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
dates = [datetime(2024, 1, 1, 3, 0) + timedelta(minutes=i) for i in range(60)]
|
||||||
|
data = pd.DataFrame(
|
||||||
|
{
|
||||||
|
"timestamp": dates,
|
||||||
|
"open": [100.0 + i * 0.1 for i in range(60)],
|
||||||
|
"high": [100.5 + i * 0.1 for i in range(60)],
|
||||||
|
"low": [99.5 + i * 0.1 for i in range(60)],
|
||||||
|
"close": [100.2 + i * 0.1 for i in range(60)],
|
||||||
|
"volume": [1000] * 60,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def reset_config():
|
||||||
|
"""Reset global config cache before each test."""
|
||||||
|
import src.config.config_loader as config_module
|
||||||
|
config_module._config = None
|
||||||
|
|
||||||
104
tests/unit/test_config/test_config_loader.py
Normal file
104
tests/unit/test_config/test_config_loader.py
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
"""Tests for configuration loader."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from src.config.config_loader import get_config, load_config
|
||||||
|
from src.core.exceptions import ConfigurationError
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_config_success(temp_config_dir, monkeypatch):
|
||||||
|
"""Test successful config loading."""
|
||||||
|
from src.core import constants
|
||||||
|
original_path = constants.PATHS["config"]
|
||||||
|
constants.PATHS["config"] = temp_config_dir
|
||||||
|
|
||||||
|
config = load_config()
|
||||||
|
assert config is not None
|
||||||
|
assert "app" in config
|
||||||
|
assert config["app"]["name"] == "Test App"
|
||||||
|
|
||||||
|
constants.PATHS["config"] = original_path
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_config_missing_file(temp_dir, monkeypatch):
|
||||||
|
"""Test config loading with missing file."""
|
||||||
|
from src.core import constants
|
||||||
|
original_path = constants.PATHS["config"]
|
||||||
|
constants.PATHS["config"] = temp_dir / "nonexistent"
|
||||||
|
|
||||||
|
with pytest.raises(ConfigurationError):
|
||||||
|
load_config()
|
||||||
|
|
||||||
|
constants.PATHS["config"] = original_path
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_config_with_key(temp_config_dir, monkeypatch):
|
||||||
|
"""Test getting config value by key."""
|
||||||
|
from src.core import constants
|
||||||
|
original_path = constants.PATHS["config"]
|
||||||
|
constants.PATHS["config"] = temp_config_dir
|
||||||
|
|
||||||
|
value = get_config("app.name")
|
||||||
|
assert value == "Test App"
|
||||||
|
|
||||||
|
constants.PATHS["config"] = original_path
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_config_with_default(temp_config_dir, monkeypatch):
|
||||||
|
"""Test getting config with default value."""
|
||||||
|
from src.core import constants
|
||||||
|
original_path = constants.PATHS["config"]
|
||||||
|
constants.PATHS["config"] = temp_config_dir
|
||||||
|
|
||||||
|
value = get_config("nonexistent.key", default="default_value")
|
||||||
|
assert value == "default_value"
|
||||||
|
|
||||||
|
constants.PATHS["config"] = original_path
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_config_none(temp_config_dir, monkeypatch):
|
||||||
|
"""Test getting entire config."""
|
||||||
|
from src.core import constants
|
||||||
|
original_path = constants.PATHS["config"]
|
||||||
|
constants.PATHS["config"] = temp_config_dir
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
assert isinstance(config, dict)
|
||||||
|
assert "app" in config
|
||||||
|
|
||||||
|
constants.PATHS["config"] = original_path
|
||||||
|
|
||||||
|
|
||||||
|
def test_env_var_substitution(temp_config_dir, monkeypatch):
|
||||||
|
"""Test environment variable substitution in config."""
|
||||||
|
from src.core import constants
|
||||||
|
original_path = constants.PATHS["config"]
|
||||||
|
constants.PATHS["config"] = temp_config_dir
|
||||||
|
|
||||||
|
# Set environment variable
|
||||||
|
os.environ["TEST_VAR"] = "test_value"
|
||||||
|
|
||||||
|
# Create config with env var
|
||||||
|
config_file = temp_config_dir / "config.yaml"
|
||||||
|
config_file.write_text(
|
||||||
|
"""
|
||||||
|
app:
|
||||||
|
name: "${TEST_VAR}"
|
||||||
|
version: "0.1.0"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Reset config cache
|
||||||
|
import src.config.config_loader as config_module
|
||||||
|
config_module._config = None
|
||||||
|
|
||||||
|
config = load_config()
|
||||||
|
assert config["app"]["name"] == "test_value"
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
del os.environ["TEST_VAR"]
|
||||||
|
constants.PATHS["config"] = original_path
|
||||||
|
|
||||||
90
tests/unit/test_core/test_exceptions.py
Normal file
90
tests/unit/test_core/test_exceptions.py
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
"""Tests for custom exception classes."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from src.core.exceptions import (
|
||||||
|
ConfigurationError,
|
||||||
|
DataError,
|
||||||
|
DetectorError,
|
||||||
|
ICTTradingException,
|
||||||
|
ModelError,
|
||||||
|
TradingError,
|
||||||
|
ValidationError,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_base_exception():
|
||||||
|
"""Test base exception class."""
|
||||||
|
exc = ICTTradingException("Test message")
|
||||||
|
assert str(exc) == "Test message"
|
||||||
|
assert exc.message == "Test message"
|
||||||
|
assert exc.error_code is None
|
||||||
|
assert exc.context == {}
|
||||||
|
|
||||||
|
|
||||||
|
def test_exception_with_error_code():
|
||||||
|
"""Test exception with error code."""
|
||||||
|
exc = ICTTradingException("Test message", error_code="TEST_ERROR")
|
||||||
|
assert str(exc) == "[TEST_ERROR] Test message"
|
||||||
|
assert exc.error_code == "TEST_ERROR"
|
||||||
|
|
||||||
|
|
||||||
|
def test_exception_with_context():
|
||||||
|
"""Test exception with context."""
|
||||||
|
context = {"key": "value", "number": 42}
|
||||||
|
exc = ICTTradingException("Test message", context=context)
|
||||||
|
assert exc.context == context
|
||||||
|
assert exc.to_dict()["context"] == context
|
||||||
|
|
||||||
|
|
||||||
|
def test_exception_to_dict():
|
||||||
|
"""Test exception to_dict method."""
|
||||||
|
exc = ICTTradingException("Test message", error_code="TEST", context={"key": "value"})
|
||||||
|
exc_dict = exc.to_dict()
|
||||||
|
assert exc_dict["error_type"] == "ICTTradingException"
|
||||||
|
assert exc_dict["error_code"] == "TEST"
|
||||||
|
assert exc_dict["message"] == "Test message"
|
||||||
|
assert exc_dict["context"] == {"key": "value"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_data_error():
|
||||||
|
"""Test DataError exception."""
|
||||||
|
exc = DataError("Data loading failed")
|
||||||
|
assert isinstance(exc, ICTTradingException)
|
||||||
|
assert exc.error_code == "DATA_ERROR"
|
||||||
|
|
||||||
|
|
||||||
|
def test_detector_error():
|
||||||
|
"""Test DetectorError exception."""
|
||||||
|
exc = DetectorError("Detection failed")
|
||||||
|
assert isinstance(exc, ICTTradingException)
|
||||||
|
assert exc.error_code == "DETECTOR_ERROR"
|
||||||
|
|
||||||
|
|
||||||
|
def test_model_error():
|
||||||
|
"""Test ModelError exception."""
|
||||||
|
exc = ModelError("Model training failed")
|
||||||
|
assert isinstance(exc, ICTTradingException)
|
||||||
|
assert exc.error_code == "MODEL_ERROR"
|
||||||
|
|
||||||
|
|
||||||
|
def test_configuration_error():
|
||||||
|
"""Test ConfigurationError exception."""
|
||||||
|
exc = ConfigurationError("Invalid config")
|
||||||
|
assert isinstance(exc, ICTTradingException)
|
||||||
|
assert exc.error_code == "CONFIG_ERROR"
|
||||||
|
|
||||||
|
|
||||||
|
def test_trading_error():
|
||||||
|
"""Test TradingError exception."""
|
||||||
|
exc = TradingError("Trade execution failed")
|
||||||
|
assert isinstance(exc, ICTTradingException)
|
||||||
|
assert exc.error_code == "TRADING_ERROR"
|
||||||
|
|
||||||
|
|
||||||
|
def test_validation_error():
|
||||||
|
"""Test ValidationError exception."""
|
||||||
|
exc = ValidationError("Validation failed")
|
||||||
|
assert isinstance(exc, ICTTradingException)
|
||||||
|
assert exc.error_code == "VALIDATION_ERROR"
|
||||||
|
|
||||||
83
tests/unit/test_logging/test_logger.py
Normal file
83
tests/unit/test_logging/test_logger.py
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
"""Tests for logging system."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from src.core.exceptions import ConfigurationError
|
||||||
|
from src.logging import get_logger
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_logger_with_name():
|
||||||
|
"""Test getting logger with name."""
|
||||||
|
logger = get_logger("test_module")
|
||||||
|
assert isinstance(logger, logging.Logger)
|
||||||
|
assert logger.name == "test_module"
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_logger_root():
|
||||||
|
"""Test getting root logger."""
|
||||||
|
logger = get_logger()
|
||||||
|
assert isinstance(logger, logging.Logger)
|
||||||
|
|
||||||
|
|
||||||
|
def test_logger_logs_message(caplog):
|
||||||
|
"""Test that logger actually logs messages."""
|
||||||
|
logger = get_logger("test")
|
||||||
|
logger.info("Test message")
|
||||||
|
assert "Test message" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
def test_logger_with_missing_config(temp_dir, monkeypatch):
|
||||||
|
"""Test logger with missing config file."""
|
||||||
|
# Temporarily change config path to non-existent location
|
||||||
|
from src.core import constants
|
||||||
|
original_path = constants.PATHS["config"]
|
||||||
|
constants.PATHS["config"] = temp_dir / "nonexistent"
|
||||||
|
|
||||||
|
with pytest.raises(ConfigurationError):
|
||||||
|
get_logger("test")
|
||||||
|
|
||||||
|
# Restore original path
|
||||||
|
constants.PATHS["config"] = original_path
|
||||||
|
|
||||||
|
|
||||||
|
def test_logger_creates_directories(temp_dir, monkeypatch):
|
||||||
|
"""Test that logger creates log directories."""
|
||||||
|
from src.core import constants
|
||||||
|
original_path = constants.PATHS["logs"]
|
||||||
|
constants.PATHS["logs"] = temp_dir / "logs"
|
||||||
|
|
||||||
|
# Create minimal config
|
||||||
|
config_dir = temp_dir / "config"
|
||||||
|
config_dir.mkdir()
|
||||||
|
logging_config = config_dir / "logging.yaml"
|
||||||
|
logging_config.write_text(
|
||||||
|
"""
|
||||||
|
version: 1
|
||||||
|
disable_existing_loggers: false
|
||||||
|
|
||||||
|
formatters:
|
||||||
|
detailed:
|
||||||
|
format: '%(message)s'
|
||||||
|
|
||||||
|
handlers:
|
||||||
|
console:
|
||||||
|
class: logging.StreamHandler
|
||||||
|
level: INFO
|
||||||
|
formatter: detailed
|
||||||
|
|
||||||
|
root:
|
||||||
|
level: INFO
|
||||||
|
handlers:
|
||||||
|
- console
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = get_logger("test")
|
||||||
|
assert isinstance(logger, logging.Logger)
|
||||||
|
|
||||||
|
# Restore original path
|
||||||
|
constants.PATHS["logs"] = original_path
|
||||||
|
|
||||||
Reference in New Issue
Block a user