# SQLUtils-Python Requirements
# Install with: pip install -r requirements.txt

# ==========================================
# Core Dependencies (Required)
# ==========================================
CoreUtilities==0.0.1  # Custom logging and utility functions
sqlglot==27.29.0  # SQL parser and transpiler for query analysis
Unidecode==1.4.0  # Unicode text transliteration for identifier validation
pandas>=2.0.0  # Core dataframe library for data loading (required for read_sql)

# ==========================================
# Optional: MySQL Drivers (Choose one or more)
# ==========================================
# PyMySQL==1.1.2  # Pure Python MySQL client (recommended for most use cases)
# mysql-connector-python==9.3.0  # Official Oracle MySQL connector
# mysqlclient>=2.2.0  # High-performance C-based MySQL client

# ==========================================
# Optional: PostgreSQL Drivers (Choose one or more)
# ==========================================
# psycopg2-binary==2.9.11  # Most popular PostgreSQL adapter (recommended)
# psycopg>=3.1.0  # Modern PostgreSQL adapter (psycopg3)
# pg8000>=1.30.0  # Pure Python PostgreSQL driver

# ==========================================
# Optional: Oracle Drivers (Choose one)
# ==========================================
# oracledb==3.4.0  # Official Oracle Database driver (recommended for Oracle 12c+)
# cx_Oracle==8.3.0  # Legacy Oracle Database driver

# ==========================================
# Optional: SQL Server Drivers (Choose one)
# ==========================================
# pyodbc==5.3.0  # ODBC database access (recommended, requires ODBC driver installation)
# pymssql==2.3.9  # Pure Python SQL Server client

# ==========================================
# Optional: Amazon Redshift Drivers
# ==========================================
# redshift_connector==2.1.5  # Amazon Redshift native connector
# Note: Redshift can also use psycopg2 (PostgreSQL driver)

# ==========================================
# Optional: Google BigQuery Drivers
# ==========================================
# google-cloud-bigquery==3.38.0  # Official Google BigQuery client (recommended)
# pandas-gbq>=0.19.0  # Pandas interface to BigQuery

# ==========================================
# Optional: High-Performance Universal Drivers
# ==========================================
# connectorx>=0.3.0  # Rust-based high-performance data loader (supports multiple databases)
# sqlalchemy>=2.0.0  # SQL toolkit and ORM (universal database support)

# ==========================================
# Optional: Dataframe Backends for read_sql
# ==========================================
# polars==1.35.1  # High-performance dataframe library (default for read_sql, recommended)
# dask==2025.11.0  # Distributed dataframe library for large datasets
# pyarrow==20.0.0  # Columnar data format (required for Polars/Dask, optional for Pandas)

# ==========================================
# Optional: Secure Credential Management
# ==========================================
# boto3>=1.26.0  # AWS Secrets Manager and RDS IAM authentication
# google-cloud-secret-manager>=2.16.0  # Google Cloud Secret Manager
# azure-identity>=1.12.0  # Azure authentication
# azure-keyvault-secrets>=4.6.0  # Azure Key Vault
# hvac>=1.1.0  # HashiCorp Vault (static and dynamic credentials)
# keyring>=24.0.0  # OS credential stores (Keychain/Windows Credential Locker)

# ==========================================
# Notes:
# ==========================================
# - SQLite support is built-in (uses sqlite3 standard library)
# - For production use, install only the drivers you need
# - Some drivers require additional system libraries:
#   - pyodbc: Requires ODBC driver installation (e.g., Microsoft ODBC Driver for SQL Server)
#   - mysqlclient: Requires MySQL client libraries
#   - psycopg2: For binary-free installation, use psycopg2-binary (development only)
# - For BigQuery, you'll also need to set up authentication (service account key)
# - For read_sql():
#   - pandas is required (core dependency)
#   - polars is recommended for best performance (default output format)
#   - dask is optional for distributed computing on large datasets
#   - pyarrow is optional for columnar data format
#   - connectorx provides Rust-based acceleration (automatic when available)
#
# ==========================================
# Installing with Poetry (Recommended):
# ==========================================
# Install specific credential manager:
#   poetry install --extras "aws-credentials"
#   poetry install --extras "gcp-credentials"
#   poetry install --extras "azure-credentials"
#   poetry install --extras "vault-credentials"
#   poetry install --extras "keyring-credentials"
#
# Install all credential managers:
#   poetry install --extras "credentials"
#
# Install specific database drivers:
#   poetry install --extras "mysql postgres"
#
# Install everything:
#   poetry install --extras "full"
#
# Combine multiple extras:
#   poetry install --extras "mysql postgres aws-credentials"
