# --- Makefile (generated by PyScaffold Clickstart) ----------------------------
# Provides:
#   - Auto versioned builds (setuptools_scm)
#   - Linting/formatting via Ruff
#   - Incremental pytest testing via STAMPs
#   - Release helpers (Git tag based)
# -----------------------------------------------------------------------------

.SILENT:
.ONESHELL:
SHELL := $(shell which bash)
.SHELLFLAGS := -eu -o pipefail -c

# --- Python Detection and Miniconda Installation ---
# Local Miniconda installation directory (no admin rights needed)
MINICONDA_DIR := .miniconda

ifeq ($(OS),Windows_NT)
    # Windows paths
    MINICONDA_PYTHON := $(MINICONDA_DIR)/python.exe
    MINICONDA_URL := https://repo.anaconda.com/miniconda/Miniconda3-latest-Windows-x86_64.exe
    MINICONDA_INSTALLER := miniconda-installer.exe
else
    # Unix paths (Linux/macOS)
    MINICONDA_PYTHON := $(MINICONDA_DIR)/bin/python
    UNAME_S := $(shell uname -s)
    UNAME_M := $(shell uname -m)
    ifeq ($(UNAME_S),Darwin)
        ifeq ($(UNAME_M),arm64)
            MINICONDA_URL := https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-arm64.sh
        else
            MINICONDA_URL := https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
        endif
    else
        ifeq ($(UNAME_M),aarch64)
            MINICONDA_URL := https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-aarch64.sh
        else
            MINICONDA_URL := https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh
        endif
    endif
    MINICONDA_INSTALLER := miniconda-installer.sh
endif

# Detect system Python: check python3, python, py -3 in order
# Returns empty string if none found
DETECTED_PYTHON := $(shell \
    command -v python3 >/dev/null 2>&1 && python3 --version >/dev/null 2>&1 && echo python3 || \
    (command -v python >/dev/null 2>&1 && python --version >/dev/null 2>&1 && echo python || \
    (command -v py >/dev/null 2>&1 && py -3 --version >/dev/null 2>&1 && echo "py -3" || echo "")))

# Check if detected Python can actually create venvs (Debian/Ubuntu often lack ensurepip)
# We test by creating a temporary venv - --help passes even when ensurepip is missing
PYTHON_HAS_VENV := $(shell \
    if [ -n "$(DETECTED_PYTHON)" ]; then \
      tmpdir=$$(mktemp -d 2>/dev/null || mktemp -d -t 'venvtest'); \
      if $(DETECTED_PYTHON) -m venv "$$tmpdir/test" >/dev/null 2>&1; then \
        rm -rf "$$tmpdir"; echo 1; \
      else \
        rm -rf "$$tmpdir"; echo 0; \
      fi; \
    else \
      echo 0; \
    fi)

# SYS_PYTHON: use detected Python only if it has venv, otherwise fall back to Miniconda
ifeq ($(DETECTED_PYTHON),)
    SYS_PYTHON := $(MINICONDA_PYTHON)
    NEED_MINICONDA := 1
else ifeq ($(PYTHON_HAS_VENV),0)
    # Python exists but lacks venv module - use Miniconda
    SYS_PYTHON := $(MINICONDA_PYTHON)
    NEED_MINICONDA := 1
else
    SYS_PYTHON := $(DETECTED_PYTHON)
    NEED_MINICONDA := 0
endif

# Virtual environment settings - ALWAYS use .venv
VENV_DIR := .venv
ifeq ($(OS),Windows_NT)
    VENV_PYTHON := $(VENV_DIR)/Scripts/python.exe
    VENV_ACTIVATE := $(VENV_DIR)/Scripts/activate
else
    VENV_PYTHON := $(VENV_DIR)/bin/python
    VENV_ACTIVATE := $(VENV_DIR)/bin/activate
endif

# PYTHON: In CI environments (where CI=true), use system Python directly.
# Otherwise, use the venv Python.
ifdef CI
    PYTHON := $(DETECTED_PYTHON)
else
    PYTHON := $(VENV_PYTHON)
endif

# Main code package
CODE_DIRS   := src/sfdump
CONF_FILES  := pyproject.toml pytest.ini
STAMPS_DIR  := .stamps
NO_CACHE   ?= 0

# Default release kind for `make release` (patch|minor|major)
KIND ?= patch

# Pytest flags
PYTEST         := $(PYTHON) -m pytest
PYTEST_Q       := -q
PYTEST_WARN    := --disable-warnings
# Do NOT enforce fail-under per-stamp; we'll enforce once at the end
PYTEST_COV_BASE := --cov=src/sfdump
PYTEST_COV_UNIT := $(PYTEST_COV_BASE) --cov-report= --cov-append
PYTEST_COV_INTEG := $(PYTEST_COV_BASE) --cov-report= --cov-append
PYTEST_XDIST   ?= -n auto
PYTEST_TIMEOUT ?= --timeout=60

# Test directories (align with our layout)
UNIT_DIR    := tests/unit
INTEG_DIR   := tests/integration
SYSTEM_DIR  := tests/system  # live/system tests (opt-in, uncached)

.PHONY: help bootstrap check-venv check-python install-miniconda precommit docs lint format \
        test test-all test-live clean-tests \
        build upload version fetch-tags changelog changelog-md \
        release-show release release-patch release-minor release-major \
        release-zip release-wheel gh-release \
        clean clean-venv clean-miniconda run-cli sf-export sf-exportcheck-clean

help:
	@echo "Common targets:"
	@echo "  make bootstrap           - create .venv and install .[dev] (installs Python if needed)"
	@echo "  make precommit           - install pre-commit hook"
	@echo "  make docs                - build Sphinx/MyST docs (HTML + PDF) to docs/"
	@echo "  make lint                - run Ruff checks"
	@echo "  make format              - auto-fix via Ruff"
	@echo "  make test                - run cached unit+integration tests (not live)"
	@echo "  make test-all            - run all non-live tests (no stamps)"
	@echo "  make test-live           - run @live tests only (no cache)"
	@echo "  make build               - build wheel+sdist"
	@echo "  make upload              - upload to PyPI (via Twine)"
	@echo "  make version             - print setuptools_scm inferred version"
	@echo "  make changelog           - show changes since last Git tag"
	@echo "  make changelog-md        - write docs/CHANGELOG.md from Git history"
	@echo "  make release-show        - show scm ver, installed ver, last Git tag"
	@echo "  make release             - run tests, tag, GitHub Release, build + upload to PyPI (KIND=patch|minor|major)"
	@echo "  make release-patch       - tag vX.Y.(Z+1) + GitHub Release"
	@echo "  make release-minor       - tag vX.(Y+1).0 + GitHub Release"
	@echo "  make release-major       - tag v(X+1).0.0 + GitHub Release"
	@echo "  make clean               - remove build artifacts"
	@echo "  make run-cli             - run CLI entry point (pass CLI_ARGS=...)"
	@echo "  make sf-export           - run make -f Makefile.export export-all"

# --- Miniconda Installation (if no Python found) ---
# Install Miniconda locally if no system Python is available
# Note: On Windows, Make runs under Git Bash, so we use bash-compatible commands
$(MINICONDA_PYTHON):
	@echo "=== No Python found. Installing Miniconda to $(MINICONDA_DIR) ==="
	@echo "Downloading Miniconda installer..."
ifeq ($(OS),Windows_NT)
	@curl -fsSL -o $(MINICONDA_INSTALLER) $(MINICONDA_URL) || \
	  powershell -Command "Invoke-WebRequest -Uri '$(MINICONDA_URL)' -OutFile '$(MINICONDA_INSTALLER)'"
	@echo "Running installer (this may take a few minutes)..."
	@cmd.exe /c 'start /wait "" $(MINICONDA_INSTALLER) /InstallationType=JustMe /RegisterPython=0 /S /D=$(shell pwd)/$(MINICONDA_DIR)'
	@rm -f $(MINICONDA_INSTALLER)
else
	@if command -v curl >/dev/null 2>&1; then \
	  curl -fsSL -o $(MINICONDA_INSTALLER) $(MINICONDA_URL); \
	elif command -v wget >/dev/null 2>&1; then \
	  wget -q -O $(MINICONDA_INSTALLER) $(MINICONDA_URL); \
	else \
	  echo "❌ Neither curl nor wget found. Please install one of them."; \
	  exit 1; \
	fi
	@echo "Running installer (this may take a few minutes)..."
	@bash $(MINICONDA_INSTALLER) -b -p $(MINICONDA_DIR)
	@rm -f $(MINICONDA_INSTALLER)
endif
	@echo "✅ Miniconda installed to $(MINICONDA_DIR)"

install-miniconda: $(MINICONDA_PYTHON)

# Check if Python is available (either system or local Miniconda)
check-python:
ifeq ($(NEED_MINICONDA),1)
	@if [ ! -f "$(MINICONDA_PYTHON)" ]; then \
	  echo "❌ No usable Python found. Miniconda will be installed automatically."; \
	  if [ -n "$(DETECTED_PYTHON)" ]; then \
	    echo "   (System Python '$(DETECTED_PYTHON)' exists but lacks venv module)"; \
	  fi; \
	  echo "   Run 'make bootstrap' to install Miniconda and set up the environment."; \
	  exit 1; \
	fi
	@echo "Using local Miniconda Python: $(MINICONDA_PYTHON)"
else
	@echo "Using system Python: $(SYS_PYTHON)"
endif

# Create virtual environment if it doesn't exist
# Depends on Miniconda if no system Python is available
ifeq ($(NEED_MINICONDA),1)
$(VENV_PYTHON): $(MINICONDA_PYTHON)
else
$(VENV_PYTHON):
endif
	@echo "=== Creating virtual environment in $(VENV_DIR) ==="
	$(SYS_PYTHON) -m venv $(VENV_DIR)
	@echo "✅ Virtual environment created"

# Guard: refuse to run if venv Python doesn't exist (forces venv creation)
check-venv:
	@if [ ! -f "$(VENV_PYTHON)" ]; then \
	  echo "❌ Virtual environment not found at $(VENV_DIR)"; \
	  echo "   Run 'make bootstrap' to create it."; \
	  exit 1; \
	fi

bootstrap: $(VENV_PYTHON)
	@echo "=== Installing into $(VENV_DIR) (not system Python) ==="
	$(PYTHON) -m pip install -U pip setuptools wheel
	$(PYTHON) -m pip install -e ".[dev]"
	@echo "✅ Installed. Activate with: source $(VENV_ACTIVATE)"

precommit:
	pre-commit install

# -----------------------------------------------------------------------------#
# Docs
docs:
	$(PYTHON) -m sphinx -b html docs docs/_build/html
	$(PYTHON) -m sphinx -b latex docs docs/_build/latex && $(MAKE) -C docs/_build/latex all-pdf
	cp docs/_build/latex/sfdump.pdf docs/sf.pdf
	@echo "HTML: docs/_build/html/  PDF: docs/sf.pdf"


# -----------------------------------------------------------------------------#
# Linting / Formatting
lint:
	ruff check .
	ruff format --check .

format:
	ruff check --fix .
	ruff format .

# -----------------------------------------------------------------------------#
# Incremental Testing (cache via stamps)

$(STAMPS_DIR):
	mkdir -p $(STAMPS_DIR)

UNIT_STAMP  := $(STAMPS_DIR)/unit.ok
UNIT_SIG    := $(STAMPS_DIR)/unit.sig
INTEG_STAMP := $(STAMPS_DIR)/integration.ok
INTEG_SIG   := $(STAMPS_DIR)/integration.sig

define compute_dir_sig
{ [ -d "$(1)" ] && find $(1) -type f -not -path "*/__pycache__/*" -print0 || true; } \
| LC_ALL=C sort -z | xargs -0r sha1sum | sha1sum | awk '{print $1}'
endef

define run_pytest
$(PYTHON) -m pytest $(1) $(PYTEST_WARN) $(PYTEST_XDIST) $(PYTEST_TIMEOUT) $(PYTEST_COV)
endef

$(UNIT_STAMP): | $(STAMPS_DIR)
	@tests_sig=$( $(call compute_dir_sig,$(UNIT_DIR)) ); \
	code_sig=$( $(call compute_dir_sig,$(CODE_DIRS)) ); \
	conf_sig=$( sha1sum $(CONF_FILES) 2>/dev/null | awk '{print $$1}' | sha1sum | awk '{print $$1}' ); \
	new_sig=$( printf "%s\n%s\n%s\n" "$$tests_sig" "$$code_sig" "$$conf_sig" | sha1sum | awk '{print $$1}' ); \
	old_sig=$$(cat $(UNIT_SIG) 2>/dev/null || echo -n); \
	if [ "$(NO_CACHE)" = "1" ] || [ "$$new_sig" != "$$old_sig" ] || [ ! -f $@ ]; then \
	  echo "=== Running unit tests ==="; \
	  rm -f .coverage; \
	  $(PYTHON) -m pytest -q $(UNIT_DIR) -m "not live" $(PYTEST_WARN) $(PYTEST_XDIST) $(PYTEST_TIMEOUT) $(PYTEST_COV_UNIT); \
	  echo "$$new_sig" > $(UNIT_SIG); \
	  touch $@; \
	else echo "No changes detected; skipping unit tests."; fi

$(INTEG_STAMP): | $(STAMPS_DIR)
	@tests_sig=$( $(call compute_dir_sig,$(INTEG_DIR)) ); \
	code_sig=$( $(call compute_dir_sig,$(CODE_DIRS)) ); \
	conf_sig=$( sha1sum $(CONF_FILES) 2>/dev/null | awk '{print $$1}' | sha1sum | awk '{print $$1}' ); \
	new_sig=$( printf "%s\n%s\n%s\n" "$$tests_sig" "$$code_sig" "$$conf_sig" | sha1sum | awk '{print $$1}' ); \
	old_sig=$$(cat $(INTEG_SIG) 2>/dev/null || echo -n); \
	if [ "$(NO_CACHE)" = "1" ] || [ "$$new_sig" != "$$old_sig" ] || [ ! -f $@ ]; then \
	  echo "=== Running integration tests ==="; \
	  set +e; \
	  $(PYTHON) -m pytest -q $(INTEG_DIR) -m "not live" $(PYTEST_WARN) $(PYTEST_XDIST) $(PYTEST_TIMEOUT) $(PYTEST_COV_INTEG); \
	  status=$$?; \
	  set -e; \
	  if [ "$$status" -eq 5 ]; then \
	    echo "No integration tests collected; treating as success."; \
	  elif [ "$$status" -ne 0 ]; then \
	    exit $$status; \
	  fi; \
	  echo "$$new_sig" > $(INTEG_SIG); \
	  touch $@; \
	else echo "No changes detected; skipping integration tests."; fi



test: $(UNIT_STAMP) $(INTEG_STAMP)
	@echo "=== Aggregated coverage check (adaptive gate) ==="
	$(PYTHON) -m coverage report
	$(PYTHON) tools/coverage_gate.py
	$(PYTHON) -m coverage xml
	@echo "✅ Unit + Integration tests up-to-date (not live)"

# Full non-live run, no stamps (useful before releases)
test-all:
	$(PYTHON) -m pytest -v -m "not live" $(PYTEST_WARN) $(PYTEST_XDIST) $(PYTEST_TIMEOUT) --cov=src/sfdump --cov-report=term-missing --cov-report=xml --cov-fail-under=40


# Live tests are explicit & uncached (gentle on API; clearer intent)
test-live:
	SF_LIVE_TESTS=true $(PYTHON) -m pytest -v -m live $(PYTEST_WARN) --timeout=180 --cov=src/sfdump --cov-report=xml

# E2E tests - full export pipeline with real Salesforce (NOT for CI)
# Requires valid .env credentials and network access
test-e2e:
	@echo "=== Running E2E tests (requires live Salesforce credentials) ==="
	SF_E2E_TESTS=true $(PYTHON) -m pytest tests/e2e/ -v $(PYTEST_WARN) --timeout=1800

clean-tests:
	rm -rf $(STAMPS_DIR)

# -----------------------------------------------------------------------------#
# Build & Publish
build:
	rm -rf $(DIST_DIR)
	$(PYTHON) -m pip install -U build
	$(PYTHON) -m build

upload: build
	$(PYTHON) -m pip install -U twine
	$(PYTHON) -m twine check dist/*
	$(PYTHON) -m twine upload dist/*

# -----------------------------------------------------------------------------#
# Version & Release helpers (setuptools_scm + Git tags)
fetch-tags:
	git fetch --tags --force --prune 2>/dev/null || true

# Single, non-duplicated tag derivation
LAST_TAG := $(shell git tag --list "v[0-9]*.[0-9]*.[0-9]*" --sort=-version:refname | head -n 1 || echo v0.0.0)
MAJOR    := $(shell echo "$(LAST_TAG)" | sed -E 's/^v([0-9]+)\..*/\1/')
MINOR    := $(shell echo "$(LAST_TAG)" | sed -E 's/^v[0-9]+\.([0-9]+)\..*/\1/')
PATCH    := $(shell echo "$(LAST_TAG)" | sed -E 's/^v[0-9]+\.[0-9]+\.([0-9]+)/\1/')

version:
	@$(PYTHON) -m setuptools_scm || true

# Generate release notes since last tag
define CHANGELOG
$(shell git log $(LAST_TAG)..HEAD --pretty=format:"- %s (%h)" --no-merges)
endef

changelog:
	@echo "Changes since $(LAST_TAG):"
	@echo "$(CHANGELOG)"

changelog-md:
	@mkdir -p docs
	@echo "Writing docs/CHANGELOG.md ..."
	@printf "# Changelog\n\n## Since %s\n\n%s\n" "$(LAST_TAG)" "$(CHANGELOG)" > docs/CHANGELOG.md
	@echo "✅ docs/CHANGELOG.md updated"

release-show: fetch-tags
	@echo "python exe:"; $(PYTHON) -c "import sys; print(sys.executable)"
	@echo "setuptools_scm version:"; $(PYTHON) -m setuptools_scm || echo "(unavailable)"
	@echo "installed dist version:"; $(PYTHON) -c "import importlib.metadata as m; print(m.version('sfdump'))" || echo "(package not installed)"
	@echo "Last Git tag: $(LAST_TAG)"

# Safety check: ensure clean working tree and synced branch before tagging
check-clean:
	@if ! git diff --quiet || ! git diff --cached --quiet; then \
		echo "⏳ Stashing uncommitted changes for release..."; \
		git stash push -m "release-auto-stash" || true; \
		echo "RELEASE_STASHED=1" > .release-stash-flag; \
	fi
	@if [ "$$(git rev-parse @ 2>/dev/null)" != "$$(git rev-parse @{u} 2>/dev/null)" ]; then \
		echo "❌ Local branch not in sync with upstream (push/pull first)."; \
		exit 1; \
	fi

# Restore auto-stashed changes after release
release-unstash:
	@if [ -f .release-stash-flag ]; then \
		rm -f .release-stash-flag; \
		echo "⏳ Restoring stashed changes..."; \
		git stash pop || echo "⚠️  Could not auto-pop stash. Run 'git stash pop' manually."; \
	fi

# Allow newline in tag messages
NL := $(shell printf "\n")

# Distribution ZIP directory
DIST_DIR := dist
ZIP_NAME = sfdump-$(1).zip

# Create a clean distribution ZIP for releases
# Usage: make release-zip VERSION=v2.1.1
release-zip:
	@if [ -z "$(VERSION)" ]; then \
	  echo "ERROR: VERSION required. Usage: make release-zip VERSION=v2.1.1"; \
	  exit 1; \
	fi
	@echo "=== Creating distribution ZIP for $(VERSION) ==="
	mkdir -p $(DIST_DIR)
	@# Create a clean ZIP excluding dev/build artifacts
	git archive --format=zip --prefix=sfdump/ -o $(DIST_DIR)/sfdump-$(VERSION).zip HEAD
	@echo "✅ Created $(DIST_DIR)/sfdump-$(VERSION).zip"

# Build a wheel with the version baked in (no setuptools_scm/git needed at install time)
# Usage: make release-wheel VERSION=v2.1.1
release-wheel:
	@if [ -z "$(VERSION)" ]; then \
	  echo "ERROR: VERSION required. Usage: make release-wheel VERSION=v2.1.1"; \
	  exit 1; \
	fi
	@echo "=== Building wheel for $(VERSION) ==="
	rm -f $(DIST_DIR)/sfdump-*.whl
	$(PYTHON) -m pip install -U build
	$(PYTHON) -m build --wheel
	@echo "✅ Wheel built in $(DIST_DIR)/"

# Create GitHub Release with wheel + ZIP attached (requires gh CLI)
# Usage: make gh-release VERSION=v2.1.1
gh-release:
	@if [ -z "$(VERSION)" ]; then \
	  echo "ERROR: VERSION required. Usage: make gh-release VERSION=v2.1.1"; \
	  exit 1; \
	fi
	@if ! command -v gh >/dev/null 2>&1; then \
	  echo "ERROR: GitHub CLI (gh) not installed. Install from: https://cli.github.com/"; \
	  exit 1; \
	fi
	@echo "=== Creating GitHub Release $(VERSION) ==="
	$(MAKE) release-zip VERSION=$(VERSION)
	$(MAKE) release-wheel VERSION=$(VERSION)
	@echo "=== Uploading to GitHub ==="
	@# Find the wheel file (standard naming: sfdump-{ver}-py3-none-any.whl)
	_whl=$$(ls $(DIST_DIR)/sfdump-*.whl 2>/dev/null | head -n1) && \
	  if [ -z "$$_whl" ]; then echo "ERROR: No wheel found in $(DIST_DIR)/"; exit 1; fi && \
	  _notes=$$(mktemp) && \
	  git log $(LAST_TAG)..HEAD --pretty=format:"- %s (%h)" --no-merges > "$$_notes" && \
	  gh release create $(VERSION) \
	    "$$_whl" \
	    $(DIST_DIR)/sfdump-$(VERSION).zip \
	    --title "sfdump $(VERSION)" \
	    --notes-file "$$_notes" \
	    --latest && \
	  rm -f "$$_notes"
	@echo "✅ GitHub Release $(VERSION) created with wheel + ZIP attached"

release-patch: fetch-tags check-clean
	NEW=v$(MAJOR).$(MINOR).$$(($$(printf '%d' $(PATCH)) + 1))
	_msg=$$(mktemp) && \
	  { printf 'release: %s\n\n' "$$NEW"; git log $(LAST_TAG)..HEAD --pretty=format:"- %s (%h)" --no-merges; } > "$$_msg" && \
	  git tag -a "$$NEW" -F "$$_msg" && rm -f "$$_msg"
	git push origin "$$NEW"
	@echo "Tagged $$NEW"
	$(MAKE) gh-release VERSION=$$NEW
	$(MAKE) release-unstash

release-minor: fetch-tags check-clean
	NEW=v$(MAJOR).$$(($$(printf '%d' $(MINOR)) + 1)).0
	_msg=$$(mktemp) && \
	  { printf 'release: %s\n\n' "$$NEW"; git log $(LAST_TAG)..HEAD --pretty=format:"- %s (%h)" --no-merges; } > "$$_msg" && \
	  git tag -a "$$NEW" -F "$$_msg" && rm -f "$$_msg"
	git push origin "$$NEW"
	@echo "Tagged $$NEW"
	$(MAKE) gh-release VERSION=$$NEW
	$(MAKE) release-unstash

release-major: fetch-tags check-clean
	NEW=v$$(($$(printf '%d' $(MAJOR)) + 1)).0.0
	_msg=$$(mktemp) && \
	  { printf 'release: %s\n\n' "$$NEW"; git log $(LAST_TAG)..HEAD --pretty=format:"- %s (%h)" --no-merges; } > "$$_msg" && \
	  git tag -a "$$NEW" -F "$$_msg" && rm -f "$$_msg"
	git push origin "$$NEW"
	@echo "Tagged $$NEW"
	$(MAKE) gh-release VERSION=$$NEW
	$(MAKE) release-unstash

# Meta-release: run tests, show changelog, tag, GitHub Release, build wheel, upload to PyPI
# Creates git tag AND GitHub Release with downloadable ZIP + wheel, then uploads to PyPI
release:
	@echo "=== Running full test suite before release ==="
	$(MAKE) test-all
	@echo "=== Changelog (from $(LAST_TAG) to HEAD) ==="
	$(MAKE) changelog
	@echo "=== Performing $(KIND) release ==="
	@if [ "$(KIND)" = "patch" ]; then \
	  $(MAKE) release-patch; \
	elif [ "$(KIND)" = "minor" ]; then \
	  $(MAKE) release-minor; \
	elif [ "$(KIND)" = "major" ]; then \
	  $(MAKE) release-major; \
	else \
	  echo "Unknown KIND=$(KIND). Use: patch | minor | major"; \
	  exit 1; \
	fi
	@echo "=== Building and uploading to PyPI ==="
	$(MAKE) upload
	rm -rf $(DIST_DIR)

# -----------------------------------------------------------------------------#
# CLI convenience
ROOT ?= .
CLI_ARGS ?=
run-cli:
	sfdump $(ROOT) $(CLI_ARGS)

# -----------------------------------------------------------------------------#
# Salesforce Export
sf-export:
	$(MAKE) -f Makefile.export export-all


# -----------------------------------------------------------------------------#
clean:
	rm -rf build dist .eggs *.egg-info .coverage htmlcov .pytest_cache coverage.xml
	find . -type d -name "__pycache__" -prune -exec rm -rf {} +
	rm -rf $(STAMPS_DIR)
	rm -rf $(DIST_DIR)

clean-venv:
	rm -rf $(VENV_DIR)
	@echo "✅ Virtual environment removed. Run 'make bootstrap' to recreate."

clean-miniconda:
	rm -rf $(MINICONDA_DIR)
	@echo "✅ Local Miniconda removed."
