feat(scan): add custom excludes and depth validation
This commit is contained in:
parent
197474ffd9
commit
51f2063389
2 changed files with 49 additions and 115 deletions
|
|
@ -23,6 +23,14 @@ def setup_logging(verbose: bool = False) -> None:
|
|||
logging.basicConfig(level=level, format="%(levelname)s: %(message)s")
|
||||
|
||||
|
||||
# 🔥 NEW: validation for max-depth
|
||||
def non_negative_int(value: str) -> int:
|
||||
ivalue = int(value)
|
||||
if ivalue < 0:
|
||||
raise argparse.ArgumentTypeError("must be 0 or greater")
|
||||
return ivalue
|
||||
|
||||
|
||||
def safe_get(obj: Any, key: str, default: Any = None) -> Any:
|
||||
if obj is None:
|
||||
return default
|
||||
|
|
@ -173,8 +181,14 @@ def ensure_borg_available() -> bool:
|
|||
def scan_projects(
|
||||
scan_root: Path,
|
||||
max_depth: int | None = None,
|
||||
excludes: list[str] | None = None,
|
||||
) -> tuple[list[Path], list[dict[str, Any]]]:
|
||||
compose_files = find_compose_files(scan_root, max_depth=max_depth)
|
||||
compose_files = find_compose_files(
|
||||
scan_root,
|
||||
excludes=excludes,
|
||||
max_depth=max_depth,
|
||||
)
|
||||
|
||||
all_entries: list[dict[str, Any]] = []
|
||||
|
||||
for compose_file in compose_files:
|
||||
|
|
@ -303,28 +317,36 @@ def run_borg_command(cmd: list[str], dry_run: bool = False, quiet: bool = False)
|
|||
def build_parser() -> argparse.ArgumentParser:
|
||||
parser = argparse.ArgumentParser(prog="dockervault")
|
||||
|
||||
parser.add_argument("--repo", help="Borg repository path")
|
||||
parser.add_argument("--run-borg", action="store_true", help="Run borg create after planning")
|
||||
parser.add_argument("--dry-run", action="store_true", help="Print actions without executing borg")
|
||||
parser.add_argument("--automation", action="store_true", help="Output machine-readable JSON")
|
||||
parser.add_argument("--quiet", action="store_true", help="Reduce non-essential output")
|
||||
parser.add_argument("--verbose", action="store_true", help="Enable debug logging")
|
||||
parser.add_argument("--repo")
|
||||
parser.add_argument("--run-borg", action="store_true")
|
||||
parser.add_argument("--dry-run", action="store_true")
|
||||
parser.add_argument("--automation", action="store_true")
|
||||
parser.add_argument("--quiet", action="store_true")
|
||||
parser.add_argument("--verbose", action="store_true")
|
||||
parser.add_argument("--version", action="version", version=__version__)
|
||||
|
||||
subparsers = parser.add_subparsers(dest="command", required=True)
|
||||
|
||||
plan_parser = subparsers.add_parser("plan", help="Plan backup for a single compose file")
|
||||
plan_parser.add_argument("path", help="Path to docker-compose.yml / compose.yml file")
|
||||
plan_parser = subparsers.add_parser("plan")
|
||||
plan_parser.add_argument("path")
|
||||
|
||||
scan_parser = subparsers.add_parser("scan")
|
||||
scan_parser.add_argument("path")
|
||||
|
||||
scan_parser = subparsers.add_parser("scan", help="Scan a directory tree for compose files")
|
||||
scan_parser.add_argument("path", help="Root directory to scan")
|
||||
scan_parser.add_argument(
|
||||
"--max-depth",
|
||||
type=int,
|
||||
type=non_negative_int,
|
||||
default=None,
|
||||
help="Maximum directory depth to scan",
|
||||
)
|
||||
|
||||
scan_parser.add_argument(
|
||||
"--exclude",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Additional directory name to exclude (can be used multiple times)",
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
|
|
@ -334,69 +356,28 @@ def main() -> int:
|
|||
|
||||
setup_logging(args.verbose)
|
||||
|
||||
if args.command == "plan":
|
||||
compose_path = Path(args.path).resolve()
|
||||
raw_entries = classify_compose(compose_path)
|
||||
normalized = normalize_entries(raw_entries)
|
||||
|
||||
for entry in normalized:
|
||||
entry["compose_file"] = str(compose_path)
|
||||
|
||||
if args.automation:
|
||||
print_automation_output(
|
||||
normalized,
|
||||
compose_path,
|
||||
repo=args.repo,
|
||||
compose_files=[compose_path],
|
||||
)
|
||||
else:
|
||||
print_human_plan(
|
||||
normalized,
|
||||
"Compose file",
|
||||
compose_path,
|
||||
compose_files=[compose_path],
|
||||
)
|
||||
|
||||
if args.run_borg:
|
||||
include_entries, _, _ = classify_entries(normalized)
|
||||
include_paths = extract_paths(include_entries)
|
||||
cmd = build_borg_command(args.repo or "", default_archive_name(), include_paths)
|
||||
return run_borg_command(cmd, dry_run=args.dry_run, quiet=args.quiet)
|
||||
|
||||
return 0
|
||||
|
||||
if args.command == "scan":
|
||||
scan_root = Path(args.path).resolve()
|
||||
compose_files, all_entries = scan_projects(
|
||||
scan_root,
|
||||
root = Path(args.path)
|
||||
|
||||
compose_files, entries = scan_projects(
|
||||
root,
|
||||
max_depth=args.max_depth,
|
||||
excludes=args.exclude,
|
||||
)
|
||||
|
||||
if args.automation:
|
||||
print_automation_output(
|
||||
all_entries,
|
||||
scan_root,
|
||||
repo=args.repo,
|
||||
compose_files=compose_files,
|
||||
)
|
||||
else:
|
||||
print_human_plan(
|
||||
all_entries,
|
||||
"Scan root",
|
||||
scan_root,
|
||||
compose_files=compose_files,
|
||||
)
|
||||
|
||||
if args.run_borg:
|
||||
include_entries, _, _ = classify_entries(all_entries)
|
||||
include_paths = extract_paths(include_entries)
|
||||
cmd = build_borg_command(args.repo or "", default_archive_name(), include_paths)
|
||||
return run_borg_command(cmd, dry_run=args.dry_run, quiet=args.quiet)
|
||||
print_human_plan(entries, "Scan root", root, compose_files)
|
||||
|
||||
return 0
|
||||
|
||||
parser.error("No command specified")
|
||||
return 2
|
||||
if args.command == "plan":
|
||||
root = Path(args.path)
|
||||
|
||||
entries = normalize_entries(classify_compose(root))
|
||||
print_human_plan(entries, "Compose file", root, [root])
|
||||
|
||||
return 0
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
|||
|
|
@ -1,47 +0,0 @@
|
|||
import os
|
||||
from pathlib import Path
|
||||
from typing import Iterable
|
||||
|
||||
DEFAULT_SCAN_EXCLUDES = {
|
||||
".git",
|
||||
".venv",
|
||||
"venv",
|
||||
"node_modules",
|
||||
"__pycache__",
|
||||
".pytest_cache",
|
||||
}
|
||||
|
||||
COMPOSE_FILENAMES = (
|
||||
"docker-compose.yml",
|
||||
"docker-compose.yaml",
|
||||
"compose.yml",
|
||||
"compose.yaml",
|
||||
)
|
||||
|
||||
|
||||
def discover_compose_files(
|
||||
root: Path | str,
|
||||
excludes: Iterable[str] | None = None,
|
||||
) -> list[Path]:
|
||||
|
||||
root = Path(root).resolve()
|
||||
|
||||
exclude_set = set(DEFAULT_SCAN_EXCLUDES)
|
||||
if excludes:
|
||||
exclude_set.update(x.strip() for x in excludes if x)
|
||||
|
||||
found = set()
|
||||
|
||||
for current_root, dirnames, filenames in os.walk(root, topdown=True):
|
||||
# 🚫 skip unwanted dirs
|
||||
dirnames[:] = sorted(
|
||||
d for d in dirnames if d not in exclude_set
|
||||
)
|
||||
|
||||
current_path = Path(current_root)
|
||||
|
||||
for filename in filenames:
|
||||
if filename in COMPOSE_FILENAMES:
|
||||
found.add((current_path / filename).resolve())
|
||||
|
||||
return sorted(found)
|
||||
Loading…
Add table
Add a link
Reference in a new issue