Fix logo: transparent
This commit is contained in:
parent
1d2dc54964
commit
b9e6c582e9
3 changed files with 1 additions and 323 deletions
|
|
@ -1,5 +1,5 @@
|
|||
<p align="center">
|
||||
<img src="images/dockervault_logo.png" width="600">
|
||||
<img src="./images/dockervault-logo.png" width="600">
|
||||
</p>
|
||||
|
||||
# DockerVault
|
||||
|
|
|
|||
|
|
@ -1,322 +0,0 @@
|
|||
import argparse
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from . import __version__
|
||||
|
||||
|
||||
def entry_to_dict(entry):
|
||||
if isinstance(entry, dict):
|
||||
data = dict(entry)
|
||||
else:
|
||||
data = {}
|
||||
for attr in (
|
||||
"path",
|
||||
"source",
|
||||
"source_path",
|
||||
"host_path",
|
||||
"mount_source",
|
||||
"classification",
|
||||
"service",
|
||||
"target",
|
||||
"exists",
|
||||
"reason",
|
||||
"compose_file",
|
||||
):
|
||||
if hasattr(entry, attr):
|
||||
data[attr] = getattr(entry, attr)
|
||||
|
||||
real_path = (
|
||||
data.get("path")
|
||||
or data.get("source")
|
||||
or data.get("source_path")
|
||||
or data.get("host_path")
|
||||
or data.get("mount_source")
|
||||
)
|
||||
|
||||
if real_path is not None:
|
||||
real_path = str(real_path)
|
||||
|
||||
compose_file = data.get("compose_file")
|
||||
if compose_file is not None:
|
||||
compose_file = str(compose_file)
|
||||
|
||||
return {
|
||||
"path": real_path,
|
||||
"classification": data.get("classification"),
|
||||
"service": data.get("service"),
|
||||
"target": data.get("target"),
|
||||
"exists": data.get("exists"),
|
||||
"reason": data.get("reason"),
|
||||
"compose_file": compose_file,
|
||||
"source": str(data.get("source")) if data.get("source") is not None else None,
|
||||
}
|
||||
|
||||
|
||||
def normalize_entries(entries):
|
||||
normalized = []
|
||||
for entry in entries:
|
||||
item = entry_to_dict(entry)
|
||||
|
||||
classification = item.get("classification", "review")
|
||||
if classification == "critical":
|
||||
bucket = "include"
|
||||
elif classification == "skip":
|
||||
bucket = "skip"
|
||||
else:
|
||||
bucket = "review"
|
||||
|
||||
normalized.append(
|
||||
{
|
||||
"path": item.get("path") or "?",
|
||||
"class": classification,
|
||||
"bucket": bucket,
|
||||
"service": item.get("service", "?"),
|
||||
"target": item.get("target", "?"),
|
||||
"exists": item.get("exists", True),
|
||||
"reason": item.get("reason"),
|
||||
"compose_file": item.get("compose_file"),
|
||||
"source": item.get("source"),
|
||||
}
|
||||
)
|
||||
return normalized
|
||||
|
||||
|
||||
def build_plan(scan_root, entries, compose_count):
|
||||
normalized = normalize_entries(entries)
|
||||
|
||||
return {
|
||||
"root": str(scan_root),
|
||||
"compose_files_found": compose_count,
|
||||
"include": [e for e in normalized if e["bucket"] == "include"],
|
||||
"review": [e for e in normalized if e["bucket"] == "review"],
|
||||
"skip": [e for e in normalized if e["bucket"] == "skip"],
|
||||
"summary": {
|
||||
"include_count": sum(1 for e in normalized if e["bucket"] == "include"),
|
||||
"review_count": sum(1 for e in normalized if e["bucket"] == "review"),
|
||||
"skip_count": sum(1 for e in normalized if e["bucket"] == "skip"),
|
||||
"missing_critical_count": sum(
|
||||
1
|
||||
for e in normalized
|
||||
if e["bucket"] == "include" and not e.get("exists", True)
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def print_plan(plan):
|
||||
print("\nDockerVault Backup Plan")
|
||||
print("=======================")
|
||||
print(f"Scan root: {plan['root']}")
|
||||
print(f"Compose files found: {plan.get('compose_files_found', 0)}")
|
||||
|
||||
print("\nINCLUDE PATHS:")
|
||||
if plan["include"]:
|
||||
for item in plan["include"]:
|
||||
extra = f" reason={item['reason']}" if item.get("reason") else ""
|
||||
print(
|
||||
f" - {item['path']} [{item['class']}] "
|
||||
f"service={item['service']} target={item['target']}{extra}"
|
||||
)
|
||||
else:
|
||||
print(" - (none)")
|
||||
|
||||
print("\nREVIEW PATHS:")
|
||||
if plan["review"]:
|
||||
for item in plan["review"]:
|
||||
extra = f" reason={item['reason']}" if item.get("reason") else ""
|
||||
print(
|
||||
f" - {item['path']} [{item['class']}] "
|
||||
f"service={item['service']} target={item['target']}{extra}"
|
||||
)
|
||||
else:
|
||||
print(" - (none)")
|
||||
|
||||
print("\nSKIP PATHS:")
|
||||
if plan["skip"]:
|
||||
for item in plan["skip"]:
|
||||
extra = f" reason={item['reason']}" if item.get("reason") else ""
|
||||
print(
|
||||
f" - {item['path']} [{item['class']}] "
|
||||
f"service={item['service']} target={item['target']}{extra}"
|
||||
)
|
||||
else:
|
||||
print(" - (none)")
|
||||
|
||||
|
||||
def print_warnings(plan):
|
||||
missing = [item for item in plan["include"] if not item.get("exists", True)]
|
||||
if missing:
|
||||
print("\nWARNING: Missing critical paths detected")
|
||||
for item in missing:
|
||||
print(f" - {item['path']} (service={item['service']})")
|
||||
|
||||
|
||||
def build_json_output(plan, borg_command=None):
|
||||
output = {
|
||||
"tool": "DockerVault",
|
||||
"version": __version__,
|
||||
"plan": plan,
|
||||
"status": {
|
||||
"ok": True,
|
||||
"has_missing_critical": any(
|
||||
not item.get("exists", True) for item in plan["include"]
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
if borg_command is not None:
|
||||
output["borg_command"] = borg_command
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def build_parser():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="DockerVault - Intelligent Docker backup discovery"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
action="version",
|
||||
version=f"DockerVault {__version__}",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"path",
|
||||
nargs="?",
|
||||
help="Path to scan (folder or docker-compose.yml)",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--repo",
|
||||
help="Borg repository path",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--borg",
|
||||
action="store_true",
|
||||
help="Generate borg command",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--json",
|
||||
action="store_true",
|
||||
help="Output machine-readable JSON",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Show what would be done without executing",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--quiet",
|
||||
action="store_true",
|
||||
help="Minimal output",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--automation",
|
||||
action="store_true",
|
||||
help="Automation-friendly mode",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--exclude",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Exclude directory name or path fragment during discovery (repeatable)",
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def main():
|
||||
parser = build_parser()
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.path:
|
||||
parser.error("the following arguments are required: path")
|
||||
|
||||
scan_root = Path(args.path)
|
||||
|
||||
if not scan_root.exists():
|
||||
print(f"ERROR: Path does not exist: {scan_root}")
|
||||
sys.exit(2)
|
||||
|
||||
try:
|
||||
from .discovery import discover_compose_files
|
||||
from .classifier import classify_compose
|
||||
except ImportError as e:
|
||||
print(f"ERROR: Discovery/classifier import problem: {e}")
|
||||
sys.exit(2)
|
||||
|
||||
entries = []
|
||||
compose_files = []
|
||||
|
||||
try:
|
||||
if scan_root.is_file():
|
||||
compose_files = [scan_root]
|
||||
else:
|
||||
compose_files = discover_compose_files(
|
||||
scan_root,
|
||||
excludes=args.exclude,
|
||||
)
|
||||
|
||||
for compose_file in compose_files:
|
||||
entries.extend(classify_compose(compose_file))
|
||||
except Exception as e:
|
||||
print(f"ERROR: Failed during discovery/classification: {e}")
|
||||
sys.exit(2)
|
||||
|
||||
plan = build_plan(scan_root, entries, len(compose_files))
|
||||
|
||||
borg_shell_command = None
|
||||
if args.repo:
|
||||
try:
|
||||
from .borg import build_borg_create_command, command_to_shell
|
||||
except ImportError as e:
|
||||
print(f"ERROR: Borg import problem: {e}")
|
||||
sys.exit(2)
|
||||
|
||||
try:
|
||||
include_paths = [
|
||||
item["path"] for item in plan["include"] if item["path"] != "?"
|
||||
]
|
||||
borg_command = build_borg_create_command(args.repo, include_paths)
|
||||
borg_shell_command = command_to_shell(borg_command)
|
||||
except Exception as e:
|
||||
print(f"ERROR: Failed to build borg command: {e}")
|
||||
sys.exit(2)
|
||||
|
||||
if args.json:
|
||||
print(
|
||||
json.dumps(
|
||||
build_json_output(plan, borg_shell_command),
|
||||
indent=2,
|
||||
sort_keys=False,
|
||||
)
|
||||
)
|
||||
else:
|
||||
if not args.quiet:
|
||||
print_plan(plan)
|
||||
print_warnings(plan)
|
||||
|
||||
if borg_shell_command:
|
||||
print("\nSuggested borg create command")
|
||||
print("============================")
|
||||
print(borg_shell_command)
|
||||
|
||||
if args.automation:
|
||||
has_missing = any(not item.get("exists", True) for item in plan["include"])
|
||||
if has_missing:
|
||||
sys.exit(1)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 1.1 MiB After Width: | Height: | Size: 906 KiB |
Loading…
Add table
Add a link
Reference in a new issue