Coverage for opt/mealie/lib/python3.12/site-packages/mealie/services/backups_v2/backup_v2.py: 21%
65 statements
« prev ^ index » next coverage.py v7.10.6, created at 2025-11-25 15:48 +0000
« prev ^ index » next coverage.py v7.10.6, created at 2025-11-25 15:48 +0000
1import datetime 1a
2import json 1a
3import shutil 1a
4from pathlib import Path 1a
5from zipfile import ZipFile 1a
7from mealie.services._base_service import BaseService 1a
8from mealie.services.backups_v2.alchemy_exporter import AlchemyExporter 1a
9from mealie.services.backups_v2.backup_file import BackupFile 1a
12class BackupSchemaMismatch(Exception): ... 1a
15class BackupV2(BaseService): 1a
16 def __init__(self, db_url: str | None = None) -> None: 1a
17 super().__init__()
19 # type - one of these has to be a string
20 self.db_url: str = db_url or self.settings.DB_URL # type: ignore
22 self.db_exporter = AlchemyExporter(self.db_url)
24 def _sqlite(self) -> None: 1a
25 db_file = self.settings.DB_URL.removeprefix("sqlite:///") # type: ignore
27 # Create a backup of the SQLite database
28 timestamp = datetime.datetime.now(datetime.UTC).strftime("%Y.%m.%d")
29 shutil.copy(db_file, self.directories.DATA_DIR.joinpath(f"mealie_{timestamp}.bak.db"))
31 def _postgres(self) -> None: 1a
32 pass
34 def backup(self) -> Path: 1a
35 # sourcery skip: merge-nested-ifs, reintroduce-else, remove-redundant-continue
36 exclude = {"mealie.db", "mealie.log", ".secret"}
37 exclude_ext = {".zip"}
38 exclude_dirs = {"backups", ".temp"}
40 timestamp = datetime.datetime.now(datetime.UTC).strftime("%Y.%m.%d.%H.%M.%S")
42 backup_name = f"mealie_{timestamp}.zip"
43 backup_file = self.directories.BACKUP_DIR / backup_name
45 database_json = self.db_exporter.dump()
47 with ZipFile(backup_file, "w") as zip_file:
48 zip_file.writestr("database.json", json.dumps(database_json))
50 for data_file in self.directories.DATA_DIR.glob("**/*"):
51 if data_file.name in exclude:
52 continue
54 if data_file.is_file() and data_file.suffix not in exclude_ext:
55 if data_file.parent.name in exclude_dirs:
56 continue
58 zip_file.write(data_file, f"data/{data_file.relative_to(self.directories.DATA_DIR)}")
60 return backup_file
62 def _copy_data(self, data_path: Path) -> None: 1a
63 for f in data_path.iterdir():
64 if f.is_file():
65 continue
67 shutil.rmtree(self.directories.DATA_DIR / f.name)
68 shutil.copytree(f, self.directories.DATA_DIR / f.name)
70 def restore(self, backup_path: Path) -> None: 1a
71 self.logger.info("initializing backup restore")
73 backup = BackupFile(backup_path)
75 if self.settings.DB_ENGINE == "sqlite":
76 self._sqlite()
77 elif self.settings.DB_ENGINE == "postgres":
78 self._postgres()
80 with backup as contents:
81 # ================================
82 # Validation
83 if not contents.validate():
84 self.logger.error(
85 "Invalid backup file. file does not contain required elements (data directory and database.json)"
86 )
87 raise ValueError("Invalid backup file")
89 database_json = contents.read_tables()
91 # ================================
92 # Purge Database
94 self.logger.info("dropping all database tables")
95 self.db_exporter.drop_all()
97 # ================================
98 # Restore Database
100 self.logger.info("importing database tables")
101 self.db_exporter.restore(database_json)
103 self.logger.info("database tables imported successfully")
105 self.logger.info("restoring data directory")
106 self._copy_data(contents.data_directory)
107 self.logger.info("data directory restored successfully")
109 self.logger.info("backup restore complete")