mirror of
https://github.com/freedombox/FreedomBox.git
synced 2026-03-04 08:53:42 +00:00
backups: Use privileged decorator for backup actions
Tests:
- DONE: Functional tests works
- DONE: Initial setup works
- DONE: Borg repository is created at /var/lib/freedombox/borgbackup
- DONE: With regular and with encrypted repository
- DONE: Creating a repository works
- DONE: Getting information works. When adding a existing location, incorrect
password leads to error in the add form.
- DONE: Listing archives works
- DONE: Creating/restoring an archive works
- DONE: Backup manifest is created in /var/lib/plinth/backups-manifests/
- DONE: Including an app that dumps/restores its settings works
- DONE: Exporting an archive as tar works
- DONE: Exporting a large archive yields reasonable download speeds. 31
MB/s. 1GB file in about 30 seconds.
- DONE: Restoring from an uploaded archive works
- DONE: Listing the apps inside an archive works before restore
- DONE: Errors during operations are re-raises as simpler errors
- DONE: Get info
- DONE: List archives
- DONE: Delete archive (not handled)
- FAIL: Export tar
- DONE: Init repo
- DONE: Get archive apps (not handled)
Signed-off-by: Sunil Mohan Adapa <sunil@medhas.org>
Reviewed-by: James Valleroy <jvalleroy@mailbox.org>
This commit is contained in:
parent
222563a482
commit
9a4905e832
349
actions/backups
349
actions/backups
@ -1,349 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- mode: python -*-
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""
|
||||
Wrapper to handle backups using borg-backups.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
|
||||
from plinth.modules.backups import MANIFESTS_FOLDER
|
||||
from plinth.utils import Version
|
||||
|
||||
TIMEOUT = 30
|
||||
BACKUPS_DATA_PATH = pathlib.Path('/var/lib/plinth/backups-data/')
|
||||
|
||||
|
||||
def parse_arguments():
|
||||
"""Return parsed command line arguments as dictionary."""
|
||||
parser = argparse.ArgumentParser()
|
||||
subparsers = parser.add_subparsers(dest='subcommand', help='Sub command')
|
||||
|
||||
setup = subparsers.add_parser(
|
||||
'setup', help='Create repository if it does not already exist')
|
||||
|
||||
init = subparsers.add_parser('init', help='Initialize a repository')
|
||||
init.add_argument('--encryption', help='Encryption of the repository',
|
||||
required=True)
|
||||
|
||||
info = subparsers.add_parser('info', help='Show repository information')
|
||||
|
||||
list_repo = subparsers.add_parser('list-repo',
|
||||
help='List repository contents')
|
||||
|
||||
create_archive = subparsers.add_parser('create-archive',
|
||||
help='Create archive')
|
||||
create_archive.add_argument('--paths', help='Paths to include in archive',
|
||||
nargs='+')
|
||||
create_archive.add_argument('--comment',
|
||||
help='Comment text to add to archive',
|
||||
default='')
|
||||
|
||||
delete_archive = subparsers.add_parser('delete-archive',
|
||||
help='Delete archive')
|
||||
|
||||
export_help = 'Export archive contents as tar on stdout'
|
||||
export_tar = subparsers.add_parser('export-tar', help=export_help)
|
||||
|
||||
get_archive_apps = subparsers.add_parser(
|
||||
'get-archive-apps', help='Get list of apps included in archive')
|
||||
|
||||
restore_archive = subparsers.add_parser(
|
||||
'restore-archive', help='Restore files from an archive')
|
||||
restore_archive.add_argument('--destination', help='Destination',
|
||||
required=True)
|
||||
|
||||
for cmd in [
|
||||
info, init, list_repo, create_archive, delete_archive, export_tar,
|
||||
get_archive_apps, restore_archive, setup
|
||||
]:
|
||||
cmd.add_argument('--path', help='Repository or Archive path',
|
||||
required=False)
|
||||
cmd.add_argument('--ssh-keyfile', help='Path of private ssh key',
|
||||
default=None)
|
||||
|
||||
get_exported_archive_apps = subparsers.add_parser(
|
||||
'get-exported-archive-apps',
|
||||
help='Get list of apps included in exported archive file')
|
||||
get_exported_archive_apps.add_argument('--path', help='Tarball file path',
|
||||
required=True)
|
||||
|
||||
restore_exported_archive = subparsers.add_parser(
|
||||
'restore-exported-archive',
|
||||
help='Restore files from an exported archive')
|
||||
restore_exported_archive.add_argument('--path', help='Tarball file path',
|
||||
required=True)
|
||||
|
||||
dump_settings = subparsers.add_parser('dump-settings',
|
||||
help='Dump JSON settings to a file')
|
||||
dump_settings.add_argument('--app-id',
|
||||
help='ID of the app to dump settings for')
|
||||
|
||||
load_settings = subparsers.add_parser(
|
||||
'load-settings', help='Load JSON settings from a file')
|
||||
load_settings.add_argument('--app-id',
|
||||
help='ID of the app to load settings for')
|
||||
|
||||
subparsers.required = True
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def subcommand_setup(arguments):
|
||||
"""Create repository if it does not already exist."""
|
||||
try:
|
||||
run(['borg', 'info', arguments.path], arguments, check=True)
|
||||
except subprocess.CalledProcessError:
|
||||
path = os.path.dirname(arguments.path)
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
|
||||
init_repository(arguments, encryption='none')
|
||||
|
||||
|
||||
def init_repository(arguments, encryption):
|
||||
"""Initialize a local or remote borg repository"""
|
||||
if encryption != 'none':
|
||||
if not _read_encryption_passphrase(arguments):
|
||||
raise ValueError('No encryption passphrase provided')
|
||||
|
||||
cmd = ['borg', 'init', '--encryption', encryption, arguments.path]
|
||||
run(cmd, arguments)
|
||||
|
||||
|
||||
def subcommand_init(arguments):
|
||||
"""Initialize the borg repository."""
|
||||
init_repository(arguments, encryption=arguments.encryption)
|
||||
|
||||
|
||||
def subcommand_info(arguments):
|
||||
"""Show repository information."""
|
||||
run(['borg', 'info', '--json', arguments.path], arguments)
|
||||
|
||||
|
||||
def subcommand_list_repo(arguments):
|
||||
"""List repository contents."""
|
||||
run(['borg', 'list', '--json', '--format="{comment}"', arguments.path],
|
||||
arguments)
|
||||
|
||||
|
||||
def _get_borg_version(arugments):
|
||||
"""Return the version of borgbackup."""
|
||||
process = run(['borg', '--version'], arugments, stdout=subprocess.PIPE)
|
||||
return process.stdout.decode().split()[1] # Example: "borg 1.1.9"
|
||||
|
||||
|
||||
def subcommand_create_archive(arguments):
|
||||
"""Create archive."""
|
||||
paths = filter(os.path.exists, arguments.paths)
|
||||
command = ['borg', 'create', '--json']
|
||||
if arguments.comment:
|
||||
comment = arguments.comment
|
||||
if Version(_get_borg_version(arguments)) < Version('1.1.10'):
|
||||
# Undo any placeholder escape sequences in comments as this version
|
||||
# of borg does not support placeholders. XXX: Drop this code when
|
||||
# support for borg < 1.1.10 is dropped.
|
||||
comment = comment.replace('{{', '{').replace('}}', '}')
|
||||
|
||||
command += ['--comment', comment]
|
||||
|
||||
command += [arguments.path] + list(paths)
|
||||
run(command, arguments)
|
||||
|
||||
|
||||
def subcommand_delete_archive(arguments):
|
||||
"""Delete archive."""
|
||||
run(['borg', 'delete', arguments.path], arguments)
|
||||
|
||||
|
||||
def _extract(archive_path, destination, arguments, locations=None):
|
||||
"""Extract archive contents."""
|
||||
prev_dir = os.getcwd()
|
||||
borg_call = ['borg', 'extract', archive_path]
|
||||
# do not extract any files when we get an empty locations list
|
||||
if locations is not None:
|
||||
borg_call.extend(locations)
|
||||
|
||||
try:
|
||||
os.chdir(os.path.expanduser(destination))
|
||||
# TODO: with python 3.7 use subprocess.run with the 'capture_output'
|
||||
# argument
|
||||
process = run(borg_call, arguments, check=False,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if process.returncode != 0:
|
||||
error = process.stderr.decode()
|
||||
# Don't fail on the borg error when no files were matched
|
||||
if "never matched" not in error:
|
||||
raise subprocess.CalledProcessError(process.returncode,
|
||||
process.args)
|
||||
finally:
|
||||
os.chdir(prev_dir)
|
||||
|
||||
|
||||
def subcommand_export_tar(arguments):
|
||||
"""Export archive contents as tar stream on stdout."""
|
||||
run(['borg', 'export-tar', arguments.path, '-', '--tar-filter=gzip'],
|
||||
arguments)
|
||||
|
||||
|
||||
def _read_archive_file(archive, filepath, arguments):
|
||||
"""Read the content of a file inside an archive"""
|
||||
borg_call = ['borg', 'extract', archive, filepath, '--stdout']
|
||||
return run(borg_call, arguments, stdout=subprocess.PIPE).stdout.decode()
|
||||
|
||||
|
||||
def subcommand_get_archive_apps(arguments):
|
||||
"""Get list of apps included in archive."""
|
||||
manifest_folder = os.path.relpath(MANIFESTS_FOLDER, '/')
|
||||
borg_call = [
|
||||
'borg', 'list', arguments.path, manifest_folder, '--format',
|
||||
'{path}{NEWLINE}'
|
||||
]
|
||||
try:
|
||||
borg_process = run(borg_call, arguments, stdout=subprocess.PIPE)
|
||||
manifest_path = borg_process.stdout.decode().strip()
|
||||
except subprocess.CalledProcessError:
|
||||
sys.exit(1)
|
||||
|
||||
manifest = None
|
||||
if manifest_path:
|
||||
manifest_data = _read_archive_file(arguments.path, manifest_path,
|
||||
arguments)
|
||||
manifest = json.loads(manifest_data)
|
||||
|
||||
if manifest:
|
||||
for app in _get_apps_of_manifest(manifest):
|
||||
print(app['name'])
|
||||
|
||||
|
||||
def _get_apps_of_manifest(manifest):
|
||||
"""Get apps of a manifest.
|
||||
|
||||
Supports both dict format as well as list format of plinth <=0.42
|
||||
|
||||
"""
|
||||
if isinstance(manifest, list):
|
||||
apps = manifest
|
||||
elif isinstance(manifest, dict) and 'apps' in manifest:
|
||||
apps = manifest['apps']
|
||||
else:
|
||||
raise RuntimeError('Unknown manifest format')
|
||||
|
||||
return apps
|
||||
|
||||
|
||||
def subcommand_get_exported_archive_apps(arguments):
|
||||
"""Get list of apps included in an exported archive file."""
|
||||
manifest = None
|
||||
with tarfile.open(arguments.path) as tar_handle:
|
||||
filenames = tar_handle.getnames()
|
||||
for name in filenames:
|
||||
if 'var/lib/plinth/backups-manifests/' in name \
|
||||
and name.endswith('.json'):
|
||||
manifest_data = tar_handle.extractfile(name).read()
|
||||
manifest = json.loads(manifest_data)
|
||||
break
|
||||
|
||||
if manifest:
|
||||
for app in _get_apps_of_manifest(manifest):
|
||||
print(app['name'])
|
||||
|
||||
|
||||
def subcommand_restore_archive(arguments):
|
||||
"""Restore files from an archive."""
|
||||
_locations = json.loads(arguments.stdin)
|
||||
locations = _locations['directories'] + _locations['files']
|
||||
locations = [os.path.relpath(location, '/') for location in locations]
|
||||
_extract(arguments.path, arguments.destination, arguments,
|
||||
locations=locations)
|
||||
|
||||
|
||||
def subcommand_restore_exported_archive(arguments):
|
||||
"""Restore files from an exported archive."""
|
||||
locations = json.loads(arguments.stdin)
|
||||
|
||||
with tarfile.open(arguments.path) as tar_handle:
|
||||
for member in tar_handle.getmembers():
|
||||
path = '/' + member.name
|
||||
if path in locations['files']:
|
||||
tar_handle.extract(member, '/')
|
||||
else:
|
||||
for directory in locations['directories']:
|
||||
if path.startswith(directory):
|
||||
tar_handle.extract(member, '/')
|
||||
break
|
||||
|
||||
|
||||
def _assert_app_id(app_id):
|
||||
"""Check that app ID is correct."""
|
||||
if not re.fullmatch(r'[a-z0-9_]+', app_id):
|
||||
raise Exception('Invalid App ID')
|
||||
|
||||
|
||||
def subcommand_dump_settings(arguments):
|
||||
"""Dump an app's settings to a JSON file."""
|
||||
app_id = arguments.app_id
|
||||
_assert_app_id(app_id)
|
||||
BACKUPS_DATA_PATH.mkdir(exist_ok=True)
|
||||
settings_path = BACKUPS_DATA_PATH / f'{app_id}-settings.json'
|
||||
settings_path.write_text(arguments.stdin)
|
||||
|
||||
|
||||
def subcommand_load_settings(arguments):
|
||||
"""Load an app's settings from a JSON file."""
|
||||
app_id = arguments.app_id
|
||||
_assert_app_id(app_id)
|
||||
settings_path = BACKUPS_DATA_PATH / f'{app_id}-settings.json'
|
||||
try:
|
||||
print(settings_path.read_text())
|
||||
except FileNotFoundError:
|
||||
print('{}')
|
||||
|
||||
|
||||
def _read_encryption_passphrase(arguments):
|
||||
"""Read encryption passphrase from stdin."""
|
||||
if arguments.stdin:
|
||||
try:
|
||||
return json.loads(arguments.stdin)['encryption_passphrase']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_env(arguments):
|
||||
"""Create encryption and ssh kwargs out of given arguments"""
|
||||
env = dict(os.environ, BORG_RELOCATED_REPO_ACCESS_IS_OK='yes',
|
||||
LANG='C.UTF-8')
|
||||
# Always provide BORG_PASSPHRASE (also if empty) so borg does not get stuck
|
||||
# while asking for a passphrase.
|
||||
encryption_passphrase = _read_encryption_passphrase(arguments)
|
||||
env['BORG_PASSPHRASE'] = encryption_passphrase or ''
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def run(cmd, arguments, check=True, **kwargs):
|
||||
"""Wrap the command with extra encryption passphrase handling."""
|
||||
env = get_env(arguments)
|
||||
return subprocess.run(cmd, check=check, env=env, **kwargs)
|
||||
|
||||
|
||||
def main():
|
||||
"""Parse arguments and perform all duties."""
|
||||
arguments = parse_arguments()
|
||||
arguments.stdin = sys.stdin.read()
|
||||
|
||||
subcommand = arguments.subcommand.replace('-', '_')
|
||||
subcommand_method = globals()['subcommand_' + subcommand]
|
||||
subcommand_method(arguments)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@ -1,7 +1,5 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""
|
||||
FreedomBox app to manage backup archives.
|
||||
"""
|
||||
"""FreedomBox app to manage backup archives."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
@ -14,12 +12,11 @@ from django.utils.text import get_valid_filename
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.translation import gettext_noop
|
||||
|
||||
from plinth import actions
|
||||
from plinth import app as app_module
|
||||
from plinth import cfg, glib, menu
|
||||
from plinth.package import Packages
|
||||
|
||||
from . import api
|
||||
from . import api, privileged
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -27,7 +24,6 @@ _description = [
|
||||
_('Backups allows creating and managing backup archives.'),
|
||||
]
|
||||
|
||||
MANIFESTS_FOLDER = '/var/lib/plinth/backups-manifests/'
|
||||
# session variable name that stores when a backup file should be deleted
|
||||
SESSION_PATH_VARIABLE = 'fbx-backups-upload-path'
|
||||
|
||||
@ -69,8 +65,7 @@ class BackupsApp(app_module.App):
|
||||
"""Install and configure the app."""
|
||||
super().setup(old_version)
|
||||
from . import repository
|
||||
actions.superuser_run(
|
||||
'backups', ['setup', '--path', repository.RootBorgRepository.PATH])
|
||||
privileged.setup(repository.RootBorgRepository.PATH)
|
||||
self.enable()
|
||||
|
||||
# First time setup or upgrading from older versions.
|
||||
@ -79,11 +74,11 @@ class BackupsApp(app_module.App):
|
||||
|
||||
|
||||
def _backup_handler(packet, encryption_passphrase=None):
|
||||
"""Performs backup operation on packet."""
|
||||
if not os.path.exists(MANIFESTS_FOLDER):
|
||||
os.makedirs(MANIFESTS_FOLDER)
|
||||
"""Perform backup operation on packet."""
|
||||
if not os.path.exists(privileged.MANIFESTS_FOLDER):
|
||||
os.makedirs(privileged.MANIFESTS_FOLDER)
|
||||
|
||||
manifest_path = os.path.join(MANIFESTS_FOLDER,
|
||||
manifest_path = os.path.join(privileged.MANIFESTS_FOLDER,
|
||||
get_valid_filename(packet.path) + '.json')
|
||||
manifests = {
|
||||
'apps': [{
|
||||
@ -97,17 +92,10 @@ def _backup_handler(packet, encryption_passphrase=None):
|
||||
|
||||
paths = packet.directories + packet.files
|
||||
paths.append(manifest_path)
|
||||
arguments = ['create-archive', '--path', packet.path]
|
||||
if packet.archive_comment:
|
||||
arguments += ['--comment', packet.archive_comment]
|
||||
|
||||
arguments += ['--paths'] + paths
|
||||
input_data = ''
|
||||
if encryption_passphrase:
|
||||
input_data = json.dumps(
|
||||
{'encryption_passphrase': encryption_passphrase})
|
||||
|
||||
actions.superuser_run('backups', arguments, input=input_data.encode())
|
||||
privileged.create_archive(packet.path, paths,
|
||||
comment=packet.archive_comment,
|
||||
encryption_passphrase=encryption_passphrase)
|
||||
|
||||
|
||||
def backup_by_schedule(data):
|
||||
@ -123,34 +111,16 @@ def backup_by_schedule(data):
|
||||
exception=exception)
|
||||
|
||||
|
||||
def get_exported_archive_apps(path):
|
||||
"""Get list of apps included in exported archive file."""
|
||||
arguments = ['get-exported-archive-apps', '--path', path]
|
||||
output = actions.superuser_run('backups', arguments)
|
||||
return output.splitlines()
|
||||
|
||||
|
||||
def _restore_exported_archive_handler(packet, encryption_passphrase=None):
|
||||
"""Perform restore operation on packet."""
|
||||
locations = {'directories': packet.directories, 'files': packet.files}
|
||||
locations_data = json.dumps(locations)
|
||||
actions.superuser_run('backups',
|
||||
['restore-exported-archive', '--path', packet.path],
|
||||
input=locations_data.encode())
|
||||
privileged.restore_exported_archive(packet.path, packet.directories,
|
||||
packet.files)
|
||||
|
||||
|
||||
def restore_archive_handler(packet, encryption_passphrase=None):
|
||||
"""Perform restore operation on packet."""
|
||||
locations = {
|
||||
'directories': packet.directories,
|
||||
'files': packet.files,
|
||||
'encryption_passphrase': encryption_passphrase
|
||||
}
|
||||
locations_data = json.dumps(locations)
|
||||
arguments = [
|
||||
'restore-archive', '--path', packet.path, '--destination', '/'
|
||||
]
|
||||
actions.superuser_run('backups', arguments, input=locations_data.encode())
|
||||
privileged.restore_archive(packet.path, '/', packet.directories,
|
||||
packet.files, encryption_passphrase)
|
||||
|
||||
|
||||
def restore_from_upload(path, app_ids=None):
|
||||
|
||||
@ -1,12 +1,11 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""
|
||||
App component for other apps to use backup/restore functionality.
|
||||
"""
|
||||
"""App component for other apps to use backup/restore functionality."""
|
||||
|
||||
import copy
|
||||
import json
|
||||
|
||||
from plinth import actions, app
|
||||
from plinth import app
|
||||
|
||||
from . import privileged
|
||||
|
||||
|
||||
def _validate_directories_and_files(section):
|
||||
@ -150,19 +149,14 @@ class BackupRestore(app.FollowerComponent):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
input_ = json.dumps(data).encode()
|
||||
actions.superuser_run('backups',
|
||||
['dump-settings', '--app-id', self.app_id],
|
||||
input=input_)
|
||||
privileged.dump_settings(self.app_id, data)
|
||||
|
||||
def _settings_restore_post(self):
|
||||
"""Read from a file and restore keys to kvstore."""
|
||||
if not self.settings:
|
||||
return
|
||||
|
||||
output = actions.superuser_run(
|
||||
'backups', ['load-settings', '--app-id', self.app_id])
|
||||
data = json.loads(output)
|
||||
data = privileged.load_settings(self.app_id)
|
||||
|
||||
from plinth import kvstore
|
||||
for key, value in data.items():
|
||||
|
||||
@ -1,12 +1,20 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Configure backups and sshfs."""
|
||||
"""Configure backups (with borg) and sshfs."""
|
||||
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import subprocess
|
||||
import tarfile
|
||||
from typing import Optional, Union
|
||||
|
||||
from plinth.actions import privileged
|
||||
from plinth.utils import Version
|
||||
|
||||
TIMEOUT = 30
|
||||
BACKUPS_DATA_PATH = pathlib.Path('/var/lib/plinth/backups-data/')
|
||||
MANIFESTS_FOLDER = '/var/lib/plinth/backups-manifests/'
|
||||
|
||||
|
||||
class AlreadyMountedError(Exception):
|
||||
@ -14,8 +22,9 @@ class AlreadyMountedError(Exception):
|
||||
|
||||
|
||||
@privileged
|
||||
def mount(mountpoint: str, remote_path: str, ssh_keyfile: str = None,
|
||||
password: str = None, user_known_hosts_file: str = '/dev/null'):
|
||||
def mount(mountpoint: str, remote_path: str, ssh_keyfile: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
user_known_hosts_file: str = '/dev/null'):
|
||||
"""Mount a remote ssh path via sshfs."""
|
||||
try:
|
||||
_validate_mountpoint(mountpoint)
|
||||
@ -28,12 +37,12 @@ def mount(mountpoint: str, remote_path: str, ssh_keyfile: str = None,
|
||||
# 'reconnect', 'ServerAliveInternal' and 'ServerAliveCountMax' allow the
|
||||
# client (FreedomBox) to keep control of the SSH connection even when the
|
||||
# SSH server misbehaves. Without these options, other commands such as
|
||||
# '/usr/share/plinth/actions/storage usage-info', 'df',
|
||||
# '/usr/share/plinth/actions/sshfs is-mounted', or 'mountpoint' might block
|
||||
# indefinitely (even when manually invoked from the command line). This
|
||||
# situation has some lateral effects, causing major system instability in
|
||||
# the course of ~11 days, and leaving the system in such state that the
|
||||
# only solution is a reboot.
|
||||
# '/usr/share/plinth/actions/actions storage usage_info --no-args', 'df',
|
||||
# '/usr/share/plinth/actions/actions sshfs is_mounted --no-args', or
|
||||
# 'mountpoint' might block indefinitely (even when manually invoked from
|
||||
# the command line). This situation has some lateral effects, causing major
|
||||
# system instability in the course of ~11 days, and leaving the system in
|
||||
# such state that the only solution is a reboot.
|
||||
cmd = [
|
||||
'sshfs', remote_path, mountpoint, '-o',
|
||||
f'UserKnownHostsFile={user_known_hosts_file}', '-o',
|
||||
@ -52,7 +61,7 @@ def mount(mountpoint: str, remote_path: str, ssh_keyfile: str = None,
|
||||
|
||||
|
||||
@privileged
|
||||
def subcommand_umount(mountpoint: str):
|
||||
def umount(mountpoint: str):
|
||||
"""Unmount a mountpoint."""
|
||||
subprocess.run(['umount', mountpoint], check=True)
|
||||
|
||||
@ -82,6 +91,260 @@ def _is_mounted(mountpoint):
|
||||
|
||||
|
||||
@privileged
|
||||
def is_mounted(arguments) -> bool:
|
||||
"""Print whether a path is already mounted."""
|
||||
return _is_mounted(arguments.mountpoint)
|
||||
def is_mounted(mount_point: str) -> bool:
|
||||
"""Return whether a path is already mounted."""
|
||||
return _is_mounted(mount_point)
|
||||
|
||||
|
||||
@privileged
|
||||
def setup(path: str):
|
||||
"""Create repository if it does not already exist."""
|
||||
try:
|
||||
_run(['borg', 'info', path], check=True)
|
||||
except subprocess.CalledProcessError:
|
||||
parent = os.path.dirname(path)
|
||||
if not os.path.exists(parent):
|
||||
os.makedirs(parent)
|
||||
|
||||
_init_repository(path, encryption='none')
|
||||
|
||||
|
||||
def _init_repository(path: str, encryption: str,
|
||||
encryption_passphrase: Optional[str] = None):
|
||||
"""Initialize a local or remote borg repository."""
|
||||
if encryption != 'none':
|
||||
if not encryption_passphrase:
|
||||
raise ValueError('No encryption passphrase provided')
|
||||
|
||||
cmd = ['borg', 'init', '--encryption', encryption, path]
|
||||
_run(cmd, encryption_passphrase)
|
||||
|
||||
|
||||
@privileged
|
||||
def init(path: str, encryption: str,
|
||||
encryption_passphrase: Optional[str] = None):
|
||||
"""Initialize the borg repository."""
|
||||
_init_repository(path, encryption, encryption_passphrase)
|
||||
|
||||
|
||||
@privileged
|
||||
def info(path: str, encryption_passphrase: Optional[str] = None) -> dict:
|
||||
"""Show repository information."""
|
||||
process = _run(['borg', 'info', '--json', path], encryption_passphrase,
|
||||
stdout=subprocess.PIPE)
|
||||
return json.loads(process.stdout.decode())
|
||||
|
||||
|
||||
@privileged
|
||||
def list_repo(path: str, encryption_passphrase: Optional[str] = None) -> dict:
|
||||
"""List repository contents."""
|
||||
process = _run(['borg', 'list', '--json', '--format="{comment}"', path],
|
||||
encryption_passphrase, stdout=subprocess.PIPE)
|
||||
return json.loads(process.stdout.decode())
|
||||
|
||||
|
||||
def _get_borg_version():
|
||||
"""Return the version of borgbackup."""
|
||||
process = _run(['borg', '--version'], stdout=subprocess.PIPE)
|
||||
return process.stdout.decode().split()[1] # Example: "borg 1.1.9"
|
||||
|
||||
|
||||
@privileged
|
||||
def create_archive(path: str, paths: list[str], comment: Optional[str] = None,
|
||||
encryption_passphrase: Optional[str] = None):
|
||||
"""Create archive."""
|
||||
existing_paths = filter(os.path.exists, paths)
|
||||
command = ['borg', 'create', '--json']
|
||||
if comment:
|
||||
if Version(_get_borg_version()) < Version('1.1.10'):
|
||||
# Undo any placeholder escape sequences in comments as this version
|
||||
# of borg does not support placeholders. XXX: Drop this code when
|
||||
# support for borg < 1.1.10 is dropped.
|
||||
comment = comment.replace('{{', '{').replace('}}', '}')
|
||||
|
||||
command += ['--comment', comment]
|
||||
|
||||
command += [path] + list(existing_paths)
|
||||
_run(command, encryption_passphrase)
|
||||
|
||||
|
||||
@privileged
|
||||
def delete_archive(path: str, encryption_passphrase: Optional[str] = None):
|
||||
"""Delete archive."""
|
||||
_run(['borg', 'delete', path], encryption_passphrase)
|
||||
|
||||
|
||||
def _extract(archive_path, destination, encryption_passphrase, locations=None):
|
||||
"""Extract archive contents."""
|
||||
prev_dir = os.getcwd()
|
||||
borg_call = ['borg', 'extract', archive_path]
|
||||
# do not extract any files when we get an empty locations list
|
||||
if locations is not None:
|
||||
borg_call.extend(locations)
|
||||
|
||||
try:
|
||||
os.chdir(os.path.expanduser(destination))
|
||||
# TODO: with python 3.7 use subprocess.run with the 'capture_output'
|
||||
# argument
|
||||
process = _run(borg_call, encryption_passphrase, check=False,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if process.returncode != 0:
|
||||
error = process.stderr.decode()
|
||||
# Don't fail on the borg error when no files were matched
|
||||
if "never matched" not in error:
|
||||
raise subprocess.CalledProcessError(process.returncode,
|
||||
process.args)
|
||||
finally:
|
||||
os.chdir(prev_dir)
|
||||
|
||||
|
||||
@privileged
|
||||
def export_tar(path: str, encryption_passphrase: Optional[str] = None):
|
||||
"""Export archive contents as tar stream on stdout."""
|
||||
_run(['borg', 'export-tar', path, '-', '--tar-filter=gzip'],
|
||||
encryption_passphrase)
|
||||
|
||||
|
||||
def _read_archive_file(archive, filepath, encryption_passphrase):
|
||||
"""Read the content of a file inside an archive."""
|
||||
borg_call = ['borg', 'extract', archive, filepath, '--stdout']
|
||||
return _run(borg_call, encryption_passphrase,
|
||||
stdout=subprocess.PIPE).stdout.decode()
|
||||
|
||||
|
||||
@privileged
|
||||
def get_archive_apps(path: str,
|
||||
encryption_passphrase: Optional[str] = None) -> list[str]:
|
||||
"""Get list of apps included in archive."""
|
||||
manifest_folder = os.path.relpath(MANIFESTS_FOLDER, '/')
|
||||
borg_call = [
|
||||
'borg', 'list', path, manifest_folder, '--format', '{path}{NEWLINE}'
|
||||
]
|
||||
try:
|
||||
borg_process = _run(borg_call, encryption_passphrase,
|
||||
stdout=subprocess.PIPE)
|
||||
manifest_path = borg_process.stdout.decode().strip()
|
||||
except subprocess.CalledProcessError:
|
||||
raise RuntimeError('Borg exited unsuccessfully')
|
||||
|
||||
manifest = None
|
||||
if manifest_path:
|
||||
manifest_data = _read_archive_file(path, manifest_path,
|
||||
encryption_passphrase)
|
||||
manifest = json.loads(manifest_data)
|
||||
|
||||
archive_apps = []
|
||||
if manifest:
|
||||
for app in _get_apps_of_manifest(manifest):
|
||||
archive_apps.append(app['name'])
|
||||
|
||||
return archive_apps
|
||||
|
||||
|
||||
def _get_apps_of_manifest(manifest):
|
||||
"""Get apps of a manifest.
|
||||
|
||||
Supports both dict format as well as list format of plinth <=0.42
|
||||
|
||||
"""
|
||||
if isinstance(manifest, list):
|
||||
apps = manifest
|
||||
elif isinstance(manifest, dict) and 'apps' in manifest:
|
||||
apps = manifest['apps']
|
||||
else:
|
||||
raise RuntimeError('Unknown manifest format')
|
||||
|
||||
return apps
|
||||
|
||||
|
||||
@privileged
|
||||
def get_exported_archive_apps(path: str) -> list[str]:
|
||||
"""Get list of apps included in an exported archive file."""
|
||||
manifest = None
|
||||
with tarfile.open(path) as tar_handle:
|
||||
filenames = tar_handle.getnames()
|
||||
for name in filenames:
|
||||
if 'var/lib/plinth/backups-manifests/' in name \
|
||||
and name.endswith('.json'):
|
||||
manifest_data = tar_handle.extractfile(name).read()
|
||||
manifest = json.loads(manifest_data)
|
||||
break
|
||||
|
||||
app_names = []
|
||||
if manifest:
|
||||
for app in _get_apps_of_manifest(manifest):
|
||||
app_names.append(app['name'])
|
||||
|
||||
return app_names
|
||||
|
||||
|
||||
@privileged
|
||||
def restore_archive(archive_path: str, destination: str,
|
||||
directories: list[str], files: list[str],
|
||||
encryption_passphrase: Optional[str] = None):
|
||||
"""Restore files from an archive."""
|
||||
locations_all = directories + files
|
||||
locations_all = [
|
||||
os.path.relpath(location, '/') for location in locations_all
|
||||
]
|
||||
_extract(archive_path, destination, encryption_passphrase,
|
||||
locations=locations_all)
|
||||
|
||||
|
||||
@privileged
|
||||
def restore_exported_archive(path: str, directories: list[str],
|
||||
files: list[str]):
|
||||
"""Restore files from an exported archive."""
|
||||
with tarfile.open(path) as tar_handle:
|
||||
for member in tar_handle.getmembers():
|
||||
path = '/' + member.name
|
||||
if path in files:
|
||||
tar_handle.extract(member, '/')
|
||||
else:
|
||||
for directory in directories:
|
||||
if path.startswith(directory):
|
||||
tar_handle.extract(member, '/')
|
||||
break
|
||||
|
||||
|
||||
def _assert_app_id(app_id):
|
||||
"""Check that app ID is correct."""
|
||||
if not re.fullmatch(r'[a-z0-9_]+', app_id):
|
||||
raise Exception('Invalid App ID')
|
||||
|
||||
|
||||
@privileged
|
||||
def dump_settings(app_id: str, settings: dict[str, Union[int, float, bool,
|
||||
str]]):
|
||||
"""Dump an app's settings to a JSON file."""
|
||||
_assert_app_id(app_id)
|
||||
BACKUPS_DATA_PATH.mkdir(exist_ok=True)
|
||||
settings_path = BACKUPS_DATA_PATH / f'{app_id}-settings.json'
|
||||
settings_path.write_text(json.dumps(settings))
|
||||
|
||||
|
||||
@privileged
|
||||
def load_settings(app_id: str) -> dict[str, Union[int, float, bool, str]]:
|
||||
"""Load an app's settings from a JSON file."""
|
||||
_assert_app_id(app_id)
|
||||
settings_path = BACKUPS_DATA_PATH / f'{app_id}-settings.json'
|
||||
try:
|
||||
return json.loads(settings_path.read_text())
|
||||
except FileNotFoundError:
|
||||
return {}
|
||||
|
||||
|
||||
def _get_env(encryption_passphrase: Optional[str] = None):
|
||||
"""Create encryption and ssh kwargs out of given arguments."""
|
||||
env = dict(os.environ, BORG_RELOCATED_REPO_ACCESS_IS_OK='yes',
|
||||
LANG='C.UTF-8')
|
||||
# Always provide BORG_PASSPHRASE (also if empty) so borg does not get stuck
|
||||
# while asking for a passphrase.
|
||||
env['BORG_PASSPHRASE'] = encryption_passphrase or ''
|
||||
return env
|
||||
|
||||
|
||||
def _run(cmd, encryption_passphrase=None, check=True, **kwargs):
|
||||
"""Wrap the command with extra encryption passphrase handling."""
|
||||
env = _get_env(encryption_passphrase)
|
||||
return subprocess.run(cmd, check=check, env=env, **kwargs)
|
||||
|
||||
@ -4,7 +4,6 @@
|
||||
import abc
|
||||
import contextlib
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
@ -13,8 +12,7 @@ from uuid import uuid1
|
||||
import paramiko
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from plinth import actions, cfg
|
||||
from plinth.errors import ActionError
|
||||
from plinth import cfg
|
||||
from plinth.utils import format_lazy
|
||||
|
||||
from . import (_backup_handler, api, errors, get_known_hosts_path, privileged,
|
||||
@ -137,8 +135,10 @@ class BaseBorgRepository(abc.ABC):
|
||||
|
||||
def get_info(self):
|
||||
"""Return Borg information about a repository."""
|
||||
output = self.run(['info', '--path', self.borg_path])
|
||||
output = json.loads(output)
|
||||
with self._handle_errors():
|
||||
output = privileged.info(self.borg_path,
|
||||
self._get_encryption_passpharse())
|
||||
|
||||
if output['encryption']['mode'] == 'none' and \
|
||||
self._get_encryption_data():
|
||||
raise errors.BorgUnencryptedRepository(
|
||||
@ -147,7 +147,7 @@ class BaseBorgRepository(abc.ABC):
|
||||
return output
|
||||
|
||||
def get_view_content(self):
|
||||
"""Get archives with additional information as needed by the view"""
|
||||
"""Get archives with additional information as needed by the view."""
|
||||
repository = {
|
||||
'uuid': self.uuid,
|
||||
'name': self.name,
|
||||
@ -160,7 +160,7 @@ class BaseBorgRepository(abc.ABC):
|
||||
repository['mounted'] = self.is_mounted
|
||||
if repository['mounted']:
|
||||
repository['archives'] = self.list_archives()
|
||||
except (errors.BorgError, ActionError) as err:
|
||||
except (errors.BorgError, Exception) as err:
|
||||
repository['error'] = str(err)
|
||||
|
||||
return repository
|
||||
@ -170,8 +170,9 @@ class BaseBorgRepository(abc.ABC):
|
||||
|
||||
def list_archives(self):
|
||||
"""Return list of archives in this repository."""
|
||||
output = self.run(['list-repo', '--path', self.borg_path])
|
||||
archives = json.loads(output)['archives']
|
||||
with self._handle_errors():
|
||||
archives = privileged.list_repo(
|
||||
self.borg_path, self._get_encryption_passpharse())['archives']
|
||||
return sorted(archives, key=lambda archive: archive['start'],
|
||||
reverse=True)
|
||||
|
||||
@ -186,7 +187,9 @@ class BaseBorgRepository(abc.ABC):
|
||||
def delete_archive(self, archive_name):
|
||||
"""Delete an archive with given name from this repository."""
|
||||
archive_path = self._get_archive_path(archive_name)
|
||||
self.run(['delete-archive', '--path', archive_path])
|
||||
with self._handle_errors():
|
||||
privileged.delete_archive(archive_path,
|
||||
self._get_encryption_passpharse())
|
||||
|
||||
def initialize(self):
|
||||
"""Initialize / create a borg repository."""
|
||||
@ -196,8 +199,9 @@ class BaseBorgRepository(abc.ABC):
|
||||
encryption = 'repokey'
|
||||
|
||||
try:
|
||||
self.run(
|
||||
['init', '--path', self.borg_path, '--encryption', encryption])
|
||||
with self._handle_errors():
|
||||
privileged.init(self.borg_path, encryption,
|
||||
self._get_encryption_passpharse())
|
||||
except errors.BorgRepositoryExists:
|
||||
pass
|
||||
|
||||
@ -219,25 +223,13 @@ class BaseBorgRepository(abc.ABC):
|
||||
except Exception as exception:
|
||||
self.reraise_known_error(exception)
|
||||
|
||||
def _run(self, cmd, arguments, superuser=True, **kwargs):
|
||||
"""Run a backups or sshfs action script command."""
|
||||
try:
|
||||
if superuser:
|
||||
return actions.superuser_run(cmd, arguments, **kwargs)
|
||||
|
||||
return actions.run(cmd, arguments, **kwargs)
|
||||
except ActionError as err:
|
||||
self.reraise_known_error(err)
|
||||
|
||||
def run(self, arguments, superuser=True):
|
||||
"""Add credentials and run a backups action script command."""
|
||||
def _get_encryption_passpharse(self):
|
||||
"""Return encryption passphrase or raise an exception."""
|
||||
for key in self.credentials.keys():
|
||||
if key not in self.known_credentials:
|
||||
raise ValueError('Unknown credentials entry: %s' % key)
|
||||
|
||||
input_data = json.dumps(self._get_encryption_data())
|
||||
return self._run('backups', arguments, superuser=superuser,
|
||||
input=input_data.encode())
|
||||
return self.credentials.get('encryption_passphrase', None)
|
||||
|
||||
def get_download_stream(self, archive_name):
|
||||
"""Return an stream of .tar.gz binary data for a backup archive."""
|
||||
@ -264,11 +256,16 @@ class BaseBorgRepository(abc.ABC):
|
||||
|
||||
return chunk
|
||||
|
||||
args = ['export-tar', '--path', self._get_archive_path(archive_name)]
|
||||
input_data = json.dumps(self._get_encryption_data())
|
||||
proc = self._run('backups', args, run_in_background=True)
|
||||
proc.stdin.write(input_data.encode())
|
||||
with self._handle_errors():
|
||||
proc, read_fd, input_ = privileged.export_tar(
|
||||
self._get_archive_path(archive_name),
|
||||
self._get_encryption_passpharse(), _raw_output=True)
|
||||
|
||||
os.close(read_fd) # Don't use the pipe for communication, just stdout
|
||||
proc.stdin.write(input_)
|
||||
proc.stdin.close()
|
||||
proc.stderr.close() # writing to stderr in child will cause SIGPIPE
|
||||
|
||||
return BufferedReader(proc.stdout)
|
||||
|
||||
def _get_archive_path(self, archive_name):
|
||||
@ -278,7 +275,7 @@ class BaseBorgRepository(abc.ABC):
|
||||
@staticmethod
|
||||
def reraise_known_error(err):
|
||||
"""Look whether the caught error is known and reraise it accordingly"""
|
||||
caught_error = str(err)
|
||||
caught_error = str((err, err.args))
|
||||
for known_error in KNOWN_ERRORS:
|
||||
for error in known_error['errors']:
|
||||
if re.search(error, caught_error):
|
||||
@ -297,8 +294,9 @@ class BaseBorgRepository(abc.ABC):
|
||||
def get_archive_apps(self, archive_name):
|
||||
"""Get list of apps included in an archive."""
|
||||
archive_path = self._get_archive_path(archive_name)
|
||||
output = self.run(['get-archive-apps', '--path', archive_path])
|
||||
return output.splitlines()
|
||||
with self._handle_errors():
|
||||
return privileged.get_archive_apps(
|
||||
archive_path, self._get_encryption_passpharse())
|
||||
|
||||
def restore_archive(self, archive_name, app_ids=None):
|
||||
"""Restore an archive from this repository to the system."""
|
||||
@ -474,7 +472,7 @@ class SshBorgRepository(BaseBorgRepository):
|
||||
if os.path.exists(self._mountpoint):
|
||||
try:
|
||||
self.umount()
|
||||
except ActionError:
|
||||
except Exception:
|
||||
pass
|
||||
if not os.listdir(self._mountpoint):
|
||||
os.rmdir(self._mountpoint)
|
||||
|
||||
@ -157,8 +157,10 @@ class TestBackupProcesses:
|
||||
@staticmethod
|
||||
@patch('plinth.action_utils.webserver_is_enabled')
|
||||
@patch('plinth.action_utils.service_is_running')
|
||||
@patch('plinth.actions.superuser_run')
|
||||
def test__shutdown_services(run, service_is_running, webserver_is_enabled):
|
||||
@patch('plinth.privileged.service.stop')
|
||||
@patch('plinth.modules.apache.privileged.disable')
|
||||
def test__shutdown_services(apache_disable, service_stop,
|
||||
service_is_running, webserver_is_enabled):
|
||||
"""Test that services are stopped in correct order."""
|
||||
components = [_get_backup_component('a'), _get_backup_component('b')]
|
||||
service_is_running.return_value = True
|
||||
@ -182,17 +184,13 @@ class TestBackupProcesses:
|
||||
[call('b', kind='site'),
|
||||
call('a', kind='site')])
|
||||
|
||||
calls = [
|
||||
call('apache', ['disable', '--name', 'b', '--kind', 'site']),
|
||||
call('service', ['stop', 'b']),
|
||||
call('apache', ['disable', '--name', 'a', '--kind', 'site']),
|
||||
call('service', ['stop', 'a'])
|
||||
]
|
||||
run.assert_has_calls(calls)
|
||||
apache_disable.assert_has_calls([call('b', 'site'), call('a', 'site')])
|
||||
service_stop.assert_has_calls([call('b'), call('a')])
|
||||
|
||||
@staticmethod
|
||||
@patch('plinth.actions.superuser_run')
|
||||
def test__restore_services(run):
|
||||
@patch('plinth.privileged.service.start')
|
||||
@patch('plinth.modules.apache.privileged.enable')
|
||||
def test__restore_services(apache_enable, service_start):
|
||||
"""Test that services are restored in correct order."""
|
||||
original_state = [
|
||||
api.SystemServiceHandler(None, 'a-service'),
|
||||
@ -211,11 +209,8 @@ class TestBackupProcesses:
|
||||
original_state[2].was_enabled = True
|
||||
original_state[3].was_enabled = False
|
||||
api._restore_services(original_state)
|
||||
calls = [
|
||||
call('service', ['start', 'a-service']),
|
||||
call('apache', ['enable', '--name', 'c-service', '--kind', 'site'])
|
||||
]
|
||||
run.assert_has_calls(calls)
|
||||
service_start.assert_has_calls([call('a-service')])
|
||||
apache_enable.assert_has_calls([call('c-service', 'site')])
|
||||
|
||||
@staticmethod
|
||||
def test__run_operation():
|
||||
|
||||
@ -3,7 +3,6 @@
|
||||
Test the backups action script.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import subprocess
|
||||
@ -11,8 +10,8 @@ import uuid
|
||||
|
||||
import pytest
|
||||
|
||||
from plinth import actions
|
||||
from plinth.modules import backups
|
||||
from plinth.modules.backups import privileged
|
||||
from plinth.modules.backups.repository import BorgRepository, SshBorgRepository
|
||||
from plinth.tests import config as test_config
|
||||
|
||||
@ -94,11 +93,8 @@ def test_create_export_delete_archive(data_directory, backup_directory):
|
||||
repository = BorgRepository(str(path))
|
||||
repository.initialize()
|
||||
archive_path = "::".join([str(path), archive_name])
|
||||
actions.superuser_run('backups', [
|
||||
'create-archive', '--path', archive_path, '--comment', archive_comment,
|
||||
'--paths',
|
||||
str(data_directory)
|
||||
])
|
||||
privileged.create_archive(archive_path, [str(data_directory)],
|
||||
archive_comment)
|
||||
|
||||
archive = repository.list_archives()[0]
|
||||
assert archive['name'] == archive_name
|
||||
@ -118,28 +114,18 @@ def test_remote_backup_actions():
|
||||
"""
|
||||
credentials = _get_credentials(add_encryption_passphrase=True)
|
||||
path = os.path.join(test_config.backups_ssh_path, str(uuid.uuid1()))
|
||||
arguments = ['init', '--path', path, '--encryption', 'repokey']
|
||||
arguments, kwargs = _append_borg_arguments(arguments, credentials)
|
||||
actions.superuser_run('backups', arguments, **kwargs)
|
||||
privileged.init(path, 'repokey', **_get_borg_arguments(credentials))
|
||||
|
||||
arguments = ['info', '--path', path]
|
||||
arguments, kwargs = _append_borg_arguments(arguments, credentials)
|
||||
info = actions.superuser_run('backups', arguments, **kwargs)
|
||||
info = json.loads(info)
|
||||
info = privileged.info(path, **_get_borg_arguments(credentials))
|
||||
assert info['encryption']['mode'] == 'repokey'
|
||||
|
||||
|
||||
def _append_borg_arguments(arguments, credentials):
|
||||
"""Append run arguments for running borg directly"""
|
||||
kwargs = {}
|
||||
passphrase = credentials.get('encryption_passphrase', None)
|
||||
if passphrase:
|
||||
kwargs['input'] = json.dumps({'encryption_passphrase': passphrase})
|
||||
|
||||
if 'ssh_keyfile' in credentials and credentials['ssh_keyfile']:
|
||||
arguments += ['--ssh-keyfile', credentials['ssh_keyfile']]
|
||||
|
||||
return (arguments, kwargs)
|
||||
def _get_borg_arguments(credentials):
|
||||
"""Get credential arguments for running borg privileged actions."""
|
||||
return {
|
||||
'passphrase': credentials.get('encryption_passphrase', None),
|
||||
'ssh_keyfile': credentials.get('ssh_keyfile')
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('needs_ssh_config')
|
||||
|
||||
@ -3,7 +3,6 @@
|
||||
Test the App components provides by backups app.
|
||||
"""
|
||||
|
||||
import json
|
||||
from unittest.mock import call, patch
|
||||
|
||||
import pytest
|
||||
@ -235,8 +234,8 @@ def test_backup_restore_hooks(backup_restore):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch('plinth.actions.superuser_run')
|
||||
def test_backup_restore_backup_pre(run, backup_restore):
|
||||
@patch('plinth.modules.backups.privileged.dump_settings')
|
||||
def test_backup_restore_backup_pre(dump_settings, backup_restore):
|
||||
"""Test running backup-pre hook."""
|
||||
packet = None
|
||||
kvstore.set('setting-1', 'value-1')
|
||||
@ -244,32 +243,27 @@ def test_backup_restore_backup_pre(run, backup_restore):
|
||||
|
||||
component = BackupRestore('test-backup-restore')
|
||||
component.backup_pre(packet)
|
||||
run.assert_has_calls([])
|
||||
dump_settings.assert_has_calls([])
|
||||
|
||||
backup_restore.backup_pre(packet)
|
||||
input_ = {'setting-1': 'value-1'}
|
||||
run.assert_has_calls([
|
||||
call('backups', ['dump-settings', '--app-id', 'testapp'],
|
||||
input=json.dumps(input_).encode())
|
||||
])
|
||||
dump_settings.assert_has_calls([call('testapp', {'setting-1': 'value-1'})])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch('plinth.actions.superuser_run')
|
||||
def test_backup_restore_restore_post(run, backup_restore):
|
||||
@patch('plinth.modules.backups.privileged.load_settings')
|
||||
def test_backup_restore_restore_post(load_settings, backup_restore):
|
||||
"""Test running restore-post hook."""
|
||||
packet = None
|
||||
backup_restore.app_id = 'testapp'
|
||||
|
||||
component = BackupRestore('test-backup-restore')
|
||||
component.restore_post(packet)
|
||||
run.assert_has_calls([])
|
||||
load_settings.assert_has_calls([])
|
||||
|
||||
output = {'setting-1': 'value-1'}
|
||||
run.return_value = json.dumps(output)
|
||||
load_settings.return_value = output
|
||||
backup_restore.restore_post(packet)
|
||||
run.assert_has_calls(
|
||||
[call('backups', ['load-settings', '--app-id', 'testapp'])])
|
||||
load_settings.assert_has_calls([call('testapp')])
|
||||
|
||||
assert kvstore.get('setting-1') == 'value-1'
|
||||
with pytest.raises(Exception):
|
||||
|
||||
@ -25,7 +25,7 @@ from plinth.modules import backups, storage
|
||||
from plinth.views import AppView
|
||||
|
||||
from . import (SESSION_PATH_VARIABLE, api, forms, get_known_hosts_path,
|
||||
is_ssh_hostkey_verified)
|
||||
is_ssh_hostkey_verified, privileged)
|
||||
from .decorators import delete_tmp_backup_file
|
||||
from .repository import (BorgRepository, SshBorgRepository, get_instance,
|
||||
get_repositories)
|
||||
@ -238,7 +238,7 @@ class RestoreFromUploadView(BaseRestoreView):
|
||||
def _get_included_apps(self):
|
||||
"""Save some data used to instantiate the form."""
|
||||
path = self.request.session.get(SESSION_PATH_VARIABLE)
|
||||
return backups.get_exported_archive_apps(path)
|
||||
return privileged.get_exported_archive_apps(path)
|
||||
|
||||
def form_valid(self, form):
|
||||
"""Restore files from the archive on valid form submission."""
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user