Mini Shell

Direktori : /opt/sharedrads/
Upload File :
Current File : //opt/sharedrads/mass_mysql_recover.py

#!/opt/imh-python/bin/python3
"""Shared server mass MySQL recovery tool"""
from collections.abc import Iterator
import sys
import os
import argparse
from pathlib import PosixPath
from concurrent.futures import ThreadPoolExecutor, as_completed
import subprocess
from cproc import ProcLimit
import rads
from bakauth import BakAuth
from restic import Restic, ResticError, SQLBackupGroup, SQLBackupItem


def parse_args() -> argparse.Namespace:
    """Parse CLI args"""
    parser = argparse.ArgumentParser(description=__doc__)
    default_load = float(os.cpu_count())
    parser.add_argument(
        '--workers',
        type=int,
        default=12,
        help='How many databases to download in parallel (default: 12)',
    )
    subparsers = parser.add_subparsers(
        title='command', dest='command', required=True
    )
    add_cmd = lambda cmd, msg: subparsers.add_parser(
        cmd, description=msg, help=msg
    )
    down = add_cmd('download', 'Download MySQL data from backups')
    down.add_argument(
        '--max-load',
        metavar='FLOAT',
        dest='lim',
        type=lambda x: ProcLimit(x, grace=(5, 1)),
        default=ProcLimit(default_load, grace=(5, 1)),
        help='Max server load before pausing restic subprocesses '
        f'(default: {default_load})',
    )
    down.add_argument(
        '--userlist',
        metavar='FILE',
        type=lambda x: PosixPath(x).resolve(),
        help='Only search in restic repos owned by users found in this '
        'line-delimited file',
    )
    down.add_argument(
        '--dblist',
        metavar='FILE',
        type=lambda x: PosixPath(x).resolve(),
        help='Only download databases found that match the names found in this '
        'line-delimited file',
    )
    dest = down.add_mutually_exclusive_group(required=True)
    dest.add_argument(
        '--path',
        type=lambda x: PosixPath(x).resolve(),
        help='Path to download sql dumps to',
    )
    dest.add_argument(
        '--import-now',
        dest='import_now',
        action='store_true',
        help='Instead of downloading to the disk, immediately import',
    )
    imp = add_cmd('import', 'Import downloaded database dumps')
    imp.add_argument(
        'path',
        type=lambda x: PosixPath(x).resolve(),
        help='Path to import sql dumps from',
    )
    args = parser.parse_args()
    return args


def main():
    """Main program logic"""
    args = parse_args()
    if args.command == 'import':
        import_main(args)
    elif args.command == 'download':
        download_main(args)
    else:
        raise RuntimeError
    print('done.')


class Pool(ThreadPoolExecutor):
    """ThreadPoolExecutor subclass that automatically runs .result() on
    each future it creates on __exit__"""

    def __init__(self, max_workers: int):
        super().__init__(max_workers=max_workers)
        self.tasks = []

    def submit(self, func, /, *args, **kwargs):
        future = super().submit(func, *args, **kwargs)
        self.tasks.append(future)
        return future

    def __exit__(self, exc_type, exc_val, exc_tb):
        for future in as_completed(self.tasks):
            try:
                future.result()
            except Exception as exc:  # pylint: disable=broad-except
                print('ERROR:', exc, file=sys.stderr)
        return super().__exit__(exc_type, exc_val, exc_tb)


def download_main(args: argparse.Namespace):
    """Mass-downloads mysql data from restic"""
    if args.import_now:
        print('This will immediately import each MySQL database found without ')
        print('dumping the original first.')
        if not rads.prompt_y_n('Continue?'):
            sys.exit(2)
    else:
        print('Downloading to', args.path)
    if args.dblist is not None:
        with args.dblist.open() as handle:
            args.dblist = handle.read().splitlines()
    if args.userlist is not None:
        with args.userlist.open() as handle:
            args.userlist = handle.read().splitlines()
    api = BakAuth()
    is_reseller = api.get_reg_details()['svr_class'] == 'imh_reseller'
    with Pool(max_workers=args.workers) as pool:
        for bucket_user, restic in api.all_restics(
            users=args.userlist, lim=args.lim
        ).items():
            for backup in find_backups(restic, bucket_user, is_reseller):
                # this iterates once per user, finding the latest backup
                # which may contain multiple databases
                for dbname, snap in backup.dbs.items():
                    if args.dblist is not None and dbname not in args.dblist:
                        continue
                    if args.import_now:
                        pool.submit(download_import, snap)
                        continue
                    pool.submit(download_to_disk, snap, args.path)


def import_main(args: argparse.Namespace):
    """Mass imports MySQL data from a directory"""
    print('MySQL data will be imported as root. This should only be used for ')
    print('data downloaded by this tool, never customer-provided .sql files.')
    if not rads.prompt_y_n('Continue?'):
        sys.exit(2)
    with Pool(max_workers=args.workers) as pool:
        for sql in args.path.glob('*/*.sql'):
            pool.submit(import_db, sql)


def find_backups(
    restic: Restic, bucket_user: str, is_reseller: bool
) -> Iterator[SQLBackupGroup]:
    """Find the latest MySQL backups in a restic repo

    Args:
        restic (Restic): restic instance to look through
        bucket_user (str): username for the main cPanel user this restic repo
            belongs to

    Yields:
        SQLBackupGroup: latest SQLBackupGroup for each user
    """
    if bucket_user == 'root':
        return
    if is_reseller:
        users = rads.get_children(bucket_user) + [bucket_user]
    else:
        users = [bucket_user]
    try:
        backups = restic.get_backups(serialize=False)
    except ResticError as exc:
        print('ERROR:', restic, exc, sep=': ', file=sys.stderr)
    for user in users:
        try:
            backup = sorted(backups[user]['mysql'], key=lambda x: x.time)[-1]
        except (IndexError, KeyError):
            print('SKIPPED:', user, 'had no MySQL backups', file=sys.stderr)
            continue
        yield backup


def download_import(snap: SQLBackupItem):
    """Downloads and immediately imports a database from restic"""
    print('DOWNLOAD+IMPORT:', snap.dbname)
    with snap.dump().proc(stdout=subprocess.PIPE) as proc:
        subprocess.run(['mysql', snap.dbname], stdin=proc.stdout, check=True)
        down_stderr = proc.stderr.read()
    if proc.returncode:
        raise ResticError(
            subprocess.CompletedProcess(
                args=proc.args,
                returncode=proc.returncode,
                stdout=None,
                stderr=down_stderr,
            )
        )


def download_to_disk(snap: SQLBackupItem, root_path: PosixPath):
    """Downloads a MySQL database from a restic repo, saves to the disk

    Args:
        snap (SQLBackupItem): SQL snapshot to download from
        path (PosixPath): path to download to

    Raises:
        ResticError: any error raised by restic
    """
    # dump_dest = ${args.path}/${username}/${dbname}.sql
    dump_dest = root_path.joinpath(snap.user).joinpath(f'{snap.dbname}.sql')
    try:
        if dump_dest.stat().st_size > 0:
            print('EXISTS:', dump_dest)
            return
    except FileNotFoundError:
        pass
    print('DOWNLOADING:', dump_dest)
    os.makedirs(dump_dest.parent, exist_ok=True)
    with dump_dest.open('w') as handle:
        try:
            snap.dump().run(stdout=handle, check=True)
        except subprocess.CalledProcessError as exc:
            raise ResticError(exc) from exc


def import_db(path: PosixPath):
    """Imports a database dump"""
    print('IMPORTING:', path)
    with path.open() as handle:
        dbname = path.name.removesuffix('.sql')
        subprocess.run(['mysql', dbname], stdin=handle, check=True)


if __name__ == '__main__':
    main()

Zerion Mini Shell 1.0