ok

Mini Shell

Direktori : /opt/imunify360/venv/lib64/python3.11/site-packages/restore_infected/backup_backends/
Upload File :
Current File : //opt/imunify360/venv/lib64/python3.11/site-packages/restore_infected/backup_backends/acronis.py

"""Interface to Acronis file restoration API"""

import functools
import json
import logging
import operator
import os
import platform
import re
import resource
import socket
import time
import urllib.parse as urlparse  # path differs from python 2.7!
from contextlib import closing
from itertools import chain
from multiprocessing import Pool
from pathlib import Path
from subprocess import PIPE, Popen, run
from typing import Dict, Iterable, List, Optional
from xml.etree import ElementTree

import requests

from restore_infected import __version__
from restore_infected.backup_backends_lib import (
    BackendNotAuthorizedError,
    backend_auth_required,
    backup_client_required,
)
from .. import helpers, package_manager
from ..backup_backends_lib import asyncable, extra
from ..helpers import from_env

REPEAT_NUM = 30
REPEAT_WAIT = 5  # seconds
BACKUP_ATTEMPTS = 2

TOKEN_FILE = '/var/restore_infected/acronis_api_token.json'

auth_required = backend_auth_required(
    TOKEN_FILE, "Initialize Acronis first!")
client_required = backup_client_required(
    '/usr/lib/Acronis/BackupAndRecovery/mms',
    'Acronis client should be installed first!')


def is_suitable():
    return True


class FileDeliverError(Exception):
    pass


class BackupFailed(Exception):
    pass


class TokenExistsError(Exception):
    pass


class ClientAlreadyInstalledError(Exception):
    pass


class RequestFailed(Exception):
    pass


def check_response(response):
    """
    Exit with error status unless response code is 200
    :param response: obj -> response object
    """
    try:
        response.raise_for_status()
    except requests.HTTPError as e:
        if e.response.status_code == 401:
            raise BackendNotAuthorizedError("Token is invalid") from e
        raise e


class AcronisConnector:
    api_path = 'api/1'
    entrypoint = 'https://cloud.acronis.com'
    refresh_interval = 86400  # one day

    # URL templates
    accounts_tpl = '{}/{}/accounts?login={}'
    token_api_tpl = '{}/api/idp/v1/token'
    login_tpl = '{}/{}/login'
    logout_tpl = '{}/{}/logout'
    bredirect_url_tpl = '{}/{}/groups/{}/backupconsole'
    backup_url_api_tpl = '{}api/ams'

    token_path = TOKEN_FILE  # for backward compatibility

    def __init__(self, hostname=None):
        self.hostname = hostname if hostname else socket.gethostname()
        self._session = requests.Session()
        self._session.headers.update(
            {"User-Agent": "cloudlinux-backup-utils v{}".format(__version__)}
        )
        self._logged_in = False
        self.server_url = None

    @classmethod
    def connect(cls, hostname=None):
        _class = cls(hostname)
        _class.login()
        return _class

    def get(self, url, **kw):
        return self._session.get('{}/{}'.format(self.base_url, url), **kw)

    def post(self, url, **kw):
        return self._session.post('{}/{}'.format(self.base_url, url), **kw)

    def put(self, url, **kw):
        return self._session.put('{}/{}'.format(self.base_url, url), **kw)

    def login(self):
        """
        Creates a session to work with restoration API
        """
        self._login_to_api()
        self._get_url()
        self._logged_in = True

    def logout(self):
        """
        Destroys the session
        """
        url = self.logout_tpl.format(self.server_url, self.api_path)
        check_response(self._session.post(url))
        self._logged_in = False

    @classmethod
    def create_token(cls, username, password, force=False):
        """
        Gets and saves authentication token using provided credentials
        :param username: str -> username
        :param password: str -> password
        :param force: bool -> overwrite token file
        """
        if os.path.exists(TOKEN_FILE) and not force:
            raise TokenExistsError("Token file already exists. If you want to "
                                   "override it, delete '{}' manually and run "
                                   "init again.".format(TOKEN_FILE))
        server_url = cls.get_server_url(username)
        url = cls.token_api_tpl.format(server_url)
        params = {
            'grant_type': 'password',
            'username': username,
            'password': password,
            'client_id': 'backupAgent'}

        r = requests.post(url, data=params)
        check_response(r)

        data = r.json()
        data['timestamp'] = int(time.time())
        data['username'] = username
        for key in 'token_type', 'access_token':
            data.pop(key, None)
        cls._save_token(data)

    def refresh_token(self):
        """
        Refreshes old token with old token refresh key
        """
        self._refresh_token()

    @staticmethod
    def get_saved_token():
        """Reads token from file"""
        try:
            with open(TOKEN_FILE) as f:
                return json.load(f)
        except (OSError, IOError, ValueError, TypeError) as e:
            raise Exception("Could not read token: {}".format(str(e)))

    def _login_to_api(self):
        """Log into common API"""
        if self._logged_in:
            return

        token_data = self.get_saved_token()
        if token_data is None or 'username' not in token_data:
            raise Exception('Missing or incomplete credentials. Exit')

        if not self.server_url:
            self.server_url = self.get_server_url(token_data['username'])

        if time.time() - token_data['timestamp'] > self.refresh_interval:
            self._refresh_token(token_data)

        url = self.login_tpl.format(self.server_url, self.api_path)
        params = {'jwt': token_data.get('id_token')}
        r = self._session.get(url, params=params)
        check_response(r)
        self._user_data = r.json()

    def _get_url(self):
        """After common API login get redirected to backup API"""
        if hasattr(self, 'url'):
            return
        if not hasattr(self, '_user_data'):
            raise Exception("No user data. Call basic_login first")

        url = self.bredirect_url_tpl.format(
            self.server_url, self.api_path,
            self._user_data.get('group', {}).get('id'))

        r = self._session.get(url)
        check_response(r)
        redirect_url = r.json().get('redirect_url')
        r = self._session.get(redirect_url)
        self.base_url = self.backup_url_api_tpl.format(r.url)
        self._subscription_url = self.base_url.replace('ams', 'subscriptions')

    @classmethod
    def get_server_url(cls, username):
        """
        To start working we have to get our working server URL
        from the main entrypoint
        :param username: str -> username to get working server
        :return: str -> working server URL
        """
        server_url = prev_server_url = cls.entrypoint
        for _ in range(10):
            url = cls.accounts_tpl.format(server_url, cls.api_path, username)
            r = requests.get(url)
            check_response(r)
            server_url = r.json().get("server_url")
            if server_url == prev_server_url:
                break
            prev_server_url = server_url
        return server_url

    def _refresh_token(self, token=None):
        """
        Gets new token and passes it for saving
        :param token: dict -> old token data as dict
        :return: dict -> refreshed token
        """
        if token is None:  # no token passed
            token = self.get_saved_token()
        if token is None:  # getting token from file failure
            raise Exception('Could not get saved token to refresh')
        if not self.server_url:
            self.server_url = self.get_server_url(token['username'])
        url = self.token_api_tpl.format(self.server_url)
        params = {
            'grant_type': 'refresh_token',
            'refresh_token': token['refresh_token'],
            'client_id': 'backupAgent'}
        r = self._session.post(url, data=params)
        check_response(r)
        data = r.json()

        for key in 'refresh_token', 'id_token':
            token[key] = data.get(key)
        token['timestamp'] = int(time.time())
        self._save_token(token)
        return token

    @classmethod
    def _save_token(cls, token):
        """
        Saves token to file
        :param token: dict -> token data to be saved
        :return: bool -> True if succeeded and False otherwise
        """
        helpers.mkdir(os.path.dirname(TOKEN_FILE))
        try:
            with open(TOKEN_FILE, 'w') as f:
                json.dump(token, f)
        except (OSError, IOError, ValueError, TypeError) as e:
            raise Exception("Could not save token to file: {}".format(e))
        return True

    def get_login_url(self):
        """
        Get login url to sign up without username/password prompt
        :return: str -> login url
        """
        token_data = self._refresh_token()
        id_token = token_data.get('id_token')
        if not id_token:
            raise Exception("Error obtaining login token")
        username = token_data['username']
        if not self.server_url:
            self.server_url = self.get_server_url(username)
        return "{}?jwt={}".format(self.server_url, id_token)

    def subscription(self, sid=None, params=None):
        if params is None:
            params = {}
        url = '/'.join(filter(None, (self._subscription_url, sid)))
        headers = {'Content-Type': 'application/json'}
        r = self._session.post(url, headers=headers, json=params)
        check_response(r)
        return r.json()


@functools.total_ordering
class FileData:
    def __init__(self, item_id, filename, size, mtime):
        self.id = item_id
        self.filename = filename
        self.size = size
        self.mtime = mtime

    def __str__(self):
        return "{}; {}; {}; {}".format(
            self.id, self.filename, self.size, self.mtime.isoformat(),
        )

    def __repr__(self):
        return (
            "<{__class__.__name__}"
            " (filename={filename!s}, mtime={mtime!r})>".format(
                __class__=self.__class__, **self.__dict__
            )
        )

    def __lt__(self, other):
        return self.mtime < other.mtime

    def __eq__(self, other):
        return self.mtime == other.mtime

    def __hash__(self):
        return hash((self.id, self.filename, self.size, self.mtime))


class AcronisRestoreItem(FileData):
    def __init__(self, item, restore_prefix, destination_folder):
        super().__init__(
            item_id=item.id,
            filename=item.filename,
            size=item.size,
            mtime=item.mtime)
        self.restore_prefix = restore_prefix
        self.destination_folder = destination_folder

    @property
    def restore_path(self):
        # /tmp/tmpdir234234/home/user/public_html/blog/index.php
        return os.path.join(
            self.destination_folder,
            os.path.relpath(self.filename, self.restore_prefix)
        )

    @classmethod
    def get_chunks(
        cls, items, n: int, temp_dir: str
    ) -> Iterable[List["AcronisRestoreItem"]]:
        common_prefix = common_path(items)
        offset = 0
        chunk = items[offset: offset + n]
        while chunk:
            restore_prefix = common_path(chunk)
            destination_folder = os.path.join(
                temp_dir, os.path.relpath(restore_prefix, common_prefix)
            )
            yield [
                cls(item, restore_prefix, destination_folder)
                for item in chunk
            ]
            offset += n
            chunk = items[offset: offset + n]


def common_path(items):
    if len(items) == 1:
        return os.path.dirname(items[0].filename)

    return os.path.commonpath([item.filename for item in items])


class AcronisBackup:
    _contents_tpl = (
        "archives/<patharchiveId>/backups/<pathbackupId>/items?"
        "type={}&machineId={}&backupId={}&id={}"
    )
    _restore_session_tpl = "archives/downloads?machineId={}"
    _download_item_tpl = (
        "archives/downloads/{}/<pathfileName>?"
        "fileName={}&format={}&machineId={}&start_download=1"
    )
    _draft_request_tpl = "recovery/plan_drafts?machineId={}"
    _target_loc_tpl = "recovery/plan_drafts/{}/target_location?machineId={}"
    _create_recovery_plan_tpl = "recovery/plan_drafts/{}"
    _run_recovery_plan_tpl = "recovery/plan_operations/run?machineId={}"
    default_download_dir = '/tmp'
    jheaders = {"Content-Type": "application/json"}
    restore_fmt = "PLAIN"
    chunk_size = 50

    def __init__(self, conn, host_id, backup_id, created):
        self._conn = conn
        self.host_id = host_id
        self.backup_id = backup_id
        self.created = created
        logging.getLogger(__name__).addHandler(logging.NullHandler())

    def __str__(self):
        return "{}".format(self.created.isoformat())

    def __repr__(self):
        return "<{__class__.__name__} (created={created!r})>".format(
            __class__=self.__class__, **self.__dict__
        )

    def __eq__(self, other):
        return self.created == other.created

    def _get_draft_request(self, items):
        payload = {
            'backupId': self.backup_id,
            'operationId': 'rec-draft-2-4',
            'machineId': self.host_id,
            'resourcesIds': [item.id for item in items]}
        url = self._draft_request_tpl.format(self.host_id)
        response = self._conn.post(url, headers=self.jheaders, json=payload)
        check_response(response)
        return response.json()

    def _set_target_location(self, draft, destination_folder: str):
        payload = {
            'machineId': self.host_id,
            'type': 'local_folder',
            'path': destination_folder}
        url = self._target_loc_tpl.format(draft.get('DraftID'), self.host_id)
        response = self._conn.put(url, headers=self.jheaders, json=payload)
        check_response(response)

    def _create_recovery_plan(self, draft):
        payload = {'operationId': 'rec-createplan-1-1'}
        url = self._create_recovery_plan_tpl.format(draft.get('DraftID'))
        r = self._conn.post(url, headers=self.jheaders, json=payload)
        check_response(r)
        return r.json()

    def _run_recovery_plan(self, plan):
        payload = {'machineId': self.host_id,
                   'operationId': 'noWait',
                   'planId': plan.get('PlanID')}
        url = self._run_recovery_plan_tpl.format(self.host_id)
        r = self._conn.post(url, headers=self.jheaders, json=payload)
        check_response(r)

    def _get_restore_session(self, file_path):
        """
        Gets session for downloading a selected item
        :return: str -> session ID
        """
        url = self._restore_session_tpl.format(self.host_id)
        json_params = {
            'format': self.restore_fmt,
            'machineId': self.host_id,
            'backupId': self.backup_id,
            'backupUri': self.backup_id,
            'items': [file_path]}
        r = self._conn.post(url, json=json_params)
        check_response(r)
        return r.json().get('SessionID')

    def _download_backup_item(self, session_id, item: AcronisRestoreItem):
        """
        Downloads chosen item from acronis backup to current host
        :param session_id: str -> download session token
        """
        file_name = os.path.basename(item.filename)
        url = self._download_item_tpl.format(
            session_id, file_name, self.restore_fmt, self.host_id
        )
        r = self._conn.get(url)
        check_response(r)
        os.makedirs(
            os.path.dirname(item.restore_path), mode=0o700, exist_ok=True
        )
        with open(item.restore_path, "wb") as f:
            f.write(r.content)

    def _get_path_item(self, host_id, backup_id, wanted_path, path_id=None):
        """
        Recursively gets path items
        :param wanted_path: str -> file path to restore
        :param path_id: str -> acronis path ID or None
        :return: dict -> found backup item or None
        """
        wanted_path_parents = Path(wanted_path).parents

        try:
            result = self._get_path_item_inner(host_id, backup_id, path_id)
            if not result:
                return  # There are no data. So return
        except RequestFailed:
            return

        for item in result:
            item_path = _strip_device_name(item.get('fullPath'))
            if item_path == wanted_path:  # We've found item we search for
                return item

            if Path(item_path) == Path('/'):  # If current path is a root path
                # it means that we selected some volume. We will try to find
                # needed file in that volume, if failed -- we have to look in
                # another volume
                return_value = self._get_path_item(
                    host_id, backup_id, wanted_path, item.get('id')
                )
                if return_value:
                    # We found file in the current volume
                    return return_value
                else:
                    # Look in the another volume
                    continue

            elif Path(item_path) in wanted_path_parents:
                # we're on the right way
                return self._get_path_item(
                    host_id, backup_id, wanted_path, item.get('id')
                )

    def __hash__(self):  # quick hack for lru_cache
        return hash((type(self), self.host_id, self.backup_id))

    @functools.lru_cache(maxsize=128)
    def _get_path_item_inner(self, host_id, backup_id, path_id=None):
        """
        Gets path items
        :param host_id: str -> acronis host ID
        :param backup_id: str -> acronis backup ID
        :param path_id: str -> acronis path ID or None
        :return: list -> found backup items
        :raise: RequestFailed
        """
        if path_id is None:
            path_id = ''
        backup_type = 'files'
        url = self._contents_tpl.format(
            backup_type, host_id, urlparse.quote_plus(backup_id),
            urlparse.quote_plus(path_id))

        params_for_json = {
            'id': path_id,
            'backupId': backup_id,
            'machineId': host_id,
            'type': backup_type}
        try:
            r = self._conn.post(url, json=params_for_json)
        except requests.exceptions.ConnectionError as e:
            logging.error('Fetching backup data failure: {}'.format(e))
            # we don't want it to get cached
            raise RequestFailed('Fetching backup data failure: {}'.format(e))
        if r.status_code != 200:
            raise RequestFailed(
                'Fetching backup data failure: status code {}'.format(
                    r.status_code,
                ),
            )

        result = r.json()
        if result.get('total') == 0 or len(result.get('data', [])) == 0:
            return []

        return [{
            'id': item.get('id'),
            'fullPath': item.get('fullPath'),
            'size': item.get('size'),
            'lastChange': item.get('lastChange'),
        } for item in result.get('data', [])]

    def _get_backup_contents(self, paths):
        """
        Frontend for recursive backup contents getter
        :param paths: list -> list of file paths to restore
        """
        runner = functools.partial(
            self._get_path_item, self.host_id, self.backup_id)
        total = len(paths)
        pool = Pool(5 if total > 5 else total)
        results = pool.map(runner, paths)
        self.contents = []
        for idx, value in enumerate(results):
            if value is None:
                raise FileNotFoundError(paths[idx])
            self.contents.append(
                FileData(
                    item_id=value.get('id'),
                    filename=value.get('fullPath'),
                    size=value.get('size'),
                    mtime=helpers.DateTime(value.get('lastChange', '')),
                )
            )

    def file_data(self, path):
        result = self._get_path_item(self.host_id, self.backup_id, path)
        if result is None:
            raise FileNotFoundError(path)
        return FileData(
            item_id=result.get('id'),
            filename=_strip_device_name(result.get('fullPath')),
            size=result.get('size'),
            mtime=helpers.DateTime(result.get('lastChange', '')),
        )

    @staticmethod
    def wait_for_arrival(items, timeout):
        yet_to_arrive = set(items)
        start = time.monotonic()
        while time.monotonic() - start < timeout:
            yet_to_arrive = {
                item
                for item in yet_to_arrive
                if not (
                    os.path.exists(item.restore_path)
                    and item.size == os.stat(item.restore_path).st_size
                )
            }
            if not yet_to_arrive:
                break
            time.sleep(1)
        return yet_to_arrive

    def restore(
        self,
        items: List[FileData],
        destination_folder: Optional[str],
        timeout=600,
        use_download=False,
    ) -> Dict[str, str]:
        if not items:
            return {}
        destination_folder = destination_folder or self.default_download_dir
        if use_download:
            # FIXME: figure out how to download the items in bulk as well
            restore_prefix = common_path(items)
            restore_items = [
                AcronisRestoreItem(item, restore_prefix, destination_folder)
                for item in items
            ]
            for item in restore_items:
                session_id = self._get_restore_session(item.filename)
                self._download_backup_item(session_id, item)
        else:
            chunk_list = list(
                AcronisRestoreItem.get_chunks(
                    items, self.chunk_size, destination_folder
                )
            )
            for chunk in chunk_list:
                recovery_draft = self._get_draft_request(chunk)
                self._set_target_location(
                    recovery_draft, chunk[0].destination_folder
                )
                recovery_plan = self._create_recovery_plan(recovery_draft)
                self._run_recovery_plan(recovery_plan)

            restore_items = list(chain.from_iterable(chunk_list))

        did_not_arrive = self.wait_for_arrival(restore_items, timeout)
        if did_not_arrive:
            raise FileDeliverError(
                "Some files ({}) have not been "
                "delivered in a given time slot ({})"
                .format(did_not_arrive, timeout)
            )
        for item in restore_items:
            atime = helpers.DateTime.now().timestamp()
            mtime = item.mtime.timestamp()
            os.utime(item.restore_path, (atime, mtime))

        return {item.restore_path: item.filename for item in restore_items}

    def close(self):
        pass


class AcronisAPI:
    _archives_tpl = ('locations/<pathlocationId>/archives?'
                     'locationId={}&machineId={}')
    _backups_tpl = ('archives/<patharchiveId>/backups?'
                    'archiveId={}&machineId={}')
    _region_pattern = re.compile(
        r'(?P<region>\w+)(?:-cloud\.acronis|\.cloudlinuxbackup)\.com'
    )
    _backup_and_recovery_config_path = '/etc/Acronis/BackupAndRecovery.config'

    backup_log_path = '/var/restore_infected/acronis_backup.log'

    default_start_time = {'hour': 5, 'minute': 0, 'second': 0}

    mysql_freeze_options = {
        'prePostCommands': {
            'preCommands': {
                'command': '',
                'commandArguments': '',
                'continueOnCommandError': False,
                'waitCommandComplete': True,
                'workingDirectory': '',
            },
            'postCommands': {
                'command': '',
                'commandArguments': '',
                'continueOnCommandError': False,
                'waitCommandComplete': True,
                'workingDirectory': '',
            },
            'useDefaultCommands': True,
            'usePreCommands': False,
            'usePostCommands': False,
        },
        'prePostDataCommands': {
            'preCommands': {
                'command':
                    '/usr/libexec/cloudlinux-backup/pre-mysql-freeze-script',
                'commandArguments': '',
                'continueOnCommandError': False,
                'waitCommandComplete': True,
                'workingDirectory': '',
            },
            'postCommands': {
                'command':
                    '/usr/libexec/cloudlinux-backup/post-mysql-thaw-script',
                'commandArguments': '',
                'continueOnCommandError': False,
                'waitCommandComplete': True,
                'workingDirectory': '',
            },
            'useDefaultCommands': False,
            'usePreCommands': True,
            'usePostCommands': True,
        }
    }

    scheme = {
        'schedule': {
            'daysOfWeek': [
                'sunday',
                'monday',
                'tuesday',
                'wednesday',
                'thursday',
                'friday',
                'saturday',
            ],
            'startAt': default_start_time,
            'type': 'weekly'
        },
        'type': 'alwaysIncremental'
    }

    def __init__(self, hostname=None, subscribe=False):
        self.hostname = hostname if hostname else socket.gethostname()
        self._conn = AcronisConnector.connect(self.hostname)
        if subscribe:
            self._sid = self._conn.subscription().get('id')
            if self._sid is None:
                raise Exception("Could not get subscription ID")
        logging.getLogger(__name__).addHandler(logging.NullHandler())

    @classmethod
    def make_initial_backup(cls, plan_name='CloudLinux Backup'):
        _class = cls(subscribe=True)
        _class._get_hostinfo()
        plan = _class._select_plan()  # Pick the first plan for this host
        if plan is not None:
            return _class._run_backup_plan(plan['id'])
        draft_id = _class._set_backup_plan_draft()
        params = {'planName': plan_name}
        _class._set_backup_plan_params(draft_id, params)
        _class._set_backup_plan_options(draft_id, cls.mysql_freeze_options)
        _class._set_backup_plan_scheme(draft_id, cls.scheme)
        plan_id = _class._create_backup_plan(draft_id)
        _class._run_backup_plan(plan_id)

    @classmethod
    def pull_backups(cls, until=None):
        _class = cls()
        _class._get_hostinfo()
        _class._get_host_archives()
        _class._get_archive_backups(until)
        return _class.backup_list

    @classmethod
    def migration_fix_backup_plan(cls):
        a = cls(subscribe=True)
        a._get_hostinfo()

        obsolete_time = {'hour': 18, 'minute': 0, 'second': 0}

        for plan in a._get_cloudlinux_plans():
            draft_id = a._create_draft_from_plan(plan)
            a._set_backup_plan_options(draft_id, cls.mysql_freeze_options)

            if cls._get_plan_time(plan) == obsolete_time:
                a._set_backup_plan_scheme(draft_id, cls.scheme)

            a._save_draft(draft_id)

    def _get_hostid(self, config_path=None):
        if hasattr(self, 'hostid'):
            return self.hostid

        if not config_path:
            config_path = self._backup_and_recovery_config_path

        et = ElementTree.parse(config_path)
        root = et.getroot()

        for settings in root:
            if settings.tag == 'key' and settings.get('name') == 'Settings':
                break
        else:
            raise Exception("Can't find 'Settings' key entry in config file "
                            "'%s'" % config_path)

        for machine_manager in settings:
            if machine_manager.tag == 'key' and \
                    machine_manager.get('name') == 'MachineManager':
                break
        else:
            raise Exception("Can't find 'MachineManager' key entry in "
                            "'Settings'")

        for value in machine_manager:
            if value.tag == 'value' and \
                    value.get('name') == 'MMSCurrentMachineID':
                break
        else:
            raise Exception("Can't find 'MMSCurrentMachineID' value entry in "
                            "'MachineManager'")

        self.hostid = value.text.strip().strip('"')
        return self.hostid

    def _refresh_hostinfo(self):
        """
        Refreshes the existing hostinfo or saves a new one if not present.
        Returns True if successful, False otherwise.
        """
        hostid = self._get_hostid()
        r = self._conn.get('resources')
        check_response(r)
        resources = r.json()
        for res in resources.get('data', []):
            if res.get('type') == 'machine' and res.get('hostId') == hostid:
                self.hostinfo = res
                return True

        return False

    def _get_hostinfo(self):
        """
        Gets backup host info from acronis.
        Exits with error message unless host found.
        """
        if hasattr(self, 'hostinfo'):
            return self.hostinfo
        if not _repeat(self._refresh_hostinfo):
            raise Exception("No backup for host {}".format(self.hostname))

    def get_backup_progress(self):
        """
        Returns the progress of a running backup in percents.
        """
        if self.is_backup_running():
            progress = self.hostinfo.get('progress')
            if progress:
                return int(progress)

    def get_backup_status(self):
        """
        Returns last backup status.
            * ok - backup succeeded
            * error - backup failed
            * unknown - there were no backups
            * warning - backup succeeded, but there are alerts
        """
        self._refresh_hostinfo()

        return self.hostinfo['status']

    def is_backup_running(self):
        """
        Returns True if there is a backup currently running or False otherwise.
        """
        if self._refresh_hostinfo():
            if self.hostinfo['state']:
                return self.hostinfo['state'][0] == 'backup'

            return False

        raise Exception("No backup for host {}".format(self.hostname))

    def get_alerts(self, as_json=True):
        """
        Get alerts for the current machine
        :param as_json: set to false to get alerts in human readable form
        :return list: list of alerts for the machine this backup belongs to
        """
        r = self._conn.get('alerts')
        check_response(r)
        alerts_json = r.json()
        alerts = [alert
                  for alert in alerts_json.get('data', [])
                  if alert.get('meta', []).get('machineName') == self.hostname]
        if as_json:
            return alerts

        alerts_as_str = ''
        for alert in alerts:
            data = ': '.join(
                filter(None, [alert.get('title'), alert.get('description')]))
            if data:
                if alerts_as_str:
                    alerts_as_str += '\n'
                alerts_as_str += data

        return alerts_as_str

    def _get_host_archives(self, location='cloud'):
        """
        Gets host archives. Required archive names are expected to start with
        hostname
        :param location: str -> location where archive is saved
        """
        self.archives = []
        if not hasattr(self, 'hostinfo'):
            raise Exception('No host data. Has "_get_hostinfo" been run?')
        host_id = self.hostinfo.get('hostId')
        url = self._archives_tpl.format(location, host_id)
        r = self._conn.get(url)
        check_response(r)
        archives = r.json()
        for archive in archives.get('data', []):
            if any([archive.get('archiveName', '').startswith(self.hostname),
                    archive.get('archiveMachineId') == host_id]):
                self.archives.append(archive)

    def _get_archive_backups(self, until=None):
        """
        Gets list of backups (not contents!) from the archive
        :param until: str -> timestamp
        """
        host_id = self.hostinfo.get('hostId')
        archive_ids = [i['id'] for i in self.archives]
        if until is not None and isinstance(until, str):
            until = helpers.DateTime(until)
        self.backup_list = []
        for archive_id in archive_ids:
            url = (self._backups_tpl.format(
                urlparse.quote_plus(archive_id), host_id))
            r = self._conn.get(url)
            if not r.ok:
                continue
            for backup in r.json().get('data', []):
                created = helpers.DateTime(backup.get('creationTime'))
                if until is not None and created < until:
                    continue
                self.backup_list.append(
                    AcronisBackup(
                        conn=self._conn,
                        host_id=host_id,
                        backup_id=backup.get('id'),
                        created=created))
        self.backup_list.sort(
            key=operator.attrgetter('created'), reverse=True)

    def _apply_default_plan(self):
        payload = {
            'resourcesIds': [self.hostinfo.get('id')]
        }
        r = self._conn.post('resource_operations/apply_default_backup_plan',
                            headers={'Content-Type': 'application/json'},
                            json=payload)
        return r.status_code

    def _get_applicable_plans(self):
        payload = {
            'resourcesIds': [self.hostinfo.get('id')]
        }
        r = self._conn.post(
            'backup/plan_draft_operations/get_applicable_plans',
            headers={'Content-Type': 'application/json'},
            json=payload,
        )
        check_response(r)
        return r.json().get('Plans', [])

    def _get_plans(self):
        if hasattr(self, '_backup_plans'):
            return self._backup_plans
        r = self._conn.get('backup/plans',
                           headers={'Content-Type': 'application/json'})
        check_response(r)
        self._backup_plans = r.json()['data']
        return self._backup_plans

    def _select_plan(self):
        if not hasattr(self, '_backup_plans'):
            self._get_plans()
        for plan in self._backup_plans:
            for source in plan['sources']['data']:
                hostid = self.hostinfo.get('hostId')
                source_id = self.hostinfo.get('id')
                if source.get('hostID') == hostid or \
                        hostid in source.get('machineId', '') or \
                        source_id == source.get('id', ''):
                    return plan

    def _set_backup_plan_draft(self):
        url = 'backup/plan_drafts'
        payload = {
            'subscriptionId': self._sid,
            'operationId': 'bc-draft-{}-0'.format(self._sid),
            'language': 'en',
            'resourcesIds': [self.hostinfo.get('id')],
            'defaultStartTime': self.default_start_time}
        return self._post_pop_request(url, payload)['DraftID']

    def _set_backup_plan_params(self, draft_id, payload):
        url = 'backup/plan_drafts/{}/parameters'.format(draft_id)
        headers = {'Content-Type': 'application/json'}
        r = self._conn.put(url, headers=headers, json=payload)
        check_response(r)

    def _set_backup_plan_options(self, draft_id, payload):
        url = 'backup/plan_drafts/{}/options'.format(draft_id)
        headers = {'Content-Type': 'application/json'}
        r = self._conn.put(url, headers=headers, json=payload)
        check_response(r)

    def _set_backup_plan_scheme(self, draft_id, payload):
        url = 'backup/plan_drafts/{}/scheme'.format(draft_id)
        headers = {'Content-Type': 'application/json'}
        r = self._conn.put(url, headers=headers, json=payload)
        check_response(r)

    def _post_pop_request(self, url, payload):
        headers = {'Content-Type': 'application/json'}
        r = self._conn.post(url, headers=headers, json=payload)
        check_response(r)

        params = {'action': 'pop', 'timeout': 10}
        data = _repeat(
            self._conn.subscription, **{'sid': self._sid, 'params': params})
        return data[0]['data']['result']

    def _create_backup_plan(self, draft_id):
        url = 'backup/plan_drafts_operations/create_plan'
        payload = {
            'subscriptionId': self._sid,
            'operationId': 'bc-createplan-{}-1'.format(self._sid),
            'draftId': draft_id}
        return self._post_pop_request(url, payload)['PlanID']

    @staticmethod
    def _get_plan_time(plan):
        return plan["scheme"].get("schedule", {}).get("startAt")

    @staticmethod
    def _get_plan_options(plan):
        return plan.get('options').get('backupOptions')

    @classmethod
    def _is_cloudlinux_plan(cls, plan):
        return 'cloudlinux' in str(cls._get_plan_options(plan))

    def _get_cloudlinux_plans(self):
        cl_plans = []
        for plan in self._get_plans():
            if self._is_cloudlinux_plan(plan):
                cl_plans.append(plan)
        return cl_plans

    def _create_draft_from_plan(self, plan):
        url = 'backup/plan_drafts'
        payload = {
            'language': 'en',
            'subscriptionId': self._sid,
            'operationId': 'bc-draft-{}-1'.format(self._sid),
            'planId': plan.get('id')}
        return self._post_pop_request(url, payload)['DraftID']

    def _save_draft(self, draft_id):
        url = 'backup/plan_drafts_operations/save_plan'
        payload = {
            'subscriptionId': self._sid,
            'operationId': 'bc-saveplan-{}-1'.format(self._sid),
            'draftId': draft_id}
        return self._post_pop_request(url, payload)

    def _run_backup_plan(self, plan_id):
        payload = {
            'planId': plan_id,
            'resourcesIds': [self.hostinfo.get('id')]}
        r = self._conn.post('backup/plan_operations/run',
                            headers={'Content-Type': 'application/json'},
                            json=payload)
        check_response(r)

    def get_info(self):
        """
        Get info from Acronis
        :return: dict
        """
        storage_info = self._conn.get('online_storage_information')
        check_response(storage_info)

        storage_info_data = storage_info.json()
        usage = storage_info_data.get("usage", {}).get("storageSize")

        if usage is None:
            for offering_item in storage_info_data.get("offering_items", []):
                if offering_item.get("usage_name") == "total_storage":
                    usage = offering_item.get("value")
                    break

        self._get_hostinfo()
        plan = self._select_plan()
        if plan and plan["enabled"]:
            scheme = plan["scheme"]
            if scheme["type"] == "custom":
                schedule = scheme.get("scheduleItems", [None])[0]
            else:
                schedule = scheme.get("schedule")
        else:
            schedule = None

        region_match = self._region_pattern.search(self._conn.base_url)
        region = region_match.groupdict()['region'] if region_match else None

        info_data = {
            'usage': usage,  # space used by backups
            'schedule': schedule,
            'region': region,
        }
        token_data = self._conn.get_saved_token()
        for key in 'username', 'timestamp':
            info_data[key] = token_data.get(key)
        return info_data


class AcronisClientInstaller:
    ACRONIS_BIN_LINK = '{endpoint}/bc/api/ams/links/agents/' \
                       'redirect?language=multi&system=linux&' \
                       'architecture={arch}&productType=enterprise'

    ACRONIS_BIN = 'acronis.bin'

    REG_BIN_OBSOLETE = '/usr/lib/Acronis/BackupAndRecovery/AmsRegisterHelper'
    REG_CMD_OBSOLETE = '{bin} register {endpoint} {login} {password}'

    REG_BIN = '/usr/lib/Acronis/RegisterAgentTool/RegisterAgent'
    REG_CMD = '{bin} -o register -t cloud -a {endpoint} ' \
              '-u {login} -p {password}'

    ACRONIS_DEPS = ('gcc', 'make', 'rpm')
    ACRONIS_INSTALL_BIN_CMD = './{bin} ' \
                              '--auto ' \
                              '--rain={endpoint} ' \
                              '--login={login} ' \
                              '--password={password}' \
                              '{optional}'
    MAKE_EXECUTABLE_CMD = 'chmod +x {}'
    INSTALL_LOG = '/var/restore_infected/acronis_installation_{time}.log'

    DTS_REPO_URL = 'https://people.centos.org/tru/devtools-2/devtools-2.repo'
    DTS_DEPS = ('devtoolset-2-gcc', 'devtoolset-2-binutils')
    DTS_BIN_PATH = '/opt/rh/devtoolset-2/root/usr/bin'

    YUM_REPO_DIR = '/etc/yum.repos.d'

    DKMS_CONFIG = '/etc/dkms/framework.conf'
    DKMS_PATH_VAR = 'export PATH={path}:$PATH' + os.linesep

    UNINSTALL_BIN = '/usr/lib/Acronis/BackupAndRecovery/uninstall/uninstall'
    UNINSTALL_CMD = (UNINSTALL_BIN, '--auto')

    def __init__(self, server_url, log_to_file=True):
        self.server_url = server_url
        self.logger = logging.getLogger(self.__class__.__name__)
        if log_to_file:
            log_file = self.INSTALL_LOG.format(time=time.time())
            self.logger.setLevel(logging.INFO)
            self.logger.addHandler(logging.FileHandler(log_file))

        self.arch = '64'
        if not platform.machine().endswith('64'):
            self.arch = '32'

        self.pm = package_manager.detect()

    def _exec(self, command):
        self.logger.debug('exec: `%s`', command)
        process = Popen(command.split(' '), stdout=PIPE, stderr=PIPE)
        out, err = process.communicate()
        exit_code = process.returncode
        self.logger.debug('Return code: %d\n\tSTDOUT:\n%s\n\tSTDERR:\n%s',
                          exit_code,
                          out.decode('utf-8').strip(),
                          err.decode('utf-8').strip())

        return exit_code, out.decode('utf-8')

    def _download_binary(self):
        self.logger.info('\tSaving from %s for x%s to acronis.bin',
                         self.server_url, self.arch)
        response = requests.get(
            self.ACRONIS_BIN_LINK.format(endpoint=self.server_url,
                                         arch=self.arch), allow_redirects=True,
            stream=True)

        response.raise_for_status()

        with open(self.ACRONIS_BIN, 'wb') as handle:
            for block in response.iter_content(1024):
                handle.write(block)

    @staticmethod
    def _check_rlimit_stack():
        """
        Acronis agent has wrappers for its binaries, changing stack size
        to 2048. Some kernels have bug preventing us from setting this size
        lower than 4096 (https://bugzilla.redhat.com/show_bug.cgi?id=1463241).
        This is a simple check to detect such kernels (normally /bin/true
        always successful and returns nothing)
        """
        try:
            p = Popen(['/bin/true'], preexec_fn=_pre_exec_check)
            p.wait()
        except OSError as e:
            if getattr(e, 'strerror', '') == 'Argument list too long':
                return False
            raise  # reraise if we face another error message
        return True

    def _install_binary(self, username, password, tmp_dir=None):
        self.logger.info('\tMaking %s executable', self.ACRONIS_BIN)
        self._exec(self.MAKE_EXECUTABLE_CMD.format(self.ACRONIS_BIN))

        self.logger.info('\tInstalling binary')
        res = self._exec(
            self.ACRONIS_INSTALL_BIN_CMD.format(
                bin=self.ACRONIS_BIN,
                endpoint=self.server_url,
                login=username,
                password=password,
                optional=' --tmp-dir=' + tmp_dir if tmp_dir else ''
            ))

        return res

    @classmethod
    def is_agent_installed(cls):
        return os.path.exists(cls.UNINSTALL_BIN)

    def install_client(self, username, password, force=False, tmp_dir=None):
        self.logger.info('Checking if agent is installable')
        if not self._check_rlimit_stack():
            raise Exception(
                "The backup agent cannot be installed on this system "
                "because of a bug in the current running kernel "
                "(see https://bugzilla.redhat.com/show_bug.cgi?id=1463241). "
                "Update kernel and try again. "
                "Use KernelCare to apply kernel updates without a reboot -- "
                "https://kernelcare.com/"
            )
        self.logger.info('Searching for installed agents')
        installed = self.is_agent_installed()

        if installed and not force:
            raise ClientAlreadyInstalledError(
                'Looks like Acronis backup is already installed. '
                'Use --force to override')

        deps = self.ACRONIS_DEPS
        kernel_dev_args = ()

        if self._is_hybrid_kernel():
            self.logger.info('Hybrid Kernel detected: installing devtoolset')
            deps, kernel_dev_args = self._enable_devtoolset_repo()
            self._enable_devtoolset_dkms()

        self.logger.info('Installing dependencies')
        try:
            self.pm.install(*deps)
        except package_manager.PackageManagerError as e:
            raise Exception(
                'Error installing dependencies: {}'.format(e.stderr.strip())
            )

        self.logger.info('Kernel package installed: %s',
                         self.pm.kernel_name_full)
        self.logger.info('Checking if %s is installed.',
                         self.pm.kernel_dev_name_full)
        if not self.pm.is_kernel_dev_installed():
            self.logger.info('Installing %s', self.pm.kernel_dev_name_full)
            try:
                self.pm.install_kernel_dev(*kernel_dev_args)
            except package_manager.PackageManagerError:
                raise Exception('{} package could not be found. '
                                'Please install it manually or try with '
                                'a different kernel.'
                                .format(self.pm.kernel_dev_name_full))

        self.logger.info('%s is installed.', self.pm.kernel_dev_name_full)

        self.logger.info('Downloading Acronis binary')
        self._download_binary()

        self.logger.info('Installing binary')
        ret, out = self._install_binary(username, password, tmp_dir)

        self.logger.info(out)

        if ret != 0:
            raise Exception('Error while installing binary: {}'.format(out))

    def register(self, username, password):
        self.logger.info(
            'Registering on {endpoint}'.format(endpoint=self.server_url)
        )

        if os.path.exists(self.REG_BIN):
            reg_bin, reg_cmd = self.REG_BIN, self.REG_CMD
        elif os.path.exists(self.REG_BIN_OBSOLETE):
            reg_bin, reg_cmd = self.REG_BIN_OBSOLETE, self.REG_CMD_OBSOLETE
        else:
            raise Exception('No any register binary found')

        rc, out = self._exec(reg_cmd.format(
            bin=reg_bin,
            endpoint=self.server_url,
            login=username,
            password=password,
        ))
        if rc != 0:
            raise Exception('Error while registering: {}'.format(out))
        self.logger.info('Registered successfully!')

    def _is_hybrid_kernel(self):
        return '.el6h.' in self.pm.kernel_ver

    def _enable_devtoolset_repo(self):
        yum_repo_file = Path(self.YUM_REPO_DIR) / Path(self.DTS_REPO_URL).name

        with yum_repo_file.open('wb+') as f:
            with closing(requests.get(self.DTS_REPO_URL, stream=True)) as r:
                r.raise_for_status()
                for content in r.iter_content(chunk_size=1024):
                    f.write(content)

        return (
            self.ACRONIS_DEPS + self.DTS_DEPS,
            ('--enablerepo', 'cloudlinux-hybrid-testing'),
        )

    def _enable_devtoolset_dkms(self):
        dkms_config = Path(self.DKMS_CONFIG)
        config = []

        if dkms_config.exists():
            with dkms_config.open() as r:
                config = [
                    line
                    for line in r.readlines()
                    if not (
                        line.startswith("export PATH") and "devtoolset" in line
                    )
                ]

        config.append(self.DKMS_PATH_VAR.format(path=self.DTS_BIN_PATH))

        dkms_config.parent.mkdir(parents=True, exist_ok=True)
        with dkms_config.open('w+') as w:
            w.writelines(config)

    @classmethod
    def uninstall(cls):
        cls.is_agent_installed() and run(cls.UNINSTALL_CMD)


def _strip_device_name(path, patt=re.compile(r'^[A-Za-z][A-Za-z0-9]+?:')):
    """
    Here we've to remove disk prefix (like 'vda5:') and optionally prepend
    forward slash if missing
    :param path: str -> path to handle
    :param patt: regex obj -> pattern to match against
    :return: str -> modified path
    """
    path = patt.sub('', path)
    if '/' not in path:
        path = "/{}".format(path)
    return path


def _pre_exec_check():
    resource.setrlimit(resource.RLIMIT_STACK, (4096, 4096))


def _repeat(fn, *callee_args, **callee_kw):
    rv = None
    for i in range(REPEAT_NUM):
        rv = fn(*callee_args, **callee_kw)
        if rv:
            return rv
        logging.warning(
            "Callee %s returned falsy result. Attempt %d", fn.__name__, i)
        time.sleep(REPEAT_WAIT)
    return rv


@from_env(username="ACCOUNT_NAME", password="PASSWORD")
def init(
    username, password, provision=False, force=False, tmp_dir=None,
):
    AcronisConnector.create_token(username, password, force)

    server_url = AcronisConnector.get_server_url(username)
    installer = AcronisClientInstaller(server_url)

    if provision:
        installer.install_client(username, password, force, tmp_dir)
    elif installer.is_agent_installed():
        installer.register(username, password)


@asyncable
def is_agent_installed():
    return AcronisClientInstaller.is_agent_installed()


def _wait_backup_running(acr):
    alerts = []

    for a in range(1, BACKUP_ATTEMPTS + 1):
        logging.warning("Starting backup, attempt %s", a)

        AcronisAPI.make_initial_backup()

        # wait until start
        for i in range(1, REPEAT_NUM + 1):
            if acr.is_backup_running():
                return
            logging.warning("Backup is not started after scheduling, "
                            "attempt %s, waiting %s seconds", i, REPEAT_WAIT)
            time.sleep(REPEAT_WAIT)

        alerts = AcronisAPI().get_alerts(as_json=False)
        if alerts:
            break

    raise BackupFailed(alerts)


@asyncable
@auth_required
@client_required
@extra
def make_initial_backup_strict():
    """
    Starts initial backup and waits until the backup will be started and
    completed and logs the progress to the file specified in
    AcronisAPI.backup_log_path.
    :raise BackupFailed: if backup not started of failed
    """
    acr = AcronisAPI(subscribe=True)
    _wait_backup_running(acr)

    # log backup progress
    with open(acr.backup_log_path, 'w') as f:
        while acr.is_backup_running():
            progress = acr.get_backup_progress()
            if progress:
                f.write('{}\n'.format(progress))
                f.flush()
            time.sleep(5)

    if acr.get_backup_status() == 'error':
        alerts = AcronisAPI().get_alerts(as_json=False)
        raise BackupFailed(alerts)


@asyncable
@auth_required
def make_initial_backup(trace=False):
    """
    DEPRECATED
    Starts initial backup.
    If trace is True, the function waits until the backup
      is completed and logs the progress to the file specified in
      AcronisAPI.backup_log_path. Returns True if no errors occurred
      and False otherwise.
    If trace is False always returns None.
    """
    AcronisAPI.make_initial_backup()

    if not trace:
        return None

    acr = AcronisAPI(subscribe=True)
    with open(acr.backup_log_path, 'w') as f:
        while acr.is_backup_running():
            f.write('{}\n'.format(acr.get_backup_progress()))
            f.flush()
            time.sleep(5)

        return acr.get_backup_status() != 'error'


@asyncable
@auth_required
@client_required
def is_backup_running():
    return AcronisAPI(subscribe=True).is_backup_running()


@asyncable
@auth_required
@client_required
def get_backup_progress():
    return AcronisAPI(subscribe=True).get_backup_progress()


@auth_required
@client_required
def backups(until=None, **__):
    return AcronisAPI.pull_backups(until=until)


@asyncable
@auth_required
@extra
def login_url():
    return AcronisConnector().get_login_url()


@auth_required
@client_required
def info():
    return AcronisAPI().get_info()


@auth_required
@extra
def refresh_token():
    AcronisConnector().refresh_token()


@auth_required
@extra
def migration_fix_backup_plan():
    AcronisAPI.migration_fix_backup_plan()


@asyncable
@auth_required
def get_alerts(as_json=True):
    return AcronisAPI().get_alerts(as_json)


@extra
def is_installed():
    if AcronisClientInstaller.is_agent_installed():
        return 'Acronis agent is installed'
    else:
        raise helpers.ActionError('Acronis agent is NOT installed!')


@extra
def uninstall():
    AcronisClientInstaller.uninstall()

Zerion Mini Shell 1.0