示例#1
0
    def run_user_script(self, user_script):
        if len(user_script) == 0:
            return

        script_path = "$env:TMP\\coriolis_user_script.ps1"
        try:
            utils.write_winrm_file(
                self._conn,
                script_path,
                user_script)
        except Exception as err:
            raise exception.CoriolisException(
                "Failed to copy user script to target system.") from err

        cmd = ('$ErrorActionPreference = "Stop"; powershell.exe '
               '-NonInteractive -ExecutionPolicy RemoteSigned '
               '-File "%(script)s" "%(os_root_dir)s"') % {
            "script": script_path,
            "os_root_dir": self._os_root_dir,
        }
        try:
            out = self._conn.exec_ps_command(cmd)
            LOG.debug("User script output: %s" % out)
        except Exception as err:
            raise exception.CoriolisException(
                "Failed to run user script.") from err
示例#2
0
def _check_ensure_volumes_info_ordering(export_info, volumes_info):
    """ Returns a new list of volumes_info, ensuring that the order of
    the disks in 'volumes_info' is consistent with the order that the
    disks appear in 'export_info[devices][disks]'
    """
    instance = export_info.get('instance_name',
                               export_info.get('name', export_info['id']))
    ordered_volumes_info = []
    for disk in export_info['devices']['disks']:
        disk_id = disk['id']
        matching_volumes = [
            vol for vol in volumes_info if vol['disk_id'] == disk_id
        ]
        if not matching_volumes:
            raise exception.CoriolisException(
                "Could not find source disk '%s' (ID '%s') in Replica "
                "volumes info: %s" % (disk, disk_id, volumes_info))
        elif len(matching_volumes) > 1:
            raise exception.CoriolisException(
                "Multiple disks with ID '%s' foind in Replica "
                "volumes info: %s" % (disk_id, volumes_info))

        ordered_volumes_info.append(matching_volumes[0])

    LOG.debug("volumes_info returned by provider for instance "
              "'%s': %s", instance, volumes_info)
    LOG.debug("volumes_info for instance '%s' after "
              "reordering: %s", instance, ordered_volumes_info)

    return ordered_volumes_info
示例#3
0
    def _parse_source_ssh_conn_info(self, conn_info):
        # if we get valid SSH connection info we can
        # use it to copy the binary, and potentially
        # create a SSH tunnel through which we will
        # connect to the coriolis replicator
        required = ('ip', 'username')
        port = conn_info.get('port', 22)
        password = conn_info.get('password', None)
        pkey = conn_info.get('pkey', None)
        missing = [field for field in required if field not in conn_info]
        if missing:
            raise exception.CoriolisException(
                "Missing some required fields from source replication "
                "worker VM connection info: %s" % missing)
        if any([password, pkey]) is False:
            raise exception.CoriolisException(
                "Either 'password' or 'pkey' for source worker VM is required "
                "to initialize the Coriolis replicator.")

        if pkey:
            if type(pkey) is str:
                pkey = utils.deserialize_key(
                    pkey, CONF.serialization.temp_keypair_password)

        args = {
            "hostname": conn_info["ip"],
            "username": conn_info["username"],
            "password": password,
            "pkey": pkey,
            "port": port,
        }
        return args
示例#4
0
    def _get_impl(self, path, disk_id):
        ssh = self._connect_ssh()
        _disable_lvm2_lvmetad(ssh)

        matching_devs = [
            v for v in self._volumes_info if v["disk_id"] == disk_id
        ]

        if not matching_devs:
            base_msg = ("Could not locate disk with ID '%s' in volumes_info" %
                        disk_id)
            LOG.error("%s: %s", base_msg, self._volumes_info)
            raise exception.CoriolisException(base_msg)
        elif len(matching_devs) > 1:
            base_msg = ("Multiple disks with ID '%s' in volumes_info" %
                        disk_id)
            LOG.error("%s: %s", base_msg, self._volumes_info)
            raise exception.CoriolisException(base_msg)

        path = matching_devs[0]["volume_dev"]
        impl = SSHBackupWriterImpl(path, disk_id)

        self._copy_helper_cmd(ssh)
        impl._set_ssh_client(ssh)
        return impl
示例#5
0
    def _parse_source_ssh_conn_info(self, conn_info):
        # if we get valid SSH connection info we can
        # use it to copy the binary, and potentially
        # create a SSH tunnel through which we will
        # connect to the coriolis replicator
        required = ('ip', 'username')
        port = conn_info.get('port', 22)
        password = conn_info.get('password', None)
        pkey = conn_info.get('pkey', None)
        for i in required:
            if conn_info.get(i) is None:
                raise exception.CoriolisException(
                    "missing required field: %s" % i)
        if any([password, pkey]) is False:
            raise exception.CoriolisException(
                "Either password or pkey is required")

        if pkey:
            if type(pkey) is str:
                pkey = utils.deserialize_key(
                    pkey, CONF.serialization.temp_keypair_password)

        args = {
            "hostname": conn_info["ip"],
            "username": conn_info["username"],
            "password": password,
            "pkey": pkey,
            "port": port,
        }
        return args
示例#6
0
    def __init__(self, name, description, schedule, enabled, expires,
                 on_success, on_error, job_callable, *args, **kw):
        # param: name: string: unique ID that describes this job
        # param: description: string: a short description of the job
        # param: schedule: dict: cron job schedule. This is of the form:
        #     {
        #         "minute": 1,
        #         "hour": 0,
        #         "dom": 20,
        #         "month": 11,
        #         "dow": 1
        #     }
        # param: enabled: bool: Whether or not this cron job is enabled
        # param: expires: datetime: expiration date for this cronjob
        # param: on_success: callable: a function that gets called if the
        # job is successful. This function must accept the result returned
        # by the scheduled function
        #
        # param: on_error: callable: If the function scheduled to run raises
        # an exception, this function will run. on_error MUST accept the
        # exception info raised by the scheduled function, as the only
        # parameter. Any exception thrown by this callback will be logged and
        # ignored.
        #
        # param: job_callable: callable: The function we are scheduling to run,
        # Every other *arg or **kw following this parameter will be passed in
        # directly to this function.

        self.name = name
        if not callable(job_callable):
            raise exception.CoriolisException("Invalid job function")

        schema = schemas.SCHEDULE_API_BODY_SCHEMA["properties"]["schedule"]
        schemas.validate_value(schedule, schema)

        if on_success and not callable(on_success):
            raise ValueError("on_success must be callable")
        if on_error and not callable(on_error):
            raise ValueError("on_error must be callable")

        self._on_success = on_success
        self._on_error = on_error
        self.schedule = schedule
        self._func = job_callable
        self._description = description
        self._args = args
        self._kw = kw
        self._enabled = enabled
        if expires:
            if not isinstance(expires, datetime.datetime):
                raise exception.CoriolisException("Invalid expires")
        self._expires = expires
        self._last_run = None
示例#7
0
    def write(self, data):
        if self._closing:
            raise exception.CoriolisException(
                "Attempted to write to a closed writer.")
        if self._exception:
            raise exception.CoriolisException(self._exception)

        payload = {
            "offset": self._offset,
            "data": data,
        }
        self._comp_q.put(payload)
        self._offset += len(data)
示例#8
0
def get_os_mount_tools(os_type, connection_info, event_manager,
                       ignore_devices):
    os_mount_tools = {constants.OS_TYPE_LINUX: [ubuntu.UbuntuOSMountTools],
                      constants.OS_TYPE_WINDOWS: [windows.WindowsMountTools]}

    if os_type and os_type not in os_mount_tools:
        raise exception.CoriolisException("Unsupported OS type: %s" % os_type)

    for cls in os_mount_tools.get(os_type,
                                  itertools.chain(*os_mount_tools.values())):
        tools = cls(connection_info, event_manager, ignore_devices)
        LOG.debug("Testing OS mount tools: %s", cls.__name__)
        if tools.check_os():
            return tools
    raise exception.CoriolisException("OS mount tools not found")
示例#9
0
 def _validate_info(self, info):
     if type(info) is not dict:
         raise exception.CoriolisException(
             "Invalid backup writer connection info.")
     wrt_type = info.get("backend", None)
     if wrt_type is None:
         raise exception.CoriolisException(
             "Missing backend name in connection info")
     if wrt_type not in BACKUP_WRITERS:
         raise exception.CoriolisException(
             "Invalid backup writer type: %s" % wrt_type)
     wrt_conn_info = info.get("connection_details")
     if wrt_conn_info is None:
         raise exception.CoriolisException(
             "Missing credentials in connection info")
示例#10
0
def exec_ssh_cmd(ssh, cmd, environment=None, get_pty=False):
    remote_str = "<undeterminable>"
    try:
        remote_str = "%s:%s" % ssh.get_transport().sock.getpeername()
    except (ValueError, AttributeError, TypeError):
        LOG.warn(
            "Failed to determine connection string for SSH connection: %s",
            get_exception_details())
    LOG.debug(
        "Executing the following SSH command on '%s' with "
        "environment %s: '%s'", remote_str, environment, cmd)

    _, stdout, stderr = ssh.exec_command(
        cmd, environment=environment, get_pty=get_pty)
    exit_code = stdout.channel.recv_exit_status()
    std_out = stdout.read()
    std_err = stderr.read()
    if exit_code:
        raise exception.CoriolisException(
            "Command \"%s\" failed on host '%s' with exit code: %s\n"
            "stdout: %s\nstd_err: %s" %
            (cmd, remote_str, exit_code, std_out, std_err))
    # Most of the commands will use pseudo-terminal which unfortunately will
    # include a '\r' to every newline. This will affect all plugins too, so
    # best we can do now is replace them.
    return std_out.replace(b'\r\n', b'\n').replace(b'\n\r', b'\n')
示例#11
0
    def from_env(cls):
        """ Retuns a `LicensingClient` object instatiated using the
        following env vars:
        LICENSING_SERVER_BASE_URL="https://10.7.2.3:37667/v1"
        LICENSING_SERVER_ALLOW_UNTRUSTED="<set to anything>"
        LICENSING_SERVER_APP_ID_FILE="<path to licensing server ID file>"
        Returns None if 'LICENSING_SERVER_BASE_URL' is not defined.
        """
        base_url = os.environ.get("LICENSING_SERVER_BASE_URL")
        if base_url in ["", None, "None", "null"]:
            LOG.warn("No 'LICENSING_SERVER_BASE_URL' env var present. Cannot "
                     "instantiate licensing client.")
            return None
        allow_untrusted = os.environ.get("LICENSING_SERVER_ALLOW_UNTRUSTED",
                                         False)
        client = cls(base_url,
                     appliance_id=None,
                     allow_untrusted=allow_untrusted)
        appliance_ids = client.get_appliances()
        if not appliance_ids:
            client._appliance_id = client.create_appliance().get("id")
        elif len(appliance_ids) == 1:
            client._appliance_id = appliance_ids[0].get('id')
        else:
            raise exception.CoriolisException(
                'More than one appliance IDs found.')
        client.get_licence_status()

        return client
示例#12
0
    def _exec_task_process(self, ctxt, task_id, task_type, origin, destination,
                           instance, task_info):
        mp_ctx = multiprocessing.get_context('spawn')
        mp_q = mp_ctx.Queue()
        mp_log_q = mp_ctx.Queue()
        p = mp_ctx.Process(target=_task_process,
                           args=(ctxt, task_id, task_type, origin, destination,
                                 instance, task_info, mp_q, mp_log_q))

        extra_library_paths = self._get_extra_library_paths_for_providers(
            ctxt, task_id, task_type, origin, destination)

        self._start_process_with_custom_library_paths(p, extra_library_paths)
        LOG.info("Task process started: %s", task_id)
        self._rpc_conductor_client.set_task_host(ctxt, task_id, self._server,
                                                 p.pid)

        self._handle_mp_log_events(p, mp_log_q)
        p.join()

        if mp_q.empty():
            raise exception.CoriolisException("Task canceled")
        result = mp_q.get(False)

        if isinstance(result, str):
            raise exception.TaskProcessException(result)
        return result
示例#13
0
    def wait_for_chunks(self):
        if self._cli is None:
            raise exception.CoriolisException(
                "replicator not initialized. Run init_replicator()")
        perc_steps = {}

        while True:
            status = self._cli.get_status()
            done = []
            for vol in status:
                devName = vol["device-path"]
                perc_step = perc_steps.get(devName)
                if perc_step is None:
                    perc_step = self._event_manager.add_percentage_step(
                        100,
                        message_format=("Chunking progress for disk %s: "
                                        "{:.0f}%%") % devName)
                    perc_steps[devName] = perc_step
                perc_done = vol["checksum-status"]["percentage"]
                self._event_manager.set_percentage_step(
                    perc_step, perc_done)
                done.append(int(perc_done) == 100)
            if all(done):
                break
            else:
                time.sleep(5)
示例#14
0
    def _add_repo(self, uri, alias):
        repos = self._get_repos()
        if repos.get(alias):
            if repos[alias] == uri:
                LOG.debug(
                    'Repo with alias %s already exists and has the same '
                    'URI. Enabling', alias)
                self._event_manager.progress_update("Enabling repository: %s" %
                                                    alias)
                self._exec_cmd_chroot(
                    'zypper --non-interactive modifyrepo -e %s' % alias)
                self._exec_cmd_chroot(
                    "zypper --non-interactive --no-gpg-checks refresh")
                return
            else:
                LOG.debug(
                    'Repo with alias %s already exists, but has a '
                    'different URI. Renaming alias', alias)
                alias = "%s%s" % (alias, str(uuid.uuid4()))

        self._event_manager.progress_update("Adding repository: %s" % alias)
        try:
            self._exec_cmd_chroot("zypper --non-interactive addrepo -f %s %s" %
                                  (uri, alias))
            self._exec_cmd_chroot(
                "zypper --non-interactive --no-gpg-checks refresh")
        except Exception as err:
            raise exception.CoriolisException(
                "Failed to add %s repo: %s. Please review logs"
                " for more details." % (alias, uri)) from err
示例#15
0
    def _configure_cloud_init_user_retention(self):
        cloud_cfg_paths = ["/etc/cloud/cloud.cfg"]
        cloud_cfgs_dir = "/etc/cloud/cloud.cfg.d"
        if self._test_path(cloud_cfgs_dir):
            for path in self._exec_cmd_chroot(
                    'ls -1 %s' % cloud_cfgs_dir).decode().splitlines():
                if path.endswith('.cfg'):
                    cloud_cfg_paths.append("%s/%s" % (cloud_cfgs_dir, path))

        self._event_manager.progress_update(
            "Reconfiguring cloud-init to retain original user credentials")
        for cloud_cfg_path in cloud_cfg_paths:
            if not self._test_path(cloud_cfg_path):
                LOG.warn("Could not find %s. Skipping reconfiguration." %
                         (cloud_cfg_path))
                continue

            try:
                self._exec_cmd_chroot("cp %s %s.bak" %
                                      (cloud_cfg_path, cloud_cfg_path))

                cloud_cfg = yaml.load(self._read_file(cloud_cfg_path),
                                      Loader=yaml.SafeLoader)

                cloud_cfg['disable_root'] = False
                cloud_cfg['ssh_pwauth'] = True
                cloud_cfg['users'] = None
                new_cloud_cfg = yaml.dump(cloud_cfg)
                self._write_file_sudo(cloud_cfg_path, new_cloud_cfg)
            except Exception as err:
                raise exception.CoriolisException(
                    "Failed to reconfigure cloud-init to retain user "
                    "credentials. Error was: %s" % str(err)) from err
示例#16
0
    def run(self, ctxt, instance, origin, destination, task_info,
            event_handler):
        event_manager = events.EventManager(event_handler)
        new_source_env = task_info.get('source_environment', {})
        if not new_source_env:
            event_manager.progress_update(
                "No new source environment options provided")
            return task_info

        source_provider = providers_factory.get_provider(
            origin["type"],
            constants.PROVIDER_TYPE_SOURCE_REPLICA_UPDATE,
            event_handler,
            raise_if_not_found=False)
        if not source_provider:
            raise exception.CoriolisException(
                "Replica source provider plugin for '%s' does not support"
                " updating Replicas" % origin["type"])

        origin_connection_info = base.get_connection_info(ctxt, origin)
        volumes_info = task_info.get("volumes_info", {})

        LOG.info("Checking source provider environment params")
        # NOTE: the `source_environment` in the `origin` is the one set
        # in the dedicated DB column of the Replica and thus stores
        # the previous value of it:
        old_source_env = origin.get('source_environment', {})
        volumes_info = (source_provider.check_update_source_environment_params(
            ctxt, origin_connection_info, instance, volumes_info,
            old_source_env, new_source_env))

        task_info['volumes_info'] = volumes_info

        return task_info
示例#17
0
    def _write_cloudbase_init_conf(self,
                                   cloudbaseinit_base_dir,
                                   local_base_dir,
                                   com_port="COM1",
                                   metadata_services=None,
                                   plugins=None):
        if metadata_services is None:
            metadata_services = CLOUDBASE_INIT_DEFAULT_METADATA_SVCS

        if plugins is None:
            plugins = CLOUDBASE_INIT_DEFAULT_PLUGINS
        elif type(plugins) is not list:
            raise exception.CoriolisException(
                "Invalid plugins parameter. Must be list.")

        LOG.info("Writing Cloudbase-Init configuration files")
        conf_dir = "%s\\conf" % cloudbaseinit_base_dir
        scripts_dir = self._get_cbslinit_scripts_dir(cloudbaseinit_base_dir)
        self._conn.exec_ps_command("mkdir '%s' -Force" % conf_dir,
                                   ignore_stdout=True)
        self._conn.exec_ps_command("mkdir '%s' -Force" % scripts_dir,
                                   ignore_stdout=True)

        conf_file_path = "%s\\cloudbase-init.conf" % conf_dir

        conf_content = (
            "[DEFAULT]\n"
            "username = Admin\n"
            "groups = Administrators\n"
            "verbose = true\n"
            "bsdtar_path = %(bin_path)s\\bsdtar.exe\n"
            "mtools_path = %(bin_path)s\n"
            "logdir = %(log_path)s\n"
            "local_scripts_path = %(scripts_path)s\n"
            "stop_service_on_exit = false\n"
            "logfile = cloudbase-init.log\n"
            "default_log_levels = "
            "comtypes=INFO,suds=INFO,iso8601=WARN,requests=WARN\n"
            "allow_reboot = false\n"
            "plugins = %(plugins)s\n"
            "debug = true\n"
            "san_policy = OnlineAll\n"
            "metadata_services = %(metadata_services)s\n"
            "logging_serial_port_settings = %(com_port)s,9600,N,8\n" % {
                "bin_path": "%s\\Bin" % local_base_dir,
                "log_path": "%s\\Log" % local_base_dir,
                "scripts_path": "%s\\LocalScripts" % local_base_dir,
                "com_port": com_port,
                "metadata_services": ",".join(metadata_services),
                "plugins": ",".join(plugins)
            })

        utils.write_winrm_file(self._conn, conf_file_path, conf_content)

        disks_script = os.path.join(utils.get_resources_bin_dir(),
                                    "bring-disks-online.ps1")

        self._write_local_script(cloudbaseinit_base_dir,
                                 disks_script,
                                 priority=99)
示例#18
0
 def __init__(self,
              ip,
              port,
              username,
              pkey,
              password,
              writer_port,
              volumes_info,
              cert_dir,
              compressor_count=3):
     self._ip = ip
     self._port = port
     self._username = username
     self._pkey = pkey
     self._password = password
     self._volumes_info = volumes_info
     self._writer_port = writer_port
     self._lock = threading.Lock()
     self._id = str(uuid.uuid4())
     self._compressor_count = compressor_count
     self._writer_cmd = os.path.join("/usr/bin", _CORIOLIS_HTTP_WRITER_CMD)
     self._crt = None
     self._key = None
     self._ca = None
     if os.path.isdir(cert_dir) is False:
         raise exception.CoriolisException(
             "Certificates dir %s does not exist" % cert_dir)
     self._crt_dir = cert_dir
示例#19
0
def db_sync(engine, version=None):
    """Migrate the database to `version` or the most recent version."""
    if version is not None and int(version) < db_version(engine):
        raise exception.CoriolisException(
            _("Cannot migrate to lower schema version."))

    return migration.db_sync(engine, version=version)
示例#20
0
 def _wait_for_task(self, task):
     while task.info.state not in [
             vim.TaskInfo.State.success, vim.TaskInfo.State.error
     ]:
         time.sleep(.1)
     if task.info.state == vim.TaskInfo.State.error:
         raise exception.CoriolisException(task.info.error.msg)
示例#21
0
def write_winrm_file(conn, remote_path, content, overwrite=True):
    """This is a poor man's scp command that transfers small
    files, in chunks, over WinRM.
    """
    if conn.test_path(remote_path):
        if overwrite:
            conn.exec_ps_command('Remove-Item -Force "%s"' % remote_path)
        else:
            raise exception.CoriolisException("File %s already exists" %
                                              remote_path)
    idx = 0
    while True:
        data = content[idx:idx + 2048]
        if not data:
            break

        if type(data) is str:
            data = data.encode()
        asb64 = base64.b64encode(data).decode()
        cmd = ("$ErrorActionPreference = 'Stop';"
               "$x = [System.IO.FileStream]::new(\"%s\", "
               "[System.IO.FileMode]::Append); $bytes = "
               "[Convert]::FromBase64String('%s'); $x.Write($bytes, "
               "0, $bytes.Length); $x.Close()") % (remote_path, asb64)
        conn.exec_ps_command(cmd)
        idx += 2048
示例#22
0
    def wait_for_chunks(self):
        if self._cli is None:
            raise exception.CoriolisException(
                "replicator not initialized. Run init_replicator()")
        perc_steps = {}

        while True:
            status = self._cli.get_status()
            done = []
            for vol in status:
                devName = vol["device-path"]
                dev_size = vol['size'] / units.Mi
                perc_step = perc_steps.get(devName)
                if perc_step is None:
                    perc_step = self._event_manager.add_percentage_step(
                        "Performing chunking for disk %s (total size %.2f MB)"
                        % (devName, dev_size), 100)
                    perc_steps[devName] = perc_step
                perc_done = vol["checksum-status"]["percentage"]
                self._event_manager.set_percentage_step(perc_step, perc_done)
                done.append(int(perc_done) == 100)
            if all(done):
                break
            else:
                time.sleep(5)
示例#23
0
    def from_connection_info(cls, conn_info, volumes_info):
        """Instantiate a HTTP backup writer from connection info.

        Connection info has the following schema:

        {
            # IP address or hostname where we can reach the backup writer
            "ip": "192.168.0.1",
            # Backup writer port
            "port": 4433,
            "certificates": {
                # PEM encoded client certificate
                "client_crt": "",
                # PEM encoded client private key
                "client_key": "",
                # PEM encoded CA certificate we use to validate the server
                "ca_crt": ""
            }
        }
        """
        ip = conn_info.get("ip")
        port = conn_info.get("port")
        certs = conn_info.get("certificates")

        required = ["ip", "port", "certificates"]
        if not all([ip, port, certs]):
            raise exception.CoriolisException(
                "Missing required connection info: %s" % ", ".join(required))
        return cls(ip, port, volumes_info, certs)
示例#24
0
    def _get_ssh_tunnel(self):
        """
        gets a SSH tunnel object. Note, this does not start the tunnel,
        it simply creates the object, without actually connecting.
        """
        remote_host = self._ssh_conn_info["hostname"]
        remote_port = self._ssh_conn_info["port"]
        remote_user = self._ssh_conn_info["username"]
        local_host = "127.0.0.1"
        remote_port = self._ssh_conn_info.get("port", 22)

        pkey = self._ssh_conn_info.get("pkey")
        password = self._ssh_conn_info.get("password")
        if any([pkey, password]) is False:
            raise exception.CoriolisException(
                "Either password or pkey is required")

        server = SSHTunnelForwarder(
            (remote_host, remote_port),
            ssh_username=remote_user,
            ssh_pkey=pkey,
            ssh_password=password,
            # bind to remote replicator port
            remote_bind_address=(local_host, self._port),
            # select random port on this end.
            local_bind_address=(local_host, 0),
        )
        return server
示例#25
0
    def run(self, ctxt, instance, origin, destination, task_info,
            event_handler):
        event_manager = events.EventManager(event_handler)
        destination_type = destination["type"]

        destination_connection_info = base.get_connection_info(
            ctxt, destination)
        destination_provider = providers_factory.get_provider(
            destination_type,
            constants.PROVIDER_TYPE_VALIDATE_REPLICA_IMPORT,
            event_handler,
            raise_if_not_found=False)
        if not destination_provider:
            event_manager.progress_update(
                "Replica Import Provider for platform '%s' does not support "
                "Replica input validation" % destination_type)
            return task_info

        export_info = task_info.get("export_info")
        if not export_info:
            raise exception.CoriolisException(
                "Instance export info is not set. Cannot perform "
                "Replica Import validation for destination platform "
                "'%s'" % destination_type)

        # NOTE: the target environment JSON schema should have been validated
        # upon accepting the Replica API creation request.
        target_environment = destination.get("target_environment", {})
        destination_provider.validate_replica_import_input(
            ctxt, destination_connection_info, target_environment, export_info)

        return task_info
示例#26
0
    def run(self, ctxt, instance, origin, destination, task_info,
            event_handler):
        """ Runs the task with the given params and returns
        a dict with the results.
        NOTE: This should NOT modify the existing task_info in any way.
        """
        missing_info_props = [
            prop for prop in self.get_required_task_info_properties()
            if prop not in task_info
        ]
        if missing_info_props:
            raise exception.CoriolisException(
                "Task type '%s' asked to run on task info with "
                "missing properties: %s" %
                (self.__class__, missing_info_props))

        result = self._run(ctxt, instance, origin, destination, task_info,
                           event_handler)

        if type(result) is not dict:
            raise exception.CoriolisException(
                "Task type '%s' returned result of type %s "
                "instead of a dict: %s" %
                (self.__class__, type(result), result))

        missing_returns = [
            prop for prop in self.get_returned_task_info_properties()
            if prop not in result.keys()
        ]
        if missing_returns:
            raise exception.CoriolisException(
                "Task type '%s' failed to return the following "
                "declared return values in its result: %s. "
                "Result was: %s" % (self.__class__, missing_returns,
                                    utils.sanitize_task_info(result)))

        undeclared_returns = [
            prop for prop in result.keys()
            if prop not in self.get_returned_task_info_properties()
        ]
        if undeclared_returns:
            raise exception.CoriolisException(
                "Task type '%s' returned the following undeclared "
                "keys in its result: %s" %
                (self.__class__, undeclared_returns))

        return result
示例#27
0
def create_keystone_session(ctxt, connection_info={}):
    allow_untrusted = connection_info.get("allow_untrusted",
                                          CONF.keystone.allow_untrusted)
    # TODO(alexpilotti): add "ca_cert" to connection_info
    verify = not allow_untrusted

    username = connection_info.get("username")
    auth = None

    if not username:
        # Using directly the caller's token is not feasible for long running
        # tasks as once it expires it cannot be automatically renewed. This is
        # solved by using a Keystone trust, which must have been set priorly.
        if ctxt.trust_id:
            auth = _get_trusts_auth_plugin(ctxt.trust_id)
        else:
            plugin_name = "token"
            plugin_args = {"token": ctxt.auth_token}
    else:
        plugin_name = "password"
        password = connection_info.get("password")
        plugin_args = {
            "username": username,
            "password": password,
        }

    if not auth:
        project_name = connection_info.get("project_name", ctxt.project_name)

        auth_url = connection_info.get("auth_url", CONF.keystone.auth_url)
        if not auth_url:
            raise exception.CoriolisException(
                '"auth_url" not provided in "connection_info" and option '
                '"auth_url" in group "[openstack_migration_provider]" '
                'not set')

        plugin_args.update({
            "auth_url": auth_url,
            "project_name": project_name,
        })

        keystone_version = connection_info.get(
            "identity_api_version", CONF.keystone.identity_api_version)

        if keystone_version == 3:
            plugin_name = "v3" + plugin_name

            project_domain_name = connection_info.get("project_domain_name",
                                                      ctxt.project_domain)
            plugin_args["project_domain_name"] = project_domain_name

            user_domain_name = connection_info.get("user_domain_name",
                                                   ctxt.user_domain)
            plugin_args["user_domain_name"] = user_domain_name

        loader = loading.get_plugin_loader(plugin_name)
        auth = loader.load_from_options(**plugin_args)

    return ks_session.Session(auth=auth, verify=verify)
示例#28
0
def exec_process(args):
    p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    std_out, std_err = p.communicate()
    if p.returncode:
        raise exception.CoriolisException(
            "Command \"%s\" failed with exit code: %s\nstdout: %s\nstd_err: %s"
            % (args, p.returncode, std_out, std_err))
    return std_out
示例#29
0
def wait_for_port_connectivity(address, port, max_wait=300):
    i = 0
    while not _check_port_open(address, port) and i < max_wait:
        time.sleep(1)
        i += 1
    if i == max_wait:
        raise exception.CoriolisException("Connection failed on port %s" %
                                          port)
示例#30
0
 def _wait_for_instance(self, nova, instance, expected_status='ACTIVE'):
     instance = nova.servers.get(instance.id)
     while instance.status not in [expected_status, 'ERROR']:
         time.sleep(2)
         instance = nova.servers.get(instance.id)
     if instance.status != expected_status:
         raise exception.CoriolisException(
             "VM is in status: %s" % instance.status)