Пример #1
0
    def clean_dst(self, host_dst, path_dst):
        """
        Remove the file on destination.

        :param host_dst: The destination host
        :param path_dst: The path of file on destination host
        """
        files.remote_rm(self.runner(host_dst, 'dst'), path_dst,
                        ignoring_errors=True)
Пример #2
0
    def transfer(self, data):
        host_src = data['host_src']
        path_src = data['path_src']
        host_dst = data['host_dst']
        path_dst = data['path_dst']
        gateway = data.get('gateway')

        src_runner = self.runner(host_src, 'src', gateway)
        dst_runner = self.runner(host_dst, 'dst', gateway)

        block_size = CONF.migrate.ssh_chunk_size
        file_size = files.remote_file_size_mb(src_runner, path_src)
        num_blocks = int(math.ceil(float(file_size) / block_size))

        src_temp_dir = os.path.join(os.path.basename(path_src), '.cf.copy')
        dst_temp_dir = os.path.join(os.path.basename(path_dst), '.cf.copy')
        part_filename = os.path.basename(path_src) + '.part'

        with files.FullAccessRemoteDir(src_runner, src_temp_dir) as src_tmp, \
                files.FullAccessRemoteDir(dst_runner, dst_temp_dir) as dst_tmp:
            src_part_path = os.path.join(src_tmp.dirname, part_filename)
            dst_part_path = os.path.join(dst_tmp.dirname, part_filename)
            for i in xrange(num_blocks):
                files.remote_split_file(src_runner, path_src, src_part_path, i,
                                        block_size)
                gzipped_path = files.remote_gzip(src_runner, src_part_path)
                gzipped_filename = os.path.basename(gzipped_path)
                dst_gzipped_path = os.path.join(dst_tmp.dirname,
                                                gzipped_filename)

                self.run_scp(host_src, gzipped_path, host_dst,
                             dst_gzipped_path, gateway)
                files.remote_unzip(dst_runner, dst_gzipped_path)
                files.remote_join_file(dst_runner, path_dst, dst_part_path, i,
                                       block_size)
                files.remote_rm(src_runner, src_part_path)
                files.remote_rm(dst_runner, dst_part_path)
        if not self.verify(host_src, path_src, host_dst, path_dst, gateway):
            self.clean_dst(host_dst, path_dst)
            raise base.FileCopyError(**data)
Пример #3
0
    def transfer(self, data):
        host_src = data['host_src']
        path_src = data['path_src']
        host_dst = data['host_dst']
        path_dst = data['path_dst']
        gateway = data.get('gateway')

        src_runner = self.runner(host_src, 'src', gateway)
        dst_runner = self.runner(host_dst, 'dst', gateway)

        block_size = CONF.migrate.ssh_chunk_size
        file_size = files.remote_file_size_mb(src_runner, path_src)
        num_blocks = int(math.ceil(float(file_size) / block_size))

        src_temp_dir = os.path.join(os.path.basename(path_src), '.cf.copy')
        dst_temp_dir = os.path.join(os.path.basename(path_dst), '.cf.copy')
        part_filename = os.path.basename(path_src) + '.part'

        with files.FullAccessRemoteDir(src_runner, src_temp_dir) as src_tmp, \
                files.FullAccessRemoteDir(dst_runner, dst_temp_dir) as dst_tmp:
            src_part_path = os.path.join(src_tmp.dirname, part_filename)
            dst_part_path = os.path.join(dst_tmp.dirname, part_filename)
            for i in range(num_blocks):
                files.remote_split_file(src_runner, path_src, src_part_path, i,
                                        block_size)
                gzipped_path = files.remote_gzip(src_runner, src_part_path)
                gzipped_filename = os.path.basename(gzipped_path)
                dst_gzipped_path = os.path.join(dst_tmp.dirname,
                                                gzipped_filename)

                self.run_scp(host_src, gzipped_path, host_dst,
                             dst_gzipped_path, gateway)
                files.remote_unzip(dst_runner, dst_gzipped_path)
                files.remote_join_file(dst_runner, path_dst, dst_part_path, i,
                                       block_size)
                files.remote_rm(src_runner, src_part_path)
                files.remote_rm(dst_runner, dst_part_path)
        if not self.verify(host_src, path_src, host_dst, path_dst, gateway):
            self.clean_dst(host_dst, path_dst)
            raise base.FileCopyError(**data)
Пример #4
0
 def reuse_source_volume(self, src_volume):
     """Creates volume on destination with same id from source"""
     volume_id = src_volume['id']
     original_size = src_volume['size']
     src_volume_object = self.dst_cinder_backend.get_volume_object(
         self.dst_cloud, volume_id)
     LOG.debug("Backing file for source volume on destination cloud: %s",
               src_volume_object)
     fake_volume = copy.deepcopy(src_volume)
     fake_volume['size'] = 1
     dst_volume, dst_volume_object = self._create_volume(fake_volume)
     user = self.dst_cloud.cloud_config.cloud.ssh_user
     password = self.dst_cloud.cloud_config.cloud.ssh_sudo_password
     rr = remote_runner.RemoteRunner(dst_volume_object.host, user,
                                     password=password, sudo=True,
                                     ignore_errors=True)
     files.remote_rm(rr, dst_volume_object.path)
     dst_cinder = self.dst_cloud.resources[utils.STORAGE_RESOURCE]
     dst_db = cinder_db.CinderDBBroker(dst_cinder.mysql_connector)
     dst_db.update_volume_id(dst_volume.id, volume_id)
     if original_size > 1:
         inc_size = original_size - 1
         project_id = dst_db.get_cinder_volume(volume_id).project_id
         dst_db.inc_quota_usages(project_id, 'gigabytes', inc_size)
         volume_type = (None
                        if dst_volume.volume_type == 'None'
                        else dst_volume.volume_type)
         if volume_type:
             dst_db.inc_quota_usages(project_id,
                                     'gigabytes_%s' % volume_type, inc_size)
     provider_location = self.dst_cinder_backend.get_provider_location(
         self.dst_cloud,
         dst_volume_object.host,
         src_volume_object.path
     )
     dst_db.update_volume(volume_id, provider_location=provider_location,
                          size=original_size)
     return dst_cinder.get_volume_by_id(volume_id)
Пример #5
0
 def reuse_source_volume(self, src_volume):
     """Creates volume on destination with same id from source"""
     volume_id = src_volume['id']
     original_size = src_volume['size']
     src_volume_object = self.dst_cinder_backend.get_volume_object(
         self.dst_cloud, volume_id)
     LOG.debug("Backing file for source volume on destination cloud: %s",
               src_volume_object)
     fake_volume = copy.deepcopy(src_volume)
     fake_volume['size'] = 1
     dst_volume, dst_volume_object = self._create_volume(fake_volume)
     user = self.dst_cloud.cloud_config.cloud.ssh_user
     password = self.dst_cloud.cloud_config.cloud.ssh_sudo_password
     rr = remote_runner.RemoteRunner(dst_volume_object.host, user,
                                     password=password, sudo=True,
                                     ignore_errors=True)
     files.remote_rm(rr, dst_volume_object.path)
     dst_cinder = self.dst_cloud.resources[utils.STORAGE_RESOURCE]
     dst_db = cinder_db.CinderDBBroker(dst_cinder.mysql_connector)
     dst_db.update_volume_id(dst_volume.id, volume_id)
     if original_size > 1:
         inc_size = original_size - 1
         project_id = dst_db.get_cinder_volume(volume_id).project_id
         dst_db.inc_quota_usages(project_id, 'gigabytes', inc_size)
         volume_type = (None
                        if dst_volume.volume_type == 'None'
                        else dst_volume.volume_type)
         if volume_type:
             dst_db.inc_quota_usages(project_id,
                                     'gigabytes_%s' % volume_type, inc_size)
     provider_location = self.dst_cinder_backend.get_provider_location(
         self.dst_cloud,
         dst_volume_object.host,
         src_volume_object.path
     )
     dst_db.update_volume(volume_id, provider_location=provider_location,
                          size=original_size)
     return dst_cinder.get_volume_by_id(volume_id)