示例#1
0
    def upload_imagestore(self, cmd, req):
        self._check_zstore_cli()

        imf = self._get_image_json_file(cmd.srcPath)
        if not self._ceph_file_existed(imf):
            self.commit_to_imagestore(cmd, req)

        extpara = ""
        taskid = req[http.REQUEST_HEADER].get(http.TASK_UUID)
        if cmd.threadContext:
            if cmd.threadContext['task-stage']:
                extpara += " -stage %s" % cmd.threadContext['task-stage']
            if cmd.threadContext.api:
                taskid = cmd.threadContext.api

        cmdstr = '%s -url %s:%s -callbackurl %s -taskid %s -imageUuid %s %s push %s' % (
            self.ZSTORE_CLI_PATH, cmd.hostname, self.ZSTORE_DEF_PORT, req[http.REQUEST_HEADER].get(http.CALLBACK_URI),
            taskid, cmd.imageUuid, extpara, cmd.srcPath)
        logger.debug('pushing %s to image store' % cmd.srcPath)
        shell = traceable_shell.get_shell(cmd)
        shell.call(cmdstr.encode(encoding="utf-8"))
        logger.debug('%s pushed to image store' % cmd.srcPath)

        name, imageid = self._get_image_reference(cmd.srcPath)
        rsp = CpRsp()
        rsp.installPath = self._build_install_path(name, imageid)
        return jsonobject.dumps(rsp)
示例#2
0
    def cp(self, req):
        cmd = jsonobject.loads(req[http.REQUEST_BODY])
        src_path = self._normalize_install_path(cmd.srcPath)
        dst_path = self._normalize_install_path(cmd.dstPath)

        if cmd.sendCommandUrl:
            Report.url = cmd.sendCommandUrl

        report = Report(cmd.threadContext, cmd.threadContextStack)
        report.processType = "CephCpVolume"
        _, PFILE = tempfile.mkstemp()
        stage = (cmd.threadContext['task-stage'],
                 "10-90")[cmd.threadContext['task-stage'] is None]

        def _get_progress(synced):
            if not Report.url:
                return synced

            logger.debug("getProgress in ceph-agent")
            percent = shell.call(
                "tail -1 %s | grep -o '1\?[0-9]\{1,2\}%%' | tail -1" %
                PFILE).strip(' \t\n\r%')
            if percent and Report.url:
                report.progress_report(get_exact_percent(percent, stage),
                                       "report")
            return synced

        def _get_cp_cmd():
            return "deep cp" if shell.run(
                "rbd help deep cp > /dev/null") == 0 else "cp"

        t_shell = traceable_shell.get_shell(cmd)
        _, _, err = t_shell.bash_progress_1(
            self._wrap_shareable_cmd(
                cmd, 'rbd %s %s %s 2> %s' %
                (_get_cp_cmd(), src_path, dst_path, PFILE)), _get_progress)

        if os.path.exists(PFILE):
            os.remove(PFILE)

        if err:
            shell.run('rbd rm %s' % dst_path)
            raise err

        rsp = CpRsp()
        rsp.size = self._get_file_size(dst_path)
        self._set_capacity_to_response(rsp)
        return jsonobject.dumps(rsp)
示例#3
0
    def create_template_from_volume(self, req):
        cmd = jsonobject.loads(req[http.REQUEST_BODY])
        rsp = AgentRsp()
        dirname = os.path.dirname(cmd.installPath)
        if not os.path.exists(dirname):
            os.makedirs(dirname, 0755)

        @rollback.rollbackable
        def _0():
            linux.rm_file_force(cmd.installPath)
        _0()

        t_shell = traceable_shell.get_shell(cmd)
        linux.create_template(cmd.volumePath, cmd.installPath, shell=t_shell)

        logger.debug('successfully created template[%s] from volume[%s]' % (cmd.installPath, cmd.volumePath))
        rsp.totalCapacity, rsp.availableCapacity = self._get_disk_capacity(cmd.mountPoint)
        return jsonobject.dumps(rsp)
示例#4
0
    def _migrate_volume_segment(self, parent_uuid, resource_uuid,
                                src_install_path, dst_install_path,
                                dst_mon_addr, dst_mon_user, dst_mon_passwd,
                                dst_mon_port, cmd):
        src_install_path = self._normalize_install_path(src_install_path)
        dst_install_path = self._normalize_install_path(dst_install_path)

        traceable_bash = traceable_shell.get_shell(cmd)
        ssh_cmd, tmp_file = linux.build_sshpass_cmd(
            dst_mon_addr, dst_mon_passwd,
            "tee >(md5sum >/tmp/%s_dst_md5) | rbd import-diff - %s" %
            (resource_uuid, dst_install_path), dst_mon_user, dst_mon_port)
        r, _, e = traceable_bash.bash_roe(
            'set -o pipefail; rbd export-diff {FROM_SNAP} {SRC_INSTALL_PATH} - | tee >(md5sum >/tmp/{RESOURCE_UUID}_src_md5) | {SSH_CMD}'
            .format(RESOURCE_UUID=resource_uuid,
                    SSH_CMD=ssh_cmd,
                    SRC_INSTALL_PATH=src_install_path,
                    FROM_SNAP='--from-snap ' +
                    parent_uuid if parent_uuid != '' else ''))
        linux.rm_file_force(tmp_file)
        if r != 0:
            logger.error('failed to migrate volume %s: %s' %
                         (src_install_path, e))
            return r

        # compare md5sum of src/dst segments
        src_segment_md5 = self._read_file_content('/tmp/%s_src_md5' %
                                                  resource_uuid)
        dst_segment_md5 = linux.sshpass_call(
            dst_mon_addr, dst_mon_passwd,
            'cat /tmp/%s_dst_md5' % resource_uuid, dst_mon_user, dst_mon_port)
        if src_segment_md5 != dst_segment_md5:
            logger.error('check sum mismatch after migration: %s' %
                         src_install_path)
            return -1
        return 0
示例#5
0
    def create_template_from_root_volume(self, req):
        cmd = jsonobject.loads(req[http.REQUEST_BODY])
        rsp = CreateTemplateFromRootVolumeRsp()
        try:
            dirname = os.path.dirname(cmd.installPath)
            if not os.path.exists(dirname):
                os.makedirs(dirname, 0755)

            t_shell = traceable_shell.get_shell(cmd)
            linux.create_template(cmd.rootVolumePath,
                                  cmd.installPath,
                                  shell=t_shell)
        except linux.LinuxError as e:
            linux.rm_file_force(cmd.installPath)
            logger.warn(linux.get_exception_stacktrace())
            rsp.error = 'unable to create image to root@%s:%s from root volume[%s], %s' % (
                cmd.sftpBackupStorageHostName, cmd.installPath,
                cmd.rootVolumePath, str(e))
            rsp.success = False

        self._set_capacity_to_response(cmd.uuid, rsp)
        logger.debug('successfully created template[%s] from root volume[%s]' %
                     (cmd.installPath, cmd.rootVolumePath))
        return jsonobject.dumps(rsp)
示例#6
0
    def download(self, req):
        rsp = DownloadRsp()

        def _get_origin_format(path):
            qcow2_length = 0x9007
            if path.startswith('http://') or path.startswith(
                    'https://') or path.startswith('ftp://'):
                resp = urllib2.urlopen(path)
                qhdr = resp.read(qcow2_length)
                resp.close()
            elif path.startswith('sftp://'):
                fd, tmp_file = tempfile.mkstemp()
                get_header_from_pipe_cmd = "timeout 60 head --bytes=%d %s > %s" % (
                    qcow2_length, pipe_path, tmp_file)
                clean_cmd = "pkill -f %s" % pipe_path
                shell.run(
                    '%s & %s && %s' %
                    (scp_to_pipe_cmd, get_header_from_pipe_cmd, clean_cmd))
                qhdr = os.read(fd, qcow2_length)
                if os.path.exists(tmp_file):
                    os.remove(tmp_file)
            else:
                resp = open(path)
                qhdr = resp.read(qcow2_length)
                resp.close()
            if len(qhdr) < qcow2_length:
                return "raw"

            return get_image_format_from_buf(qhdr)

        def get_origin_format(fpath, fail_if_has_backing_file=True):
            image_format = _get_origin_format(fpath)
            if image_format == "derivedQcow2" and fail_if_has_backing_file:
                raise Exception('image has backing file or %s is not exist!' %
                                fpath)
            return image_format

        cmd = jsonobject.loads(req[http.REQUEST_BODY])
        shell = traceable_shell.get_shell(cmd)
        pool, image_name = self._parse_install_path(cmd.installPath)
        tmp_image_name = 'tmp-%s' % image_name

        @rollbackable
        def _1():
            shell.check_run('rbd rm %s/%s' % (pool, tmp_image_name))

        def _getRealSize(length):
            '''length looks like: 10245K'''
            logger.debug(length)
            if not length[-1].isalpha():
                return length
            units = {
                "g": lambda x: x * 1024 * 1024 * 1024,
                "m": lambda x: x * 1024 * 1024,
                "k": lambda x: x * 1024,
            }
            try:
                if not length[-1].isalpha():
                    return length
                return units[length[-1].lower()](int(length[:-1]))
            except:
                logger.warn(linux.get_exception_stacktrace())
                return length

        # whether we have an upload request
        if cmd.url.startswith(self.UPLOAD_PROTO):
            self._prepare_upload(cmd)
            rsp.size = 0
            rsp.uploadPath = self._get_upload_path(req)
            self._set_capacity_to_response(rsp)
            return jsonobject.dumps(rsp)

        if cmd.sendCommandUrl:
            Report.url = cmd.sendCommandUrl

        report = Report(cmd.threadContext, cmd.threadContextStack)
        report.processType = "AddImage"
        report.resourceUuid = cmd.imageUuid
        report.progress_report("0", "start")

        url = urlparse.urlparse(cmd.url)
        if url.scheme in ('http', 'https', 'ftp'):
            image_format = get_origin_format(cmd.url, True)
            cmd.url = linux.shellquote(cmd.url)
            # roll back tmp ceph file after import it
            _1()

            _, PFILE = tempfile.mkstemp()
            content_length = shell.call(
                """curl -sLI %s|awk '/[cC]ontent-[lL]ength/{print $NF}'""" %
                cmd.url).splitlines()[-1]
            total = _getRealSize(content_length)

            def _getProgress(synced):
                last = linux.tail_1(PFILE).strip()
                if not last or len(last.split(
                )) < 1 or 'HTTP request sent, awaiting response' in last:
                    return synced
                logger.debug("last synced: %s" % last)
                written = _getRealSize(last.split()[0])
                if total > 0 and synced < written:
                    synced = written
                    if synced < total:
                        percent = int(round(float(synced) / float(total) * 90))
                        report.progress_report(percent, "report")
                return synced

            logger.debug("content-length is: %s" % total)

            _, _, err = shell.bash_progress_1(
                'set -o pipefail;wget --no-check-certificate -O - %s 2>%s| rbd import --image-format 2 - %s/%s'
                % (cmd.url, PFILE, pool, tmp_image_name), _getProgress)
            if err:
                raise err
            actual_size = linux.get_file_size_by_http_head(cmd.url)

            if os.path.exists(PFILE):
                os.remove(PFILE)

        elif url.scheme == 'sftp':
            port = (url.port, 22)[url.port is None]
            _, PFILE = tempfile.mkstemp()
            ssh_pswd_file = None
            pipe_path = PFILE + "fifo"
            scp_to_pipe_cmd = "scp -P %d -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null %s@%s:%s %s" % (
                port, url.username, url.hostname, url.path, pipe_path)
            sftp_command = "sftp -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=no -P %s -b /dev/stdin %s@%s" % (
                port, url.username, url.hostname) + " <<EOF\n%s\nEOF\n"
            if url.password is not None:
                ssh_pswd_file = linux.write_to_temp_file(url.password)
                scp_to_pipe_cmd = 'sshpass -f %s %s' % (ssh_pswd_file,
                                                        scp_to_pipe_cmd)
                sftp_command = 'sshpass -f %s %s' % (ssh_pswd_file,
                                                     sftp_command)

            actual_size = shell.call(
                sftp_command %
                ("ls -l " + url.path)).splitlines()[1].strip().split()[4]
            os.mkfifo(pipe_path)
            image_format = get_origin_format(cmd.url, True)
            cmd.url = linux.shellquote(cmd.url)
            # roll back tmp ceph file after import it
            _1()

            def _get_progress(synced):
                if not os.path.exists(PFILE):
                    return synced
                last = linux.tail_1(PFILE).strip()
                if not last or not last.isdigit():
                    return synced
                report.progress_report(int(last) * 90 / 100, "report")
                return synced

            get_content_from_pipe_cmd = "pv -s %s -n %s 2>%s" % (
                actual_size, pipe_path, PFILE)
            import_from_pipe_cmd = "rbd import --image-format 2 - %s/%s" % (
                pool, tmp_image_name)
            _, _, err = shell.bash_progress_1(
                'set -o pipefail; %s & %s | %s' %
                (scp_to_pipe_cmd, get_content_from_pipe_cmd,
                 import_from_pipe_cmd), _get_progress)

            if ssh_pswd_file:
                linux.rm_file_force(ssh_pswd_file)

            linux.rm_file_force(PFILE)
            linux.rm_file_force(pipe_path)

            if err:
                raise err

        elif url.scheme == 'file':
            src_path = cmd.url.lstrip('file:')
            src_path = os.path.normpath(src_path)
            if not os.path.isfile(src_path):
                raise Exception('cannot find the file[%s]' % src_path)
            image_format = get_origin_format(src_path, True)
            # roll back tmp ceph file after import it
            _1()

            shell.check_run("rbd import --image-format 2 %s %s/%s" %
                            (src_path, pool, tmp_image_name))
            actual_size = os.path.getsize(src_path)
        else:
            raise Exception('unknown url[%s]' % cmd.url)

        file_format = shell.call(
            "set -o pipefail; %s rbd:%s/%s | grep 'file format' | cut -d ':' -f 2"
            % (qemu_img.subcmd('info'), pool, tmp_image_name))
        file_format = file_format.strip()
        if file_format not in ['qcow2', 'raw']:
            raise Exception('unknown image format: %s' % file_format)

        if file_format == 'qcow2':
            conf_path = None
            try:
                with open('/etc/ceph/ceph.conf', 'r') as fd:
                    conf = fd.read()
                    conf = '%s\n%s\n' % (conf, 'rbd default format = 2')
                    conf_path = linux.write_to_temp_file(conf)

                shell.check_run(
                    '%s -f qcow2 -O rbd rbd:%s/%s rbd:%s/%s:conf=%s' %
                    (qemu_img.subcmd('convert'), pool, tmp_image_name, pool,
                     image_name, conf_path))
                shell.check_run('rbd rm %s/%s' % (pool, tmp_image_name))
            finally:
                if conf_path:
                    os.remove(conf_path)
        else:
            shell.check_run('rbd mv %s/%s %s/%s' %
                            (pool, tmp_image_name, pool, image_name))
        report.progress_report("100", "finish")

        @rollbackable
        def _2():
            shell.check_run('rbd rm %s/%s' % (pool, image_name))

        _2()

        o = shell.call('rbd --format json info %s/%s' % (pool, image_name))
        image_stats = jsonobject.loads(o)

        rsp.size = long(image_stats.size_)
        rsp.actualSize = actual_size
        if image_format == "qcow2":
            rsp.format = "raw"
        else:
            rsp.format = image_format

        self._set_capacity_to_response(rsp)
        return jsonobject.dumps(rsp)
    def download_image(self, req):
        #TODO: report percentage to mgmt server
        def percentage_callback(percent, url):
            reporter.progress_report(int(percent))

        def use_wget(url, name, workdir, timeout):
            return linux.wget(url, workdir=workdir, rename=name, timeout=timeout, interval=2, callback=percentage_callback, callback_data=url)

        cmd = jsonobject.loads(req[http.REQUEST_BODY])
        reporter = report.Report.from_spec(cmd, "DownloadImage")

        t_shell = traceable_shell.get_shell(cmd)
        rsp = DownloadResponse()
        # for download failure
        (total, avail) = self.get_capacity()
        rsp.totalCapacity = total
        rsp.availableCapacity = avail

        supported_schemes = [self.URL_HTTP, self.URL_HTTPS, self.URL_FTP, self.URL_SFTP, self.URL_FILE]
        if cmd.urlScheme not in supported_schemes:
            rsp.success = False
            rsp.error = 'unsupported url scheme[%s], SimpleSftpBackupStorage only supports %s' % (cmd.urlScheme, supported_schemes)
            return jsonobject.dumps(rsp)

        path = os.path.dirname(cmd.installPath)
        if not os.path.exists(path):
            os.makedirs(path, 0777)
        image_name = os.path.basename(cmd.installPath)
        install_path = cmd.installPath

        timeout = cmd.timeout if cmd.timeout else 7200
        url = urlparse.urlparse(cmd.url)
        if cmd.urlScheme in [self.URL_HTTP, self.URL_HTTPS, self.URL_FTP]:
            try:
                cmd.url = linux.shellquote(cmd.url)
                ret = use_wget(cmd.url, image_name, path, timeout)
                if ret != 0:
                    linux.rm_file_force(install_path)
                    rsp.success = False
                    rsp.error = 'http/https/ftp download failed, [wget -O %s %s] returns value %s' % (image_name, cmd.url, ret)
                    return jsonobject.dumps(rsp)
            except linux.LinuxError as e:
                linux.rm_file_force(install_path)
                traceback.format_exc()
                rsp.success = False
                rsp.error = str(e)
                return jsonobject.dumps(rsp)
        elif cmd.urlScheme == self.URL_SFTP:
            ssh_pass_file = None
            port = (url.port, 22)[url.port is None]

            class SftpDownloadDaemon(plugin.TaskDaemon):
                def _cancel(self):
                    pass

                def _get_percent(self):
                    return os.stat(install_path).st_size / (total_size / 100) if os.path.exists(install_path) else 0

                def __exit__(self, exc_type, exc_val, exc_tb):
                    super(SftpDownloadDaemon, self).__exit__(exc_type, exc_val, exc_tb)
                    if ssh_pass_file:
                        linux.rm_file_force(ssh_pass_file)
                    if exc_val is not None:
                        linux.rm_file_force(install_path)
                        traceback.format_exc()

            with SftpDownloadDaemon(cmd, "DownloadImage"):
                sftp_cmd = "sftp -P %d -o BatchMode=no -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -b /dev/stdin %s@%s " \
                           "<<EOF\n%%s\nEOF\n" % (port, url.username, url.hostname)
                if url.password is not None:
                    ssh_pass_file = linux.write_to_temp_file(url.password)
                    sftp_cmd = 'sshpass -f %s %s' % (ssh_pass_file, sftp_cmd)

                total_size = int(shell.call(sftp_cmd % ("ls -l " + url.path)).splitlines()[1].split()[4])
                t_shell.call(sftp_cmd % ("reget %s %s" % (url.path, install_path)))

        elif cmd.urlScheme == self.URL_FILE:
            src_path = cmd.url.lstrip('file:')
            src_path = os.path.normpath(src_path)
            if not os.path.isfile(src_path):
                raise Exception('cannot find the file[%s]' % src_path)
            logger.debug("src_path is: %s" % src_path)
            try:
                t_shell.call('yes | cp %s %s' % (src_path, linux.shellquote(install_path)))
            except shell.ShellError as e:
                linux.rm_file_force(install_path)
                raise e

        os.chmod(cmd.installPath, stat.S_IRUSR + stat.S_IRGRP + stat.S_IROTH)

        try:
            image_format = linux.get_img_file_fmt(linux.shellquote(install_path))
        except Exception as e:
            image_format = "raw"
        size = os.path.getsize(install_path)
        md5sum = 'not calculated'
        logger.debug('successfully downloaded %s to %s' % (cmd.url, install_path))
        (total, avail) = self.get_capacity()
        rsp.md5Sum = md5sum
        rsp.actualSize = size
        rsp.size = linux.qcow2_virtualsize(install_path)
        rsp.totalCapacity = total
        rsp.availableCapacity = avail
        rsp.format = image_format
        return jsonobject.dumps(rsp)
示例#8
0
    def migrate_bits(self, req):
        cmd = jsonobject.loads(req[http.REQUEST_BODY])
        rsp = NfsToNfsMigrateBitsRsp()

        mount_path = cmd.mountPath
        dst_folder_path = cmd.dstFolderPath
        temp_dir = None
        fd, PFILE = tempfile.mkstemp()
        os.close(fd)
        f = open(PFILE, 'r')

        try:
            if not cmd.isMounted:
                linux.is_valid_nfs_url(cmd.url)

                temp_dir = tempfile.mkdtemp()

                # dst folder is absolute path
                mount_path = temp_dir + mount_path
                dst_folder_path = temp_dir + dst_folder_path

                if not linux.is_mounted(mount_path, cmd.url):
                    linux.mount(cmd.url, mount_path, cmd.options, "nfs4")

            # begin migration, then check md5 sums
            linux.mkdir(dst_folder_path)

            t_shell = traceable_shell.get_shell(cmd)
            rsync_excludes = ""
            md5_excludes = ""
            if cmd.filtPaths:
                for filtPath in cmd.filtPaths:
                    # filtPath cannot start with '/', because it must be a relative path
                    if filtPath.startswith('/'):
                        filtPath = filtPath[1:]
                    if filtPath != '':
                        rsync_excludes = rsync_excludes + " --exclude=%s" % filtPath
                        md5_excludes = md5_excludes + " ! -path %s/%s" % (
                            cmd.srcFolderPath, filtPath)

            total_size = int(
                shell.call(
                    "rsync -aznv %s/ %s %s | grep -o -P 'total size is \K\d*'"
                    % (cmd.srcFolderPath, dst_folder_path, rsync_excludes)))

            stage = get_task_stage(cmd)
            reporter = Report.from_spec(cmd, "MigrateVolume")

            def _get_progress(synced):
                def get_written(regex):
                    matcher = re.match(regex, line)
                    return int(matcher.group(1)) if matcher else 0

                lines = f.readlines()
                writing = 0
                for line in lines:
                    if line[1] == ' ' and line[-1] == '\n':
                        synced += get_written(r'\s.*?(\d+)\s+100%')
                    elif line[-1] == '\r' and line[1] == ' ':
                        writing = get_written(r'.*?(\d+)\s+\d+%[^\r]*\r$')
                reporter.progress_report(
                    get_exact_percent(
                        float(synced + writing) / total_size * 100, stage))
                return synced

            t_shell.bash_progress_1(
                "rsync -az --progress %s/ %s %s > %s" %
                (cmd.srcFolderPath, dst_folder_path, rsync_excludes, PFILE),
                _get_progress)

            src_md5 = t_shell.call(
                "find %s -type f %s -exec md5sum {} \; | awk '{ print $1 }' | sort | md5sum"
                % (cmd.srcFolderPath, md5_excludes))
            dst_md5 = t_shell.call(
                "find %s -type f -exec md5sum {} \; | awk '{ print $1 }' | sort | md5sum"
                % dst_folder_path)
            if src_md5 != dst_md5:
                rsp.error = "failed to copy files from %s to %s, md5sum not match" % (
                    cmd.srcFolderPath, dst_folder_path)
                rsp.success = False

            if not cmd.isMounted:
                linux.umount(mount_path)
        finally:
            if temp_dir is not None:
                return_code = shell.run("mount | grep '%s'" % temp_dir)

                if return_code != 0:
                    # in case dir is not empty
                    try:
                        os.rmdir(temp_dir)
                    except OSError as e:
                        logger.warn("delete temp_dir %s failed: %s",
                                    (temp_dir, str(e)))
                else:
                    logger.warn(
                        "temp_dir %s still had mounted destination primary storage, skip cleanup operation"
                        % temp_dir)

            f.close()
            linux.rm_file_force(PFILE)
        return jsonobject.dumps(rsp)