def _download_kickstarts(self, project, configurations, ignore):
        """Downloads RPM and extrack .ks files."""
        kickstarts = []
        errors = []
        rpms = set()
        with Lab(prefix="get_kickstarts") as lab:
            # Download binaries
            for package in configurations:
                for target in configurations[package]:
                    for binary in configurations[package][target]:
                        rpms.add(
                            self.download_binary(project, package, target,
                                                 binary, lab.path))

            for rpm in rpms:
                # Extract kickstart files
                found = False
                for fname in extract_rpm(rpm, lab.path, patterns=["*.ks"]):
                    # Read ks contents in images array
                    basedir, basename = os.path.split(fname)
                    if [True for pattern in ignore if pattern.match(basename)]:
                        continue
                    kickstarts.append({
                        "basedir": basedir,
                        "basename": basename,
                        "contents": lab.open(fname).read()
                    })
                    found = True
                if not found:
                    errors.append("%s did not contain .ks files" %
                                  os.path.basename(rpm))

        return kickstarts, errors
    def _download_kickstarts(self, project, configurations, ignore):
        """Downloads RPM and extrack .ks files."""
        kickstarts = []
        errors = []
        rpms = set()
        with Lab(prefix="get_kickstarts") as lab:
            # Download binaries
            for package in configurations:
                for target in configurations[package]:
                    for binary in configurations[package][target]:
                        rpms.add(self.download_binary(project, package, target, binary, lab.path))

            for rpm in rpms:
                # Extract kickstart files
                found = False
                for fname in extract_rpm(rpm, lab.path, patterns=["*.ks"]):
                    # Read ks contents in images array
                    basedir, basename = os.path.split(fname)
                    if [True for pattern in ignore if pattern.match(basename)]:
                        continue
                    kickstarts.append({"basedir": basedir, "basename": basename, "contents": lab.open(fname).read()})
                    found = True
                if not found:
                    errors.append("%s did not contain .ks files" % os.path.basename(rpm))

        return kickstarts, errors
    def _download_kickstarts(self, project, configurations):
        """Downloads RPM and extrack .ks files."""
        rpms = set()
        with Lab(prefix="get_kickstarts") as lab:
            # Download binaries
            for package in configurations:
                for target in configurations[package]:
                    for binary in configurations[package][target]:
                        rpms.add(self.download_binary(project, package,
                                target, binary, lab.path))

            for rpm in rpms:
                # deploy dir is the name of the rpm without the versions
                deploy_dir = os.path.join(self.deploy_prefix,
                             ".%s" % os.path.basename(rpm).rsplit("-", 2)[0])
                shutil.rmtree(deploy_dir, ignore_errors=True)
                os.mkdir(deploy_dir)
                # Extract kickstart files and copy to the deploy dir
                for fname in extract_rpm(rpm, lab.path, patterns=["*.ks"]):
                    shutil.copy(os.path.join(lab.path, fname), deploy_dir)
                    symlink_src = os.path.join(deploy_dir, os.path.basename(fname))
                    symlink_dst = os.path.join(self.deploy_prefix, os.path.basename(fname))
                    if os.path.lexists(symlink_dst):
                        os.unlink(symlink_dst)
                    os.symlink(symlink_src, symlink_dst)

        return
    def update_ts(self, tsrpms, packagename, version, tmpdir):
        """Extract ts files from RPM and put them in GIT."""

        workdir = os.path.join(tmpdir, packagename)
        os.mkdir(workdir)

        tsfiles = []
        for tsbin in tsrpms:
            tsfiles.extend(extract_rpm(tsbin, workdir, "*.ts"))

        if len(tsfiles) == 0:
            print "No ts files in '%s'. Continue..." % packagename
            return

        try:
            projectdir = self.init_gitdir(packagename)
        except CalledProcessError:
            # invalidate cache and try once again
            self.log.warning("Caught a git error. Removing local git repo and trying again...")
            shutil.rmtree(os.path.join(self.gitconf["basedir"], packagename),
                          ignore_errors=True)
            projectdir = self.init_gitdir(packagename)

        tpldir = os.path.join(projectdir, "templates")
        if not os.path.isdir(tpldir):
            os.mkdir(tpldir)

        for tsfile in tsfiles:
            shutil.copy(os.path.join(workdir, tsfile), tpldir)

        check_call(["git", "add", "*"], cwd=tpldir)

        if len(check_output(["git", "diff", "--staged"],
                            cwd=projectdir)) == 0:
            print "No updates. Exiting"
            return

        check_call(["git", "commit", "-m",
                    "%s translation templates update for %s" % ( self.gitconf['vcs_msg_prefix'], version)],
                   cwd=projectdir)
        check_call(["git", "push", "origin", "master"], cwd=projectdir)

        # auto-create/update Pootle translation projects
        l10n_auth = (self.l10n_conf["username"], self.l10n_conf["password"])
        data = json.dumps({"name": packagename})
        resp = requests.post("%s/packages" % self.l10n_conf["apiurl"],
                             auth=l10n_auth,
                             headers={'content-type': 'application/json'},
                             data=data,
                             verify=False)
        assert resp.status_code == 201
        # This is a hack to make Pootle recalculate statistics
        resp = requests.post("%s/packages" % self.l10n_conf["apiurl"],
                             auth=l10n_auth,
                             headers={'content-type': 'application/json'},
                             data=data,
                             verify=False)
        assert resp.status_code == 201
    def deploy_doc(self, docrpms, packagename, version, tmpdir, symlink=None):
        """Extract doc files from RPM and put them under docroot."""

        deploydir = os.path.join(self.autodoc_conf['docroot'], packagename, version)
        print deploydir
        workdir = os.path.join(tmpdir, packagename)
        os.mkdir(workdir)

        deployed = False
        for docbin in docrpms:
            if len(docrpms) > 1:
                deploydir = os.path.join(deploydir, os.path.basename(docbin))
            print "extracting %s" % docbin
            docfiles = extract_rpm(docbin, workdir)

            toplevels = set()
            print "walking %s" % workdir
            for dirpath, dirnames, filenames in os.walk(workdir):
                for fil in filenames:
                    if fil.endswith(".html"):
                        toplevels.add(dirpath)
                        # don't look further down
                        del dirnames[:]
                        # no need to look at other files
                        break

            if len(toplevels) > 1:
                deployed = True
                for level in toplevels:
                    target = os.path.join(deploydir, os.path.basename(level))
                    shutil.rmtree(target, True)
                    shutil.copytree(level, target)

            elif len(toplevels) == 1:
                deployed = True
                print deploydir
                shutil.rmtree(deploydir, True)
                shutil.copytree(toplevels.pop(), deploydir)


        if deployed:
            # fix permissions due to cpio not honoring umask
            for root, dirs, files in os.walk(os.path.join(self.autodoc_conf['docroot'], packagename)):
                for d in dirs:  
                    os.chmod(os.path.join(root, d), 0755)
                for f in files:
                    os.chmod(os.path.join(root, f), 0644)

            if symlink:
                symlink_name = os.path.join(self.autodoc_conf['docroot'], packagename, symlink)
                print "creating symlink %s" % symlink_name
                if os.path.lexists(symlink_name):
                    os.unlink(symlink_name)
                os.symlink(version, symlink_name)
    def __update_meta(self, project, providers, metatype):
        """Extracts a meta xml from rpm and uploads them to project.

        :returns: uploaded pattern names and error messages
        :rtype: tuple(list, list)
        """
        uploaded = []
        errors = []
        for package, targets in providers.items():
            for target, binaries in targets.items():
                for binary in binaries:
                    with Lab(prefix=metatype) as lab:
                        # Download the rpm
                        try:
                            self.obs.getBinary(project, target, package,
                                               binary, lab.real_path(binary))
                        except HTTPError as exc:
                            errors.append("Failed to download %s: HTTP %s %s" %
                                         (binary, exc.code, exc.filename))
                        except Exception as exc:
                            errors.append("Failed to download %s: %s" % (binary, exc))
                        if errors:
                            return uploaded, errors
                        # Extract pattern (xml) files from the rpm
                        for xml in extract_rpm(lab.real_path(binary), lab.path,
                                               ["*.xml"]):
                            meta = os.path.basename(xml)
                            submetatype = os.path.basename(os.path.dirname(xml))
                            print(meta, metatype, submetatype)
                            try:
                                with open(lab.real_path(xml), 'r') as fd:
                                    metadata = [line.replace("@PROJECT@", project) for line in fd.readlines()]
                                # Update meta
                                if submetatype == "aggregates":
                                    pkgname = os.path.splitext(meta)[0]
                                    core.edit_meta(metatype='pkg', path_args=(project, pkgname), template_args=({'name': pkgname, 'user': '******'}), apiurl=self.obs.apiurl)
                                    u = core.makeurl(self.obs.apiurl, ['source', project, pkgname, '_aggregate'])
                                    print u
                                    print metadata
                                    core.http_PUT(u, data="\n".join(metadata))
                                else:
                                    core.edit_meta(metatype, project, data=metadata)
                                uploaded.append(metatype + '/' + meta)
                            except HTTPError as exc:
                                errors.append("Failed to upload %s:\nHTTP %s %s\n%s" %
                                        (meta, exc.code, exc.filename,
                                            exc.fp.read()))
                            except Exception as exc:
                                errors.append("Failed to upload %s: %s" %
                                        (meta, exc))
                    return uploaded, errors
Beispiel #7
0
    def __update_meta(self, project, providers, metatype):
        """Extracts a meta xml from rpm and uploads them to project.

        :returns: uploaded pattern names and error messages
        :rtype: tuple(list, list)
        """
        uploaded = []
        errors = []
        for package, targets in providers.items():
            for target, binaries in targets.items():
                for binary in binaries:
                    with Lab(prefix=metatype) as lab:
                        # Download the rpm
                        try:
                            self.obs.getBinary(project, target, package,
                                               binary, lab.real_path(binary))
                        except HTTPError as exc:
                            errors.append("Failed to download %s: HTTP %s %s" %
                                          (binary, exc.code, exc.filename))
                        except Exception as exc:
                            errors.append("Failed to download %s: %s" %
                                          (binary, exc))
                        if errors:
                            return uploaded, errors
                        # Extract pattern (xml) files from the rpm
                        for xml in extract_rpm(lab.real_path(binary), lab.path,
                                               ["*.xml"]):
                            meta = os.path.basename(xml)
                            try:
                                with open(lab.real_path(xml), 'r') as fd:
                                    metadata = [
                                        line.replace("@PROJECT@", project)
                                        for line in fd.readlines()
                                    ]
                                # Update meta
                                core.edit_meta(metatype,
                                               project,
                                               data=metadata)
                                uploaded.append(meta)
                            except HTTPError as exc:
                                errors.append(
                                    "Failed to upload %s:\nHTTP %s %s\n%s" %
                                    (meta, exc.code, exc.filename,
                                     exc.fp.read()))
                            except Exception as exc:
                                errors.append("Failed to upload %s: %s" %
                                              (meta, exc))
                    return uploaded, errors
    def __update_patterns(self, project, package, target, binary):
        """Extracts patterns from rpm and uploads them to project.

        :returns: uploaded pattern names and error messages
        :rtype: tuple(list, list)
        """
        uploaded = []
        errors = []
        with Lab(prefix="update_patterns") as lab:
            # Download the rpm
            try:
                self.obs.getBinary(project, target, package, binary,
                                   lab.real_path(binary))
            except HTTPError as exc:
                errors.append("Failed to download %s: HTTP %s %s" %
                              (binary, exc.code, exc.filename))
            except Exception as exc:
                errors.append("Failed to download %s: %s" % (binary, exc))
            if errors:
                return uploaded, errors
            # Extract pattern (xml) files from the rpm
            print lab.real_path(binary)
            for xml in extract_rpm(lab.real_path(binary), lab.path, ["*.xml"]):
                pattern = os.path.basename(xml)
                try:
                    # chop .xml from name
                    if pattern.endswith(".xml"):
                        pattern = pattern[:-4]

                    # Update pattern to project
                    self.obs.setProjectPattern(project,
                                               lab.real_path(xml),
                                               name=pattern)
                    uploaded.append(pattern)
                except HTTPError as exc:
                    errors.append(
                        "Failed to upload %s:\nHTTP %s %s\n%s" %
                        (pattern, exc.code, exc.filename, exc.fp.read()))
                except Exception as exc:
                    errors.append("Failed to upload %s: %s" % (pattern, exc))
        return uploaded, errors
    def __update_patterns(self, project, package, target, binary):
        """Extracts patterns from rpm and uploads them to project.

        :returns: uploaded pattern names and error messages
        :rtype: tuple(list, list)
        """
        uploaded = []
        errors = []
        with Lab(prefix="update_patterns") as lab:
            # Download the rpm
            try:
                self.obs.getBinary(project, target, package, binary,
                        lab.real_path(binary))
            except HTTPError as exc:
                errors.append("Failed to download %s: HTTP %s %s" %
                        (binary, exc.code, exc.filename))
            except Exception as exc:
                errors.append("Failed to download %s: %s" % (binary, exc))
            if errors:
                return uploaded, errors
            # Extract pattern (xml) files from the rpm
            for xml in extract_rpm(lab.real_path(binary), lab.path,
                    ["*.xml"]):
                pattern = os.path.basename(xml)
                try:
                    # chop .xml from name
                    if pattern.endswith(".xml"):
                        pattern = pattern[:-4]

                    # Update pattern to project
                    self.obs.setProjectPattern(project, lab.real_path(xml), name=pattern)
                    uploaded.append(pattern)
                except HTTPError as exc:
                    errors.append("Failed to upload %s:\nHTTP %s %s\n%s" %
                            (pattern, exc.code, exc.filename,
                                exc.fp.read()))
                except Exception as exc:
                    errors.append("Failed to upload %s: %s" %
                            (pattern, exc))
        return uploaded, errors
    def __update_meta(self, project, providers, metatype):
        """Extracts a meta xml from rpm and uploads them to project.

        :returns: uploaded pattern names and error messages
        :rtype: tuple(list, list)
        """
        uploaded = []
        errors = []
        for package, targets in providers.items():
            for target, binaries in targets.items():
                for binary in binaries:
                    with Lab(prefix=metatype) as lab:
                        # Download the rpm
                        try:
                            self.obs.getBinary(project, target, package, binary, lab.real_path(binary))
                        except HTTPError as exc:
                            errors.append("Failed to download %s: HTTP %s %s" % (binary, exc.code, exc.filename))
                        except Exception as exc:
                            errors.append("Failed to download %s: %s" % (binary, exc))
                        if errors:
                            return uploaded, errors
                        # Extract pattern (xml) files from the rpm
                        for xml in extract_rpm(lab.real_path(binary), lab.path, ["*.xml"]):
                            meta = os.path.basename(xml)
                            try:
                                with open(lab.real_path(xml), "r") as fd:
                                    metadata = [line.replace("@PROJECT@", project) for line in fd.readlines()]
                                # Update meta
                                core.edit_meta(metatype, project, data=metadata)
                                uploaded.append(meta)
                            except HTTPError as exc:
                                errors.append(
                                    "Failed to upload %s:\nHTTP %s %s\n%s"
                                    % (meta, exc.code, exc.filename, exc.fp.read())
                                )
                            except Exception as exc:
                                errors.append("Failed to upload %s: %s" % (meta, exc))
                    return uploaded, errors
    def _download_kickstarts(self, project, configurations):
        """Downloads RPM and extrack .ks files."""
        rpms = set()
        with Lab(prefix="get_kickstarts") as lab:
            # Download binaries
            if isinstance(project, unicode):
                project = project.encode('utf8')
            for package in configurations:
                if isinstance(package, unicode):
                    package = package.encode('utf8')
                for target in configurations[package]:
                    if isinstance(target, unicode):
                        target = target.encode('utf8')
                    for binary in configurations[package][target]:
                        if isinstance(binary, unicode):
                            binary = binary.encode('utf8')
                        rpms.add(self.download_binary(project, package,
                                target, binary, lab.path))

            for rpm in rpms:
                # deploy dir is the name of the rpm without the versions
                deploy_dir = os.path.join(self.deploy_prefix,
                             ".%s" % os.path.basename(rpm).rsplit("-", 2)[0])
                # Both release and devel ks share the same directory
                if not os.path.exists(deploy_dir):
                    os.mkdir(deploy_dir)
                # Extract kickstart files and copy to the deploy dir
                for fname in extract_rpm(rpm, lab.path, patterns=["*.ks"]):
                    shutil.copy(os.path.join(lab.path, fname), deploy_dir)
                    symlink_src = os.path.join(deploy_dir, os.path.basename(fname))
                    symlink_dst = os.path.join(self.deploy_prefix, os.path.basename(fname))
                    if os.path.lexists(symlink_dst):
                        os.unlink(symlink_dst)
                    os.symlink(symlink_src, symlink_dst)

        return
    def deploy_doc(self,
                   docrpms,
                   packagename,
                   version,
                   tmpdir,
                   symlink=None,
                   prefix=None):
        """Extract -doc- files from RPM and put them under docroot."""

        deploydir = version
        if prefix:
            deploydir = os.path.join(prefix, version)
        deploydir = os.path.join(self.autodoc_conf['docroot'], packagename,
                                 deploydir)
        print deploydir
        workdir = os.path.join(tmpdir, packagename)
        os.mkdir(workdir)

        deployed = False
        for docbin in docrpms:
            if len(docrpms) > 1:
                deploydir = os.path.join(deploydir, os.path.basename(docbin))
            print "extracting %s" % docbin
            docfiles = extract_rpm(docbin, workdir)

            toplevels = set()
            print "walking %s" % workdir
            for dirpath, dirnames, filenames in os.walk(workdir):
                for fil in filenames:
                    if fil.endswith(".html") or fil:
                        toplevels.add(dirpath)
                        # don't look further down
                        del dirnames[:]
                        # no need to look at other files
                        break

            if len(toplevels) > 1:
                deployed = True
                for level in toplevels:
                    target = os.path.join(deploydir, os.path.basename(level))
                    shutil.rmtree(target, True)
                    shutil.copytree(level, target)
            elif len(toplevels) == 1:
                deployed = True
                print deploydir
                shutil.rmtree(deploydir, True)
                shutil.copytree(toplevels.pop(), deploydir)

        if deployed:
            print "stuff was deployed"
            # fix permissions due to cpio no honoring umask
            for root, dirs, files in os.walk(
                    os.path.join(self.autodoc_conf['docroot'], packagename)):
                for d in dirs:
                    print "fixing permission for %s" % d
                    os.chmod(os.path.join(root, d), 0755)
                for f in files:
                    os.chmod(os.path.join(root, f), 0644)

            if symlink:
                symlink_name = symlink
                if prefix:
                    symlink_name = os.path.join(prefix, symlink)
                symlink_name = os.path.join(self.autodoc_conf['docroot'],
                                            packagename, symlink_name)
                print "creating symlink %s" % symlink_name
                if os.path.lexists(symlink_name):
                    os.unlink(symlink_name)
                os.symlink(version, symlink_name)
                with open("%s.id" % symlink_name, 'w') as symid:
                    symid.write(version)
    def __update_meta(self, project, providers, metatype):
        """Extracts a meta xml from rpm and uploads them to project.

        :returns: uploaded pattern names and error messages
        :rtype: tuple(list, list)
        """
        uploaded = []
        errors = []
        for package, targets in providers.items():
            for target, binaries in targets.items():
                for binary in binaries:
                    with Lab(prefix=metatype) as lab:
                        # Download the rpm
                        try:
                            self.obs.getBinary(project, target, package,
                                               binary, lab.real_path(binary))
                        except HTTPError as exc:
                            errors.append("Failed to download %s: HTTP %s %s" %
                                          (binary, exc.code, exc.filename))
                        except Exception as exc:
                            errors.append("Failed to download %s: %s" %
                                          (binary, exc))
                        if errors:
                            return uploaded, errors
                        # Extract pattern (xml) files from the rpm
                        for xml in extract_rpm(lab.real_path(binary), lab.path,
                                               ["*.xml"]):
                            meta = os.path.basename(xml)
                            submetatype = os.path.basename(
                                os.path.dirname(xml))
                            print(meta, metatype, submetatype)
                            try:
                                with open(lab.real_path(xml), 'r') as fd:
                                    metadata = [
                                        line.replace("@PROJECT@", project)
                                        for line in fd.readlines()
                                    ]
                                # Update meta
                                if submetatype == "aggregates":
                                    pkgname = os.path.splitext(meta)[0]
                                    core.edit_meta(metatype='pkg',
                                                   path_args=(project,
                                                              pkgname),
                                                   template_args=({
                                                       'name':
                                                       pkgname,
                                                       'user':
                                                       '******'
                                                   }),
                                                   apiurl=self.obs.apiurl)
                                    u = core.makeurl(self.obs.apiurl, [
                                        'source', project, pkgname,
                                        '_aggregate'
                                    ])
                                    print u
                                    print metadata
                                    core.http_PUT(u, data="\n".join(metadata))
                                else:
                                    core.edit_meta(metatype,
                                                   project,
                                                   data=metadata)
                                uploaded.append(metatype + '/' + meta)
                            except HTTPError as exc:
                                errors.append(
                                    "Failed to upload %s:\nHTTP %s %s\n%s" %
                                    (meta, exc.code, exc.filename,
                                     exc.fp.read()))
                            except Exception as exc:
                                errors.append("Failed to upload %s: %s" %
                                              (meta, exc))
                    return uploaded, errors
    def get_l10n_stats(self, source_project, target_project, package):
        tmp_dir_old = mkdtemp()
        tmp_dir_new = mkdtemp()

        old_ts_dir = tmp_dir_old + "/ts"
        new_ts_dir = tmp_dir_new + "/ts"
        target = self.obs.getTargets(str(source_project))[0]

        #get src.rpm as it contains all .ts files
        src_rpm = [
            rpm
            for rpm in self.obs.getBinaryList(source_project, target, package)
            if "src.rpm" in rpm
        ]
        target_rpm = [
            rpm
            for rpm in self.obs.getBinaryList(target_project, target, package)
            if "src.rpm" in rpm
        ]

        #download source and target rpms
        self.obs.getBinary(target_project, target, package, target_rpm[0],
                           tmp_dir_old + "/old.rpm")
        self.obs.getBinary(source_project, target, package, src_rpm[0],
                           tmp_dir_new + "/new.rpm")

        # extract rpms
        old_file = extract_rpm(tmp_dir_old + "/old.rpm", tmp_dir_old)
        new_file = extract_rpm(tmp_dir_new + "/new.rpm", tmp_dir_new)

        #rpm contains tar.bz2 and .spec file. Open and extract tar.bz2
        old_tar = tarfile.open(tmp_dir_old + '/' + old_file[0])
        old_tar.extractall(old_ts_dir)
        new_tar = tarfile.open(tmp_dir_new + '/' + new_file[0])
        new_tar.extractall(new_ts_dir)

        old_ts_files = {}
        for member in old_tar.members:
            # rpm directrory has .spec file
            if member.name.split('/')[1] == 'rpm':
                continue
            # "lang : path_to_ts_file" pair
            old_ts_files.update({member.name.split('/')[1]: member.name})

        new_ts_files = {}
        for member in new_tar.members:
            # rpm directrory has .spec file
            if member.name.split('/')[1] == 'rpm':
                continue
            # "lang : path_to_ts_file" pair
            new_ts_files.update({member.name.split('/')[1]: member.name})

        l10n_stats = {}
        for key in set(new_ts_files.keys()) & set(old_ts_files.keys()):
            _old_path = tmp_dir_old + "/ts/" + old_ts_files[key]
            _new_path = tmp_dir_new + "/ts/" + new_ts_files[key]
            unit_diff = _make_ts_diff(_old_path, _new_path)
            l10n_stats.update({key: unit_diff})
        l10n_stats.update({
            "removed_langs":
            list(set(old_ts_files.keys()) - set(new_ts_files.keys()))
        })
        l10n_stats.update({
            "added_langs":
            list(set(new_ts_files.keys()) - set(old_ts_files.keys()))
        })
        # possible removed strings
        l10n_stats.update({"removed_strings": []})

        #check that -ts-devel package is not going out of sync
        src_pkg = package.replace("-l10n", "")

        #is there a package that is using -l10n pakcage already
        src_pkg = [
            rpm for rpm in self.obs.getPackageList(target_project)
            if src_pkg == rpm
        ]

        if len(src_pkg) > 0:
            #get -ts-devel rpm
            src_ts_devel_rpm = [
                rpm for rpm in self.obs.getBinaryList(
                    target_project, target, src_pkg[0]) if "-ts-devel" in rpm
            ]
            if len(src_ts_devel_rpm) > 0:
                tmp_dir_ts = mkdtemp()
                self.obs.getBinary(target_project, target, src_pkg[0],
                                   src_ts_devel_rpm[0],
                                   tmp_dir_ts + "/orig.rpm")
                orig_ts_file = extract_rpm(tmp_dir_ts + "/orig.rpm",
                                           tmp_dir_ts,
                                           patterns="*.ts")
                original_units = factory.getobject(tmp_dir_ts + "/" +
                                                   orig_ts_file[0])
                new_units = factory.getobject(tmp_dir_new + "/ts/" +
                                              new_ts_files['templates'])
                removed_units = set(original_units.getids()) - set(
                    new_units.getids())
                l10n_stats.update({"removed_strings": list(removed_units)})
                shutil.rmtree(tmp_dir_ts)

        #get rid of tmp dirs
        shutil.rmtree(tmp_dir_old)
        shutil.rmtree(tmp_dir_new)

        return l10n_stats
Beispiel #15
0
                project = project.encode('utf8')
            for package in configurations:
                if isinstance(package, unicode):
                    package = package.encode('utf8')
                for target in configurations[package]:
                    if isinstance(target, unicode):
                        target = target.encode('utf8')
                    for binary in configurations[package][target]:
                        if isinstance(binary, unicode):
                            binary = binary.encode('utf8')
                        rpms.add(self.download_binary(project, package,
                                target, binary, lab.path))

            for rpm in rpms:
                # deploy dir is the name of the rpm without the versions
                deploy_dir = os.path.join(self.deploy_prefix,
                             ".%s" % os.path.basename(rpm).rsplit("-", 2)[0])
                # Both release and devel ks share the same directory
                if not os.path.exists(deploy_dir):
-                    os.mkdir(deploy_dir)
                # Extract kickstart files and copy to the deploy dir
                for fname in extract_rpm(rpm, lab.path, patterns=["*.ks"]):
                    shutil.copy(os.path.join(lab.path, fname), deploy_dir)
                    symlink_src = os.path.join(deploy_dir, os.path.basename(fname))
                    symlink_dst = os.path.join(self.deploy_prefix, os.path.basename(fname))
                    if os.path.lexists(symlink_dst):
                        os.unlink(symlink_dst)
                    os.symlink(symlink_src, symlink_dst)

        return
Beispiel #16
0
    def get_l10n_stats(self, source_project, target_project, package):
        tmp_dir_old = mkdtemp()
        tmp_dir_new = mkdtemp()

        old_ts_dir = os.path.join(tmp_dir_old, "ts")
        new_ts_dir = os.path.join(tmp_dir_new, "ts")
        target = self.obs.getTargets(str(source_project))[0]

        # Get src.rpm as it contains all .ts files
        src_rpm = [rpm for rpm in self.obs.getBinaryList(
                source_project, target, package) if "src.rpm" in rpm]
        target_rpm = [rpm for rpm in self.obs.getBinaryList(
                target_project, target, package) if "src.rpm" in rpm]

        # Download source and target rpms
        old_src_rpm = os.path.join(tmp_dir_old, target_rpm[0])
        self.obs.getBinary(target_project, target, package, target_rpm[0],
                           old_src_rpm)
        new_src_rpm = os.path.join(tmp_dir_new, src_rpm[0])
        self.obs.getBinary(source_project, target, package, src_rpm[0],
                           new_src_rpm)

        # Extract rpms and get the source tarball names
        old_tar = next(
            f for f in extract_rpm(old_src_rpm, tmp_dir_old)
            if '.tar' in f
        )
        new_tar = next(
            f for f in extract_rpm(new_src_rpm, tmp_dir_new)
            if '.tar' in f
        )

        # Extract tarballs and get ts files per language
        old_tar = os.path.join(tmp_dir_old, old_tar)
        new_tar = os.path.join(tmp_dir_new, new_tar)
        old_ts_files = _get_ts_files(_extract_tar(old_tar, old_ts_dir))
        new_ts_files = _get_ts_files(_extract_tar(new_tar, new_ts_dir))

        old_langs = set(old_ts_files.keys())
        new_langs = set(new_ts_files.keys())

        l10n_stats = {
            "removed_langs": list(old_langs - new_langs),
            "added_langs": list(new_langs - old_langs),
            "removed_strings": [],
            "languages": {},
        }
        for key in new_langs & old_langs:
            _old_path = os.path.join(old_ts_dir, old_ts_files[key])
            _new_path = os.path.join(new_ts_dir, new_ts_files[key])
            unit_diff = _make_ts_diff(_old_path, _new_path)
            l10n_stats['languages'][key] = unit_diff

        # Check that -ts-devel package is not going out of sync
        src_pkg = package.replace("-l10n", "")

        # Is there a package that is using -l10n pakcage already
        if src_pkg in self.obs.getPackageList(target_project):
            # get -ts-devel rpm
            src_ts_devel_rpm = next((
                rpm for rpm in
                self.obs.getBinaryList(target_project, target, src_pkg)
                if "-ts-devel" in rpm),
                None
            )
            if src_ts_devel_rpm:
                tmp_dir_ts = mkdtemp()
                tmp_src_ts_devel_rpm = os.path.join(
                    tmp_dir_ts, src_ts_devel_rpm)
                self.obs.getBinary(
                    target_project, target, src_pkg, src_ts_devel_rpm,
                    tmp_src_ts_devel_rpm)
                orig_ts_file = extract_rpm(
                    tmp_src_ts_devel_rpm, tmp_dir_ts, patterns="*.ts")
                original_units = factory.getobject(
                    os.path.join(tmp_dir_ts, orig_ts_file[0]))
                new_units = factory.getobject(
                    os.path.join(tmp_dir_new, "ts", new_ts_files['templates']))
                l10n_stats["removed_strings"] = list(
                    set(original_units.getids()) - set(new_units.getids())
                )
                shutil.rmtree(tmp_dir_ts)

        # get rid of tmp dirs
        shutil.rmtree(tmp_dir_old)
        shutil.rmtree(tmp_dir_new)

        return l10n_stats
    def get_l10n_stats(self, source_project, target_project, package):
        tmp_dir_old = mkdtemp()
        tmp_dir_new = mkdtemp()

        old_ts_dir = tmp_dir_old + "/ts"
        new_ts_dir = tmp_dir_new + "/ts"
        target = self.obs.getTargets(str(source_project))[0]

        #get src.rpm as it contains all .ts files
        src_rpm = [rpm for rpm in self.obs.getBinaryList(
                source_project, target, package) if "src.rpm" in rpm]
        target_rpm = [rpm for rpm in self.obs.getBinaryList(
                target_project, target, package) if "src.rpm" in rpm]

        #download source and target rpms
        self.obs.getBinary(target_project, target, package, target_rpm[0],
                           tmp_dir_old + "/old.rpm")
        self.obs.getBinary(source_project, target, package, src_rpm[0],
                           tmp_dir_new + "/new.rpm")

        # extract rpms
        old_file = extract_rpm(tmp_dir_old + "/old.rpm", tmp_dir_old)
        new_file = extract_rpm(tmp_dir_new + "/new.rpm", tmp_dir_new)

        #rpm contains tar.bz2 and .spec file. Open and extract tar.bz2
        old_tar = tarfile.open(tmp_dir_old + '/' + old_file[0])
        old_tar.extractall(old_ts_dir)
        new_tar = tarfile.open(tmp_dir_new + '/' + new_file[0])
        new_tar.extractall(new_ts_dir)

        old_ts_files = {}
        for member in old_tar.members:
            # rpm directrory has .spec file
            if member.name.split('/')[1] == 'rpm':
                continue
            # "lang : path_to_ts_file" pair
            old_ts_files.update({member.name.split('/')[1] : member.name })

        new_ts_files = {}
        for member in new_tar.members:
            # rpm directrory has .spec file
            if member.name.split('/')[1] == 'rpm':
                continue
            # "lang : path_to_ts_file" pair
            new_ts_files.update({member.name.split('/')[1] : member.name })

        l10n_stats = {}
        for key in set(new_ts_files.keys()) & set(old_ts_files.keys()):
            _old_path = tmp_dir_old + "/ts/" +  old_ts_files[key]
            _new_path = tmp_dir_new + "/ts/" + new_ts_files[key]
            unit_diff = _make_ts_diff(_old_path, _new_path)
            l10n_stats.update({ key : unit_diff })
        l10n_stats.update({"removed_langs" : list(set(old_ts_files.keys()) - set(new_ts_files.keys())) })
        l10n_stats.update({"added_langs" : list(set(new_ts_files.keys()) - set(old_ts_files.keys())) })
        # possible removed strings
        l10n_stats.update({ "removed_strings" : [] })

        #check that -ts-devel package is not going out of sync
        src_pkg = package.replace("-l10n", "")

        #is there a package that is using -l10n pakcage already
        src_pkg = [rpm for rpm in self.obs.getPackageList(target_project) if src_pkg ==  rpm]

        if len(src_pkg) > 0:
            #get -ts-devel rpm
            src_ts_devel_rpm = [rpm for rpm in self.obs.getBinaryList(target_project, target, src_pkg[0]) if "-ts-devel" in rpm]
            if len(src_ts_devel_rpm) > 0:
                tmp_dir_ts = mkdtemp()
                self.obs.getBinary(target_project, target, src_pkg[0], src_ts_devel_rpm[0], tmp_dir_ts + "/orig.rpm")
                orig_ts_file = extract_rpm(tmp_dir_ts + "/orig.rpm", tmp_dir_ts, patterns="*.ts")
                original_units = factory.getobject(tmp_dir_ts + "/" + orig_ts_file[0])
                new_units = factory.getobject(tmp_dir_new + "/ts/" + new_ts_files['templates'])
                removed_units = set(original_units.getids()) - set(new_units.getids())
                l10n_stats.update({"removed_strings" : list(removed_units)})
                shutil.rmtree(tmp_dir_ts)

        #get rid of tmp dirs
        shutil.rmtree(tmp_dir_old)
        shutil.rmtree(tmp_dir_new)

        return l10n_stats