示例#1
0
    def validate_native(self):
        try:
            source_format = self.get_debian_file('source/format')
        except subprocess.CalledProcessError:
            raise BuildError('Package does not specify the source format')

        if source_format != '3.0 (native)':
            raise BuildError('Package source format is not native')
示例#2
0
    def get_build_revisions(self, upstream_version, version):
        """Given the upstream and the Debian version, find the appropriate
        Git revision for upstream and for Debian, check their relationship
        and return them as (upstream, debian) revision tuple."""

        if self.native:
            cur = master = self.get_rev('master')
        else:
            cur = self.get_rev('debian')
            master = self.get_rev('master')
        prev = None
        found = False
        while True:
            # Read the changelog of current revisions
            try:
                log = debian.changelog.Changelog(cur.read_file('debian/changelog'))
            except subprocess.CalledProcessError:
                if found:
                    break
                else:
                    return None

            # Check if the current revision is matching
            if log.distributions == 'unstable' and str(log.full_version) == version and log.upstream_version == upstream_version:
                    # Found the matching revision
                    found = True

            elif found:
                # We found the point at which the release was made
                break

            # Move to next parent revision
            prev = cur
            parents = cur.parents
            if len(parents) == 0:
                # Not found
                return None
            elif len(parents) == 1:
                cur = self.get_rev(parents[0])
            else:
                debian_parents = [rev for rev in map(self.get_rev, parents) if not rev < master]
                if len(debian_parents) != 1:
                    raise BuildError("Debian revision search breakdown at revision %s" % str(cur))
                cur = self.get_rev(debian_parents[0])

        # If we are here, it means that we are past the part of the history where
        # the version matched our search conditions
        deb_rev = prev
        if self.native:
            return deb_rev, deb_rev

        orig_rev = deb_rev & master
        tagged_rev = self.read_tag(upstream_version)
        if orig_rev != tagged_rev:
            raise BuildError("Version tagged as release %s is not merge"
                    "base of corresponding Debian package release" % upstream_version)

        return orig_rev, deb_rev
示例#3
0
    def validate_quilt(self):
        if not self.has_branch('debian', local_only=True):
            self.remote_checkout('debian')

        try:
            source_format = self.get_debian_file('source/format')
        except:
            raise BuildError('Package does not specify the source format')

        if source_format != '3.0 (quilt)':
            raise BuildError('Package source format is not quilt')
示例#4
0
 def query_change(self, query, expect_unique=True):
     if self._is_windows:
         return None
     cmd = self._get_ssh_query_cmd()
     cmd.extend(['--current-patch-set', '--', query])
     lines = self._cmd_runner.check_output(cmd).splitlines()
     if len(lines) < 2:
         raise BuildError(query + ' does not match any change')
     if len(lines) > 2 and expect_unique:
         raise BuildError(query + ' does not identify a unique change')
     return GerritChange(json.loads(lines[0]))
示例#5
0
    def parse_changelog(text):
        log = debian.changelog.Changelog(text)

        if log.distributions == 'unstable':
            return log.package, True, log.version, log.version
        elif log.distributions == 'UNRELEASED':
            for change in log:
                if change.distributions == 'unstable':
                    return log.package, False, log.version, change.version
            else:
                raise BuildError("The package has no released versions")
        else:
            raise BuildError("Invalid suite name: " + log.distributions)
示例#6
0
def expand_srcname_spec(spec, full_clean=False):
    """Parse a list of source packages on which the operation is to be performed.
    If some variant of 'all' is specified, comparison against packages currently
    APT repository is made and packages which have older version in APT than in Git
    are returned."""

    if len(spec) == 1 and spec[0] == '*':
        checkouts = []
        for pkg in config.package_map:
            try:
                checkouts.append(checkout.PackageCheckout(pkg, full_clean=full_clean))
            except Exception as e:
                pass
        return checkouts, {}
    elif len(spec) > 1 or not spec[0].startswith('all'):
        return [checkout.PackageCheckout(pkg, full_clean=full_clean) for pkg in spec], {}
    else:
        if spec[0] == 'all':
            releases = config.releases
        elif spec[0].startswith('all:'):
            releases = [spec[0].split(':')[1]]
        else:
            raise BuildError("Invalid all-package qualifier specified")

        cache = {}
        packages = set()
        repos = {}
        for release in releases:
            _, _, apt_repo = apt.get_release(release)
            repos[release] = apt_repo
            comparison = apt.compare_against_git(apt_repo, update_all=full_clean, checkout_cache=cache)
            packages |= set(checkout.lookup_by_package_name(pkg) for pkg, gitver, aptver in comparison if gitver)

        return [cache[pkg] for pkg in packages], repos
示例#7
0
    def upload_revision(self, project, file_glob="*"):
        """Upload a new version of the patch that triggered this build, but
        only if files in the glob changed and it came from the
        specified project.

        Args:
            project (Project) : Enum value to choose which project might be updated
            file_glob (str) : glob describing the files to add to the patch
        """

        triggering_project = self._gerrit.get_triggering_project()
        triggering_branch = self._gerrit.get_triggering_branch()
        if triggering_project is None or triggering_branch is None:
            return

        # Add files to the index if they were updated by this job and match the glob.
        cwd = self.get_project_dir(project)
        cmd = ['git', 'add', '--', file_glob]
        try:
            self._cmd_runner.check_call(cmd, cwd=cwd)
        except CommandError as e:
            raise BuildError('Failed to add updated files running ' + e.cmd + ' in cwd ' + cwd)

        # Find out from git exit code whether there are any staged
        # changes, but don't show those changes.
        cmd = ['git', 'diff', '--cached', '--exit-code', '--no-patch']
        no_files_were_added = (0 == self._cmd_runner.call(cmd, cwd=cwd))
        if no_files_were_added:
            return

        # Reference files were added to the index, so amend the commit,
        # keeping the message from the old HEAD commit.
        cmd = ['git', 'commit', '--amend', '--reuse-message', 'HEAD']
        try:
            self._cmd_runner.check_call(cmd, cwd=cwd)
        except CommandError as e:
            raise BuildError('Failed to amend the commit when adding updated files running ' + e.cmd + ' in cwd ' + cwd)

        # If the triggering_project was in fact the project that the
        # caller expects to be updated, push the updated commit back
        # to gerrit for testing and review.
        if triggering_project == project:
            cmd = ['git', 'push', self._gerrit.get_git_url(project), 'HEAD:refs/for/{0}'.format(triggering_branch)]
            try:
                self._cmd_runner.check_call(cmd, cwd=cwd)
            except CommandError as e:
                raise BuildError('Failed to upload the commit with updated files running ' + e.cmd + ' in cwd ' + cwd)
示例#8
0
 def get_remote_hash(self, project, refspec):
     """Fetch hash of a refspec on the Gerrit server."""
     cmd = ['git', 'ls-remote', self.get_git_url(project), refspec.fetch]
     output = self._cmd_runner.check_output(cmd).split(None, 1)
     if len(output) < 2:
         return BuildError('failed to find refspec {0} for {1}'.format(
             refspec, project))
     return output[0].strip()
示例#9
0
 def _parse_cross_verify(self, tokens):
     triggering_project = self._gerrit.get_triggering_project()
     token = tokens.pop(0)
     change = self._gerrit.query_change(token)
     project = change.project
     refspec = change.refspec
     if triggering_project and project == triggering_project:
         raise BuildError(
             'Cross-verify is not possible with another change from the same repository'
         )
     if project == Project.RELENG:
         raise BuildError(
             'Cross-verify with releng changes should be initiated from the releng change'
         )
     if self._branch is None:
         self._branch = change.branch
     if project in self._branch_projects:
         self._branch_projects.remove(project)
     self._projects.override_refspec(project, refspec)
     self._default_builds = [{
         'type': 'matrix',
         'desc': 'cross-verify',
         'matrix-file': 'pre-submit-matrix'
     }]
     if not triggering_project or triggering_project == Project.RELENG:
         self._default_builds.extend([{
             'type': 'clang-analyzer',
             'desc': 'cross-verify'
         }, {
             'type': 'documentation',
             'desc': 'cross-verify'
         }, {
             'type': 'uncrustify',
             'desc': 'cross-verify'
         }])
     if triggering_project and change.is_open:
         self._cross_verify_info = {
             'change': change.number,
             'patchset': change.patchnumber
         }
示例#10
0
    def __init__(self, package, full_clean = False):
        if package not in config.package_map:
            raise BuildError("Cannot find package %s" % package)

        super(PackageCheckout, self).__init__(config.package_map[package])
        self.dirname = package

        if full_clean:
            self.git('fetch', '--all')
            self.full_clean()

        self.determine_type()
        self.load_changelog()
示例#11
0
文件: context.py 项目: ptmerz/releng
    def build_target(self,
                     target=None,
                     parallel=True,
                     keep_going=False,
                     target_descr=None,
                     failure_string=None,
                     continue_on_failure=False):
        """Builds a given target.

        run_cmake() must have been called to generate the build system.

        Args:
            target (Optional[str]): Name of the target to build.
                If ``None``, the default (all) target is built.
            parallel (Optional[bool]): Whether parallel building is supported.
            keep_going (Optional[bool]): Whether to continue building after
                first error.
            target_descr (str or None): If given, customizes the error message
                when the target fails to build.  Should fit the initial part of
                the sentence "... failed to build".
                Ignored if ``failure_string`` is specified.
            failure_string (str or None): If given, this message is used as the
                failure message if the target fails to build.
            continue_on_failure (Optional[bool]): If ``True`` and the target
                fails to build, the failure is only reported and
                ``self.failed`` is set to ``True``.

        Raises:
            BuildError: If the target fails to build, and
                ``continue_on_failure`` is not specified.
        """
        cmd = self.env._get_build_cmd(target=target,
                                      parallel=parallel,
                                      keep_going=keep_going)
        try:
            self.run_cmd(cmd)
        except BuildError:
            if failure_string is None:
                if target_descr is not None:
                    what = target_descr
                elif target is None:
                    what = 'Default (all) target'
                else:
                    what = target + ' target'
                failure_string = '{0} failed to build'.format(what)
            if continue_on_failure:
                self._status_reporter.mark_failed(failure_string)
            else:
                raise BuildError(failure_string)
示例#12
0
def list_package_versions(package):
    output = call('ls', package)
    regex = r"^%s\s+\|\s+(\S+)\s+\|\s+(\S+)\s+\|\s+([a-z0-9, ]+)$" % re.escape(
        package)
    matches = re.findall(regex, output, re.MULTILINE)
    if not matches and len(output) > 0:
        raise BuildError("Failed to parse `reprepro ls` output")
    versions = defaultdict(dict)
    for version, distribution, arches in matches:
        if 'bleeding' in distribution:
            continue
        arches = arches.split(', ')
        for arch in arches:
            versions[distribution][arch] = Version(version)

    return versions
示例#13
0
文件: context.py 项目: ptmerz/releng
    def run_cmd(self,
                cmd,
                ignore_failure=False,
                use_return_code=False,
                use_output=False,
                failure_message=None,
                **kwargs):
        """Runs a command via subprocess.

        This wraps subprocess.call() and check_call() with error-handling code
        and other generic handling such as ensuring proper output flushing and
        using bash as the shell on Unix.

        Any arguments accepted by subprocess.call() or check_call() can also
        be passed, e.g. cwd or env to make such calls in stateless ways.

        Args:
            cmd (str/list): Command to execute (as for subprocess.call()).
            ignore_failure (Optional[bool]): If ``True``, failure to run the
                command is ignored.
            use_return_code (Optional[bool]): If ``True``, exit code from the
                command is returned.  Otherwise, non-zero return code fails the
                build unless ignore_failure is set.
            use_output (Optional[bool]): If ``True``, the output from the command
                is returned.  Mutually exclusive with use_return_code.
            failure_message (Optional[str]): If set, provides a friendly
                message about what in the build fails if this command fails.
                This will be reported back to Gerrit.

        Returns:
            int: Command return code (if ``use_return_code=True``).
        """
        try:
            if use_return_code:
                return self._cmd_runner.call(cmd, **kwargs)
            elif use_output:
                if not 'stderr' in kwargs:
                    kwargs['stderr'] = subprocess.STDOUT
                return self._cmd_runner.check_output(cmd, **kwargs)
            else:
                self._cmd_runner.check_call(cmd, **kwargs)
        except CommandError as e:
            if not ignore_failure:
                if failure_message is None:
                    failure_message = 'failed to execute: ' + e.cmd
                raise BuildError(failure_message)
示例#14
0
    def out_of_date_binaries(self, arch):
        """Find all packages for which there is a source package in the
        repository, but not a binary one for a given architecture.  Returns
        a list of source packages which need rebuilding."""

        result = []
        for name, src_pkg in self.sources.iteritems():
            out_of_date = False
            for binary, arches in src_pkg.get_binary_architectures().iteritems(
            ):
                # Handle cases when package is not meant to be built
                # in the given architecture
                if arch == 'all' and 'all' not in arches:
                    continue
                if arch != 'all' and not ('any' in arches or arch in arches):
                    continue

                # Package was never built
                if binary not in self.binaries:
                    out_of_date = True
                    break

                # Package was not built for this archictecture
                bin_pkgs = self.binaries[binary]
                if arch not in bin_pkgs:
                    out_of_date = True
                    break

                # Actually compare versions
                bin_pkg = bin_pkgs[arch]
                if bin_pkg.version > src_pkg.version:
                    # Circumvent edge cases of version comparison with manual-config packages
                    if not (name.startswith('debathena-manual-')
                            and name.endswith('-config')):
                        raise BuildError(
                            "Package %s has version higher in binary than in source"
                            % bin_pkg.name)
                if src_pkg.version > bin_pkg.version:
                    out_of_date = True
                    continue

            if out_of_date:
                result.append(name)

        return result
示例#15
0
    def get_binary_architectures(self):
        """Returns the dictionary of binary packages to list of architectures
        for which those packages are built."""

        # See commit 47126733bb in dpkg.
        # Prior to May 15, 2011, dpkg did not output "Architecture: any all"
        # for packages which contained both any and all architectures.
        # Here I attempt to detect those buggy packages by asserting that
        # packages with Package-List (which was finally introduced on May 28
        # same year, though it kind of existed some time before).
        dpkg_bug = 'Package-List' not in self.manifest

        arches_naive = self.architecture.split(' ')
        if len(self.binaries) == 1:
            return {self.binaries[0]: arches_naive}
        if not dpkg_bug and arches_naive == ['all']:
            return {binary: arches_naive for binary in self.binaries}
        # "any" is unsafe because some of the binaries may have more
        # restrictive architectures

        # Actually, very limited number of packages gets here
        # Cache those which still do
        try:
            return self.cached_architectures
        except AttributeError:
            pass

        control = self.get_control_file()
        binaries = {}
        for package in control:
            if 'Source' in package:
                continue

            binaries[package['Package']] = package['Architecture'].split(' ')

        if set(binaries) != set(self.binaries):
            raise BuildError(
                "Package %s has mismatching list of binaries in dsc and control file"
                % self.name)

        self.cached_architectures = binaries
        return binaries
示例#16
0
    def check_projects(self):
        """Checks that all checked-out projects are at correct revisions.

        In the past, there have been problems with not all projects getting
        correctly checked out.  It is unknown whether this was a Jenkins bug
        or something else, and whether the issue still exists.
        """
        console = self._executor.console
        all_correct = True
        for project_info in self._projects.itervalues():
            if not project_info.is_checked_out:
                continue
            if not project_info.has_correct_hash():
                print(
                    'Checkout of {0} failed: HEAD is {1}, expected {2}'.format(
                        project, project_info.head_hash,
                        project_info.remote_hash),
                    file=console)
                all_correct = False
        if not all_correct:
            raise BuildError('Checkout failed (Jenkins issue)')
示例#17
0
 def _process_release_branch(self, branch):
     triggering_project = self._gerrit.get_triggering_project()
     if triggering_project and triggering_project != Project.RELENG:
         raise BuildError(
             'Release branch verification only makes sense for releng changes'
         )
     assert self._branch is None
     self._branch = branch
     self._default_builds = [{
         'type': 'matrix',
         'desc': branch,
         'matrix-file': 'pre-submit-matrix'
     }, {
         'type': 'clang-analyzer',
         'desc': branch
     }, {
         'type': 'documentation',
         'desc': branch
     }, {
         'type': 'uncrustify',
         'desc': branch
     }]
示例#18
0
文件: cmake.py 项目: ptmerz/releng
def _read_ctest_tag_name(executor):
    lines = list(executor.read_file('Testing/TAG'))
    if len(lines) < 1:
        raise BuildError('CTest did not produce content in a TAG file')
    return lines[0].strip()
示例#19
0
 def parse(self, request):
     tokens = request.split()
     token = tokens[0].lower()
     if token == 'cross-verify':
         tokens.pop(0)
         self._parse_cross_verify(tokens)
     elif re.match(r'^release-\d{4}$', token):
         tokens.pop(0)
         self._process_release_branch(token)
     while tokens:
         token = tokens.pop(0).lower()
         if token == 'quiet':
             self._cross_verify_info = None
         elif token == 'clang-analyzer':
             self._builds.append({'type': 'clang-analyzer'})
         elif token == 'coverage':
             self._builds.append({'type': 'coverage'})
         elif token == 'documentation':
             self._builds.append({'type': 'documentation'})
         elif token == 'package':
             project = self._gerrit.get_triggering_project()
             if project is None:
                 project = Project.RELENG
             if project in (Project.GROMACS, Project.RELENG):
                 self._builds.append({'type': 'source-package'})
             if project in (Project.REGRESSIONTESTS, Project.RELENG):
                 self._builds.append({'type': 'regtest-package'})
         elif token == 'weekly':
             self._builds.append({
                 'type': 'matrix',
                 'desc': 'weekly',
                 'matrix-file': 'weekly-matrix'
             })
         elif token == 'nightly':
             self._builds.append({
                 'type': 'matrix',
                 'desc': 'nightly',
                 'matrix-file': 'nightly-matrix'
             })
         elif token == 'post-submit':
             self._builds.append({
                 'type': 'matrix',
                 'desc': 'post-submit',
                 'matrix-file': 'post-submit-matrix'
             })
         elif token == 'pre-submit':
             self._builds.append({
                 'type': 'matrix',
                 'desc': 'pre-submit',
                 'matrix-file': 'pre-submit-matrix'
             })
         elif token == 'regtest-package':
             self._builds.append({'type': 'regtest-package'})
         elif token == 'release':
             build = {'type': 'release', 'release_flag': False}
             if tokens and tokens[0].lower() == 'no-dev':
                 tokens.pop(0)
                 build['release_flag'] = True
             self._builds.append(build)
         elif token == 'uncrustify':
             self._builds.append({'type': 'uncrustify'})
         elif token == 'update':
             project = self._gerrit.get_triggering_project()
             # It can be useful to trigger these from releng for testing,
             # so we do not check for that.
             if project == Project.GROMACS:
                 raise BuildError(
                     'Update only makes sense for regressiontests changes')
             self._builds.append({'type': 'regressiontests-update'})
         elif token == 'update-regtest-hash':
             self._builds.append({'type': 'update-regtest-hash'})
         else:
             raise BuildError('Unknown request: ' + request)
示例#20
0
 def validate_common(self):
     if not self.exists_debian_file('gbp.conf'):
         raise BuildError('Package does not contain gbp.conf')
示例#21
0
    def get_control_file(self):
        """Extract control file from the source package."""

        controlname = "%s-%s/debian/control" % (self.name, str(self.version))
        if self.format.startswith('3.0'):
            if self.format == '3.0 (native)':
                tarname = "%s_%s.tar." % (self.name, str(self.version))
            elif self.format == '3.0 (quilt)':
                tarname = "%s_%s.debian.tar." % (self.name, str(self.version))
                controlname = "debian/control"
            else:
                raise BuildError(
                    "Package %s has unsupported format %s in archive" %
                    (self.name, self.format))

            try:
                tarpath, = [
                    f.path for f in self.files if f.name.startswith(tarname)
                ]
            except ValueError:
                raise BuildError(
                    "File %s.{gz,bz2,xz} not found for package %s" %
                    (tarname, self.name))

            with closing(lzma.LZMAFile(tarpath, 'r')) if tarpath.endswith('.xz') \
                 else open(tarpath, 'r') as uncompressed:
                with tarfile.open(fileobj=uncompressed, mode='r:*') as tar:
                    return list(
                        debian.deb822.Deb822.iter_paragraphs(
                            tar.extractfile(controlname)))

        # FIXME: this code should be gone once 1.0 packages are gone
        # I still can't believe I actually wrote this
        elif self.format == '1.0':
            if len(self.files) == 2:
                try:
                    tarpath, = [
                        f.path for f in self.files
                        if f.name.endswith('.tar.gz')
                    ]
                except ValueError:
                    raise BuildError(
                        "Package %s has format 1.0 and does not seem to have the tarball"
                        % self.name)

                with tarfile.open(tarpath, 'r:*') as tar:
                    return list(
                        debian.deb822.Deb822.iter_paragraphs(
                            tar.extractfile(controlname)))
            else:
                diffname = "%s_%s.diff.gz" % (self.name, str(self.version))
                try:
                    diffpath, = [
                        f.path for f in self.files if f.name == diffname
                    ]
                except ValueError:
                    raise BuildError("File %s not found for package %s" %
                                     (diffname, self.name))

                diff = gzip.open(diffpath, 'r')
                while True:
                    line = diff.readline().strip()
                    if line.startswith('--- ') and line.endswith(
                            '/debian/control'):
                        line = diff.readline().strip()
                        if not line.startswith('+++ '):
                            raise BuildError(
                                "Malformed debian diff in package " +
                                self.name)

                        line = diff.readline()
                        match = re.match(r"@@ -0,0 \+1,(\d+) @@", line)
                        if not match:
                            raise BuildError(
                                "Malformed debian diff in package " +
                                self.name)
                        number_of_lines = int(match.group(1))

                        lines = []
                        for i in range(number_of_lines):
                            lines.append(diff.readline()[1:])

                        return list(
                            debian.deb822.Deb822.iter_paragraphs(lines))
        else:
            raise BuildError(
                "Package %s has unsupported format %s in archive" %
                (self.name, self.format))