Exemple #1
0
    def manage_current_distribution(self, distrib):
        """manage debian files depending of the current distrib from options

        We copy debian_dir directory into tmp build depending of the target distribution
        in all cases, we copy the debian directory of the default version (unstable)
        If a file should not be included, touch an empty file in the overlay
        directory.

        This is specific to Logilab (debian directory is in project directory)
        """
        try:
            # don't forget the final slash!
            export(osp.join(self.config.pkg_dir, 'debian'),
                   osp.join(self.origpath, 'debian/'),
                   verbose=(self.config.verbose == 2))
        except IOError as err:
            raise LGPException(err)

        debian_dir = self.get_debian_dir(distrib)
        if debian_dir != "debian":
            self.logger.info("overriding files from '%s' directory..." %
                             debian_dir)
            # don't forget the final slash!
            export(osp.join(self.config.pkg_dir, debian_dir),
                   osp.join(self.origpath, 'debian/'),
                   verbose=self.config.verbose)

        from debian.changelog import Changelog
        debchangelog = osp.join(self.origpath, 'debian', 'changelog')
        changelog = Changelog(open(debchangelog))
        # substitute distribution string in changelog
        if distrib:
            # squeeze python-debian doesn't handle unicode well, see Debian bug#561805
            changelog.distributions = str(distrib)
        # append suffix string (or timestamp if suffix is empty) to debian revision
        if self.config.suffix is not None:
            suffix = self.config.suffix or '+%s' % int(time.time())
            self.logger.debug("suffix '%s' added to package version" % suffix)
            changelog.version = str(changelog.version) + suffix
        changelog.write_to_open_file(open(debchangelog, 'w'))

        return self.origpath
Exemple #2
0
def update_changelog(repo, series, version):
    # Update d/changelog.
    with ExitStack() as resources:
        debian_changelog = os.path.join(repo.working_dir, 'debian',
                                        'changelog')
        infp = resources.enter_context(
            open(debian_changelog, 'r', encoding='utf-8'))
        outfp = resources.enter_context(atomic(debian_changelog))
        changelog = Changelog(infp)
        changelog.distributions = series
        series_version = {
            'groovy': '20.10',
            'focal': '20.04',
            'bionic': '18.04',
            'xenial': '16.04',
        }[series]
        new_version = '{}+{}ubuntu1'.format(version, series_version)
        changelog.version = new_version
        changelog.write_to_open_file(outfp)
    return new_version
Exemple #3
0
    def getChangelog(self, since_version=None):
        '''
        Return the changelog, optionally since a particular version
        May return None if the changelog isn't available
        '''
        if self._changelog is None:
            if self.name.startswith('lib'):
                subdir = 'lib%s' % self.name[3]
            else:
                subdir = self.name[0]
            # Strip epoch from version
            pkgversion = self.version.split(':', 1)[-1]
            extension = ''
            if self.distribution == 'debian':
                base = 'http://packages.debian.org/'
                extension = '.txt'
            elif self.distribution == 'ubuntu':
                base = 'http://changelogs.ubuntu.com/'

            url = os.path.join(base, 'changelogs', 'pool', self.component,
                               subdir, self.name, self.name + '_' + pkgversion,
                               'changelog' + extension)
            try:
                self._changelog = urlopen(url).read()
            except HTTPError as error:
                print(('%s: %s' % (url, error)), file=sys.stderr)
                return None

        if since_version is None:
            return self._changelog

        if isinstance(since_version, basestring):
            since_version = Version(since_version)

        new_entries = []
        for block in Changelog(self._changelog):
            if block.version <= since_version:
                break
            new_entries.append(unicode(block))
        return u''.join(new_entries)
 def make_unuploaded(self):
     self.wt = self.make_branch_and_tree('.')
     self.build_tree(['debian/'])
     cl = Changelog()
     v = Version("0.1-1")
     cl.new_block(
         package='package',
         version=Version('0.1-1'),
         distributions='unstable',
         urgency='low',
         author='James Westby <*****@*****.**>',
         date='Thu,  3 Aug 2006 19:16:22 +0100',
     )
     cl.add_change('')
     cl.add_change('  * Initial packaging.')
     cl.add_change('')
     f = open('debian/changelog', 'wb')
     try:
         cl.write_to_open_file(f)
     finally:
         f.close()
     self.wt.add(["debian/", "debian/changelog"])
     self.wt.commit("one")
Exemple #5
0
    def getChangelog(self, since_version=None):
        '''
        Return the changelog, optionally since a particular version
        May return None if the changelog isn't available
        Only available in the devel API, not 1.0
        '''
        if self._changelog is None:
            url = self._lpobject.changelogUrl()
            if url is None:
                print('E: No changelog available for %s %s' %
                      (self.getPackageName(), self.getVersion()),
                      file=sys.stderr)
                return None

            try:
                response, changelog = Http().request(url)
            except HttpLib2Error as e:
                print(str(e), file=sys.stderr)
                return None
            if response.status != 200:
                print('%s: %s %s' % (url, response.status, response.reason),
                      file=sys.stderr)
                return None
            self._changelog = changelog

        if since_version is None:
            return self._changelog

        if isinstance(since_version, basestring):
            since_version = Version(since_version)

        new_entries = []
        for block in Changelog(self._changelog):
            if block.version <= since_version:
                break
            new_entries.append(unicode(block))
        return u''.join(new_entries)
    def aggregate_changelog(self, since_version):
        """See `ISourcePackagePublishingHistory`."""
        if self.changelog is None:
            return None

        apt_pkg.init_system()
        chunks = []
        changelog = self.changelog
        # The python-debian API for parsing changelogs is pretty awful. The
        # only useful way of extracting info is to use the iterator on
        # Changelog and then compare versions.
        try:
            changelog_text = changelog.read().decode("UTF-8", "replace")
            for block in Changelog(changelog_text):
                version = block._raw_version
                if (since_version and
                        apt_pkg.version_compare(version, since_version) <= 0):
                    break
                # Poking in private attributes is not nice but again the
                # API is terrible.  We want to ensure that the name/date
                # line is omitted from these composite changelogs.
                block._no_trailer = True
                try:
                    # python-debian adds an extra blank line to the chunks
                    # so we'll have to sort this out.
                    chunks.append(str(block).rstrip())
                except ChangelogCreateError:
                    continue
                if not since_version:
                    # If a particular version was not requested we just
                    # return the most recent changelog entry.
                    break
        except ChangelogParseError:
            return None

        output = "\n\n".join(chunks)
        return output.decode("utf-8", "replace")
Exemple #7
0
def determine_update_changelog(local_tree, debian_path):
    from .detect_gbp_dch import (
        guess_update_changelog,
        ChangelogBehaviour,
        )

    changelog_path = os.path.join(debian_path, 'changelog')

    try:
        with local_tree.get_file(changelog_path) as f:
            cl = Changelog(f)
    except NoSuchFile:
        # If there's no changelog, then there's nothing to update!
        return False

    behaviour = guess_update_changelog(local_tree, debian_path, cl)
    if behaviour:
        _note_changelog_policy(behaviour.update_changelog, behaviour.explanation)
    else:
        # If we can't make an educated guess, assume yes.
        behaviour = ChangelogBehaviour(
            True, "Assuming changelog should be updated")

    return behaviour
Exemple #8
0
def guess_update_changelog(
    tree: WorkingTree, debian_path: str, cl: Optional[Changelog] = None
) -> Optional[ChangelogBehaviour]:
    """Guess whether the changelog should be updated.

    Args:
      tree: Tree to edit
      debian_path: Path to packaging in tree
    Returns:
      best guess at whether we should update changelog (bool)
    """
    if debian_path != "debian":
        return ChangelogBehaviour(
            True,
            "assuming changelog needs to be updated since "
            "gbp dch only suppors a debian directory in the root of the "
            "repository")
    changelog_path = osutils.pathjoin(debian_path, "changelog")
    if cl is None:
        try:
            with tree.get_file(changelog_path) as f:
                cl = Changelog(f)
        except NoSuchFile:
            cl = None
    if cl and is_unreleased_inaugural(cl):
        return ChangelogBehaviour(
            False,
            "assuming changelog does not need to be updated "
            "since it is the inaugural unreleased entry")
    ret = _guess_update_changelog_from_tree(tree, debian_path, cl)
    if ret is not None:
        return ret
    ret = _guess_update_changelog_from_branch(tree.branch, debian_path)
    if ret is not None:
        return ret
    return None
Exemple #9
0
    def generate(self, opts, changes):
        from debian.changelog import Changelog, Version
        changelog = Changelog()

        for change in changes:
            data = change

            changelog.new_block(
                package=data['package-name'],
                version=data['ref'],
                distributions=data['distributions'],
                urgency=data['urgency'],
                author=data['author-name'] + " <" + data['author-email'] + ">",
                date=data['date'],
            )
            changelog.add_change('')
            changelog.add_change('  * ' + data['message'])
            f = open(opts[1][1], 'w')
            try:
                changelog.write_to_open_file(f)
                print("wrote to file: " + opts[1][1])
            finally:
                f.close()
        sys.exit(0)
 def changelog(self: object):
     with open(os.path.join(self.debian_dir, "changelog"), "r") as f:
         changelog = Changelog(f, max_blocks=1)
     return changelog
def assertChangelogHasNoWarning(image_dir):
    with open(os.path.join(image_dir, 'changelog')) as f:
        # strict to raise an exception
        Changelog(f, strict=True)
Exemple #12
0
def _run_single_changer(  # noqa: C901
    changer: DebianChanger,
    pkg: str,
    main_branch: Branch,
    subpath: str,
    resume_branch: Optional[Branch],
    hoster: Optional[Hoster],
    existing_proposal: Optional[MergeProposal],
    overwrite: Optional[bool],
    mode: str,
    branch_name: str,
    diff: bool = False,
    committer: Optional[str] = None,
    build_verify: bool = False,
    preserve_repositories: bool = False,
    install: bool = False,
    pre_check: Optional[str] = None,
    post_check: Optional[str] = None,
    builder: str = DEFAULT_BUILDER,
    dry_run: bool = False,
    update_changelog: Optional[bool] = None,
    label: Optional[List[str]] = None,
    derived_owner: Optional[str] = None,
    build_target_dir: Optional[str] = None,
) -> Optional[bool]:
    from breezy import errors
    from . import (
        BuildFailedError,
        MissingUpstreamTarball,
        Workspace,
    )

    if hoster is None and mode == "attempt-push":
        logging.warn(
            "Unsupported hoster; will attempt to push to %s",
            full_branch_url(main_branch),
        )
        mode = "push"
    with Workspace(
            main_branch,
            resume_branch=resume_branch) as ws, ws.local_tree.lock_write():
        if ws.refreshed:
            overwrite = True
        run_pre_check(ws.local_tree, pre_check)
        if control_files_in_root(ws.local_tree, subpath):
            debian_path = subpath
        else:
            debian_path = os.path.join(subpath, "debian")
        if update_changelog is None:
            dch_guess = guess_update_changelog(ws.local_tree, debian_path)
            if dch_guess:
                logging.info('%s', dch_guess[1])
                update_changelog = dch_guess[0]
            else:
                # Assume yes.
                update_changelog = True
        try:
            changer_result = changer.make_changes(
                ws.local_tree,
                subpath=subpath,
                update_changelog=update_changelog,
                committer=committer,
                reporter=DummyChangerReporter(),
            )
        except ChangerError as e:
            logging.error('%s: %s', e.category, e.summary)
            return False

        if not ws.changes_since_main():
            if existing_proposal:
                logging.info("%s: nothing left to do. Closing proposal.", pkg)
                existing_proposal.close()
            else:
                logging.info("%s: nothing to do", pkg)
            return None

        try:
            run_post_check(ws.local_tree, post_check, ws.orig_revid)
        except PostCheckFailed as e:
            logging.info("%s: %s", pkg, e)
            return False
        if build_verify or install:
            try:
                ws.build(builder=builder, result_dir=build_target_dir)
            except BuildFailedError:
                logging.info("%s: build failed", pkg)
                return False
            except MissingUpstreamTarball:
                logging.info("%s: unable to find upstream source", pkg)
                return False

        if install:
            import re
            import subprocess
            from debian.changelog import Changelog
            from debian.deb822 import Deb822
            with open(
                    ws.local_tree.abspath(
                        os.path.join(ws.subpath, 'debian/changelog')),
                    'r') as f:
                cl = Changelog(f)
            non_epoch_version = cl[0].version.upstream_version
            if cl[0].version.debian_version is not None:
                non_epoch_version += "-%s" % cl[0].version.debian_version
            c = re.compile('%s_%s_(.*).changes' % (re.escape(
                cl[0].package), re.escape(non_epoch_version)))  # type: ignore
            for entry in os.scandir(build_target_dir):
                if not c.match(entry.name):
                    continue
                with open(entry.path, 'rb') as g:
                    changes = Deb822(g)
                    if changes.get('Binary'):
                        subprocess.check_call(['debi', entry.path])

        enable_tag_pushing(ws.local_tree.branch)

        kwargs: Dict[str, Any] = {}
        if breezy_version_info >= (3, 1):
            kwargs["tags"] = changer_result.tags

        try:
            publish_result = ws.publish_changes(
                mode,
                branch_name,
                get_proposal_description=partial(
                    changer.get_proposal_description, changer_result.mutator),
                get_proposal_commit_message=(
                    lambda oldmp: changer_result.proposed_commit_message),
                dry_run=dry_run,
                hoster=hoster,
                allow_create_proposal=changer_result.sufficient_for_proposal,
                overwrite_existing=overwrite,
                existing_proposal=existing_proposal,
                derived_owner=derived_owner,
                labels=label,
                **kwargs)
        except UnsupportedHoster as e:
            logging.error(
                "%s: No known supported hoster for %s. Run 'svp login'?",
                pkg,
                full_branch_url(e.branch),
            )
            return False
        except NoSuchProject as e:
            logging.info("%s: project %s was not found", pkg, e.project)
            return False
        except errors.PermissionDenied as e:
            logging.info("%s: %s", pkg, e)
            return False
        except errors.DivergedBranches:
            logging.info("%s: a branch exists. Use --overwrite to discard it.",
                         pkg)
            return False
        except InsufficientChangesForNewProposal:
            logging.info('%s: insufficient changes for a new merge proposal',
                         pkg)
            return False
        except HosterLoginRequired as e:
            logging.error(
                "Credentials for hosting site at %r missing. "
                "Run 'svp login'?",
                e.hoster.base_url,
            )
            return False

        if publish_result.proposal:
            changer.describe(changer_result.mutator, publish_result)
        if diff:
            for branch_entry in changer_result.branches:
                role = branch_entry[0]
                if len(changer_result.branches) > 1:
                    sys.stdout.write("%s\n" % role)
                    sys.stdout.write(("-" * len(role)) + "\n")
                sys.stdout.flush()
                changer_result.show_diff(ws.local_tree.branch.repository,
                                         sys.stdout.buffer,
                                         role=role)
                if len(changer_result.branches) > 1:
                    sys.stdout.write("\n")
        if preserve_repositories:
            ws.defer_destroy()
            logging.info('Workspace preserved in %s',
                         ws.local_tree.abspath(ws.subpath))

        return True
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License with the Debian GNU/Linux or Univention distribution in file
# /usr/share/common-licenses/AGPL-3; if not, see
# <http://www.gnu.org/licenses/>.

from setuptools import setup, Extension
import pkgconfig
from debian.changelog import Changelog
from debian.deb822 import Deb822
from email.utils import parseaddr

d = pkgconfig.parse('heimdal-krb5')
dch = Changelog(open('debian/changelog', 'r'))
dsc = Deb822(open('debian/control', 'r'))
realname, email_address = parseaddr(dsc['Maintainer'])

setup(
    name=dch.package,
    version=dch.version.full_version,
    description='Heimdal Kerberos Python bindings',
    maintainer=realname,
    maintainer_email=email_address,
    url='https://www.univention.de/',
    ext_modules=[
        Extension(
            'heimdal',
            [
                'module.c', 'error.c', 'context.c', 'principal.c', 'creds.c',
Exemple #14
0
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License with the Debian GNU/Linux or Univention distribution in file
# /usr/share/common-licenses/AGPL-3; if not, see
# <https://www.gnu.org/licenses/>.

import io
from distutils.core import setup
from email.utils import parseaddr
from debian.changelog import Changelog
from debian.deb822 import Deb822

dch = Changelog(io.open('debian/changelog', 'r', encoding='utf-8'))
dsc = Deb822(io.open('debian/control', 'r', encoding='utf-8'))
realname, email_address = parseaddr(dsc['Maintainer'])

setup(
    packages=['univention'],
    package_dir={'univention': 'modules'},
    description='Univention Python',
    license='GNU Affero General Public License v3',
    name=dch.package,
    version=dch.version.full_version,
    maintainer=realname,
    maintainer_email=email_address,
    url='https://www.univention.de/',
)
Exemple #15
0
			 "In such cases the copyrights will be different for each set of files. "
			 "If they are the case, you may ignore this warning, but it may be clearer to "
			 "split the glob patterns even so, in case this arrangement ceases in the future. "))

	#for k, vv in licenses.iteritems():
	#	print k
	#	for v in vv:
	#		print "\t", v


DebianCopyright = ControlParser.cstrKeys(
  ItemCstr.SimpleWithHead("format", [], ["files", "license"])
).add_check_post(
	copyright_check_post
).use(omaker=lambda chunks:
	{"changelog": Changelog(chunks)} if chunks else {}
).use(pselect={
	"format" : SimpleControlBlock(v_single, {}, {
		"upstream-name": v_single,
		"upstream-contact": v_list,
		"source": v_text,
		"disclaimer": v_text,
		"copyright": v_list,
		"license": lcspec_text_synop,
	}),
	"files" : SimpleControlBlock(v_words, {
		"copyright": v_list,
		"license": lcspec_text_synop,
	}, {
		"comment": v_text,
	}),
Exemple #16
0
def _get_version():
    changelog = Changelog(io.open('debian/changelog', 'r', encoding='utf-8'))
    return changelog.full_version
def get_intrinsic_package_metadata(
        p_info: DebianPackageInfo, dsc_path: str,
        extracted_path: str) -> IntrinsicPackageMetadata:
    """Get the package metadata from the source package at dsc_path,
    extracted in extracted_path.

    Args:
        p_info: the package information
        dsc_path: path to the package's dsc file
        extracted_path: the path where the package got extracted

    Returns:
        dict: a dictionary with the following keys:

        - history: list of (package_name, package_version) tuples parsed from
          the package changelog

    """
    with open(dsc_path, "rb") as dsc:
        parsed_dsc = Dsc(dsc)

    # Parse the changelog to retrieve the rest of the package information
    changelog_path = path.join(extracted_path, "debian/changelog")
    with open(changelog_path, "rb") as changelog_file:
        try:
            parsed_changelog = Changelog(changelog_file)
        except UnicodeDecodeError:
            logger.warning(
                "Unknown encoding for changelog %s,"
                " falling back to iso" % changelog_path,
                extra={
                    "swh_type": "deb_changelog_encoding",
                    "swh_name": p_info.name,
                    "swh_version": str(p_info.version),
                    "swh_changelog": changelog_path,
                },
            )

            # need to reset as Changelog scrolls to the end of the file
            changelog_file.seek(0)
            parsed_changelog = Changelog(changelog_file,
                                         encoding="iso-8859-15")

    history: List[Tuple[str, str]] = []

    for block in parsed_changelog:
        assert block.package is not None
        history.append((block.package, str(block.version)))

    changelog = DebianPackageChangelog(
        person=uid_to_person(parsed_changelog.author),
        date=parse_date(parsed_changelog.date).isoformat(),
        history=history[1:],
    )

    maintainers = [
        uid_to_person(parsed_dsc["Maintainer"]),
    ]
    maintainers.extend(
        uid_to_person(person)
        for person in UPLOADERS_SPLIT.split(parsed_dsc.get("Uploaders", "")))

    return IntrinsicPackageMetadata(
        name=p_info.name,
        version=str(p_info.intrinsic_version),
        changelog=changelog,
        maintainers=maintainers,
    )
Exemple #18
0
import sys

import reprepro_bundle
import reprepro_bundle_compose
import reprepro_bundle_appserver
import re

from distutils.core import setup

from debian.changelog import Changelog

with open('debian/changelog', 'rb') as reader:
    chlog = Changelog(reader, max_blocks=1)
version = chlog.get_version().full_version

long_description = re.sub(' +', ' ', reprepro_bundle.__doc__.strip())
long_description = re.sub('\n ', '\n', long_description)

description = '''Python3 API and command line tools to manage delivery bundles and delivery workflows in form of apt-repositories created by reprepro'''

settings = dict(name='reprepro-bundle-tools',
                version=version,
                packages=[
                    'reprepro_bundle', 'reprepro_bundle_compose',
                    'reprepro_bundle_appserver', 'reprepro_management_service'
                ],
                author='Christoph Lutz',
                author_email='*****@*****.**',
                description=description,
                long_description=long_description,
                license='EUPL 1.0+',
Exemple #19
0
def run_lintian_fixer(  # noqa: C901
    local_tree: WorkingTree,
    fixer: Fixer,
    committer: Optional[str] = None,
    update_changelog: Union[bool, Callable[[], bool]] = True,
    compat_release: Optional[str] = None,
    minimum_certainty: Optional[str] = None,
    trust_package: bool = False,
    allow_reformatting: bool = False,
    dirty_tracker=None,
    subpath: str = "",
    net_access: bool = True,
    opinionated: Optional[bool] = None,
    diligence: int = 0,
    timestamp: Optional[datetime] = None,
    basis_tree: Optional[Tree] = None,
    changes_by: str = "lintian-brush",
):
    """Run a lintian fixer on a tree.

    Args:
      local_tree: WorkingTree object
      basis_tree: Tree
      fixer: Fixer object to apply
      committer: Optional committer (name and email)
      update_changelog: Whether to add a new entry to the changelog
      compat_release: Minimum release that the package should be usable on
        (e.g. 'stable' or 'unstable')
      minimum_certainty: How certain the fixer should be
        about its changes.
      trust_package: Whether to run code from the package if necessary
      allow_reformatting: Whether to allow reformatting of changed files
      dirty_tracker: Optional object that can be used to tell if the tree
        has been changed.
      subpath: Path in tree to operate on
      net_access: Whether to allow accessing external services
      opinionated: Whether to be opinionated
      diligence: Level of diligence
    Returns:
      tuple with set of FixerResult, summary of the changes
    """
    if basis_tree is None:
        basis_tree = local_tree.basis_tree()

    changelog_path = os.path.join(subpath, "debian/changelog")

    try:
        with local_tree.get_file(changelog_path) as f:
            cl = Changelog(f, max_blocks=1)
    except NoSuchFile:
        raise NotDebianPackage(local_tree, subpath)
    package = cl.package
    if cl.distributions == "UNRELEASED":
        current_version = cl.version
    else:
        current_version = cl.version
        increment_version(current_version)
    if compat_release is None:
        compat_release = "sid"
    if minimum_certainty is None:
        minimum_certainty = DEFAULT_MINIMUM_CERTAINTY
    logger.debug('Running fixer %r', fixer)
    try:
        result = fixer.run(
            local_tree.abspath(subpath),
            package=package,
            current_version=current_version,
            compat_release=compat_release,
            minimum_certainty=minimum_certainty,
            trust_package=trust_package,
            allow_reformatting=allow_reformatting,
            net_access=net_access,
            opinionated=opinionated,
            diligence=diligence,
        )
    except BaseException:
        reset_tree(local_tree, basis_tree, subpath, dirty_tracker=dirty_tracker)
        raise
    if not certainty_sufficient(result.certainty, minimum_certainty):
        reset_tree(local_tree, basis_tree, subpath, dirty_tracker=dirty_tracker)
        raise NotCertainEnough(
            fixer, result.certainty, minimum_certainty,
            overridden_lintian_issues=result.overridden_lintian_issues)
    specific_files: Optional[List[str]]
    if dirty_tracker:
        relpaths = dirty_tracker.relpaths()
        # Sort paths so that directories get added before the files they
        # contain (on VCSes where it matters)
        local_tree.add(
            [
                p
                for p in sorted(relpaths)
                if local_tree.has_filename(p) and not local_tree.is_ignored(p)
            ]
        )
        specific_files = [p for p in relpaths if local_tree.is_versioned(p)]
        if not specific_files:
            raise NoChanges(
                fixer, "Script didn't make any changes",
                result.overridden_lintian_issues)
    else:
        local_tree.smart_add([local_tree.abspath(subpath)])
        specific_files = [subpath] if subpath else None

    if local_tree.supports_setting_file_ids():
        RenameMap.guess_renames(basis_tree, local_tree, dry_run=False)

    changes = list(
        local_tree.iter_changes(
            basis_tree,
            specific_files=specific_files,
            want_unversioned=False,
            require_versioned=True,
        )
    )

    if len(local_tree.get_parent_ids()) <= 1 and not changes:
        raise NoChanges(
            fixer, "Script didn't make any changes",
            result.overridden_lintian_issues)

    if not result.description:
        raise DescriptionMissing(fixer)

    lines = result.description.splitlines()
    summary = lines[0]
    details = list(itertools.takewhile(lambda line: line, lines[1:]))

    # If there are upstream changes in a non-native package, perhaps
    # export them to debian/patches
    if has_non_debian_changes(changes, subpath) and current_version.debian_revision:
        try:
            patch_name, specific_files = _upstream_changes_to_patch(
                local_tree,
                basis_tree,
                dirty_tracker,
                subpath,
                result.patch_name or fixer.name,
                result.description,
                timestamp=timestamp,
            )
        except BaseException:
            reset_tree(local_tree, basis_tree, subpath, dirty_tracker=dirty_tracker)
            raise

        summary = "Add patch %s: %s" % (patch_name, summary)

    if only_changes_last_changelog_block(
        local_tree, basis_tree, changelog_path, changes
    ):
        # If the script only changed the last entry in the changelog,
        # don't update the changelog
        update_changelog = False

    if callable(update_changelog):
        update_changelog = update_changelog()

    if update_changelog:
        from .changelog import add_changelog_entry

        add_changelog_entry(local_tree, changelog_path, [summary] + details)
        if specific_files:
            specific_files.append(changelog_path)

    description = result.description + "\n"
    description += "\n"
    description += "Changes-By: %s\n" % changes_by
    for tag in result.fixed_lintian_tags:
        description += "Fixes: lintian: %s\n" % tag
        description += "See-also: https://lintian.debian.org/tags/%s.html\n" % tag

    if committer is None:
        committer = get_committer(local_tree)

    revid = local_tree.commit(
        description,
        allow_pointless=False,
        reporter=NullCommitReporter(),
        committer=committer,
        specific_files=specific_files,
    )
    result.revision_id = revid
    # TODO(jelmer): Support running sbuild & verify lintian warning is gone?
    return result, summary
Exemple #20
0
    def __init__(self,
                 buildable,
                 *,
                 binary_version_suffix='',
                 link_builds=(),
                 orig_dirs=('..', ),
                 output_dir=None,
                 output_parent,
                 vendor):
        self.buildable = buildable

        self._product_prefix = None
        self._source_version = None
        self.arch_wildcards = set()
        self.archs = []
        self.autopkgtest_failures = []
        self.binary_packages = []
        self.binary_version_suffix = binary_version_suffix
        self.changes_produced = {}
        self.dirname = None
        self.dsc = None
        self.dsc_name = None
        self.indep = False
        self.indep_together_with = None
        self.link_builds = link_builds
        self.logs = {}
        self.merged_changes = OrderedDict()
        self.nominal_suite = None
        self.orig_dirs = orig_dirs
        self.output_dir = output_dir
        self.piuparts_failures = []
        self.source_from_archive = False
        self.source_package = None
        self.source_together_with = None
        self.sourceful_changes_name = None
        self.suite = None
        self.vendor = vendor

        if os.path.exists(self.buildable):
            if os.path.isdir(self.buildable):
                changelog = os.path.join(self.buildable, 'debian', 'changelog')
                changelog = Changelog(open(changelog))
                self.source_package = changelog.get_package()
                self.nominal_suite = changelog.distributions
                self._source_version = Version(changelog.version)
                control = os.path.join(self.buildable, 'debian', 'control')

                if len(changelog.distributions.split()) != 1:
                    raise ArgumentError(
                        'Cannot build for multiple distributions at once')

                for paragraph in Deb822.iter_paragraphs(open(control)):
                    self.arch_wildcards |= set(
                        paragraph.get('architecture', '').split())
                    binary = paragraph.get('package')

                    if binary is not None:
                        self.binary_packages.append(binary)

            elif self.buildable.endswith('.changes'):
                self.dirname = os.path.dirname(self.buildable) or os.curdir
                self.sourceful_changes_name = self.buildable
                sourceful_changes = Changes(open(self.buildable))
                if 'source' not in sourceful_changes['architecture'].split():
                    raise ArgumentError(
                        'Changes file {!r} must be sourceful'.format(
                            self.buildable))

                self.nominal_suite = sourceful_changes['distribution']

                for f in sourceful_changes['files']:
                    if f['name'].endswith('.dsc'):
                        if self.dsc_name is not None:
                            raise ArgumentError(
                                'Changes file {!r} contained more than one '
                                '.dsc file'.format(self.buildable))

                        self.dsc_name = os.path.join(self.dirname, f['name'])

                if self.dsc_name is None:
                    raise ArgumentError(
                        'Changes file {!r} did not contain a .dsc file'.format(
                            self.buildable))

                self.dsc = Dsc(open(self.dsc_name))

            elif self.buildable.endswith('.dsc'):
                self.dirname = os.path.dirname(self.buildable) or os.curdir
                self.dsc_name = self.buildable
                self.dsc = Dsc(open(self.dsc_name))

            else:
                raise ArgumentError(
                    'buildable must be .changes, .dsc or directory, not '
                    '{!r}'.format(self.buildable))
        else:
            self.source_from_archive = True

            if '_' in self.buildable:
                source, version = self.buildable.split('_', 1)
            else:
                source = self.buildable
                version = None

            self.source_package = source
            if version is not None:
                self._source_version = Version(version)

        if self.dsc is not None:
            self.source_package = self.dsc['source']
            self._source_version = Version(self.dsc['version'])
            self.arch_wildcards = set(self.dsc['architecture'].split())
            self.binary_packages = [
                p.strip() for p in self.dsc['binary'].split(',')
            ]

        if self._source_version is not None:
            self._binary_version = Version(
                str(self._source_version) + self.binary_version_suffix)

        timestamp = time.strftime('%Y%m%dt%H%M%S', time.gmtime())

        if self.output_dir is None:
            if self._binary_version is None:
                dirname = '{}_{}'.format(self.source_package, timestamp)
            else:
                dirname = '{}_{}_{}'.format(self.source_package,
                                            self._binary_version, timestamp)

            self.output_dir = os.path.join(output_parent, dirname)

            # For convenience, create a symbolic link for the latest build of
            # each source package: hello_latest -> hello_2.10-1_20170319t102623
            unversioned_symlink = os.path.join(output_parent,
                                               self.source_package + '_latest')

            with suppress(FileNotFoundError):
                os.unlink(unversioned_symlink)

            os.symlink(dirname, unversioned_symlink)

            # If we know the version, also create a symbolic link for the
            # latest build of each source/version pair:
            # hello_2.10-1 -> hello_2.10-1_20170319t102623
            if self._binary_version is not None:
                versioned_symlink = os.path.join(
                    output_parent, '{}_{}'.format(self.source_package,
                                                  self._binary_version))

                with suppress(FileNotFoundError):
                    os.unlink(versioned_symlink)

                os.symlink(dirname, versioned_symlink)

        # It's OK if the output directory exists but is empty.
        with suppress(FileNotFoundError):
            os.rmdir(self.output_dir)

        # Otherwise, if someone already created this, we'll just crash out.
        os.mkdir(self.output_dir)

        if self.dsc is not None:
            abs_file = os.path.abspath(self.dsc_name)
            abs_dir, base = os.path.split(abs_file)
            os.symlink(abs_file, os.path.join(self.output_dir, base))

            for l in self.link_builds:
                symlink = os.path.join(l, base)

                with suppress(FileNotFoundError):
                    os.unlink(symlink)

                os.symlink(abs_file, symlink)

            for f in self.dsc['files']:
                abs_file = os.path.join(abs_dir, f['name'])
                os.symlink(abs_file, os.path.join(self.output_dir, f['name']))

                for l in self.link_builds:
                    symlink = os.path.join(l, f['name'])

                    with suppress(FileNotFoundError):
                        os.unlink(symlink)

                    os.symlink(abs_file, symlink)
Exemple #21
0
    def runTest(self):
        xfail_path = os.path.join(self._path, "xfail")
        if os.path.exists(xfail_path):
            with open(xfail_path, "r") as f:
                reason = f.read()  # noqa: F841
            unittest.expectedFailure(self)
            return
        env = dict(os.environ.items())
        cl_path = os.path.join(self._testdir, "debian/changelog")
        if os.path.exists(cl_path):
            with open(cl_path, "rb") as f:
                cl = Changelog(f, max_blocks=1)
            if cl.distributions == "UNRELEASED":
                current_version = cl.version
            else:
                current_version = cl.version
            increment_version(current_version)
        else:
            current_version = "1.0-1"
        env["CURRENT_VERSION"] = str(current_version)
        env["NET_ACCESS"] = "disallow"
        env["MINIMUM_CERTAINTY"] = "possible"
        env_path = os.path.join(self._path, "env")
        if os.path.exists(env_path):
            with open(env_path, "r") as f:
                for line in f:
                    key, value = line.rstrip("\n").split("=")
                    env[key] = value
        p = subprocess.Popen(
            self._fixer.script_path, cwd=self._testdir, stdout=subprocess.PIPE, env=env
        )
        (stdout, err) = p.communicate("")
        self.assertEqual(p.returncode, 0)
        out_path = os.path.join(self._path, "out")
        p = subprocess.Popen(
            [
                "diff",
                "--no-dereference",
                "-x",
                "*~",
                "-ur",
                os.path.join(self._path, os.readlink(out_path))
                if os.path.islink(out_path)
                else out_path,
                self._testdir,
            ],
            stdout=subprocess.PIPE,
        )
        (diff, stderr) = p.communicate("")
        self.assertIn(p.returncode, (0, 1), "Unexpected exit code %d" % p.returncode)
        if diff.decode() != "":
            raise AssertionError("unexpected output: %s" % diff.decode())
        self.assertMultiLineEqual(diff.decode(), "")

        if (
            not os.path.islink(out_path)
            or os.readlink(os.path.join(self._path, "out")) != "in"
        ):
            check_message = True
            result = parse_script_fixer_output(stdout.decode())
            self.assertTrue(
                set(result.fixed_lintian_tags).issubset(self._fixer.lintian_tags),
                "fixer %s claims to fix tags (%r) not declared "
                "in index.desc (%r)"
                % (
                    self._fixer_name,
                    result.fixed_lintian_tags,
                    self._fixer.lintian_tags,
                ),
            )
        else:
            check_message = False

        message_path = os.path.join(self._path, "message")
        if os.path.exists(message_path) or check_message:
            with open(message_path, "r") as f:
                # Assert that message on stdout matches
                self.assertEqual(stdout.decode().strip(), f.read().strip())
Exemple #22
0
    def _add_changelog(self, package, baseline_date, replacements):
        # TODO: evaluate baseline by package
        package_changelog_path = None
        package_name = self._get_package_value('package', package)
        if os.path.isfile(
                os.path.join(self.raw_input, package_name,
                             'changelog.Debian.gz')):
            package_changelog_path = os.path.join(self.raw_input, package_name,
                                                  'changelog.Debian.gz')
        elif os.path.isfile(
                os.path.join(self.raw_input, package_name, 'changelog.gz')):
            package_changelog_path = os.path.join(self.raw_input, package_name,
                                                  'changelog.gz')
        else:
            logging.warning(
                "No changelog found for package '{}'.".format(package_name))

        if package_changelog_path:
            with gzip.open(package_changelog_path) as fh:
                try:
                    changelog = Changelog(fh)
                except UnicodeDecodeError as e:
                    raise FatalError(
                        "Failed to parse changelog of {}:\n{}".format(
                            package_name, str(e)))

                if not changelog.package or not changelog.date or not changelog.author:
                    logging.warning(
                        "The changelog of package '{}' is incomplete.".format(
                            package_name))
                    return

                package_dict = dict()
                package_dict['author'] = changelog.author
                package_dict['date'] = changelog.date
                package_dict['short_date'] = self._parse_date(
                    changelog.date).strftime("%d. %B %Y")
                package_dict['version'] = str(changelog.get_version())
                package_dict['package'] = changelog.package

                change_blocks = list()
                for change_block in changelog:
                    changeblock_date = self._parse_date(change_block.date)

                    if changeblock_date <= baseline_date:
                        break

                    if change_block.version <= Version(
                            self.baseline_versions.get(package_name, '0.0.0')):
                        break

                    block_dict = dict()
                    block_dict['author'] = change_block.author
                    block_dict['date'] = change_block.date
                    block_dict['short_date'] = changeblock_date.strftime(
                        "%d. %B %Y")
                    block_dict['version'] = str(change_block.version)
                    block_dict['package'] = change_block.package
                    block_dict['distributions'] = change_block.distributions
                    block_dict['urgency'] = change_block.urgency
                    changes = self._apply_replacements(change_block.changes(),
                                                       replacements)
                    block_dict['changes'] = ChangesAnnotator(
                        package_name).annotate(changes)
                    change_blocks.append(block_dict)

                package_dict['change_blocks'] = change_blocks

                package['changelog'] = package_dict
Exemple #23
0
here = abspath(dirname(__file__))
changelog = join(here, 'debian/changelog')
requirements = open(join(here, 'requires.txt')).readlines()
dev_requirements = open(join(here, 'dev_requires.txt')).readlines()

additional = {}

# debhelper setup FAKEROOTKEY variable
if 'FAKEROOTKEY' not in environ:
    additional['entry_points'] = {
        'console_scripts': ['homebank-web = homebank.cli:manage']
    }

    requirements.extend(dev_requirements)

setup(name='homebank-wui',
      version=str(Changelog(open(changelog)).get_version()),
      description='Web User Interface for Homebank',
      author='Alex Rembish',
      author_email='*****@*****.**',
      url='https://github.com/rembish/homebank-wui',
      packages=find_packages(),
      include_package_data=True,
      data_files=[
          ('/usr/share/homebank/', ['data/sample.xhb']),
      ],
      zip_safe=False,
      install_requires=requirements,
      **additional)
def import_uncommitted(tree, subpath):
    from breezy.plugins.debian.import_dsc import (
        DistributionBranch,
        DistributionBranchSet,
    )

    cl_path = os.path.join(subpath, "debian/changelog")
    with tree.get_file(cl_path) as f:
        tree_cl = Changelog(f)
        package_name = tree_cl.package
    with contextlib.ExitStack() as es:
        archive_source = es.enter_context(tempfile.TemporaryDirectory())
        try:
            retrieve_source(package_name, archive_source)
        except AptSourceError as e:
            if isinstance(e.reason, list):
                reason = e.reason[-1]
            else:
                reason = e.reason
            raise ChangerError("apt-source-error", reason)
        except NoAptSources:
            raise ChangerError(
                "no-apt-sources",
                "No sources configured in /etc/apt/sources.list")
        [subdir] = [e.path for e in os.scandir(archive_source) if e.is_dir()]
        with open(os.path.join(subdir, "debian", "changelog"), "r") as f:
            archive_cl = Changelog(f)
        missing_versions = []
        for block in archive_cl:
            if block.version == tree_cl.version:
                break
            missing_versions.append(block.version)
        else:
            raise TreeVersionNotInArchiveChangelog(tree_cl.version)
        if len(missing_versions) == 0:
            raise NoMissingVersions(tree_cl.version, archive_cl.version)
        logging.info("Missing versions: %s",
                     ", ".join(map(str, missing_versions)))
        ret = []
        dbs = DistributionBranchSet()
        db = DistributionBranch(tree.branch, tree.branch, tree=tree)
        dbs.add_branch(db)
        if tree_cl.version.debian_revision:
            logging.info("Extracting upstream version %s.",
                         tree_cl.version.upstream_version)
            upstream_dir = es.enter_context(tempfile.TemporaryDirectory())
            try:
                upstream_tips = db.pristine_upstream_source.version_as_revisions(
                    tree_cl.package, tree_cl.version.upstream_version)
            except PackageVersionNotPresent:
                raise TreeUpstreamVersionMissing(
                    tree_cl.version.upstream_version)
            db.extract_upstream_tree(upstream_tips, upstream_dir)
        no_preparation = not tree.has_filename(".pc/applied-patches")
        version_path = {}
        for version in missing_versions:
            output_dir = es.enter_context(tempfile.TemporaryDirectory())
            download_snapshot(package_name,
                              version,
                              output_dir,
                              no_preparation=no_preparation)
            version_path[version] = output_dir
        for version in reversed(missing_versions):
            logging.info("Importing %s", version)
            dsc_path = os.path.join(version_path[version],
                                    "%s_%s.dsc" % (package_name, version))
            tag_name = db.import_package(dsc_path)
            revision = db.version_as_revisions(version)
            ret.append((tag_name, version, revision))
    return ret
Exemple #25
0
    def parse_deb_project(self, project, product_data, project_data, branch):
        if product_data['type'] != 'deb':
            return project

        self.repo.import_project(project, branch=branch)
        project_data['commit'] = self.repo._.head.commit.hexsha

        spec_path = product_data['spec-dirs'][0]
        control_file = os.path.join(self.repo._.working_dir, spec_path,
                                    'control')

        if not os.path.exists(control_file):
            raise FileNotFoundException(
                "Control file '{}' not found".format(control_file))

        changelog_file = os.path.join(self.repo._.working_dir, spec_path,
                                      'changelog')

        if not os.path.exists(changelog_file):
            raise FileNotFoundException(
                "Changelog file '{}' not found".format(changelog_file))

        series_file = os.path.join(self.repo._.working_dir, spec_path,
                                   'patches/series')

        source_packages = []
        binary_packages = []

        for para in deb822.Sources.iter_paragraphs(open(control_file),
                                                   use_apt_pkg=False):
            if 'Source' in para:
                source_packages.append(para)

        for para in deb822.Packages.iter_paragraphs(open(control_file),
                                                    use_apt_pkg=False):
            if 'Package' in para:
                binary_packages.append(para)

        changelog = Changelog(open(changelog_file), strict=False)

        project_data['branch'] = branch

        project_data['spec_project'] = project
        project_data['source_project'] = project.replace('-build', '')

        project_data['source_package'] = source_packages[0]['Source']
        project_data['section_name'] = source_packages[0]['Section']

        for key in ['Build-Depends', 'Build-Depends-Indep']:
            for dep in map(lambda s: s.strip(),
                           source_packages[0].get(key, '').split(',')):
                dep = '|'.join(
                    map(lambda s: s.split()[0] if s else '', dep.split('|')))
                if dep:
                    project_data.setdefault('build_depends', []).append(dep)

        if os.path.exists(series_file):
            for patch_name in open(series_file):
                patch_name = patch_name.rstrip()
                if re.match(r'^\s*#', patch_name):
                    continue
                project_data['patches'].append(patch_name)

        project_data.setdefault('version', {})
        project_data['version'] = {
            'epoch': changelog.epoch or 0,
            'version': changelog.debian_version,
            'revision': changelog.debian_revision,
            'full_version': changelog.full_version
        }
        project_data['upstream_version'] = changelog.upstream_version

        for block in changelog:
            project_data['distribution_version'] = block.version.full_version
            if re.match(r'.*\@(ubuntu|canonical)\.com.*', block.author):
                project_data['distribution']['alias'] = block.distributions
                if 'cloud' in block.version.full_version:
                    project_data['package_origin'] = 'Ubuntu Cloud Archive'
                else:
                    project_data['package_origin'] = 'Ubuntu'
                break
            if re.match(r'.*\@debian\.org.*', block.author):
                project_data['distribution']['alias'] = block.distributions
                project_data['package_origin'] = 'Debian'
                break
        else:
            project_data['package_origin'] = 'Mirantis'

        for package in binary_packages:
            pkg = {}
            pkg['name'] = package['Package']
            for dep in map(lambda s: s.strip(),
                           package.get('Depends', '').split(',')):
                dep = '|'.join(
                    map(lambda s: s.split()[0] if s else '', dep.split('|')))
                if dep:
                    pkg.setdefault('depends', []).append(dep)
            project_data.setdefault('binary_packages', []).append(pkg)
Exemple #26
0
def main():  # noqa: C901
    import argparse
    import breezy  # noqa: E402

    breezy.initialize()
    import breezy.git  # noqa: E402
    import breezy.bzr  # noqa: E402

    parser = argparse.ArgumentParser(prog="deb-transition-apply")
    parser.add_argument(
        "--directory",
        metavar="DIRECTORY",
        help="directory to run in",
        type=str,
        default=".",
    )
    parser.add_argument(
        "--no-update-changelog",
        action="store_false",
        default=None,
        dest="update_changelog",
        help="do not update the changelog",
    )
    parser.add_argument(
        "--update-changelog",
        action="store_true",
        dest="update_changelog",
        help="force updating of the changelog",
        default=None,
    )
    parser.add_argument(
        "--allow-reformatting",
        default=None,
        action="store_true",
        help=argparse.SUPPRESS,
    )
    parser.add_argument("--version",
                        action="version",
                        version="%(prog)s " + version_string)
    parser.add_argument(
        "--identity",
        help="Print user identity that would be used when committing",
        action="store_true",
        default=False,
    )
    parser.add_argument("--debug",
                        help="Describe all considered changes.",
                        action="store_true")
    parser.add_argument("benfile",
                        help="Benfile to read transition from.",
                        type=str)

    args = parser.parse_args()

    with open(args.benfile, 'r') as f:
        ben = parse_ben(f)

    wt, subpath = WorkingTree.open_containing(args.directory)
    if args.identity:
        logging.info('%s', get_committer(wt))
        return 0

    try:
        check_clean_tree(wt, wt.basis_tree(), subpath)
    except WorkspaceDirty:
        logging.info("%s: Please commit pending changes first.", wt.basedir)
        return 1

    if args.debug:
        logging.basicConfig(level=logging.DEBUG)
    else:
        logging.basicConfig(level=logging.INFO, format='%(message)s')

    update_changelog = args.update_changelog
    allow_reformatting = args.allow_reformatting

    try:
        cfg = Config.from_workingtree(wt, subpath)
    except FileNotFoundError:
        pass
    else:
        if update_changelog is None:
            update_changelog = cfg.update_changelog()
        if allow_reformatting is None:
            allow_reformatting = cfg.allow_reformatting()

    if allow_reformatting is None:
        allow_reformatting = False

    if control_files_in_root(wt, subpath):
        debian_path = subpath
    else:
        debian_path = os.path.join(subpath, 'debian')

    try:
        result = apply_transition(wt,
                                  debian_path,
                                  ben,
                                  update_changelog=args.update_changelog,
                                  allow_reformatting=allow_reformatting)
    except PackageNotAffected:
        report_okay("nothing-to-do", "Package not affected by transition")
        return 0
    except PackageAlreadyGood:
        report_okay("nothing-to-do", "Transition already applied to package")
        return 0
    except PackageNotBad:
        report_okay("nothing-to-do", "Package not bad")
        return 0
    except FormattingUnpreservable as e:
        report_fatal(
            "formatting-unpreservable",
            "unable to preserve formatting while editing %s" % e.path,
        )
        return 1
    except GeneratedFile as e:
        report_fatal("generated-file", "unable to edit generated file: %r" % e)
        return 1
    except NotDebianPackage:
        report_fatal('not-debian-package', 'Not a Debian package.')
        return 1
    except ChangeConflict as e:
        report_fatal('change-conflict',
                     'Generated file changes conflict: %s' % e)
        return 1

    if not result:
        report_okay("nothing-to-do", "no changes from transition")
        return 0

    changelog_path = os.path.join(debian_path, "changelog")

    if update_changelog is None:
        from .detect_gbp_dch import guess_update_changelog
        from debian.changelog import Changelog

        with wt.get_file(changelog_path) as f:
            cl = Changelog(f, max_blocks=1)

        dch_guess = guess_update_changelog(wt, debian_path, cl)
        if dch_guess:
            update_changelog = dch_guess[0]
            _note_changelog_policy(update_changelog, dch_guess[1])
        else:
            # Assume we should update changelog
            update_changelog = True

    if update_changelog:
        summary = 'Apply transition %s.' % ben['title']
        if result.bugno:
            summary += ' Closes: #%d' % result.bugno
        add_changelog_entry(wt, changelog_path, [summary])

    if os.environ.get("SVP_API") == "1":
        with open(os.environ["SVP_RESULT"], "w") as f:
            json.dump(
                {
                    "description": "Apply transition.",
                    "value": result.value(),
                    "context": ben
                }, f)

    logging.info("Applied transition %s", ben['title'])
    return 0
        warnings.warn("Could not evaluate Changelog entry {!r}.".format(line),
                      SyntaxWarning)

    if change:
        clist.append(change)

    return clist


with changelog_file.open('r', encoding='utf-8',
                         errors='backslashreplace') as fh:

    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter("always")
        ch = Changelog(fh)

        if len(w):
            print("There were {nr} warnings on reading {f!r}.".format(
                nr=len(w), f=filename),
                  file=sys.stderr)
            sys.exit(5)

    # print("Changelog {f!r} hat {nr} Einträge.".format(f=filename, nr=len(ch)), file=sys.stderr)

    days = {}

    for block in ch:

        lines = []
        day_str = re.sub(r'\s+\d\d:\d\d:\d\d\s+[+-]?\d{4}$', '', block.date)
Exemple #28
0
def script_runner(   # noqa: C901
    local_tree: WorkingTree, script: Union[str, List[str]],
    commit_pending: Optional[bool] = None,
    resume_metadata=None, subpath: str = '', update_changelog: Optional[bool] = None,
    extra_env: Optional[Dict[str, str]] = None,
    committer: Optional[str] = None
) -> CommandResult:  # noqa: C901
    """Run a script in a tree and commit the result.

    This ignores newly added files.

    Args:
      local_tree: Local tree to run script in
      script: Script to run
      commit_pending: Whether to commit pending changes
        (True, False or None: only commit if there were no commits by the
         script)
    """
    if control_files_in_root(local_tree, subpath):
        debian_path = subpath
    else:
        debian_path = os.path.join(subpath, "debian")
    if update_changelog is None:
        dch_guess = guess_update_changelog(local_tree, debian_path)
        if dch_guess:
            if isinstance(dch_guess, tuple):  # lintian-brush < 1.22
                update_changelog, explanation = dch_guess
            else:
                update_changelog = dch_guess.update_changelog
                explanation = dch_guess.explanation
            logging.info('%s', explanation)
        else:
            # Assume yes.
            update_changelog = True

    cl_path = os.path.join(debian_path, 'changelog')
    try:
        with open(local_tree.abspath(cl_path), 'r') as f:
            cl = Changelog(f)
            source_name = cl[0].package
    except FileNotFoundError:
        source_name = None

    env = dict(os.environ)
    if extra_env:
        env.update(extra_env)

    env['SVP_API'] = '1'
    if source_name:
        env['DEB_SOURCE'] = source_name

    if update_changelog:
        env['DEB_UPDATE_CHANGELOG'] = 'update'
    else:
        env['DEB_UPDATE_CHANGELOG'] = 'leave'

    last_revision = local_tree.last_revision()
    orig_tags = local_tree.branch.tags.get_tag_dict()
    with tempfile.TemporaryDirectory() as td:
        env['SVP_RESULT'] = os.path.join(td, 'result.json')
        if resume_metadata:
            env['SVP_RESUME'] = os.path.join(td, 'resume-metadata.json')
            with open(env['SVP_RESUME'], 'w') as f:
                json.dump(resume_metadata, f)
        p = subprocess.Popen(
            script, cwd=local_tree.abspath(subpath), stdout=subprocess.PIPE,
            shell=isinstance(script, str), env=env)
        (description_encoded, err) = p.communicate(b"")
        try:
            with open(env['SVP_RESULT'], 'r') as f:
                try:
                    result_json = json.load(f)
                except json.decoder.JSONDecodeError as e:
                    raise ResultFileFormatError(e)
        except FileNotFoundError:
            result_json = None
        if p.returncode != 0:
            if result_json is not None:
                raise DetailedFailure.from_json(source_name, result_json)
            raise ScriptFailed(script, p.returncode)
        # If the changelog didn't exist earlier, then hopefully it was created
        # now.
        if source_name is None:
            try:
                with open(local_tree.abspath(cl_path), 'r') as f:
                    cl = Changelog(f)
                    source_name = cl[0].package
            except FileNotFoundError:
                raise MissingChangelog(cl_path)
        if result_json is not None:
            result = CommandResult.from_json(source_name, result_json)
        else:
            result = CommandResult(source=source_name)
    if not result.description:
        result.description = description_encoded.decode().replace("\r", "")
    new_revision = local_tree.last_revision()
    if result.tags is None:
        result.tags = []
        for name, revid in local_tree.branch.tags.get_tag_dict().items():
            if orig_tags.get(name) != revid:
                result.tags.append((name, revid))
    if last_revision == new_revision and commit_pending is None:
        # Automatically commit pending changes if the script did not
        # touch the branch.
        commit_pending = True
    if commit_pending:
        if update_changelog and result.description and local_tree.has_changes():
            add_changelog_entry(
                local_tree,
                os.path.join(debian_path, 'changelog'),
                [result.description])
        local_tree.smart_add([local_tree.abspath(subpath)])
        try:
            new_revision = local_tree.commit(
                result.description, allow_pointless=False,
                committer=committer)
        except PointlessCommit:
            pass
    if new_revision == last_revision:
        raise ScriptMadeNoChanges()
    result.old_revision = last_revision
    result.new_revision = new_revision
    return result
def _get_version():
    changelog = Changelog(open('debian/changelog', 'r'))
    return changelog.full_version
Exemple #30
0
            print('Python {}.{}.{} or better is required'.format(
                major, minor, micro))
        else:
            print('Python {}.{}.{} ({}) or better is required'.format(
                major, minor, micro,
                hex(release)[2:]))
        sys.exit(1)


require_python(0x30500f0)

if Changelog is None:
    __version__ = 'dev'
else:
    with open('debian/changelog', encoding='utf-8') as infp:
        __version__ = str(Changelog(infp).get_version())
        # Write the version out to the package directory so `ubuntu-image
        # --version` can display it.
        with open('ubuntu_image/version.txt', 'w', encoding='utf-8') as outfp:
            print(__version__, file=outfp)

# LP: #1631156 - We want the flake8 entry point for all testing purposes, but
# we do not want to install an ubuntu_image.egg-info/entry_points.txt file
# with the flake8 entry point, since this will globally break other packages.
# Unfortunately we cannot adopt flufl.testing since that's not available
# before Ubuntu 17.04, and we still need to support 16.04 LTS.
entry_points = dict()
if os.environ.get('UBUNTU_IMAGE_BUILD_FOR_TESTING', False):
    entry_points['flake8.extension'] = [
        'B4 = ubuntu_image.testing.flake8:ImportOrder'
    ]