Ejemplo n.º 1
0
    def __init__(self, filename=None, string=None):
        """
        Object constructor. The object allows the user to specify **either**:

        #. a path to a *changes* file to parse
        #. a string with the *changes* file contents.

        ::

        a = Changes(filename='/tmp/packagename_version.changes')
        b = Changes(string='Source: packagename\\nMaintainer: ...')

        ``filename``
            Path to *changes* file to parse.

        ``string``
            *changes* file in a string to parse.
        """
        if (filename and string) or (not filename and not string):
            raise TypeError

        if filename:
            self._absfile = os.path.abspath(filename)
            self._directory = os.path.dirname(self._absfile)
            self._data = deb822.Changes(open(filename, encoding='utf-8'))
            self.basename = os.path.basename(filename)
        else:
            self._data = deb822.Changes(string)

        if len(self._data) == 0:
            raise ChangesFileException('Changes file could not be parsed.')
Ejemplo n.º 2
0
def forge_changes_file(fname, dist, **kwargs):
    dsc = deb822.Dsc(open(fname, 'r'))

    changes = deb822.Changes()
    changes['Format'] = '1.8'
    changes['Date'] = email.utils.formatdate(time.mktime(
        dt.datetime.utcnow().timetuple()),
                                             usegmt=True)

    for key in [
            'Source', 'Version', 'Maintainer', 'Checksums-Sha1',
            'Checksums-Sha256', 'Files'
    ]:
        if not dsc.has_key(key):
            raise MissingChangesFieldException(key)

        changes[key] = dsc[key]

    for algo, key, h, s, f in file_info(fname):
        if algo == 'md5':
            algo = 'md5sum'

        entry = deb822.Deb822Dict()
        entry[algo] = h
        entry['size'] = s
        entry['name'] = f

        changes[key].append(entry)

    for entry in changes['Files']:
        entry['section'] = 'not-implemented'
        entry['priority'] = 'not-implemented'

    changes['Distribution'] = dist
    changes['Urgency'] = 'low'
    changes['Changed-By'] = 'Archive Rebuilder <*****@*****.**>'
    changes['Architecture'] = 'source'
    changes['Binary'] = 'not implemented either'
    changes['Description'] = """This feature is not implemented.
 This is a pretty damn hard to deal with right now. I might write this
 later."""
    changes['Changes'] = """
 {source} ({version}) {dist}; urgency={urgency}
 .
   * This is a fake ChangeLog entry used by ricky to force a rebuild
     on debuild.me.""".format(
        source=changes['Source'],
        version=changes['Version'],
        urgency=changes['Urgency'],
        dist=dist,
    )

    for k, v in kwargs.items():
        changes[k] = v

    return changes
Ejemplo n.º 3
0
    def test_unicode_values(self):
        """Deb822 objects should contain only unicode values

        (Technically, they are allowed to contain any type of object, but when
        parsed from files, and when only string-type objects are added, the
        resulting object should have only unicode values.)
        """

        objects = []
        objects.append(deb822.Deb822(UNPARSED_PACKAGE))
        objects.append(deb822.Deb822(CHANGES_FILE))
        with open_utf8('test_Packages') as f:
            objects.extend(deb822.Deb822.iter_paragraphs(f))
        with open_utf8('test_Packages') as f:
            objects.extend(deb822.Packages.iter_paragraphs(f))
        with open_utf8('test_Sources') as f:
            objects.extend(deb822.Deb822.iter_paragraphs(f))
        with open('test_Sources.iso8859-1', 'rb') as f:
            objects.extend(
                deb822.Deb822.iter_paragraphs(f, encoding="iso8859-1"))
        for d in objects:
            for value in d.values():
                self.assertTrue(isinstance(value, six.text_type))

        # The same should be true for Sources and Changes except for their
        # _multivalued fields
        multi = []
        multi.append(deb822.Changes(CHANGES_FILE))
        multi.append(
            deb822.Changes(SIGNED_CHECKSUM_CHANGES_FILE %
                           CHECKSUM_CHANGES_FILE))
        with open_utf8('test_Sources') as f:
            multi.extend(deb822.Sources.iter_paragraphs(f))
        for d in multi:
            for key, value in d.items():
                if key.lower() not in d.__class__._multivalued_fields:
                    self.assertTrue(isinstance(value, six.text_type))
Ejemplo n.º 4
0
def uploadpackage(changes, delete_existing=False):
    """Upload a package to the repository, using the changes file."""
    if delete_existing:
        data = open(changes).read()
        source_package = deb822.Changes(data)
        # Delete .upload file so dupload always refreshes the files.
        upload = upload_filepath(source_package)
        local("rm -f {0}".format(upload))
    local("dupload {0}".format(changes))
    if delete_existing:
        # Remove previous package from repository.
        distribution = "{0}-testing".format(source_package["Distribution"])
        execute(repo.rm_packages, distribution, source_package["Source"])
    # Import new packages into repository.
    run("import-new-debs.sh")
Ejemplo n.º 5
0
def get_upload_data(changesfn):
    achanges = deb822.Changes(file(changesfn))
    changesname = os.path.basename(changesfn)
    delay = os.path.basename(os.path.dirname(changesfn))
    m = re.match(r'([0-9]+)-day', delay)
    if m:
        delaydays = int(m.group(1))
        remainingtime = (delaydays > 0) * max(
            0, 24 * 60 * 60 + os.stat(changesfn).st_mtime - time.time())
        delay = "%d days %02d:%02d" % (max(
            delaydays - 1, 0), int(
                remainingtime / 3600), int(remainingtime / 60) % 60)
    else:
        delaydays = 0
        remainingtime = 0

    uploader = achanges.get('changed-by')
    uploader = re.sub(r'^\s*(\S.*)\s+<.*>', r'\1', uploader)
    with utils.open_file(changesfn) as f:
        fingerprint = SignedFile(f.read(),
                                 keyrings=get_active_keyring_paths(),
                                 require_signature=False).fingerprint
    if Cnf.has_key("Show-Deferred::LinkPath"):
        isnew = 0
        suites = get_suites_source_in(achanges['source'])
        if 'unstable' not in suites and 'experimental' not in suites:
            isnew = 1

        for b in achanges['binary'].split():
            suites = get_suites_binary_in(b)
            if 'unstable' not in suites and 'experimental' not in suites:
                isnew = 1

        if not isnew:
            # we don't link .changes because we don't want other people to
            # upload it with the existing signature.
            for afn in map(lambda x: x['name'], achanges['files']):
                lfn = os.path.join(Cnf["Show-Deferred::LinkPath"], afn)
                qfn = os.path.join(os.path.dirname(changesfn), afn)
                if os.path.islink(lfn):
                    os.unlink(lfn)
                if os.path.exists(qfn):
                    os.symlink(qfn, lfn)
                    os.chmod(qfn, 0o644)
    return (max(delaydays - 1, 0) * 24 * 60 * 60 + remainingtime, changesname,
            delay, uploader, achanges.get('closes', '').split(), fingerprint,
            achanges, delaydays)
Ejemplo n.º 6
0
    def search(self, needle, flags_search):
        #flags_search = ['equals','contains','description','depends',]
        xxx = os.path.join(self.folder, self.search_path)
        list_distributions = os.listdir(xxx)

        global_result = []
        for release in list_distributions:
            if os.path.exists(os.path.join(xxx, release, 'Release')):
                index_file = deb822.Changes(
                    codecs.open(os.path.join(xxx, release, 'Release'),
                                encoding='utf-8'))

                # Extract index files from Release file.
                list_files = [
                    w.strip().split(" ")[2]
                    for w in index_file['md5sum'].split("\n")
                    if w.endswith('Packages')
                ]

                for x in list_files:
                    with codecs.open(os.path.join(xxx, release, x),
                                     encoding='utf-8') as fd:
                        for package in deb822.Deb822.iter_paragraphs(fd):
                            found = False
                            if 'source' in flags_search and \
                                ( ( 'Source' in package.keys() and needle == package['Source'] )  or \
                                  ( 'Source' not in package.keys() and needle == package['Package'] ) ):
                                found = True
                            if ( 'equals' in flags_search and needle == package['Package']  ) or \
                               ('contains' in flags_search and needle in package['Package'] ) or \
                               ('description' in flags_search and needle in package['Description']):
                                found = True
                            if found:
                                global_result.append({
                                    'package': dict(package),
                                    'release': release,
                                    'index': x
                                })
        return global_result
Ejemplo n.º 7
0
 def test_bug487902_multivalued_checksums(self):
     """New multivalued field Checksums was not handled correctly, see #487902."""
     changesobj = deb822.Changes(CHECKSUM_CHANGES_FILE.splitlines())
     self.assertEqual(CHECKSUM_CHANGES_FILE, changesobj.dump())
Ejemplo n.º 8
0
 def test_bug457929_multivalued_dump_works(self):
     """dump() was not working in multivalued classes, see #457929."""
     changesobj = deb822.Changes(CHANGES_FILE.splitlines())
     self.assertEqual(CHANGES_FILE, changesobj.dump())
Ejemplo n.º 9
0
    def run_queue(self, debug):

        # check for *.changes in incoming directory
        for changes_file in glob.glob(
                os.path.join(self.incoming_path, "*.changes")):

            # check gpg key
            gpg_res = functions.command_result("gpgv %(changes)s" %
                                               {"changes": changes_file},
                                               output=False)

            if gpg_res == 0:

                # debug
                functions.debug_message(
                    debug, "I: incoming - found %s" % changes_file)

                # load .changes file
                deb_changes = deb822.Changes(file(changes_file))

                # check sha1 checksum
                sha1_checksum = True
                source_files = deb_changes['Checksums-Sha1']
                for source_file in source_files:
                    source_file_path = os.path.join(self.incoming_path,
                                                    source_file['name'])

                    if source_file['size'] != str(
                            os.stat(source_file_path).st_size):
                        print "E: %s has not a valid size!" % source_file_path
                        sha1_checksum = False
                    elif source_file['sha1'] != functions.sha1file(
                            source_file_path):
                        print "E: %s has not a valid SHA1 checksum!" % source_file_path
                        sha1_checksum = False

                if sha1_checksum:

                    # calculate the next build id
                    build_id = str(
                        len(glob.glob(os.path.join(self.builds_path, "*"))) +
                        1)

                    # build paths
                    build_dir = os.path.join(self.builds_path, build_id)
                    build_dir_source = os.path.join(build_dir, "source")

                    # move files to new source directory
                    os.makedirs(build_dir_source)
                    os.system("chmod 777 %s" % build_dir)
                    os.rename(
                        changes_file,
                        os.path.join(build_dir_source,
                                     os.path.basename(changes_file)))
                    for source_file in source_files:
                        source_file_path = os.path.join(
                            self.incoming_path, source_file['name'])
                        os.rename(
                            source_file_path,
                            os.path.join(build_dir_source,
                                         os.path.basename(
                                             source_file['name'])))

                    # save package info
                    package_info = {}
                    package_info['build_id'] = build_id
                    package_info['timestamp'] = time.strftime(
                        "%Y/%m/%d %H:%M:%S")
                    package_info['package'] = deb_changes['Source']
                    package_info['version'] = deb_changes['Version']
                    package_info['maintainer'] = deb_changes['Maintainer']
                    package_info['changed_by'] = deb_changes['Changed-By']
                    package_info['source_dir'] = build_dir_source
                    package_info['dists'] = self.config.as_list('dists')
                    package_info['archs'] = self.config.as_list('archs')
                    package_info_filename = os.path.join(
                        build_dir, "build.json")
                    functions.json_save(package_info, package_info_filename)

                    # prepare single build requests
                    for dist in self.config.as_list('dists'):
                        for arch in self.config.as_list('archs'):

                            queue = {}
                            queue['build_id'] = build_id
                            queue['package'] = deb_changes['Source']
                            queue['version'] = deb_changes['Version']
                            queue['maintainer'] = deb_changes['Maintainer']
                            queue['changed_by'] = deb_changes['Changed-By']
                            queue['source_dir'] = build_dir_source
                            queue['dist'] = dist
                            queue['arch'] = arch

                            queue_filename = os.path.join(self.queue_path, \
                                "%(id)s_%(package)s_%(version)s_%(dist)s_%(arch)s.json" % \
                                {
                                    "id": build_id,
                                    "package": queue['package'],
                                    "version": queue['version'],
                                    "dist": dist,
                                    "arch": arch,
                                })

                            functions.json_save(queue, queue_filename)

                    sendmail = os.popen("sendmail -t", "w")
                    sendmail.write("From: %s\n" %
                                   "AutoMate <*****@*****.**>")
                    sendmail.write("To: %s\n" % deb_changes['Changed-By'])
                    if deb_changes['Maintainer'] != deb_changes['Changed-By']:
                        sendmail.write("Cc: %s\n" % deb_changes['Maintainer'])
                    sendmail.write("Subject: %(changes)s ACCEPTED into %(instance)s\n" % \
                        {
                            "changes": os.path.basename(changes_file),
                            "instance": self.instance
                        })
                    sendmail.write("\n")
                    sendmail.write("Accepted:\n")
                    for source_file in source_files:
                        sendmail.write("%s\n" % source_file['name'])
                    sendmail.write("\n")
                    sendmail.write("%s\n" % deb_changes['Description'])
                    sendmail.write("%s\n" % deb_changes['Changes'])
                    sendmail.write("\n")
                    sendmail.write("Thank you for your contribution to %s.\n" %
                                   self.instance)
                    sendmail_result = sendmail.close()

            else:

                print "E: %s has not a valid GPG signature!" % os.path.basename(
                    changes_file)
Ejemplo n.º 10
0
 def _get_changes_obj(self):
     val = dict(deb822.Changes(open(self.get_changes_file(), 'r')))
     val = self._obj_strip(val)
     return val
Ejemplo n.º 11
0
 def __init__(self, path):
     self.path = path
     self.filename = osp.basename(path)
     self.changes = deb822.Changes(open(path))
     self.dirname = osp.dirname(path)
Ejemplo n.º 12
0
    async def build(self, build_lease, statesdb):
        package = build_lease.package
        architecture = build_lease.architecture
        version = build_lease.version
        binnmu_version = build_lease.binnmu_version
        binnmu_changelog = build_lease.binnmu_changelog
        with self.mkbuilddir(package, architecture, version) as builddir:
            logging.info(
                f'Starting build of {package}:{architecture} version {build_lease.versionstr()}'
            )
            proc = await asyncio.create_subprocess_exec(
                'sbuild',
                f'--chroot-mode={conf.sbuild_chroot_mode}',
                '-c',
                f'chroot:{conf.sbuild_chroot_name}',
                '-d',
                'unstable',
                '--no-arch-any' if architecture == 'all' else '--no-arch-all',
                *((f'--binNMU={binnmu_version}',
                   f'--make-binNMU={binnmu_changelog}')
                  if binnmu_version is not None else ()),
                '-m',
                conf.maintainer,
                '--keyid',
                conf.sbuild_key_id,
                f'{package}_{version}',
                cwd=builddir,
                stdin=asyncio.subprocess.DEVNULL,
                stdout=asyncio.subprocess.
                DEVNULL,  # sbuild creates log file itself, no need to save redundant stdout
                stderr=asyncio.subprocess.DEVNULL)
            await proc.wait()

            # it's not technically necessary to hold the locks past this point - but
            # if we do, that means that if the incoming_loop was waiting for the lock
            # (which should happen on any build taking more than 5 minutes) then the
            # next iteration will pick up the new artifacts we will place there
            try:
                logfile = next(p for p in builddir.glob('*.build')
                               if not p.is_symlink())
            except StopIteration:
                logging.warning('sbuild failed to create log file')
                return
            loginfo = self.scan_log(logfile)

            logging.info(
                f'Build of {package}:{architecture} version {build_lease.versionstr()} completed with status {loginfo["Status"]}'
            )
            logfile.rename(conf.rebuild_logs_dir / logfile.name)
            await statesdb.register_log(loginfo)
            if loginfo['Status'] == 'successful':
                try:
                    changesfile = next(builddir.glob('*.changes'))
                except StopIteration:
                    logging.warning('sbuild failed to create changes file')
                    return
                incomingdir = conf.rebuild_repo_incoming_dir
                with open(changesfile) as fh:
                    changes = deb822.Changes(fh)
                    for fname in (entry['name'] for entry in changes['files']):
                        (builddir / fname).rename(incomingdir / fname)
                changesfile.rename(incomingdir / changesfile.name)
                await build_lease.set_build_result('Uploaded')
            elif loginfo['Status'] == 'attempted':
                await build_lease.set_build_result('Attempted')
            elif loginfo['Status'] == 'given-back':
                await build_lease.set_build_result('Given-Back')
            else:
                warnings.warn(
                    f'Unrecognized status for build of {package} version {version}: {loginfo["Status"]}'
                )
                await build_lease.set_build_result('Internal-Error')
    for changed_file in change_file['Checksums-Sha256']:
        if os.path.splitext(changed_file['name'])[1] == '.buildinfo':
            continue

        if changed_file['sha256'] != checksums[changed_file['name']]:
            raise Exception(
                "Bad checksum on file {file}, local is {local}, bintray has {remote}"
                .format(file=changed_file['name'],
                        local=changed_file['sha256'],
                        remote=checksums[changed_file['name']]))
        else:
            print("{file}: OK".format(file=changed_file['name']))


with io.open(changesFile, 'rb') as f:
    d = deb822.Changes(f)
    baseDir = os.path.dirname(os.path.realpath(changesFile))
    parameters = {
        'user': user,
        'repo': repository,
        'package': d['Source'],
        'version': d['Version'],
        'distributions': 'unstable',
        'component': 'main',
        'architectures': 'amd64,i386'
    }

    for file in d['Files']:
        file_type = os.path.splitext(file['name'])[1]
        if file_type == '.buildinfo':
            continue
Ejemplo n.º 14
0
def parse_changes(changes_file):
    """ Parse a changes file into a Changes class. """
    with open(changes_file) as f:
        return deb822.Changes(f)