class TestBugIDsFromChangesFile(TestCaseWithFactory): """Test get_bug_ids_from_changes_file.""" layer = LaunchpadZopelessLayer dbuser = config.uploadqueue.dbuser def setUp(self): super(TestBugIDsFromChangesFile, self).setUp() self.changes = Changes({ 'Format': '1.8', 'Source': 'swat', }) def getBugIDs(self): """Serialize self.changes and use get_bug_ids_from_changes_file to extract bug IDs from it. """ stream = StringIO() self.changes.dump(stream) stream.seek(0) return get_bug_ids_from_changes_file(stream) def test_no_bugs(self): # An empty list is returned if there are no bugs # mentioned. self.assertEqual([], self.getBugIDs()) def test_invalid_bug_id(self): # Invalid bug ids (i.e. containing non-digit characters) are ignored. self.changes["Launchpad-Bugs-Fixed"] = "bla" self.assertEqual([], self.getBugIDs()) def test_unknown_bug_id(self): # Unknown bug ids are passed through; they will be ignored later, by # close_bug_ids_for_sourcepackagerelease. self.changes["Launchpad-Bugs-Fixed"] = "45120" self.assertEqual([45120], self.getBugIDs()) def test_valid_bug(self): # For valid bug ids the bug object is returned. bug = self.factory.makeBug() self.changes["Launchpad-Bugs-Fixed"] = "%d" % bug.id self.assertEqual([bug.id], self.getBugIDs()) def test_case_sensitivity(self): # The spelling of Launchpad-Bugs-Fixed is case-insensitive. bug = self.factory.makeBug() self.changes["LaUnchpad-Bugs-fixed"] = "%d" % bug.id self.assertEqual([bug.id], self.getBugIDs()) def test_multiple_bugs(self): # Multiple bug ids can be specified, separated by spaces. bug1 = self.factory.makeBug() bug2 = self.factory.makeBug() self.changes["Launchpad-Bugs-Fixed"] = "%d invalid %d" % (bug1.id, bug2.id) self.assertEqual([bug1.id, bug2.id], self.getBugIDs())
class TestBugIDsFromChangesFile(TestCaseWithFactory): """Test get_bug_ids_from_changes_file.""" layer = LaunchpadZopelessLayer dbuser = config.uploadqueue.dbuser def setUp(self): super(TestBugIDsFromChangesFile, self).setUp() self.changes = Changes({ 'Format': '1.8', 'Source': 'swat', }) def getBugIDs(self): """Serialize self.changes and use get_bug_ids_from_changes_file to extract bug IDs from it. """ stream = StringIO() self.changes.dump(stream) stream.seek(0) return get_bug_ids_from_changes_file(stream) def test_no_bugs(self): # An empty list is returned if there are no bugs # mentioned. self.assertEqual([], self.getBugIDs()) def test_invalid_bug_id(self): # Invalid bug ids (i.e. containing non-digit characters) are ignored. self.changes["Launchpad-Bugs-Fixed"] = "bla" self.assertEqual([], self.getBugIDs()) def test_unknown_bug_id(self): # Unknown bug ids are passed through; they will be ignored later, by # close_bug_ids_for_sourcepackagerelease. self.changes["Launchpad-Bugs-Fixed"] = "45120" self.assertEqual([45120], self.getBugIDs()) def test_valid_bug(self): # For valid bug ids the bug object is returned. bug = self.factory.makeBug() self.changes["Launchpad-Bugs-Fixed"] = "%d" % bug.id self.assertEqual([bug.id], self.getBugIDs()) def test_case_sensitivity(self): # The spelling of Launchpad-Bugs-Fixed is case-insensitive. bug = self.factory.makeBug() self.changes["LaUnchpad-Bugs-fixed"] = "%d" % bug.id self.assertEqual([bug.id], self.getBugIDs()) def test_multiple_bugs(self): # Multiple bug ids can be specified, separated by spaces. bug1 = self.factory.makeBug() bug2 = self.factory.makeBug() self.changes["Launchpad-Bugs-Fixed"] = "%d invalid %d" % ( bug1.id, bug2.id) self.assertEqual([bug1.id, bug2.id], self.getBugIDs())
def parse_changes(fname): """ Parse a .changes file named fname. Return {fname: parsed} """ m = Changes(open(fname)) wanted_fields = set(['Source', 'Version', 'Architecture', 'Distribution', 'Date', 'Maintainer', 'Description', 'Changes']) if not set(m.keys()).issuperset(wanted_fields): return None return {os.path.basename(fname): m}
def getBaseBuildInfo(self): # XXX cjwatson 2017-03-20: This will need to be fleshed out if we # ever start doing non-trivial buildinfo parsing. # A Changes object is close enough. buildinfo = Changes() buildinfo["Format"] = "1.0" return buildinfo
def get_package_list(self: object): with open(self.changes_file_path, 'r') as f: changes = Changes(f) return [ os.path.join(self.source_parent_dir, f['name']) for f in changes['Files'] ]
def get_changes_file(self, dsc): changes = Changes(open(dsc)) version = changes['Version'].split(':')[-1] fname = '%s_%s_%s.changes' % (changes['Source'], version, self.architecture) return fname
def changes_filenames(changes_location): """Read the source filenames from a changes file.""" with open(changes_location) as f: changes_contents = f.read() changes = Changes(changes_contents) for file_details in changes["files"]: yield file_details["name"]
def find_changes(path): names = [] source = None version = None distribution = None binary_packages = [] for entry in os.scandir(path): if not entry.name.endswith(".changes"): continue with open(entry.path, "r") as f: changes = Changes(f) names.append(entry.name) if version is not None and changes["Version"] != version: raise InconsistentChangesFiles(names, 'Version', changes['Version'], version) version = changes['Version'] if source is not None and changes['Source'] != source: raise InconsistentChangesFiles(names, 'Source', changes['Source'], source) source = changes['Source'] if distribution is not None and changes[ "Distribution"] != distribution: raise InconsistentChangesFiles(names, 'Distribution', changes['Distribution'], distribution) distribution = changes['Distribution'] binary_packages.extend([ entry['name'].split('_')[0] for entry in changes['files'] if entry['name'].endswith('.deb') ]) if not names: raise NoChangesFile(path) return (names, source, version, distribution, binary_packages)
def update_official_repository(): """ Update the official repository. """ options = __get_options() official_repository_path = options['official_repository_path'] build_path = options['build_path'] binaries_build_path = os.path.join(build_path, 'binaries') changes_files = [] for root, dirs, files in os.walk(binaries_build_path): changes_files.extend( os.path.join(root, _file) for _file in fnmatch.filter(files, '*.changes')) with lcd(official_repository_path): with settings(warn_only=True): for changes_file in changes_files: changes = Changes(open(changes_file)) distribution = changes['Distribution'] local( 'reprepro -b . include %(distribution)s %(changes_file)s' % { 'distribution': distribution, 'changes_file': changes_file, }) local('reprepro -b . createsymlinks')
def __init__(self, filename): try: self.filename = filename self.content = Changes(open(filename)) self.folder = os.path.dirname(filename) except Exception, ex: raise Exception("Failed to load changes file %s. [[%s]]" % (filename, ex))
async def put_changes_file(host: str, user: str, password: str, dist: str, file: str): with open(file) as fh: changes = Changes(fh) dir: str = os.path.dirname(file) files: List[str] = [ dir + "/" + file["name"] for file in changes["files"] ] await post_package_multipart(f"{host}/include/{dist}", "changes", file, files, user, password)
def cross(envdir, changesfile, target_arch): with open(changesfile, 'r') as fh: changes = Changes(fh) new_debs = [] removed_debs = [] for fi in changes['Files']: print fi if _needs_crossing(fi): cross_deb_name = _cross_deb(envdir, changesfile, fi, target_arch) new_debs.append(cross_deb_name) if _cross_only(fi): removed_debs.append(fi) for cross_deb_name in new_debs: _add_deb(changes, cross_deb_name) for fileinfo in removed_debs: _remove_deb(changes, fileinfo) out = changesfile.stripext() + '.cross.changes' with open(out, 'w') as ofh: changes.dump(ofh) return out
def test_run(self): # Running a job produces a notification. Detailed tests of which # notifications go to whom live in the PackageUpload and # PackageUploadMailer tests. distroseries = self.factory.makeDistroSeries() creator = self.factory.makePerson() changes = Changes({"Changed-By": format_address_for_person(creator)}) upload = self.factory.makePackageUpload( distroseries=distroseries, archive=distroseries.main_archive, changes_file_content=changes.dump().encode("UTF-8")) upload.addSource(self.factory.makeSourcePackageRelease()) self.factory.makeComponentSelection( upload.distroseries, upload.sourcepackagerelease.component) upload.setAccepted() job = PackageUploadNotificationJob.create(upload, summary_text='Fake summary') with dbuser(job.config.dbuser): JobRunner([job]).runAll() [email] = pop_notifications() self.assertEqual(format_address_for_person(creator), email['To']) self.assertIn('(Accepted)', email['Subject']) self.assertIn('Fake summary', email.get_payload()[0].get_payload())
def make_changes_file_available(self, filename, owner=None): d = os.path.dirname(filename) or os.curdir with open(filename) as reader: changes = Changes(reader) to = self.new_directory() self.argv.append('--copy={}:{}'.format( filename, '{}/{}'.format(to, os.path.basename(filename)))) for f in changes['files']: self.argv.append('--copy={}:{}'.format( os.path.join(d, f['name']), '{}/{}'.format(to, f['name']))) return to, os.path.basename(filename)
def get_debs(self, architecture): ret = set() for k, v in self.merged_changes.items(): changes = Changes(open(v)) for f in changes['files']: if (f['name'].endswith('_{}.deb'.format(architecture)) or f['name'].endswith('_all.deb')): assert '/' not in f['name'] ret.add( os.path.join( os.path.dirname(v) or os.curdir, f['name'], ), ) return sorted(ret)
def make_changes_file_available(self, filename, owner=None): d = os.path.dirname(filename) or os.curdir with open(filename) as reader: changes = Changes(reader) to = self.new_directory() files = [to, '{}/{}'.format(to, os.path.basename(filename))] self.copy_to_guest(filename, files[-1]) for f in changes['files']: files.append('{}/{}'.format(to, f['name'])) self.copy_to_guest(os.path.join(d, f['name']), files[-1]) if owner is not None: self.check_call(['chown', owner] + files) return files[0], os.path.basename(filename)
def _piuparts( things, *, architecture, mirrors, storage, suite, tarballs, vendor, worker, extra_repositories=()): binaries = [] for thing in things: if os.path.exists(thing): if thing.endswith('.changes'): with open(thing) as reader: c = Changes(reader) for f in c['files']: n = os.path.join( os.path.dirname(thing) or os.curdir, f['name'], ) if f['name'].endswith('.deb'): binaries.append(Binary(n, deb=n)) elif thing.endswith('.deb'): binaries.append(Binary(thing, deb=thing)) else: binaries.append(Binary(thing)) return run_piuparts( architecture=architecture, binaries=binaries, components=(), extra_repositories=extra_repositories, mirrors=mirrors, storage=storage, suite=suite, tarballs=tarballs, vendor=vendor, worker=worker, )
def get_ubuntu_delta_changelog(srcpkg): ''' Download the Ubuntu changelog and extract the entries since the last sync from Debian. ''' archive = Distribution('ubuntu').getArchive() spph = archive.getPublishedSources(source_name=srcpkg.getPackageName(), exact_match=True, pocket='Release') debian_info = DebianDistroInfo() topline = re.compile(r'^(\w%(name_chars)s*) \(([^\(\) \t]+)\)' r'((\s+%(name_chars)s+)+)\;' % {'name_chars': '[-+0-9a-z.]'}, re.IGNORECASE) delta = [] for record in spph: changes_url = record.changesFileUrl() if changes_url is None: # Native sync break try: response, body = Http().request(changes_url) except HttpLib2Error as e: Logger.error(str(e)) break if response.status != 200: Logger.error("%s: %s %s", changes_url, response.status, response.reason) break changes = Changes(Http().request(changes_url)[1]) for line in changes['Changes'].splitlines(): line = line[1:] m = topline.match(line) if m: distribution = m.group(3).split()[0].split('-')[0] if debian_info.valid(distribution): break if line.startswith(u' '): delta.append(line) else: continue break return '\n'.join(delta)
def getBaseChanges(self): contents = Changes() contents["Source"] = "mypkg" contents["Binary"] = "binary" contents["Date"] = "Fri, 25 Jun 2010 11:20:22 -0600" contents["Architecture"] = "i386" contents["Version"] = "0.1" contents["Distribution"] = "nifty" contents["Maintainer"] = "Somebody" contents["Changes"] = "Something changed" contents["Description"] = "\n An awesome package." contents["Changed-By"] = "Somebody <*****@*****.**>" contents["Files"] = [{ "md5sum": "d2bd347b3fed184fe28e112695be491c", "size": "1791", "section": "python", "priority": "optional", "name": "dulwich_0.4.1-1_i386.deb"}] return contents
def setUp(self): super(TestBugIDsFromChangesFile, self).setUp() self.changes = Changes({ 'Format': '1.8', 'Source': 'swat', })
def _autopkgtest(things, *, architecture, built_binaries, lxc_24bit_subnet, lxc_worker, mirrors, modes, qemu_ram_size, schroot_worker, storage, suite, vendor, worker, extra_repositories=()): binaries = [] sources = [] for thing in things: if os.path.exists(thing): if thing.endswith('.changes'): with open(thing) as reader: c = Changes(reader) for f in c['files']: n = os.path.join( os.path.dirname(thing) or os.curdir, f['name']) if f['name'].endswith('.deb'): binaries.append(n) elif f['name'].endswith('.dsc'): sources.append(Source(n, dsc=Dsc(open(n)))) elif thing.endswith('.dsc'): sources.append(Source(thing, dsc=Dsc(open(thing)))) elif thing.endswith('.deb'): binaries.append(thing) else: sources.append(Source(thing)) failures = set() for source in sources: source_dsc = None source_package = None if source.dsc is not None: source_dsc = source.name else: source_package = source.name if built_binaries is None: built_binaries = not binaries for failure in run_autopkgtest( architecture=architecture, binaries=binaries, built_binaries=built_binaries, components=(), extra_repositories=extra_repositories, lxc_24bit_subnet=lxc_24bit_subnet, lxc_worker=lxc_worker, mirrors=mirrors, modes=modes, qemu_ram_size=qemu_ram_size, schroot_worker=schroot_worker, source_dsc=source_dsc, source_package=source_package, storage=storage, suite=suite, vendor=vendor, worker=worker, ): source.failures.append(failure) failures.add(source) return failures
def __init__(self, buildable, *, vendor): self.buildable = buildable self._product_prefix = None self.arch_wildcards = set() self.archs = [] self.binary_packages = [] self.changes_produced = {} self.dirname = None self.dsc = None self.dsc_name = None self.indep = False self.logs = {} self.merged_changes = OrderedDict() self.nominal_suite = None self.source_from_archive = False self.source_package = None self.sourceful_changes_name = None self.suite = None self.together_with = None self.vendor = vendor self._version = None if os.path.exists(self.buildable): if os.path.isdir(self.buildable): changelog = os.path.join(self.buildable, 'debian', 'changelog') changelog = Changelog(open(changelog)) self.source_package = changelog.get_package() self.nominal_suite = changelog.distributions self._version = Version(changelog.version) control = os.path.join(self.buildable, 'debian', 'control') if len(changelog.distributions.split()) != 1: raise ArgumentError('Cannot build for multiple ' 'distributions at once') for paragraph in Deb822.iter_paragraphs(open(control)): self.arch_wildcards |= set( paragraph.get('architecture', '').split()) binary = paragraph.get('package') if binary is not None: self.binary_packages.append(binary) elif self.buildable.endswith('.changes'): self.dirname = os.path.dirname(self.buildable) self.sourceful_changes_name = self.buildable sourceful_changes = Changes(open(self.buildable)) if 'source' not in sourceful_changes['architecture']: raise ArgumentError('Changes file {!r} must be ' 'sourceful'.format(self.buildable)) self.nominal_suite = sourceful_changes['distribution'] for f in sourceful_changes['files']: if f['name'].endswith('.dsc'): if self.dsc_name is not None: raise ArgumentError('Changes file {!r} contained ' 'more than one .dsc ' 'file'.format(self.buildable)) self.dsc_name = os.path.join(self.dirname, f['name']) if self.dsc_name is None: raise ArgumentError('Changes file {!r} did not contain a ' '.dsc file'.format(self.buildable)) self.dsc = Dsc(open(self.dsc_name)) elif self.buildable.endswith('.dsc'): self.dirname = os.path.dirname(self.buildable) self.dsc_name = self.buildable self.dsc = Dsc(open(self.dsc_name)) else: raise ArgumentError('buildable must be .changes, .dsc or ' 'directory, not {!r}'.format( self.buildable)) else: self.source_from_archive = True if '_' in self.buildable: source, version = self.buildable.split('_', 1) else: source = self.buildable version = None self.source_package = source if version is not None: self._version = Version(version) if self.dsc is not None: self.source_package = self.dsc['source'] self._version = Version(self.dsc['version']) self.arch_wildcards = set(self.dsc['architecture'].split()) self.binary_packages = [ p.strip() for p in self.dsc['binary'].split(',') ]
def _sbuild(self, chroot, sbuild_options=()): sbuild_version = self.worker.dpkg_version('sbuild') # Backwards compatibility goo for Debian jessie buildd backport: # it can't do "sbuild hello", only "sbuild hello_2.10-1". if (self.buildable.source_from_archive and self.buildable.source_version is None and sbuild_version < Version('0.69.0')): lines = chroot.check_output( [ 'sh', '-c', 'apt-get update >&2 && ' '( apt-cache showsrc --only-source "$1" || ' ' apt-cache showsrc "$1" ) | ' 'sed -ne "s/^Version: *//p"', 'sh', # argv[0] self.buildable.source_package, ], universal_newlines=True).strip().splitlines() self.buildable.source_version = sorted(map(Version, lines))[-1] self.buildable.buildable = '{}_{}'.format( self.buildable.source_package, self.buildable.source_version, ) argv = [ self.worker.command_wrapper, '--chdir', '{}/out'.format(self.worker.scratch), '--', 'runuser', '-u', 'sbuild', '--', 'env', ] for k, v in sorted(self.environ.items()): argv.append('{}={}'.format(k, v)) argv.extend(( 'sbuild', '-c', chroot.chroot, '-d', str(self.buildable.nominal_suite), '--no-run-lintian', )) if self.profiles: argv.append('--profiles={}'.format(','.join(self.profiles))) for x in self.dpkg_buildpackage_options: argv.append('--debbuildopt=' + x) for child in chroot.suite.hierarchy[:-1]: # The schroot already has the apt sources, we just need the # resolver if child.sbuild_resolver: argv.extend(child.sbuild_resolver) break if self.arch == 'all': logger.info('Architecture: all') argv.append('-A') # Backwards compatibility goo for Debian jessie buildd backport if sbuild_version < Version('0.69.0'): argv.append('--arch-all-only') else: argv.append('--no-arch-any') elif self.arch == self.buildable.indep_together_with: logger.info('Architecture: %s + all', self.arch) argv.append('-A') argv.append('--arch') argv.append(self.arch) elif self.arch == 'source': logger.info('Source-only') argv.append('--no-arch-any') if sbuild_version < Version('0.69.0'): # Backwards compatibility for Debian jessie buildd backport, # and for sbuild in Ubuntu xenial. # sbuild < 0.69.0 expects to find foo_1_amd64.changes # even for a source-only build (because it doesn't really # support source-only builds), so we have to cheat. perl = ("'" + '$arch = qx(dpkg\\x20--print-architecture);\n' + 'chomp($arch);\n' + 'chdir(shift);\n' + 'foreach(glob("../*_source.changes")) {\n' + ' $orig = $_;\n' + ' s/_source\\.changes$/_${arch}.changes/;\n' + ' print("Renaming\\x20$orig\\x20to\\x20$_\\n");\n' + ' rename($orig,$_) || die("$!");\n' + '}\n' + "'") argv.append( '--finished-build-commands=perl -e {} %p'.format(perl)) else: logger.info('Architecture: %s only', self.arch) argv.append('--arch') argv.append(self.arch) if self.arch in ('source', self.buildable.source_together_with): # Build a clean source package as a side-effect of one # build. argv.append('--source') for x in self.dpkg_source_options: argv.append('--debbuildopt=--source-option={}'.format(x)) if self.buildable.binary_version_suffix: argv.append('--append-to-version={}'.format( self.buildable.binary_version_suffix)) for x in sbuild_options: argv.append(x) if self.buildable.dsc_name is not None: if 'source' in self.buildable.changes_produced: # We rebuilt the source already. Use the rebuilt version # for all subsequent builds. argv.append('{}/out/{}'.format( self.worker.scratch, os.path.basename(self.buildable.dsc_name))) else: # We got a .dsc from outside Vectis and are not # rebuilding it. argv.append('{}/in/{}'.format( self.worker.scratch, os.path.basename(self.buildable.dsc_name))) elif self.buildable.source_from_archive: argv.append(self.buildable.buildable) else: # jessie sbuild doesn't support --no-clean-source so build # the temporary source package ourselves. ds_argv = [ self.worker.command_wrapper, '--chdir', '{}/in/{}_source'.format(self.worker.scratch, self.buildable.product_prefix), '--', 'dpkg-source', ] for x in self.dpkg_source_options: ds_argv.append(x) ds_argv.extend(('-b', '.')) self.worker.check_call(ds_argv) argv.append('{}/in/{}.dsc'.format(self.worker.scratch, self.buildable.product_prefix)) logger.info('Running %r', argv) try: self.worker.check_call(argv) finally: # Note that we mix chroot.dpkg_architecture and arch here: an # Architecture: all build produces foo_1.2_amd64.build, which we # rename. # We also check for foo_amd64.build because # that's what comes out if we do "vectis sbuild --suite=sid hello". for prefix in (self.buildable.source_package, self.buildable.product_prefix): product = '{}/out/{}_{}.build'.format(self.worker.scratch, prefix, chroot.dpkg_architecture) product = self.worker.check_output( ['readlink', '-f', product], universal_newlines=True).rstrip('\n') if (self.worker.call(['test', '-e', product]) == 0 and self.output_dir is not None): logger.info('Copying %s back to host as %s_%s.build...', product, self.buildable.product_prefix, self.arch) copied_back = os.path.join( self.output_dir, '{}_{}_{}.build'.format( self.buildable.product_prefix, self.arch, time.strftime('%Y%m%dt%H%M%S', time.gmtime()))) self.worker.copy_to_host(product, copied_back) self.buildable.logs[self.arch] = copied_back symlink = os.path.join( self.output_dir, '{}_{}.build'.format(self.buildable.product_prefix, self.arch)) try: os.remove(symlink) except FileNotFoundError: pass os.symlink(os.path.abspath(copied_back), symlink) break else: logger.warning('Did not find build log at %s', product) logger.warning( 'Possible build logs:\n%s', self.worker.check_call([ 'sh', '-c', 'cd "$1"; ls -l *.build || :', 'sh', # argv[0] self.worker.scratch ])) if self.arch == 'source' and self.buildable.source_from_archive: dscs = self.worker.check_output( [ 'sh', '-c', 'exec ls "$1"/out/*.dsc', 'sh', # argv[0] self.worker.scratch ], universal_newlines=True) dscs = dscs.splitlines() if len(dscs) != 1: raise CannotHappen('sbuild --source produced more than one ' '.dsc file from {!r}'.format( self.buildable)) product = dscs[0] with TemporaryDirectory(prefix='vectis-sbuild-') as tmp: copied_back = os.path.join( tmp, '{}.dsc'.format(self.buildable.buildable)) self.worker.copy_to_host(product, copied_back) self.buildable.dsc = Dsc(open(copied_back)) self.buildable.source_package = self.buildable.dsc['source'] self.buildable.source_version = Version( self.buildable.dsc['version']) self.buildable.arch_wildcards = set( self.buildable.dsc['architecture'].split()) self.buildable.binary_packages = [ p.strip() for p in self.buildable.dsc['binary'].split(',') ] if self.arch == 'source' and self.output_dir is not None: # Make sure the orig.tar.* are in the out directory, because # we will be building from the rebuilt source in future self.worker.check_call([ 'sh', '-c', 'ln -nsf "$1"/in/*.orig.tar.* "$1"/out/', 'sh', # argv[0] self.worker.scratch ]) if self.output_dir is None: return product_arch = None for candidate in (self.arch, self.worker.dpkg_architecture): product = '{}/out/{}_{}.changes'.format( self.worker.scratch, self.buildable.product_prefix, candidate) if self.worker.call(['test', '-e', product]) == 0: product_arch = candidate break else: raise CannotHappen( 'sbuild produced no .changes file from {!r}'.format( self.buildable)) copied_back = self.copy_back_product( '{}_{}.changes'.format(self.buildable.product_prefix, product_arch), '{}_{}.changes'.format(self.buildable.product_prefix, self.arch)) if copied_back is not None: self.buildable.changes_produced[self.arch] = copied_back changes_out = Changes(open(copied_back)) if 'source' in changes_out['architecture'].split(): self.buildable.dsc_name = None self.buildable.sourceful_changes_name = copied_back for f in changes_out['files']: if f['name'].endswith('.dsc'): # expect to find exactly one .dsc file assert self.buildable.dsc_name is None self.buildable.dsc_name = os.path.join( self.output_dir, f['name']) assert self.buildable.dsc_name is not None # Save some space self.worker.check_call([ 'rm', '-fr', '{}/in/{}_source/'.format(self.worker.scratch, self.buildable.product_prefix) ]) dsc = None for f in changes_out['files']: copied_back = self.copy_back_product(f['name']) if copied_back is not None and f['name'].endswith('.dsc'): dsc = Dsc(open(copied_back)) if dsc is not None: if self.buildable.dsc is None: self.buildable.dsc = dsc for f in dsc['files']: # The orig.tar.* might not have come back. Copy that too, # if necessary. self.copy_back_product(f['name'], skip_if_exists=True)
def run(self, spec_id, package, version, distribution, component, task_arch_list, section=None, priority=None): version = version.split(':')[-1] repo_log = [] arch = None try: # Install source dsc = '%s_%s.dsc' % (package, version) dsc_file = os.path.join(settings.INCOMING, str(spec_id), 'source', dsc) cmd = [ 'reprepro', '-VVV', '-b', settings.REPO_DIR, '-C', component ] if priority is not None: cmd += ['-P', priority] if section is not None: cmd += ['-S', section] cmd += ['includedsc', distribution, dsc_file] self.execute_cmd(cmd, repo_log) # Add deb for each architecture for index, task_arch in enumerate(task_arch_list): task_id, arch = task_arch changes = '%s_%s_%s.changes' % (package, version, arch) changes_file = os.path.join(settings.INCOMING, str(spec_id), task_id, changes) # Install binary packages only files = {'.deb': [], '.udeb': []} c = Changes(open(changes_file)) for info in c['Files']: fname = info['name'] name, ext = os.path.splitext(fname) # Only accept .deb and .udeb if not ext in ['.deb', '.udeb']: continue if index == 0 or fname.endswith('_%s.deb' % arch) \ or fname.endswith('_%s.udeb' % arch): # Only first listed architecture installs # architecture independent (*_all.deb/*_all.udeb) packages files[ext].append(fname) for ext in files: action = 'includedeb' if ext == '.udeb': action = 'includeudeb' debs = [ os.path.join(settings.INCOMING, str(spec_id), task_id, deb) for deb in files[ext] ] if len(debs) > 0: cmd = 'reprepro -VVV -b %s -C %s %s %s' % \ (settings.REPO_DIR, component, action, distribution) cmd = cmd.split() + debs self.execute_cmd(cmd, repo_log) manager.update_status(spec_id, manager.SUCCESS, arch) # Report all done manager.update_status(spec_id, manager.COMPLETE) except (RepoBuildError, StandardError), e: self.get_logger().error('[%s] %s' % (spec_id, str(e))) manager.update_status(spec_id, manager.FAILURE, arch)
def __init__(self, buildable, *, binary_version_suffix='', link_builds=(), orig_dirs=('..', ), output_dir=None, output_parent, vendor): self.buildable = buildable self._product_prefix = None self._source_version = None self.arch_wildcards = set() self.archs = [] self.autopkgtest_failures = [] self.binary_packages = [] self.binary_version_suffix = binary_version_suffix self.changes_produced = {} self.dirname = None self.dsc = None self.dsc_name = None self.indep = False self.indep_together_with = None self.link_builds = link_builds self.logs = {} self.merged_changes = OrderedDict() self.nominal_suite = None self.orig_dirs = orig_dirs self.output_dir = output_dir self.piuparts_failures = [] self.source_from_archive = False self.source_package = None self.source_together_with = None self.sourceful_changes_name = None self.suite = None self.vendor = vendor if os.path.exists(self.buildable): if os.path.isdir(self.buildable): changelog = os.path.join(self.buildable, 'debian', 'changelog') changelog = Changelog(open(changelog)) self.source_package = changelog.get_package() self.nominal_suite = changelog.distributions self._source_version = Version(changelog.version) control = os.path.join(self.buildable, 'debian', 'control') if len(changelog.distributions.split()) != 1: raise ArgumentError( 'Cannot build for multiple distributions at once') for paragraph in Deb822.iter_paragraphs(open(control)): self.arch_wildcards |= set( paragraph.get('architecture', '').split()) binary = paragraph.get('package') if binary is not None: self.binary_packages.append(binary) elif self.buildable.endswith('.changes'): self.dirname = os.path.dirname(self.buildable) or os.curdir self.sourceful_changes_name = self.buildable sourceful_changes = Changes(open(self.buildable)) if 'source' not in sourceful_changes['architecture'].split(): raise ArgumentError( 'Changes file {!r} must be sourceful'.format( self.buildable)) self.nominal_suite = sourceful_changes['distribution'] for f in sourceful_changes['files']: if f['name'].endswith('.dsc'): if self.dsc_name is not None: raise ArgumentError( 'Changes file {!r} contained more than one ' '.dsc file'.format(self.buildable)) self.dsc_name = os.path.join(self.dirname, f['name']) if self.dsc_name is None: raise ArgumentError( 'Changes file {!r} did not contain a .dsc file'.format( self.buildable)) self.dsc = Dsc(open(self.dsc_name)) elif self.buildable.endswith('.dsc'): self.dirname = os.path.dirname(self.buildable) or os.curdir self.dsc_name = self.buildable self.dsc = Dsc(open(self.dsc_name)) else: raise ArgumentError( 'buildable must be .changes, .dsc or directory, not ' '{!r}'.format(self.buildable)) else: self.source_from_archive = True if '_' in self.buildable: source, version = self.buildable.split('_', 1) else: source = self.buildable version = None self.source_package = source if version is not None: self._source_version = Version(version) if self.dsc is not None: self.source_package = self.dsc['source'] self._source_version = Version(self.dsc['version']) self.arch_wildcards = set(self.dsc['architecture'].split()) self.binary_packages = [ p.strip() for p in self.dsc['binary'].split(',') ] if self._source_version is not None: self._binary_version = Version( str(self._source_version) + self.binary_version_suffix) timestamp = time.strftime('%Y%m%dt%H%M%S', time.gmtime()) if self.output_dir is None: if self._binary_version is None: dirname = '{}_{}'.format(self.source_package, timestamp) else: dirname = '{}_{}_{}'.format(self.source_package, self._binary_version, timestamp) self.output_dir = os.path.join(output_parent, dirname) # For convenience, create a symbolic link for the latest build of # each source package: hello_latest -> hello_2.10-1_20170319t102623 unversioned_symlink = os.path.join(output_parent, self.source_package + '_latest') with suppress(FileNotFoundError): os.unlink(unversioned_symlink) os.symlink(dirname, unversioned_symlink) # If we know the version, also create a symbolic link for the # latest build of each source/version pair: # hello_2.10-1 -> hello_2.10-1_20170319t102623 if self._binary_version is not None: versioned_symlink = os.path.join( output_parent, '{}_{}'.format(self.source_package, self._binary_version)) with suppress(FileNotFoundError): os.unlink(versioned_symlink) os.symlink(dirname, versioned_symlink) # It's OK if the output directory exists but is empty. with suppress(FileNotFoundError): os.rmdir(self.output_dir) # Otherwise, if someone already created this, we'll just crash out. os.mkdir(self.output_dir) if self.dsc is not None: abs_file = os.path.abspath(self.dsc_name) abs_dir, base = os.path.split(abs_file) os.symlink(abs_file, os.path.join(self.output_dir, base)) for l in self.link_builds: symlink = os.path.join(l, base) with suppress(FileNotFoundError): os.unlink(symlink) os.symlink(abs_file, symlink) for f in self.dsc['files']: abs_file = os.path.join(abs_dir, f['name']) os.symlink(abs_file, os.path.join(self.output_dir, f['name'])) for l in self.link_builds: symlink = os.path.join(l, f['name']) with suppress(FileNotFoundError): os.unlink(symlink) os.symlink(abs_file, symlink)
def sbuild(self): self.worker.check_call([ 'install', '-d', '-m755', '-osbuild', '-gsbuild', '{}/out'.format(self.worker.scratch) ]) sbuild_version = Version( self.worker.check_output( ['dpkg-query', '-W', '-f${Version}', 'sbuild'], universal_newlines=True).rstrip('\n')) logger.info('Building architecture: %s', self.arch) if self.arch in ('all', 'source'): logger.info('(on %s)', self.worker.dpkg_architecture) use_arch = self.worker.dpkg_architecture else: use_arch = self.arch hierarchy = self.suite.hierarchy sbuild_tarball = ('sbuild-{vendor}-{base}-{arch}.tar.gz'.format( arch=use_arch, vendor=self.buildable.vendor, base=hierarchy[-1], )) self.worker.copy_to_guest(os.path.join(self.storage, sbuild_tarball), '{}/in/{}'.format(self.worker.scratch, sbuild_tarball), cache=True) chroot = '{base}-{arch}-sbuild'.format(base=hierarchy[-1], arch=use_arch) with TemporaryDirectory() as tmp: with AtomicWriter(os.path.join(tmp, 'sbuild.conf')) as writer: writer.write( textwrap.dedent(''' [{chroot}] type=file description=An autobuilder file={scratch}/in/{sbuild_tarball} groups=root,sbuild root-groups=root,sbuild profile=sbuild ''').format(chroot=chroot, sbuild_tarball=sbuild_tarball, scratch=self.worker.scratch)) self.worker.copy_to_guest( os.path.join(tmp, 'sbuild.conf'), '/etc/schroot/chroot.d/{}'.format(chroot)) # Backwards compatibility goo for Debian jessie buildd backport: # it can't do "sbuild hello", only "sbuild hello_2.10-1" if (self.buildable.source_from_archive and self.buildable.version is None and sbuild_version < Version('0.69.0')): lines = self.worker.check_output( [ 'schroot', '-c', chroot, '--', 'sh', '-c', 'apt-get update >&2 && ' 'apt-cache showsrc --only-source "$1" | ' 'sed -ne "s/^Version: *//p"', 'sh', # argv[0] self.buildable.source_package ], universal_newlines=True).strip().splitlines() self.buildable.version = sorted(map(Version, lines))[-1] self.buildable.buildable = '{}_{}'.format( self.buildable.source_package, self.buildable.version, ) argv = [ self.worker.command_wrapper, '--chdir', '{}/out'.format(self.worker.scratch), '--', 'runuser', '-u', 'sbuild', '--', 'sbuild', '-c', chroot, '-d', str(self.buildable.nominal_suite), '--no-run-lintian', ] for x in self.dpkg_buildpackage_options: argv.append('--debbuildopt=' + x) for x in self.dpkg_source_options: argv.append('--dpkg-source-opt=' + x) for child in hierarchy[:-1]: argv.append('--extra-repository') argv.append('deb {} {} {}'.format( child.mirror, child.apt_suite, ' '.join( set(self.components or child.components) & child.all_components))) if child.sbuild_resolver: argv.extend(child.sbuild_resolver) for x in self.extra_repositories: argv.append('--extra-repository') argv.append(x) if self.arch == 'all': logger.info('Architecture: all') argv.append('-A') # Backwards compatibility goo for Debian jessie buildd backport if sbuild_version < Version('0.69.0'): argv.append('--arch-all-only') else: argv.append('--no-arch-any') elif self.arch == self.buildable.together_with: logger.info('Architecture: %s + all', self.arch) argv.append('-A') argv.append('--arch') argv.append(self.arch) elif self.arch == 'source': logger.info('Source-only') # Backwards compatibility goo for Debian jessie buildd backport if sbuild_version < Version('0.69.0'): # If we only build 'all', and we don't build 'all', # then logically we build nothing (except source). argv.append('--arch-all-only') argv.append('--no-arch-all') # Urgh. This sbuild expects to find foo_1_amd64.changes # even for a source-only build (because it doesn't really # support source-only builds), so we have to cheat. # sbuild splits the command on spaces so we need to have # a one-liner that doesn't contain embedded whitespace. # Luckily, Perl can be written as line-noise. argv.append('--finished-build-commands=perl -e ' + '$arch=qx(dpkg\\x20--print-architecture);' + 'chomp($arch);' + 'chdir(shift);' + 'foreach(glob("../*_source.changes")){' + '$orig=$_;' + 's/_source\\.changes$/_${arch}.changes/;' + 'print("Renaming\\x20$orig\\x20to\\x20$_\\n");' + 'rename($orig,$_)||die("$!");' + '}' + ' %p') else: argv.append('--no-arch-any') argv.append('--source') else: logger.info('Architecture: %s only', self.arch) argv.append('--arch') argv.append(self.arch) if self.buildable.dsc_name is not None: if 'source' in self.buildable.changes_produced: argv.append('{}/out/{}'.format( self.worker.scratch, os.path.basename(self.buildable.dsc_name))) else: argv.append('{}/in/{}'.format( self.worker.scratch, os.path.basename(self.buildable.dsc_name))) elif self.buildable.source_from_archive: argv.append(self.buildable.buildable) else: # Build a clean source package as a side-effect of the first # build (in practice this will be the 'source' build). if '--source' not in argv: argv.append('--source') # jessie sbuild doesn't support --no-clean-source so build # the temporary source package ourselves. self.worker.check_call([ self.worker.command_wrapper, '--chdir', '{}/in/{}_source'.format(self.worker.scratch, self.buildable.product_prefix), '--', 'dpkg-source', '-b', '.' ]) argv.append('{}/in/{}.dsc'.format(self.worker.scratch, self.buildable.product_prefix)) logger.info('Running %r', argv) try: self.worker.check_call(argv) finally: # Note that we mix use_arch and arch here: an Architecture: all # build produces foo_1.2_amd64.build, which we rename. # We also check for foo_amd64.build because # that's what comes out if we do "vectis sbuild --suite=sid hello". for prefix in (self.buildable.source_package, self.buildable.product_prefix): product = '{}/out/{}_{}.build'.format(self.worker.scratch, prefix, use_arch) product = self.worker.check_output( ['readlink', '-f', product], universal_newlines=True).rstrip('\n') if (self.worker.call(['test', '-e', product]) == 0 and self.output_builds is not None): logger.info('Copying %s back to host as %s_%s.build...', product, self.buildable.product_prefix, self.arch) copied_back = os.path.join( self.output_builds, '{}_{}_{}.build'.format( self.buildable.product_prefix, self.arch, time.strftime('%Y%m%dt%H%M%S', time.gmtime()))) self.worker.copy_to_host(product, copied_back) self.buildable.logs[self.arch] = copied_back symlink = os.path.join( self.output_builds, '{}_{}.build'.format(self.buildable.product_prefix, self.arch)) try: os.remove(symlink) except FileNotFoundError: pass os.symlink(os.path.abspath(copied_back), symlink) break else: logger.warning('Did not find build log at %s', product) logger.warning( 'Possible build logs:\n%s', self.worker.check_call([ 'sh', '-c', 'cd "$1"; ls -l *.build || :', 'sh', # argv[0] self.worker.scratch ])) if self.arch == 'source' and self.buildable.source_from_archive: dscs = self.worker.check_output( [ 'sh', '-c', 'exec ls "$1"/out/*.dsc', 'sh', # argv[0] self.worker.scratch ], universal_newlines=True) dscs = dscs.splitlines() if len(dscs) != 1: raise CannotHappen('sbuild --source produced more than one ' '.dsc file from {!r}'.format( self.buildable)) product = dscs[0] with TemporaryDirectory() as tmp: copied_back = os.path.join( tmp, '{}.dsc'.format(self.buildable.buildable)) self.worker.copy_to_host(product, copied_back) self.buildable.dsc = Dsc(open(copied_back)) self.buildable.source_package = self.buildable.dsc['source'] self.buildable.version = Version(self.buildable.dsc['version']) self.buildable.arch_wildcards = set( self.buildable.dsc['architecture'].split()) self.buildable.binary_packages = [ p.strip() for p in self.buildable.dsc['binary'].split(',') ] if self.arch == 'source' and self.output_builds is not None: # Make sure the orig.tar.* are in the out directory, because # we will be building from the rebuilt source in future self.worker.check_call([ 'sh', '-c', 'ln -nsf "$1"/in/*.orig.tar.* "$1"/out/', 'sh', # argv[0] self.worker.scratch ]) if self.output_builds is None: return for product_arch in (self.arch, self.worker.dpkg_architecture): product = '{}/out/{}_{}.changes'.format( self.worker.scratch, self.buildable.product_prefix, product_arch) if self.worker.call(['test', '-e', product]) == 0: break else: raise CannotHappen('sbuild produced no .changes file from ' '{!r}'.format(self.buildable)) logger.info('Copying %s back to host...', product) copied_back = os.path.join( self.output_builds, '{}_{}.changes'.format(self.buildable.product_prefix, self.arch)) self.worker.copy_to_host(product, copied_back) self.buildable.changes_produced[self.arch] = copied_back changes_out = Changes(open(copied_back)) if self.arch == 'source': self.buildable.dsc_name = None self.buildable.sourceful_changes_name = copied_back for f in changes_out['files']: if f['name'].endswith('.dsc'): # expect to find exactly one .dsc file assert self.buildable.dsc_name is None self.buildable.dsc_name = os.path.join( self.output_builds, f['name']) assert self.buildable.dsc_name is not None # Save some space self.worker.check_call([ 'rm', '-fr', '{}/in/{}_source/'.format(self.worker.scratch, self.buildable.product_prefix) ]) for f in changes_out['files']: assert '/' not in f['name'] assert not f['name'].startswith('.') logger.info('Additionally copying %s back to host...', f['name']) product = '{}/out/{}'.format(self.worker.scratch, f['name']) copied_back = os.path.join(self.output_builds, f['name']) self.worker.copy_to_host(product, copied_back)