def install_packages(self, debs, method=None, gain_root='su'): if method and method not in ( 'apt', 'dpkg', 'gdebi', 'gdebi-gtk', 'gdebi-kde', ): logger.warning(('Unknown installation method %r, using apt or ' + 'dpkg instead') % method) method = None if not method: apt_ver = self.current_version('apt') if Version(apt_ver.strip()) >= Version('1.1~0'): method = 'apt' else: method = 'dpkg' if method == 'apt': run_as_root(['apt-get', 'install', '--install-recommends'] + list(debs), gain_root) elif method == 'dpkg': run_as_root(['dpkg', '-i'] + list(debs), gain_root) elif method == 'gdebi': run_as_root(['gdebi'] + list(debs), gain_root) else: # gdebi-gtk etc. subprocess.call([method] + list(debs))
def __init__( self, stanza, # type: Packages binary_version_marker=None # type: typing.Optional[str] ): # type: (...) -> None self.stanza = stanza self.name = stanza['package'] self.arch = stanza['architecture'] self.multiarch = stanza.get('multi-arch', 'no') self.version = Version(stanza['version']) source = stanza.get('source', self.name) if ' (' in source: self.source, tmp = source.split(' (', 1) source_version = tmp.rstrip(')') else: self.source = source source_version = str(self.version) self.built_source_version = Version(source_version) self.source_version = self.built_source_version if (binary_version_marker is not None and binary_version_marker in source_version): left, right = source_version.rsplit(binary_version_marker, 1) self.source_version = Version(left)
def add_one_innoextract(self,exe): game = self.gog['game'] = GOG.get_id_from_archive(exe) if not game: game = os.path.basename(exe) game = game[len('setup_'):len(game)-len('.exe')] last_part = game.split('_')[-1] if last_part.strip('0123456789.') == '': game = game[0:len(game)-len(last_part)-1] last_part = game.split('_')[-1] if last_part in ('german', 'spanish', 'french', 'italian', 'polish', 'russian'): game = game[0:len(game)-len(last_part)-1] tmp = tempfile.mkdtemp(prefix='gdptmp.') command = ['innoextract', os.path.realpath(exe)] version = check_output(['innoextract', '-v', '-s'], universal_newlines=True) if Version(version.split('-')[0]) >= Version('1.5'): command.append('-I') command.append('app') command.append('--collisions=rename') logger.info('running "%s" ...' % ' '.join(command)) log = check_output(command, stderr=subprocess.DEVNULL, universal_newlines=True, cwd=tmp) self.longname = log.split('\n')[0].split('"')[1] self.add_one_dir(os.path.join(tmp, 'app'), game=game, lang=guess_lang(exe)) os.system('rm -r ' + tmp) self.add_archive(exe, lower=False) self.file_data[os.path.basename(exe)] = dict(unpack=dict(format='innoextract'),provides=['file1','file2'])
def product_prefix(self): if self._product_prefix is None: version_no_epoch = Version(self.binary_version) version_no_epoch.epoch = None self._product_prefix = '{}_{}'.format(self.source_package, version_no_epoch) return self._product_prefix
def compare_version(self, other): if self.packagetype == 'R' and other.packagetype == 'R': return labelCompare(self.get_version_string(), other.get_version_string()) elif self.packagetype == 'D' and other.packagetype == 'D': vs = Version(self.get_version_string()) vo = Version(other.get_version_string()) return version_compare(vs, vo)
def vmdebootstrap_argv(version, *, architecture, components, debootstrap_version, include=(), kernel_package, merged_usr, qemu_image_size, suite, uri): default_name = 'autopkgtest.qcow2' argv = [ 'env', # We use apt-cacher-ng in non-proxy mode, to make it easier to # add extra apt sources later that can't go via this proxy. 'AUTOPKGTEST_APT_PROXY=DIRECT', 'MIRROR={}'.format(uri), 'RELEASE={}'.format(suite), 'vmdebootstrap', '--log=/dev/stderr', '--verbose', '--serial-console', '--distribution={}'.format(suite), '--user=user', '--hostname=host', '--sparse', '--size={}'.format(qemu_image_size), '--mirror={}'.format(uri), '--arch={}'.format(architecture), '--grub', '--no-extlinux', ] if kernel_package is not None: if version >= Version('1.4'): argv.append('--kernel-package={}'.format(kernel_package)) else: argv.append('--no-kernel') argv.append('--package={}'.format(kernel_package)) for package in include: argv.append('--package={}'.format(package)) debootstrap_args = [] debootstrap_args.append('components={}'.format(','.join(components))) if debootstrap_version >= Version('1.0.86~'): if merged_usr: debootstrap_args.append('merged-usr') default_name = 'autopkgtest-merged-usr.qcow2' else: # piuparts really doesn't like merged /usr debootstrap_args.append('no-merged-usr') return argv, debootstrap_args, default_name
def __init__(self, name, version, architecture, manifest): self.name = name self.architecture = architecture # We do not need ~ubuntu-version suffix here self.full_version = version if '~' in version: self.version = Version(version.split('~')[0]) else: self.version = Version(version) self.manifest = manifest self.relations = manifest.relations
def get_pkg_version(base_dir=None): """return the current package version""" if base_dir is None: base_dir = os.getcwd() if exists(join(base_dir, 'version.txt')): return Version(open(join(base_dir, 'version.txt')).read().strip()) sys.path.insert(0, base_dir) mod = __import__('__pkginfo__') sys.path.pop(0) try: del sys.modules['__pkginfo__'] except KeyError: pass return Version(mod.version)
def resolve(self, github: Github) -> 'Buildjob': repo = github.get_repo(self.project) tags = repo.get_tags() if tags.totalCount == 0: raise ValueError(f'<{self.project}> has no tags') versions = {Version(t.name): t.tarball_url for t in tags} sorted_versions = sorted(versions.keys(), key=cmp_to_key(version_compare), reverse=True) latest_version = sorted_versions[0] if self.tag == '?': self.tag = str(latest_version) self.tarball_url = latest_version.tarball_url else: regex = re.compile(fnmatch.translate(self.tag)) for v in sorted_versions: _v = str(v) if regex.match(_v): self.tag = _v self.tarball_url = versions[v] if self.tarball_url is None: raise KeyError( f'could not resolve or validate tag for project: {self.project}:{self.tag}' ) return self
def set_version(self, version): # type: (Union[Version, str]) -> None """Set the version of the last changelog block version can be a full version string, or a Version object """ self._blocks[0].version = Version(version)
def compare_version(self, other): if self.packagetype == 'R' and other.packagetype == 'R': return labelCompare(self.get_version_string(), other.get_version_string()) elif self.packagetype == 'D' and other.packagetype == 'D': vs = Version(self.get_version_string()) vo = Version(other.get_version_string()) return version_compare(vs, vo) elif self.packagetype == 'A' and other.packagetype == 'A': if self.epoch == other.epoch \ and self.version == other.version \ and self.release == other.release: return 0 vs = Version(self.get_version_string()) vo = Version(other.get_version_string()) return version_compare(vs, vo)
def vmdebootstrap_argv(version, *, architecture, kernel_package, qemu_image_size, suite, uri): argv = [ 'env', # We use apt-cacher-ng in non-proxy mode, to make it easier to # add extra apt sources later that can't go via this proxy. 'AUTOPKGTEST_APT_PROXY=DIRECT', 'MIRROR={}'.format(uri), 'RELEASE={}'.format(suite), 'vmdebootstrap', '--log=/dev/stderr', '--verbose', '--serial-console', '--distribution={}'.format(suite), '--user=user', '--hostname=host', '--sparse', '--size={}'.format(qemu_image_size), '--mirror={}'.format(uri), '--arch={}'.format(architecture), '--grub', '--no-extlinux', ] if kernel_package is not None: if version >= Version('1.4'): argv.append('--kernel-package={}'.format(kernel_package)) else: argv.append('--no-kernel') argv.append('--package={}'.format(kernel_package)) return argv
def install_binaries(binaries_by_arch, binarylists, manifest): skipped = 0 for arch, arch_binaries in sorted(binaries_by_arch.items()): installset = binarylists[arch].copy() # # Create the destination directory if necessary # dir = os.path.join(top, destdir, "binary" if not args.debug else "debug", arch) if not os.access(dir, os.W_OK): os.makedirs(dir) for p, binaries in sorted(arch_binaries.items()): if p in installset: if args.verbose: print("DOWNLOADING BINARY: %s" % p) newest = max(binaries, key=lambda b: Version(b.stanza['Version'])) manifest[(p, arch)] = newest # # Download the package and install it # check_path_traversal(newest.stanza['Filename']) file_url = "%s/%s" % ( newest.apt_source.url, newest.stanza['Filename'], ) dest_deb = os.path.join( dir, os.path.basename(newest.stanza['Filename']), ) if not download_file(file_url, dest_deb): if args.verbose: print("Skipping download of existing deb: %s" % dest_deb) else: skipped += 1 install_deb( os.path.splitext( os.path.basename(newest.stanza['Filename']))[0], dest_deb, os.path.join(args.output, arch)) installset.remove(p) for p in installset: # # There was a binary package in the list to be installed that is not in the repo # e = "ERROR: Package %s not found in Packages files\n" % p sys.stderr.write(e) if installset and args.strict: raise SystemExit('Not all binary packages were found') if skipped > 0: print("Skipped downloading %i file(s) that were already present." % skipped)
def __init__(self, name, manifest): self.name = name self.version = Version(manifest['Version']) self.architecture = manifest['Architecture'] self.binaries = manifest['Binary'].split(', ') self.manifest = manifest self.relations = manifest.relations self.format = manifest['Format']
def __getitem__(self, n): # type: (Union[Version, int, str]) -> ChangeBlock """ select a changelog entry by number, version string, or Version :param n: integer or str representing a version or Version object """ if isinstance(n, int): return self._blocks[n] if isinstance(n, six.string_types): return self[Version(n)] return self._blocks[self.versions.index(n)]
def __init__(self, control, hashes, md5sums, scripts={}): if isinstance(control, dict): control = deb822.Deb822(control) self._c = control self._deps = DebPkgRequires(**self._c) self._version = Version(self._c.get('Version')) self._scripts = DebPkgScripts(**scripts) if isinstance(hashes, dict): hashes = deb822.Deb822(hashes) self._h = hashes if isinstance(md5sums, DebPkgMD5sums): self._md5 = md5sums else: self._md5 = DebPkgMD5sums(md5sums)
def _recurse_dependency( arch_binaries, # type: typing.Dict[str, typing.List[Binary]] library, # type: str binaries_from_apt, # type: typing.Set[str] sources_from_apt # type: typing.Set[str] ): if library not in arch_binaries: print('ERROR: Package %s not found in Packages files' % library) return False binary = max( arch_binaries[library], key=lambda b: Version(b.stanza['Version'])) sources_from_apt.add(binary.source) error = False for d in binary.dependency_names: if accept_transitive_dependency(d): if d not in binaries_from_apt: binaries_from_apt.add(d) if not _recurse_dependency( arch_binaries, d, binaries_from_apt, sources_from_apt, ): error = True elif library.endswith(('-dev', '-dbg', '-multidev')): # When building a -debug runtime we # disregard transitive dependencies of # development-only packages pass elif d in binaries_from_apt: pass elif ignore_metapackage_dependency(d): pass elif ignore_transitive_dependency(d): pass else: print('ERROR: %s depends on %s but the metapackages do not' % (library, d)) _recurse_dependency( arch_binaries, d, binaries_from_apt, sources_from_apt, ) error = True return not error
def close(self, base_dir, create=None): """close the opened change log entry""" version = get_pkg_version(base_dir) entry = self.get_entry(create=create) today = self.formatted_date() if len(self.entries) > 1: centry = self.entries[1] upstream_version = centry.version.upstream_version assert upstream_version <= version assert centry.date < today if version == upstream_version: debian_version = centry.version.upstream_version + 1 else: debian_version = 1 entry.date = today entry.version = Version(version)
def list_package_versions(package): output = call('ls', package) regex = r"^%s\s+\|\s+(\S+)\s+\|\s+(\S+)\s+\|\s+([a-z0-9, ]+)$" % re.escape( package) matches = re.findall(regex, output, re.MULTILINE) if not matches and len(output) > 0: raise BuildError("Failed to parse `reprepro ls` output") versions = defaultdict(dict) for version, distribution, arches in matches: if 'bleeding' in distribution: continue arches = arches.split(', ') for arch in arches: versions[distribution][arch] = Version(version) return versions
def find_source_version(package, version): if not isinstance(version, Version): version = Version(version) versions = list_package_versions(package) # Do not just copy packages from random files, use some well-defined order order = versions.keys() order.sort(reverse=True) try: _, distro = min( reduce(operator.add, [[(order.index(distro), distro) for arch, ver in pkg.items() if arch == 'source' and ver == version] for distro, pkg in versions.iteritems()], [])) return distro except ValueError: return None
def extract_deb_packages(data, url): """ Extract package metadata from debian Packages file """ extracted = extract(data, url) package_re = re.compile(b'^Package: ', re.M) plen = len(package_re.findall(extracted)) packages = set() if plen > 0: ptext = 'Extracting packages: ' progress_info_s.send(sender=None, ptext=ptext, plen=plen) bio = BytesIO(extracted) for i, stanza in enumerate(Packages.iter_paragraphs(bio)): # https://github.com/furlongm/patchman/issues/55 if 'version' not in stanza: continue fullversion = Version(stanza['version']) arch = stanza['architecture'] name = stanza['package'] epoch = fullversion._BaseVersion__epoch if epoch is None: epoch = '' version = fullversion._BaseVersion__upstream_version release = fullversion._BaseVersion__debian_revision if release is None: release = '' progress_update_s.send(sender=None, index=i + 1) package = PackageString(name=name, epoch=epoch, version=version, release=release, arch=arch, packagetype='D') packages.add(package) else: info_message.send(sender=None, text='No packages found in repo') return packages
def expand_metapackages(binaries_by_arch, metapackages): sources_from_apt = set() binaries_from_apt = {} error = False for arch, arch_binaries in sorted(binaries_by_arch.items()): binaries_from_apt[arch] = set() for metapackage in metapackages: if metapackage not in arch_binaries: print('ERROR: Metapackage %s not found in Packages files' % metapackage) error = True continue binary = max( arch_binaries[metapackage], key=lambda b: Version(b.stanza['Version'])) sources_from_apt.add(binary.source) binaries_from_apt[arch].add(metapackage) for d in binary.dependency_names: if not ignore_metapackage_dependency(d): binaries_from_apt[arch].add(d) for arch, arch_binaries in sorted(binaries_by_arch.items()): for library in sorted(binaries_from_apt[arch]): if not _recurse_dependency( arch_binaries, library, binaries_from_apt[arch], sources_from_apt, ): error = True if error and args.strict: sys.exit(1) return sources_from_apt, binaries_from_apt
def get_packages(self, connection, repository, consumer): index = self._get_url_of_metafile(repository, "Packages.gz") stream = GzipDecompress(connection.open_stream(index)) self.logger.info("loading packages from %s ...", repository) pkg_iter = deb822.Packages.iter_paragraphs(stream) counter = 0 for dpkg in pkg_iter: try: consumer( Package( repository=repository, name=dpkg["package"], version=Version(dpkg['version']), filesize=int(dpkg.get('size', -1)), filename=dpkg["filename"], checksum=FileChecksum( md5=dpkg.get("md5sum"), sha1=dpkg.get("sha1"), sha256=dpkg.get("sha256"), ), mandatory=self._is_mandatory(dpkg), # Recommends are installed by default (since Lucid) requires=self._get_relations(dpkg, "depends", "pre-depends", "recommends"), # The deb does not have obsoletes section obsoletes=[], provides=self._get_relations(dpkg, "provides"), group=dpkg.get("section"), )) except KeyError as e: self.logger.error("Malformed index %s - %s: %s", repository, six.text_type(dpkg), six.text_type(e)) raise counter += 1 self.logger.info("loaded: %d packages from %s.", counter, repository)
def load_package_from_file(self, repository, filename): filepath = utils.get_path_from_url(repository.url + filename) _, size, checksum = next( iter( utils.get_size_and_checksum_for_files([filepath], _checksum_collector))) with closing(debfile.DebFile(filepath)) as deb: debcontrol = deb822.Packages( deb.control.get_content(debfile.CONTROL_FILE)) return Package( repository=repository, name=debcontrol["package"], version=Version(debcontrol['version']), filesize=int(debcontrol.get('size', size)), filename=filename, checksum=FileChecksum(*checksum), mandatory=self._is_mandatory(debcontrol), requires=self._get_relations(debcontrol, "depends", "pre-depends", "recommends"), provides=self._get_relations(debcontrol, "provides"), obsoletes=[], group=debcontrol.get('section'), )
def _get_version(self): # type: () -> Optional[Version] if self._raw_version is None: return None return Version(self._raw_version)
def check_pylint_version(): return Version(pylint.__version__) >= Version('2.4.4')
def audit(self, path, slow=False, check_arch=False): # always do the signature check first if DebsigVerify.available(): try: DebsigVerify.verify(path, self.allow_unauthenticated) except DebsigVerifyError as e: raise ClickInstallerAuditError(str(e)) else: logging.warning( "debsig-verify not available; cannot check signatures") # fail early if the file cannot be opened try: with closing(DebFile(filename=path)) as package: pass except Exception as e: raise ClickInstallerError("Failed to read %s: %s" % (path, str(e))) # then perform the audit with closing(DebFile(filename=path)) as package: control_fields = package.control.debcontrol() try: click_version = Version(control_fields["Click-Version"]) except KeyError: raise ClickInstallerAuditError("No Click-Version field") if click_version > spec_version: raise ClickInstallerAuditError( "Click-Version: %s newer than maximum supported version " "%s" % (click_version, spec_version)) for field in ( "Pre-Depends", "Depends", "Recommends", "Suggests", "Enhances", "Conflicts", "Breaks", "Provides", ): if field in control_fields: raise ClickInstallerAuditError( "%s field is forbidden in Click packages" % field) scripts = package.control.scripts() if ("preinst" in scripts and static_preinst_matches(scripts["preinst"])): scripts.pop("preinst", None) if scripts: raise ClickInstallerAuditError( "Maintainer scripts are forbidden in Click packages " "(found: %s)" % " ".join(sorted(scripts))) if not package.control.has_file("manifest"): raise ClickInstallerAuditError("Package has no manifest") with package.control.get_file("manifest", encoding="UTF-8") as f: manifest = json.load(f) try: package_name = manifest["name"] except KeyError: raise ClickInstallerAuditError('No "name" entry in manifest') # TODO: perhaps just do full name validation? if "/" in package_name: raise ClickInstallerAuditError( 'Invalid character "/" in "name" entry: %s' % package_name) if "_" in package_name: raise ClickInstallerAuditError( 'Invalid character "_" in "name" entry: %s' % package_name) try: package_version = manifest["version"] except KeyError: raise ClickInstallerAuditError( 'No "version" entry in manifest') # TODO: perhaps just do full version validation? if "/" in package_version: raise ClickInstallerAuditError( 'Invalid character "/" in "version" entry: %s' % package_version) if "_" in package_version: raise ClickInstallerAuditError( 'Invalid character "_" in "version" entry: %s' % package_version) try: framework = manifest["framework"] except KeyError: raise ClickInstallerAuditError( 'No "framework" entry in manifest') try: validate_framework(framework, self.force_missing_framework) except ClickFrameworkInvalid as e: raise ClickInstallerAuditError(str(e)) if check_arch: architecture = manifest.get("architecture", "all") if architecture != "all": dpkg_architecture = self._dpkg_architecture() if isinstance(architecture, list): if dpkg_architecture not in architecture: raise ClickInstallerAuditError( 'Package architectures "%s" not compatible ' 'with system architecture "%s"' % (" ".join(architecture), dpkg_architecture)) elif architecture != dpkg_architecture: raise ClickInstallerAuditError( 'Package architecture "%s" not compatible ' 'with system architecture "%s"' % (architecture, dpkg_architecture)) # This isn't ideally quick, since it has to decompress the data # part of the package, but dpkg's path filtering code assumes # that all paths start with "./" so we must check it before # passing the package to dpkg. for data_name in package.data: if data_name != "." and not data_name.startswith("./"): raise ClickInstallerAuditError( 'File name "%s" in package does not start with "./"' % data_name) if slow: temp_dir = tempfile.mkdtemp(prefix="click") try: self.extract(path, temp_dir) command = [ "md5sum", "-c", "--quiet", os.path.join("DEBIAN", "md5sums"), ] subprocess.check_call(command, cwd=temp_dir) finally: shutil.rmtree(temp_dir) return package_name, package_version
def run(args): if args.suite is None: if args.worker_suite is not None: args.suite = args.worker_suite else: raise ArgumentError('--suite must be specified') architecture = args.architecture keep = args._keep kernel_package = args.get_kernel_package(architecture) mirrors = args.get_mirrors() out = args.write_qemu_image qemu_image_size = args.qemu_image_size storage = args.storage suite = args.suite uri = args._uri vmdebootstrap_options = args.vmdebootstrap_options if uri is None: uri = mirrors.lookup_suite(suite) try: version = subprocess.check_output( ['dpkg-query', '-W', '-f${Version}', 'vmdebootstrap'], universal_newlines=True).rstrip('\n') except: # non-dpkg host, guess a recent version version = Version('1.7') else: version = Version(version) with TemporaryDirectory(prefix='vectis-bootstrap-') as scratch: argv = [ 'sudo', os.path.join( os.path.dirname(__file__), os.pardir, 'vectis-command-wrapper'), '--', ] argv.extend( vmdebootstrap_argv( version, architecture=architecture, kernel_package=kernel_package, qemu_image_size=qemu_image_size, suite=suite, uri=uri, ), ) argv.extend(vmdebootstrap_options) argv.append( '--customize={}'.format(os.path.join( os.path.dirname(__file__), os.pardir, 'setup-testbed'))) argv.append('--owner={}'.format(pwd.getpwuid(os.getuid())[0])) argv.append('--image={}/output.raw'.format(scratch)) subprocess.check_call(argv) subprocess.check_call([ 'qemu-img', 'convert', '-f', 'raw', '-O', 'qcow2', '-c', '-p', '{}/output.raw'.format(scratch), '{}/output.qcow2'.format(scratch), ]) os.makedirs(os.path.dirname(out) or os.curdir, exist_ok=True) shutil.move('{}/output.qcow2'.format(scratch), out + '.new') try: with VirtWorker( ['qemu', '{}.new'.format(out)], storage=storage, suite=suite, mirrors=mirrors) as worker: worker.check_call([ 'env', 'DEBIAN_FRONTEND=noninteractive', 'apt-get', '-y', '--no-install-recommends', '-t', suite.apt_suite, 'install', 'python3', 'sbuild', 'schroot', ]) except: if not keep: os.remove(out + '.new') raise else: os.rename(out + '.new', out)
def dpkg_version(self, package): v = self.check_output(['dpkg-query', '-W', '-f${Version}', package], universal_newlines=True).rstrip('\n') return Version(v)
def get_latest_revision(self): """return the latest revision found or 0.0.0""" for entry in self.entries: if entry.version: return entry.version return Version('0.0.0')