def _add_deb(changes, filename): d = DebFile(filename) changes['Files'].append({ 'name': filename.basename(), 'size': filename.getsize(), 'md5sum': filename.read_hash('md5').encode('hex'), 'section': d.debcontrol()['Section'], 'priority': d.debcontrol()['Priority'], }) changes['Checksums-Sha1'].append({ 'name': filename.basename(), 'size': filename.getsize(), 'sha1': filename.read_hash('sha1').encode('hex') }) changes['Checksums-Sha256'].append({ 'name': filename.basename(), 'size': filename.getsize(), 'sha256': filename.read_hash('sha256').encode('hex') })
def scan_packages(filenames): """Parse names and versions from .deb files. Returns a list of `Package`. """ packages = [] for filename in filenames: deb = DebFile(filename) control = deb.debcontrol() packages.append(Package.from_control(deb.debcontrol())) return packages
def add_package_to_cache(config, cache, codename, component, package): distro = get_distro(config, codename) if component not in distro["Components"]: raise RuntimeError( "Can not add package %s, component %s not supported by the repository" % (package, component)) deb = DebFile(filename=package) fields = deb.debcontrol() arch = fields["Architecture"] if arch not in distro["Architectures"]: raise RuntimeError( "Can not add package %s, architecture not supported by the repository" % package) fields["SHA256"] = hash_file(package, hashlib.sha256) fields["SHA1"] = hash_file(package, hashlib.sha1) fields["MD5sum"] = hash_file(package, hashlib.md5) fields["Size"] = str(os.path.getsize(package)) fields["Filename"] = "pool/%s/%s" % (component, os.path.basename(package)) packages = get_package_versions(cache, codename, component, arch, fields["Package"]) if fields["Version"] in packages: raise RuntimeError( "Package %s with version %s already exists in the repository" % (fields["Package"], fields["version"])) # convert dict like object to tuple to keep original sorting in json file packages[fields["Version"]] = [(field, fields[field]) for field in fields] print("-> Adding %s=%s to cache for %s/%s" % (fields["Package"], fields["Version"], codename, arch)) return fields["Filename"]
def generate_sut_from_deb(path): """ Generate a Firehose SUT from a .deb file. """ obj = DebFile(filename=path, mode='r') control = obj.debcontrol() version = control['Version'] version, local = parse_version(version) name, arch = [control[x] for x in ['Package', 'Architecture']] return DebianBinary(name, version, local, arch)
def getBinFromDeb(fname): """ Get a binary package from a .deb file """ dfile = DebFile(fname) cdict = dfile.debcontrol() result = BinPackageDeb( id=-1, name=cdict['Package'], control=dfile.control.get_content('control', 'utf-8'), cdict=dfile.debcontrol(), Version=cdict['Version'], Architecture=cdict['Architecture'], udeb=fname.endswith('.udeb'), Description_md5=hashlib.md5(cdict['Description'].encode() + b'\n').hexdigest(), debfile=dfile, origfile=fname) result.__dict__.update(utils.Hasher.hash(fname)._asdict()) return result
git_refs = git_repo.remotes.origin.refs git_refs_name = list(map(lambda x: str(x).split('/')[-1], git_refs)) logging.debug(git_refs_name) if gh_branch not in git_refs_name: git_repo.git.checkout(b=gh_branch) else: git_repo.git.checkout(gh_branch) # Generate metadata logging.debug("cwd : {}".format(os.getcwd())) logging.debug(os.listdir()) deb_file_handle = DebFile(filename=deb_file_path) deb_file_control = deb_file_handle.debcontrol() current_metadata = { 'format_version': 1, 'sw_version': deb_file_control['Version'], 'sw_architecture': deb_file_control['Architecture'], 'linux_version': deb_file_version } current_metadata_str = json.dumps(current_metadata) logging.debug('Metadata {}'.format(current_metadata_str)) # Get metadata all_commit = git_repo.iter_commits(gh_branch) all_apt_action_commit = list(filter(lambda x: (x.message[:12] == '[apt-action]'), all_commit)) apt_action_metadata_str = list(