Exemplo n.º 1
0
    def from_file(cls, filename, user_metadata=None):
        """
        Creates a DebPackage object (and by extension a mongodb entry) from a
        .deb package file.
        """
        try:
            control_fields = debfile.DebFile(filename).debcontrol()
        except debfile.ArError as invalid_package_error:
            raise InvalidPackageError(str(invalid_package_error))
        except IOError as missing_file_error:
            raise Error(str(missing_file_error))

        initialization_params = user_metadata or {}
        initialization_params.update(cls._to_internal_dict_style(control_fields))
        initialization_params = cls._parse_rel_fields(initialization_params)

        with open(filename) as input_file:
            checksum = cls._compute_checksum(input_file)

        initialization_params.update(
            size=getsize(filename),
            checksumtype=util.TYPE_SHA256,
            checksum=checksum,
            control_fields=control_fields,
        )

        cls._check_for_required_fields(initialization_params)
        filename = cls.filename_from_unit_key(initialization_params)
        initialization_params['filename'] = filename

        return cls(**initialization_params)
Exemplo n.º 2
0
    def __import_to(self, confman):
        data = debfile.DebFile(self.source).data.tgz()

        prefix = "./usr/lib/poni-config/"
        for item in data.getnames():
            if (item.endswith("/") or (not item.startswith(prefix))
                    or (not data.getmember(item).isfile())):
                continue

            dest_sub = item[len(prefix):]
            dest_path = confman.system_root / dest_sub
            dest_dir = dest_path.dirname()
            if not dest_dir.exists():
                dest_dir.makedirs()

            contents = data.extractfile(item).read()
            write = not dest_path.exists()
            if (not write) and dest_path.exists():
                old = dest_path.bytes()
                write = (old != contents)

            logger = self.log.info if self.verbose else self.log.debug
            pretty_path = confman.root_dir.relpathto(dest_path)
            if write:
                file(dest_path, "wb").write(contents)
                logger("imported: %s", pretty_path)
            else:
                logger("unchanged: %s", pretty_path)
Exemplo n.º 3
0
 def test_tar_bz2(self):
     bz2_deb = debfile.DebFile(self.bz2_debname)
     # random test on the data part (which is bzipped), just to check if we
     # can access its content
     self.assertEqual(os.path.normpath(bz2_deb.data.tgz().getnames()[10]),
                      os.path.normpath('./usr/share/locale/bg/'))
     bz2_deb.close()
Exemplo n.º 4
0
    def get_manpages_from_package(package):
        logging.debug("Fetching manpages in %s", package)

        container = pjoin(DebianManpageFetcher.output_dir, package)
        if not os.path.isdir(container):
            os.makedirs(container)

        # TODO: We will take only the first one
        # (edge case with duplicated names in sectons)
        p = [sp for sp in DebianManpageFetcher.packages if sp.package == package][0]
        cp = DebianManpageFetcher.packages[p]

        if cp['flushed']:
            return True

        file = pjoin(DebianRepo.base_mirror_path,cp['url'])

        try:
            data_file = debfile.DebFile(file).data
        except:
            print("Error processing (A) {}".format(package))
            os.unlink(tmpfile)
            return

        for member, v in cp['members'].items():
            if v['state']:
                continue

            item = data_file.tgz().getmember('./' + member)

            try:
                file_contents = data_file.get_file(member)
            except KeyError:
                if item.issym():
                    # TODO: Do something with missing links
                    print("Saved {}".format(package))
                    v['state'] = 2
                    continue
                else:
                    return True

            compressed = member.endswith(".gz")
            if compressed:
                file_contents = gzip.GzipFile(fileobj=file_contents)

            if item.issym():
                cp['members'][member]['link'] = manpage_name(
                    item.linkname)

            final_path = pjoin(container, v['name'])
            fp = open(final_path, "wb")
            fp.write(file_contents.read())
            fp.close()

            v['state'] = 1
        else:
            cp['flushed'] = True
Exemplo n.º 5
0
 def from_file(cls, path, **kwargs):
     """
     Allows for fields (like Filename and Size) to be added or replaced
     using keyword arguments.
     """
     debpkg = debfile.DebFile(filename=path)
     md5sums = cls.read_md5sums(debpkg, path)
     control = debpkg.control.debcontrol().copy()
     scripts = debpkg.control.scripts()
     hashes = cls.make_hashes(path)
     control.update(kwargs)
     return cls(control, hashes, md5sums, scripts=scripts)
Exemplo n.º 6
0
 def from_file(cls, path, **kwargs):
     """
     Allows for fields (like Filename and Size) to be added or replaced
     using keyword arguments.
     """
     debpkg = debfile.DebFile(filename=path)
     # existance of md5sums in control part is optional
     try:
         md5sums = debpkg.md5sums(encoding='utf-8')
     except debfile.DebError as err:
         log.warn('While processing %s: %s', path, err.args[0])
         md5sums = None
     control = debpkg.control.debcontrol().copy()
     scripts = debpkg.control.scripts()
     hashes = cls.make_hashes(path)
     control.update(kwargs)
     return cls(control, hashes, md5sums, scripts=scripts)
Exemplo n.º 7
0
    def setUp(self):
        def uudecode(infile, outfile):
            uu_deb = open(infile, 'rb')
            bin_deb = open(outfile, 'wb')
            uu.decode(uu_deb, bin_deb)
            uu_deb.close()
            bin_deb.close()

        self.debname = 'test.deb'
        self.broken_debname = 'test-broken.deb'
        self.bz2_debname = 'test-bz2.deb'
        uudecode('test.deb.uu', self.debname)
        uudecode('test-broken.deb.uu', self.broken_debname)
        uudecode('test-bz2.deb.uu', self.bz2_debname)

        self.debname = 'test.deb'
        uu_deb = open('test.deb.uu', 'rb')
        bin_deb = open(self.debname, 'wb')
        uu.decode(uu_deb, bin_deb)
        uu_deb.close()
        bin_deb.close()
        self.d = debfile.DebFile(self.debname)
Exemplo n.º 8
0
def plist(dist):
    packagelist = []
    unique = set()
    snapshotregex = re.compile(SNAPSHOT_REGEX_BASE % dist)
    try:
        try:  # Try to find a snapshot matching the distribution
            snapshotname = [
                s for s in snapshotlist if snapshotregex.search(s)
            ][-1]
        except IndexError:  # If that fails, just treat it as a repo
            print('Using packages in repo %r...' % dist)
            packages_raw = subprocess.check_output(
                ("aptly", "repo", "show", "-with-packages",
                 dist)).splitlines()
        else:
            print('Using packages in snapshot %r...' % snapshotname)
            packages_raw = subprocess.check_output(
                ("aptly", "snapshot", "show", "-with-packages",
                 snapshotname)).splitlines()
    except subprocess.CalledProcessError:  # It broke, whatever...
        return

    for line in packages_raw[packages_raw.index(b"Packages:"):]:
        # We can't get a raw list of packages, but all package entries are indented... Use that.
        if line.startswith(b" "):
            # Each package is given as a string in the format packagename_version_arch
            fullname = line.decode("utf-8").strip()
            name, version, arch = fullname.split("_")

            # Track a list of unique source packages
            if arch == "source":
                unique.add(name)

            packagelist.append((name, version, arch, fullname))
    # Sort everything by package name
    packagelist.sort(key=lambda k: k[0])

    os.chdir(OUTDIR)
    with open('%s_sources.txt' % dist, 'w') as sources_f:
        sources_f.write('\n'.join(sorted(unique)))

    with open('%s_list.html' % dist, 'w') as f:
        f.write("""<!DOCTYPE HTML>
<html>
<head><title>Package List for the Utopia Repository - {}</title>
<meta charset="UTF-8">
<meta name=viewport content="width=device-width">
{}
</head>
<body>
<a href="/">Back to root</a>
<br><br>
<table class="sortable">
<tr>
<th>Package Name</th>
<th>Version</th>
<th>Architectures</th>""".format(dist, EXTRA_STYLES))
        # Note: preserve this order when formatting the <td>'s later on, or the results
        # will be in the wrong column!
        if SHOW_CHANGELOGS:
            f.write("""<th>Changelog</th>""")
        if SHOW_VCS_LINKS:
            f.write("""<th>Vcs-Browser</th>""")
        if SHOW_DEPENDENCIES:
            f.write("""<th>Package Relations</th>""")
        f.write("""
</tr>
""")
        for p in packagelist:
            short_desc = ''

            # If enabled, try to find a link to the file for the package given.
            # XXX: is all this conditional stuff needed or should we make the 'package show' call implicit?
            if SHOW_POOL_LINKS or SHOW_CHANGELOGS or SHOW_DEPENDENCIES or SHOW_DESCRIPTIONS:
                #print("Finding links for %s" % str(p))
                name, version, arch, fullname = p
                download_link = arch

                poolresults = subprocess.check_output(
                    ("aptly", "package", "show", "-with-files", fullname))

                # First, locate the raw filename corresponding to the package we asked for.
                filename = ''
                changelog_path = ''
                vcs_link = ''
                relations = collections.OrderedDict()

                for line in poolresults.splitlines():
                    line = line.decode('utf-8')
                    fields = line.split()

                    if line.startswith('Vcs-Browser:'):
                        vcs_link = fields[1]

                    if line.startswith('Filename:'):
                        # .deb's get a fancy "Filename: hello_1.0.0-1_all.deb" line in aptly's output.
                        filename = fields[1]
                    if arch == 'source' and '.dsc' in line and len(
                            fields) == 3:
                        # Source packages are listed as raw files in the pool though. Look for .dsc
                        # files in this case, usually in the line format
                        # 72c1479a7564c47cc2643336332c1e1d 711 utopia-defaults_2016.05.21+1.dsc
                        filename = fields[-1]

                    if line.startswith('Description:'):
                        short_desc = line.split(' ', 1)[-1]

                    # Parse dependency lines
                    for deptype in DEPENDENCY_TYPES:
                        if line.startswith(deptype + ':'):
                            relations[deptype] = line.split(' ', 1)[-1]

                if filename and (SHOW_POOL_LINKS or SHOW_CHANGELOGS):
                    # Then, once we've found the filename, look it up in the pool/ tree we made
                    # earlier.
                    #print("Found filename %s for %s" % (filename, fullname))
                    for poolfile in poolobjects[name]:
                        if poolfile.name == filename:
                            # Filename matched found, make the "arch" field a relative link to the path given.
                            location = poolfile.relative_to(OUTDIR)
                            download_link = '<a href="%s">%s</a>' % (location,
                                                                     arch)
                            if SHOW_CHANGELOGS and arch != 'source':  # XXX: there's no easy way to generate changelogs from sources
                                changelog_path = os.path.join(
                                    CHANGELOG_TARGET_DIR,
                                    '%s_%s.changelog' % (name, version))

                                if not os.path.exists(changelog_path):
                                    # There's a new changelog file name for every version, so don't repeat generation
                                    # for versions that already have a changelog.

                                    full_path = str(poolfile.resolve())
                                    if os.path.getsize(
                                            full_path
                                    ) > MAX_CHANGELOG_FILE_SIZE:
                                        print(
                                            "    Skipping .deb %s; file size too large"
                                            % poolfile.name)
                                        break
                                    elif name.endswith(('-dbg', '-dbgsym')):
                                        print(
                                            "    Skipping .deb %s; debug packages don't use changelogs"
                                            % poolfile.name)
                                        break

                                    # Cached changelog doesn't exist, so make a new file.
                                    print("    Reading .deb %s" % full_path)
                                    deb = debfile.DebFile(full_path)
                                    changelog = deb.changelog()
                                    if changelog:
                                        with open(changelog_path,
                                                  'w') as changes_f:
                                            print(
                                                "    Writing changelog for %s (%s) to %s"
                                                % (fullname, filename,
                                                   changelog_path))
                                            try:
                                                changelog.write_to_open_file(
                                                    changes_f)
                                            except ValueError:  # Something went wrong, bleh.
                                                traceback.print_exc()
                                                continue
                                    else:
                                        print(
                                            "    Changelog generation FAILED for %s (deb.changelog() is empty?)"
                                            % fullname)
                                        continue

                            #print("Found %s for %s" % (poolfile, fullname))
                            break
                name_extended = name
                if short_desc and SHOW_DESCRIPTIONS:
                    # Format the name in a tooltip span if a description is available.
                    name_extended = """<span title="{0} - {1}" class="tooltip">{0}</span>""".format(
                        name, html.escape(short_desc))

                f.write(("""<tr id="{0}_{3}">
<td>{4}</td>
<td>{1}</td>
<td>{2}</td>
""".format(name, version, download_link, html.escape(arch), name_extended)))
                if SHOW_CHANGELOGS:
                    # Only fill in the changelog column if it is enabled, and the changelog exists.
                    if changelog_path and os.path.exists(changelog_path):
                        f.write(
                            """<td><a href="{}">Changelog</a></td>""".format(
                                os.path.relpath(changelog_path, OUTDIR)))
                    else:
                        f.write("""<td>N/A</td>""")
                if SHOW_VCS_LINKS:
                    if vcs_link:
                        f.write(
                            """<td><a href="{0}">{0}</a>""".format(vcs_link))
                    else:
                        f.write("""<td>N/A</td>""")
                if SHOW_DEPENDENCIES:
                    text = ''
                    for depname, data in relations.items():
                        text += """<span class="dependency deptype-{2}">{0}</span>: {1}<br>""".format(
                            depname, data, depname.lower())
                    f.write("""<td>{}</td>""".format(text))
                f.write("""
</tr>
""")
        f.write("""</table>
<p><b>Total items:</b> {} ({} unique source packages)</p>
<p>Last updated {}</p>
</body></html>""".format(
            len(packagelist), len(unique),
            time.strftime("%I:%M:%S %p, %b %d %Y +0000", time.gmtime())))
Exemplo n.º 9
0
 def test_missing_members(self):
     self.assertRaises(debfile.DebError,
                       lambda _: debfile.DebFile(self.broken_debname), None)
Exemplo n.º 10
0
    def init_service_host(self):
        """Initialize service host."""
        self.log.info('Initializing service host')

        # install ivxv-common package in service host if required
        self.log.info('Detect ivxv-common package status')
        proc = self.ssh('dpkg -l ivxv-common', account='ivxv-admin')
        if proc.returncode:
            self.log.info('Failed to detect ivxv-common package status')
            # detect ivxv-common package dependencies
            pkg_path = os.path.join(CONFIG.get('deb_pkg_path'),
                                    COLLECTOR_PACKAGE_FILENAMES['ivxv-common'])
            deb = debfile.DebFile(pkg_path)
            deps = []
            for field in 'Depends', 'Recommends':
                deps_str = deb.control.debcontrol().get(field)
                if deps_str is not None:
                    deps += [dep.split(' ')[0] for dep in deps_str.split(', ')]

            # copy package file to service host
            remote_path = os.path.join(
                '/root', COLLECTOR_PACKAGE_FILENAMES['ivxv-common'])
            if not self.scp(local_path=pkg_path,
                            remote_path=remote_path,
                            description='ivxv-common package file',
                            account='root'):
                return False

            # install ivxv-common package dependecies
            self.log.info('Updating package list')
            cmd = [
                'env', 'TERM=dumb', 'DEBIAN_FRONTEND=noninteractive',
                'apt-get', 'update'
            ]
            proc = self.ssh(cmd, account='root')
            if proc.returncode:
                self.log.info('Failed to update package list')
                return False

            # install ivxv-common package dependecies
            self.log.info('Installing ivxv-common package dependencies')
            cmd = [
                'env', 'TERM=dumb', 'DEBIAN_FRONTEND=noninteractive',
                'apt-get', '--yes', 'install'
            ] + deps
            proc = self.ssh(cmd, account='root')
            if proc.returncode:
                self.log.info('Failed to install ivxv-common package '
                              'dependencies')
                return False

            # install ivxv-common package
            self.log.info('Installing ivxv-common package')
            proc = self.ssh('dpkg -i {}'.format(remote_path), account='root')
            if proc.returncode:
                self.log.info('Failed to install ivxv-common package')
                return False

            # create management service access to
            # service host ivxv-admin account
            self.log.info('Creating access to the management account '
                          '"ivxv-admin" in service host')
            cmd = 'tee --append /home/ivxv-admin/.ssh/authorized_keys'
            with open(IVXV_ADMIN_SSH_PUBKEY_FILE, 'rb') as key_fp:
                proc = self.ssh(cmd, account='root', stdin=key_fp)
            if proc.returncode:
                self.log.error('Failed to remove create SSH access '
                               'to management account in service host')
                return False

            # remove management service access to service host root account
            if not self.remove_root_access():
                return False

        # initialize service host
        proc = self.ssh('sudo ivxv-admin-helper init-host',
                        account='ivxv-admin')
        if proc.returncode:
            self.log.error('Failed to initialize service host')
            return False

        self.log.info('Service host initialized successfully')
        return True
Exemplo n.º 11
0
def index_deb_info(name, path, obj, indexname):
    # relative to the repository root, which would be something
    # like /api/v1/{channel}/
    filename = "pool/{}/{}".format(indexname, name)

    # md5, sha1, and sha256 of file
    blocksize = 65536
    md5sum_hasher = hashlib.md5()
    sha1_hasher = hashlib.sha1()
    sha256_hasher = hashlib.sha256()
    sha512_hasher = hashlib.sha512()
    with open(path, 'rb') as fin:
        buf = fin.read(blocksize)
        while len(buf) > 0:
            md5sum_hasher.update(buf)
            sha1_hasher.update(buf)
            sha256_hasher.update(buf)
            sha512_hasher.update(buf)
            buf = fin.read(blocksize)
    md5sum = md5sum_hasher.hexdigest()
    sha1 = sha1_hasher.hexdigest()
    sha256 = sha256_hasher.hexdigest()
    sha512 = sha512_hasher.hexdigest()

    # the rest is ripped out of the .deb file or generated based
    # on the information there.
    deb = debfile.DebFile(path)
    control = deb.control.debcontrol()

    # deb control is dict-like object where key lookups are case-insensitive
    multi_arch = control.get("multi-arch", None)
    package = control.get("package", None)
    source = control.get("source", None)
    version = control.get("version", None)
    section = control.get("section", None)
    priority = control.get("priority", None)
    architecture = control.get("architecture", None)
    essential = control.get("essential", None)
    depends = control.get("depends", None)
    recommends = control.get("recommends", None)
    suggests = control.get("suggests", None)
    enhances = control.get("enhances", None)
    pre_depends = control.get("pre-depends", None)
    installed_size = control.get("installed-size", None)
    maintainer = control.get("maintainer", None)
    description = control.get("description", None)
    description_md5 = control.get("description-md5", None)
    homepage = control.get("homepage", None)
    built_using = control.get("built_using", None)

    # if the description-md5 wasn't specified, comput it!
    # the computed value starts at the second character after the colon in the
    # control file (basically, allowing the 'Header: ' format of the text file)
    # and includes a trailing newline character. The value must be lowercase
    # hex md5.
    if not description_md5:
        if description[-1] == "\n":
            description_md5 = hashlib.md5(description.encode()).hexdigest()
        else:
            description_md5 = hashlib.md5((description+"\n").encode()).hexdigest()

    # missing required fields are a deal breaker for including this package
    # in the index
    msg = name+" skipped for deb info: '{}' not found in control"
    if not package:
        logger.error(msg.format('Package'))
        return
    if not version:
        logger.error(msg.format('Version'))
        return
    if not architecture:
        logger.error(msg.format('Architecture'))
        return
    if not maintainer:
        logger.error(msg.format('Maintainer'))
        return
    if not description:
        logger.error(msg.format('Description'))
        return

    kwargs = dict(
        filename=filename,
        md5sum=md5sum,
        sha1=sha1,
        sha256=sha256,
        sha512=sha512,
        multi_arch=multi_arch,
        package=package,
        source=source,
        version=version,
        section=section,
        priority=priority,
        architecture=architecture,
        essential=essential,
        depends=depends,
        recommends=recommends,
        suggests=suggests,
        enhances=enhances,
        pre_depends=pre_depends,
        installed_size=installed_size,
        maintainer=maintainer,
        description=description,
        description_md5=description_md5,
        homepage=homepage,
        built_using=built_using)

    try:
        debinfo_dbobj = DBSession.query(DebInfo) \
                                 .filter_by(relic_id=obj.uid) \
                                 .one_or_none()
        with transaction.manager:
            if debinfo_dbobj:
                logger.info("Adding deb info for " + name)
                DBSession.query(DebInfo) \
                         .filter_by(uid=debinfo_dbobj.uid) \
                         .update(kwargs)
            else:
                logger.info("Updating deb info for " + name)
                kwargs['relic_id'] = obj.uid
                DBSession.add(DebInfo(**kwargs))
    except MultipleResultsFound:
        logger.error("Apparently there's more than one debinfo object"
                     "associated with '"+obj.name+"'")
Exemplo n.º 12
0
    def fill_arch_info(self, deb_info):
        self.deb_source_infos.append(deb_info)

        try:
            self.arch = [architectures_map[deb_info["Architecture"]], ]
        except:
            self.arch = default_arch

        if deb_info["Architecture"] == "i386":
            if self.arch_pkg_name.startswith('lib32-'):
                self.arch = ['x86_64']
            else:
                import sys
                sys.stderr.write("ERROR: There is a bug in this script, package '%s' is i386 (came from %s) and should start with 'lib32'. Check packages_map!\n" % (self.arch_pkg_name, deb_info["Package"]))


        try:
            deb_deps = deb_info["Depends"].split(', ')
        except:
            deb_deps = None

        domap = True
        #if self.arch_pkg_name == "amdgpu-pro" or self.arch_pkg_name == "lib32-amdgpu-pro":
            #domap = False

        # Removing unneeded dependencies
        if self.arch_pkg_name == "amdgpu-pro-meta" or self.arch_pkg_name == "lib32-amdgpu-pro-meta":
            # Removes dependency on many open components, which are provided by standard arch repos
            deb_deps.remove('amdgpu-hwe (= %s-%s)' % (pkgver_base, pkgver_build))  # for hwe releases
            # deb_deps.remove('amdgpu (= %s-%s)' % (pkgver_base, pkgver_build)) # for non-hwe releases, i.e. LTS release with xx.xx.0 version
        if self.arch_pkg_name == "amdgpu-pro-lib32-meta":
            deb_deps.remove('amdgpu (= %s-%s) | amdgpu-hwe (= %s-%s)' % (pkgver_base, pkgver_build, pkgver_base, pkgver_build))
            deb_deps.remove('amdgpu-lib32 (= %s-%s)' % (pkgver_base, pkgver_build))
        if self.arch_pkg_name == "opencl-amdgpu-pro-dev":
            deb_deps.remove('ocl-icd-libopencl1-amdgpu-pro (= %s-%s)' % (pkgver_base, pkgver_build))
        # if self.arch_pkg_name == "amf-amdgpu-pro":
        #     deb_deps.remove("libgl1-amdgpu-mesa-glx") # I do not know what is amf and if it will work normal without this dep, but I removed that open component
        if self.arch_pkg_name == "opencl-amdgpu-pro-meta":
            deb_deps.remove('amdgpu-dkms (= %s-%s)' % (pkgver_base, pkgver_build)) # I do not know why it wants amdgpu-dkms, but I did not built it, so just rm this dep for now
            deb_deps.remove('clinfo-amdgpu-pro (= %s-%s)' % (pkgver_base, pkgver_build))
            deb_deps.remove('ocl-icd-libopencl1-amdgpu-pro (= %s-%s)' % (pkgver_base, pkgver_build))
        if self.arch_pkg_name == "amf-amdgpu-pro":
            # adding dependencies of omitted opencl-amdgpu-pro package
            deb_deps.remove('opencl-amdgpu-pro')
            deb_deps.append('amdgpu-pro-core')
            # deb_deps.append('amdgpu-dkms') # amdgpu-dkms is not build currently, but there is such dep. Probably it extends functionality?
            # deb_deps.append('libdrm...-amdgpu...) # opencl icd may depend on libdrm-amdgpu already, so skip this
            deb_deps.append('opencl-orca-amdgpu-pro-icd')
            deb_deps.append('opencl-amdgpu-pro-icd')

        if deb_deps:
            deb_deps = [ depWithAlt_to_singleDep(dep) if dependencyWithAltRE.search(dep) else dep for dep in deb_deps ]
            deb_deps = [ dependencyNameWithVersionRE.match(dep).groups() for dep in deb_deps ]
            deb_deps = [(replace_deps[deb_pkg_name] if deb_pkg_name in replace_deps else deb_pkg_name, version) for deb_pkg_name, version in deb_deps]
            deb_deps = ["\"" + convertName(deb_pkg_name, deb_info, domap) + convertVersionSpecifier(deb_pkg_name, version) + "\"" for deb_pkg_name, version in deb_deps if deb_pkg_name]
            deb_deps = [ dep for dep in deb_deps if dep != "\"\"" ]
            deb_deps = [ dep for dep in deb_deps if not dep.startswith("\"=")]

            # if self.arch_pkg_name == "opencl-amdgpu-pro-orca":
            #     deb_deps.append('\"libdrm-amdgpu=${major}_${minor}-${pkgrel}\"')
            # if self.arch_pkg_name == "lib32-opencl-amdgpu-pro-orca":
            #     deb_deps.append('\"lib32-libdrm-amdgpu=${major}_${minor}-${pkgrel}\"')
            # # I am not sure if it is needed for pal variant, but just to be safe:
            # if self.arch_pkg_name == "opencl-amdgpu-pro-pal":
            #     deb_deps.append('\"libdrm-amdgpu=${major}_${minor}-${pkgrel}\"')

            # remove all dependencies on itself
            deb_deps = [ dep for dep in deb_deps if dep[1:len(self.arch_pkg_name)+1] != self.arch_pkg_name ]

            if hasattr(self, 'depends') and self.depends:
                deb_deps += self.depends

            self.depends = list(sorted(set( deb_deps ))) # remove duplicates and append to already existing dependencies

        try:
            deb_suggs = deb_info["Suggests"].split(', ')
        except:
            deb_suggs = None

        try:
            deb_recomms = deb_info["Recommends"].split(', ')
        except:
            deb_recomms = None

        deb_optdeps = []
        if deb_suggs:
            deb_optdeps = deb_suggs
        if deb_recomms:
            deb_optdeps = deb_optdeps + deb_recomms

        if deb_optdeps:
            deb_optdeps = [depWithAlt_to_singleDep(dep) if dependencyWithAltRE.search(dep) else dep for dep in deb_optdeps]
            deb_optdeps = [dependencyNameWithVersionRE.match(dep).groups() for dep in deb_optdeps]
            deb_optdeps = [(replace_deps[deb_pkg_name] if deb_pkg_name in replace_deps else deb_pkg_name, version) for deb_pkg_name, version in deb_optdeps]
            deb_optdeps = ["\"" + convertName(deb_pkg_name, deb_info, domap) + convertVersionSpecifier(deb_pkg_name, version) + ": "
                           + (optdepends_descriptions[deb_pkg_name] if deb_pkg_name in optdepends_descriptions else "Warning unspecified optdep description" )
                           + "\"" for deb_pkg_name, version in deb_optdeps if deb_pkg_name]

        # remove all optional dependencies on itself
        deb_optdeps = [dep for dep in deb_optdeps if dep[1:len(self.arch_pkg_name) + 1] != self.arch_pkg_name]

        self.optdepends = self.optdepends + list(sorted(set(deb_optdeps)))

        if not hasattr(self, 'desc'):
            desc = deb_info["Description"].split("\n")
            if len(desc) > 2:
                desc = desc[0]
            else:
                desc = " ".join(x.strip() for x in desc)

            if deb_info["Architecture"] == "i386":
                desc += ' (32-bit)'

            self.desc = desc

        deb_info["Filename"] = deb_info["Filename"].replace("./","")
        deb_file = debfile.DebFile("src/amdgpu-pro-%s-%s-ubuntu-%s/%s" % (pkgver_base, pkgver_build, ubuntu_ver, deb_info["Filename"]))

        if not hasattr(self, 'license'):
            copyright_md5 = deb_file.md5sums()[b'usr/share/doc/%s/copyright' % (str.encode(deb_info["Package"]))]
            if copyright_md5 in licenses_hashes_map:
                self.license = "('%s')" % licenses_hashes_map[copyright_md5]
            else:
                self.license = "('NOT_IN_MAP')"

        if not hasattr(self,'backup'):
            if deb_file.control.has_file("conffiles"):
                self.backup = [ line.decode('utf-8').replace("\n","") for line in deb_file.control.get_file("conffiles") if line.decode('utf-8') ]
                self.backup = [ re.sub("^/", "", line) for line in self.backup ] # removing leading slash

        if not hasattr(self, 'install'):
            if Path("%s.install" % self.arch_pkg_name).is_file():
                self.install = "%s.install" % self.arch_pkg_name