Example #1
0
    def calculate_hashes(self):
        def write_archive_hash(urls, file_name):
            name, version = utils.parse_pkgname(file_name)
            for url in utils.parse_url_tag(urls, name, version):
                archive_name = os.path.basename(url)
                archive_path = os.path.join(conf.LPMSConfig().src_cache, archive_name)
                if not os.access(archive_path, os.F_OK):
                    fetcher.URLFetcher().run([url])
                sha1 = utils.sha1sum(archive_path)
                shelltools.echo("hashes", "%s %s %s" % (archive_name, sha1, os.path.getsize(archive_path)))

        excepts = ('hashes')
        shelltools.remove_file("hashes")
        if len(self.files) == 0:
            self.files = os.listdir(self.current_dir)

        for f in self.files:
            if f in excepts:
                continue
            if f.endswith(cst.spec_suffix):
                out.normal("processing %s" % f)
                shelltools.echo("hashes", "%s %s %s" % (f, utils.sha1sum(f), os.path.getsize(f)))
                content = utils.import_script(f)
                if "src_url" in utils.metadata_parser(content["metadata"]).keys():
                    write_archive_hash(utils.metadata_parser(content["metadata"])["src_url"], f)
                elif "src_url" in content.keys():
                    write_archive_hash(content["src_url"], f)
                else:
                    lpms.terminate("src_url was not defined in spec")
                del content
            elif os.path.isdir(f):
                for l in os.listdir(os.path.join(self.current_dir, f)):
                    path = os.path.join(f, l)
                    out.normal("processing %s" % path)
                    shelltools.echo("hashes", "%s %s %s" %  (path, utils.sha1sum(path), os.path.getsize(path)))
Example #2
0
    def calculate_hashes(self):
        def write_archive_hash(urls, file_name):
            name, version = utils.parse_pkgname(file_name)
            for url in utils.parse_url_tag(urls, name, version):
                archive_name = os.path.basename(url)
                archive_path = os.path.join(conf.LPMSConfig().src_cache,
                                            archive_name)
                if not os.access(archive_path, os.F_OK):
                    fetcher.URLFetcher().run([url])
                sha1 = utils.sha1sum(archive_path)
                shelltools.echo(
                    "hashes", "%s %s %s" %
                    (archive_name, sha1, os.path.getsize(archive_path)))

        excepts = ('hashes')
        shelltools.remove_file("hashes")
        if len(self.files) == 0:
            self.files = os.listdir(self.current_dir)

        for f in self.files:
            if f in excepts:
                continue
            if f.endswith(cst.spec_suffix):
                out.normal("processing %s" % f)
                shelltools.echo(
                    "hashes",
                    "%s %s %s" % (f, utils.sha1sum(f), os.path.getsize(f)))
                content = utils.import_script(f)
                if "src_url" in utils.metadata_parser(
                        content["metadata"]).keys():
                    write_archive_hash(
                        utils.metadata_parser(content["metadata"])["src_url"],
                        f)
                elif "src_url" in content.keys():
                    write_archive_hash(content["src_url"], f)
                else:
                    lpms.terminate("src_url was not defined in spec")
                del content
            elif os.path.isdir(f):
                for l in os.listdir(os.path.join(self.current_dir, f)):
                    path = os.path.join(f, l)
                    out.normal("processing %s" % path)
                    shelltools.echo(
                        "hashes", "%s %s %s" %
                        (path, utils.sha1sum(path), os.path.getsize(path)))
Example #3
0
 def import_repo_news(self, repo):
     '''Imports news of given repository'''
     my_news_dir = os.path.join(cst.repos, repo, cst.news_dir)
     if not os.path.isdir(my_news_dir):
         return
     
     for news in os.listdir(my_news_dir):
         local = utils.import_script(os.path.join(my_news_dir, news))
         try:
             metadata = utils.metadata_parser(local["metadata"], keys=metadata_keys)
         except IndexError:
             out.warn("Syntax errors found in %s" % os.path.join(my_news_dir, news))
             continue
         self.data.append((repo, metadata, local["message"]))
Example #4
0
    def import_repo_news(self, repo):
        '''Imports news of given repository'''
        my_news_dir = os.path.join(cst.repos, repo, cst.news_dir)
        if not os.path.isdir(my_news_dir):
            return

        for news in os.listdir(my_news_dir):
            local = utils.import_script(os.path.join(my_news_dir, news))
            try:
                metadata = utils.metadata_parser(local["metadata"],
                                                 keys=metadata_keys)
            except IndexError:
                out.warn("Syntax errors found in %s" %
                         os.path.join(my_news_dir, news))
                continue
            self.data.append((repo, metadata, local["message"]))
Example #5
0
    def set_environment_variables(self):
        '''Sets environment variables that used interpreter and other parts of lpms'''
        # TODO: This part seems dirty
        if self.inline_option_targets is not None and \
                self.package.id in self.inline_option_targets:
            self.internals.env.inline_option_targets = self.inline_option_targets[self.package.id]
        if self.conditional_versions is not None and \
                self.package.id in self.conditional_versions:
            self.internals.env.conditional_versions = self.conditional_versions[self.package.id]

        self.internals.env.package = self.package
        if self.dependencies is not None:
            self.internals.env.dependencies = self.dependencies.get(self.package.id, None)
        installed_package = self.instdb.find_package(package_name=self.package.name, \
                package_category=self.package.category, package_slot=self.package.slot)
        self.internals.env.previous_version = installed_package.get(0).version \
                if installed_package else None

        # Handle package conflicts and remove that conflicts if required
        # TODO: This mech. is obsolete
        if self.conflicts is not None and self.package.id in self.conflicts:
            conflict_instruct = self.instruction
            conflict_instruct.count = len(self.conflicts[self.package.id])
            for index, conflict in enumerate(self.conflicts[self.package.id], 1):
                conflict_instruct['index'] = index
                conflict_category, conflict_name, conflict_slot = self.conflict.split("/")
                conflict_package = self.instdb.find_package(package_name=conflict_name, \
                        package_category=conflict_category, \
                        package_slot=conflict_slot).get(0)
                if not initpreter.InitializeInterpreter(conflict_package, conflict_instruct,
                        ['remove'], remove=True).initialize():
                    out.error("an error occured during remove operation: %s/%s/%s-%s" % \
                            (conflict_package.repo, conflict_package.category, \
                            conflict_package.name, conflict_package.version))

        # FIXME: This is no good, perhaps, we should only import some variables to internal environment
        self.internals.env.raw.update(self.instruction.raw)

        # Absolute path of the spec file.
        self.internals.env.spec_file = os.path.join(
                cst.repos,
                self.package.repo,
                self.package.category,
                self.package.name,
                self.package.name+"-"+self.package.version+cst.spec_suffix
        )

        # Set metadata fields from the spec file.
        metadata_fields = ('repo', 'name', 'category', 'name', 'version', 'slot', 'options')
        for field in metadata_fields:
            setattr(self.internals.env, field, getattr(self.package, field))

        # Fullname of the package thats consists of its name and version
        self.internals.env.fullname = self.internals.env.name+"-"+self.internals.env.version

        # applied options is a set that contains options which will be applied to the package
        if self.options is not None and self.package.id in self.options:
            self.internals.env.applied_options = self.options[self.package.id]

        # set local environment variable
        if not self.instruction.unset_env_variables:
           self.set_local_environment_variables()

        interphase = re.search(r'-r[0-9][0-9]', self.internals.env.version)
        if not interphase:
            interphase = re.search(r'-r[0-9]', self.internals.env.version)
        # Before setting raw_version and revision, set their initial values
        self.internals.env.revision = ""
        self.internals.env.raw_version = self.internals.env.version

        # Now, set real values of these variables if package revisioned. 
        if interphase is not None and interphase.group():
            self.internals.env.raw_version = self.internals.env.version.replace(interphase.group(), "")
            self.internals.env.revision = interphase.group()

        # Import the spec
        self.mangle_spec()
        metadata = utils.metadata_parser(self.internals.env.metadata)
        if metadata.has_key("src_url"):
            self.internals.env.src_url = metadata["src_url"]
        else:
            if not hasattr(self.internals.env, "src_url"):
                self.internals.env.src_url = None

        if self.internals.env.srcdir is None:
            # Cut revision number from srcdir prevent unpacking fails
            srcdir = self.internals.env.name+"-"\
                    +self.internals.env.version.replace(self.internals.env.revision, "")
            self.internals.env.srcdir = srcdir

        filesdir = os.path.join(
                cst.repos,
                self.internals.env.repo,
                self.internals.env.category,
                self.internals.env.name,
                cst.files_dir
        )
        setattr(self.internals.env, "filesdir", filesdir)

        # TODO: What is src_cache?
        setattr(self.internals.env, "src_cache", cst.src_cache)

        # Set sandbox variable to switch sandbox
        if not self.config.sandbox and self.instruction.enable_sandbox:
            self.internals.env.sandbox = True
        elif self.config.sandbox and self.instruction.disable_sandbox:
            self.internals.env.sandbox = False
        else:
            self.internals.env.sandbox = self.config.sandbox

        # Set work_dir, build_dir and install_dir variables to lpms' internal build environment.
        self.internals.env.work_dir = os.path.join(
                self.config.build_dir,
                self.internals.env.category,
                self.internals.env.fullname
        )
        self.internals.env.build_dir = os.path.join(
                self.config.build_dir,
                self.internals.env.category,
                self.internals.env.fullname,
                "source",
                self.internals.env.srcdir)
        self.internals.env.install_dir = os.path.join(
                self.config.build_dir,
                self.internals.env.category,
                self.internals.env.fullname,
                "install")

        # Create these directories
        for target in ('build_dir', 'install_dir'):
            if not os.path.isdir(getattr(self.internals.env, target)):
                os.makedirs(getattr(self.internals.env, target))
        if not self.instruction.resume_build and len(os.listdir(self.internals.env.install_dir)):
            shelltools.remove_dir(self.internals.env.install_dir)
Example #6
0
    def set_environment_variables(self):
        '''Sets environment variables that used interpreter and other parts of lpms'''
        # TODO: This part seems dirty
        if self.inline_option_targets is not None and \
                self.package.id in self.inline_option_targets:
            self.internals.env.inline_option_targets = self.inline_option_targets[
                self.package.id]
        if self.conditional_versions is not None and \
                self.package.id in self.conditional_versions:
            self.internals.env.conditional_versions = self.conditional_versions[
                self.package.id]

        self.internals.env.package = self.package
        if self.dependencies is not None:
            self.internals.env.dependencies = self.dependencies.get(
                self.package.id, None)
        installed_package = self.instdb.find_package(package_name=self.package.name, \
                package_category=self.package.category, package_slot=self.package.slot)
        self.internals.env.previous_version = installed_package.get(0).version \
                if installed_package else None

        # Handle package conflicts and remove that conflicts if required
        # TODO: This mech. is obsolete
        if self.conflicts is not None and self.package.id in self.conflicts:
            conflict_instruct = self.instruction
            conflict_instruct.count = len(self.conflicts[self.package.id])
            for index, conflict in enumerate(self.conflicts[self.package.id],
                                             1):
                conflict_instruct['index'] = index
                conflict_category, conflict_name, conflict_slot = self.conflict.split(
                    "/")
                conflict_package = self.instdb.find_package(package_name=conflict_name, \
                        package_category=conflict_category, \
                        package_slot=conflict_slot).get(0)
                if not initpreter.InitializeInterpreter(
                        conflict_package,
                        conflict_instruct, ['remove'],
                        remove=True).initialize():
                    out.error("an error occured during remove operation: %s/%s/%s-%s" % \
                            (conflict_package.repo, conflict_package.category, \
                            conflict_package.name, conflict_package.version))

        # FIXME: This is no good, perhaps, we should only import some variables to internal environment
        self.internals.env.raw.update(self.instruction.raw)

        # Absolute path of the spec file.
        self.internals.env.spec_file = os.path.join(
            cst.repos, self.package.repo, self.package.category,
            self.package.name,
            self.package.name + "-" + self.package.version + cst.spec_suffix)

        # Set metadata fields from the spec file.
        metadata_fields = ('repo', 'name', 'category', 'name', 'version',
                           'slot', 'options')
        for field in metadata_fields:
            setattr(self.internals.env, field, getattr(self.package, field))

        # Fullname of the package thats consists of its name and version
        self.internals.env.fullname = self.internals.env.name + "-" + self.internals.env.version

        # applied options is a set that contains options which will be applied to the package
        if self.options is not None and self.package.id in self.options:
            self.internals.env.applied_options = self.options[self.package.id]

        # set local environment variable
        if not self.instruction.unset_env_variables:
            self.set_local_environment_variables()

        interphase = re.search(r'-r[0-9][0-9]', self.internals.env.version)
        if not interphase:
            interphase = re.search(r'-r[0-9]', self.internals.env.version)
        # Before setting raw_version and revision, set their initial values
        self.internals.env.revision = ""
        self.internals.env.raw_version = self.internals.env.version

        # Now, set real values of these variables if package revisioned.
        if interphase is not None and interphase.group():
            self.internals.env.raw_version = self.internals.env.version.replace(
                interphase.group(), "")
            self.internals.env.revision = interphase.group()

        # Import the spec
        self.mangle_spec()
        metadata = utils.metadata_parser(self.internals.env.metadata)
        if metadata.has_key("src_url"):
            self.internals.env.src_url = metadata["src_url"]
        else:
            if not hasattr(self.internals.env, "src_url"):
                self.internals.env.src_url = None

        if self.internals.env.srcdir is None:
            # Cut revision number from srcdir prevent unpacking fails
            srcdir = self.internals.env.name+"-"\
                    +self.internals.env.version.replace(self.internals.env.revision, "")
            self.internals.env.srcdir = srcdir

        filesdir = os.path.join(cst.repos, self.internals.env.repo,
                                self.internals.env.category,
                                self.internals.env.name, cst.files_dir)
        setattr(self.internals.env, "filesdir", filesdir)

        # TODO: What is src_cache?
        setattr(self.internals.env, "src_cache", cst.src_cache)

        # Set sandbox variable to switch sandbox
        if not self.config.sandbox and self.instruction.enable_sandbox:
            self.internals.env.sandbox = True
        elif self.config.sandbox and self.instruction.disable_sandbox:
            self.internals.env.sandbox = False
        else:
            self.internals.env.sandbox = self.config.sandbox

        # Set work_dir, build_dir and install_dir variables to lpms' internal build environment.
        self.internals.env.work_dir = os.path.join(self.config.build_dir,
                                                   self.internals.env.category,
                                                   self.internals.env.fullname)
        self.internals.env.build_dir = os.path.join(
            self.config.build_dir, self.internals.env.category,
            self.internals.env.fullname, "source", self.internals.env.srcdir)
        self.internals.env.install_dir = os.path.join(
            self.config.build_dir, self.internals.env.category,
            self.internals.env.fullname, "install")

        # Create these directories
        for target in ('build_dir', 'install_dir'):
            if not os.path.isdir(getattr(self.internals.env, target)):
                os.makedirs(getattr(self.internals.env, target))
        if not self.instruction.resume_build and len(
                os.listdir(self.internals.env.install_dir)):
            shelltools.remove_dir(self.internals.env.install_dir)
Example #7
0
    def update_package(self, repo_path, category, my_pkg, my_version = None, update = False):
        dataset = LCollect()
        # Register some variables to use after
        self.env.repo = os.path.basename(repo_path)
        self.env.category = category

        dataset.repo = self.env.repo
        dataset.category = category

        os.chdir(os.path.join(repo_path, category, my_pkg))
        for pkg in glob.glob("*"+cst.spec_suffix):
            script_path = os.path.join(repo_path, category, my_pkg, pkg)

            self.env.name, self.env.version = utils.parse_pkgname(pkg.split(cst.spec_suffix)[0])

            dataset.name = self.env.name
            dataset.version = self.env.version

            # FIXME: We must develop a upper-class or environment to 
            # use that cases to prevent code duplication

            # Begins code duplication
            interphase = re.search(r'-r[0-9][0-9]', self.env.version)
            if not interphase:
                interphase = re.search(r'-r[0-9]', self.env.version)
            self.env.raw_version = self.env.version
            self.env.revision = ""
            # Now, set real values of these variables if package revisioned. 
            if interphase is not None and interphase.group():
                self.env.raw_version = self.env.version.replace(interphase.group(), "")
                self.env.revision = interphase.group()
            # End of code duplication

            self.env.__dict__["fullname"] = self.env.name+"-"+self.env.version

            if not self.import_script(script_path):
                out.error("an error occured while processing the spec: %s" \
                        % out.color(script_path, "red"))
                out.error("please report the above error messages to the package maintainer.")
                continue

            metadata = utils.metadata_parser(self.env.metadata)
            metadata.update({"name": self.env.name, "version": self.env.version})
            # This method checks metadata integrity. 
            # It warn the user and pass the spec if a spec is broken
            self.check_metadata_integrity(metadata)
            # These values are optional
            if not "options" in metadata:
                metadata.update({"options": None})
            if not "slot" in metadata:
                metadata.update({"slot": "0"})
            if not "src_url" in metadata:
                metadata.update({"src_url": None})

            if lpms.getopt("--verbose"):
                out.write("    %s-%s\n" % (self.env.name, self.env.version))
            
            try:
                dataset.summary = metadata['summary']
                dataset.homepage = metadata['homepage']
                dataset.license = metadata['license']
                dataset.src_uri = metadata['src_url']
                if metadata['options'] is None:
                    dataset.options = None
                else:
                    dataset.options = metadata['options'].split(" ")
                dataset.slot = metadata['slot']

            except KeyError as err:
                out.error("%s/%s/%s-%s: invalid metadata" % (repo_name, category, \
                        self.env.name, self.env.version))
                out.warn("repository update was failed and the repository database was removed.")
                out.warn("you can run 'lpms --reload-previous-repodb' command to reload previous db version.")
                lpms.terminate("good luck!")

            if update:
                self.repodb.delete_package(package_repo=dataset.repo, package_category=dataset.category, \
                        package_name=self.env.name, package_version=self.env.version)

            static_depends_runtime = []; static_depends_build = []; static_depends_postmerge = []; static_depends_conflict = []
            if 'depends' in self.env.__dict__.keys():
                deps = utils.depends_parser(self.env.depends)
                if 'runtime' in deps:
                    static_depends_runtime.extend(deps['runtime'])
                if 'build' in deps:
                    static_depends_build.extend(deps['build'])
                if 'common' in deps:
                    static_depends_runtime.extend(deps['common'])
                    static_depends_build.extend(deps['common'])
                if 'postmerge' in deps:
                    static_depends_postmerge.extend(deps['postmerge'])
                if 'conflict' in deps:
                    static_depends_conflict.extend(deps['conflict'])

            optional_depends_runtime = []; optional_depends_build = []; optional_depends_postmerge = []; optional_depends_conflict = []
            for opt in ('opt_common', 'opt_conflict', 'opt_postmerge', 'opt_runtime', 'opt_build'):
                try:
                    deps = utils.parse_opt_deps(getattr(self.env, opt))
                    if opt.split("_")[1] == "runtime":
                        optional_depends_runtime.append(deps)
                    elif opt.split("_")[1] == "build":
                        optional_depends_build.append(deps)
                    elif opt.split("_")[1] == "common":
                        optional_depends_build.append(deps)
                        optional_depends_runtime.append(deps)
                    elif opt.split("_")[1] == "postmerge":
                        optional_depends_postmerge.append(deps)
                    elif opt.split("_")[1] == "conflict":
                        optional_depends_conflict.append(deps)
                    del deps
                except AttributeError:
                    continue

            dataset.optional_depends_runtime = optional_depends_runtime
            dataset.optional_depends_build = optional_depends_build
            dataset.optional_depends_postmerge = optional_depends_postmerge
            dataset.optional_depends_conflict = optional_depends_conflict

            dataset.static_depends_runtime = static_depends_runtime
            dataset.static_depends_build = static_depends_build
            dataset.static_depends_postmerge = static_depends_postmerge
            dataset.static_depends_conflict = static_depends_conflict

            if metadata['arch'] is not None:
                arches = metadata['arch'].split(" ")
                for arch in arches:
                    dataset.arch = arch
                    self.repodb.insert_package(dataset)
            else:
                dataset.arch = None
                self.repodb.insert_package(dataset)

            # remove optional keys
            for key in ('depends', 'options', 'opt_runtime', 'opt_build', \
                    'opt_conflict', 'opt_common', 'opt_postmerge'):
                try:
                    del self.env.__dict__[key]
                except KeyError:
                    pass
            self.packages_num += 1
Example #8
0
    def update_package(self,
                       repo_path,
                       category,
                       my_pkg,
                       my_version=None,
                       update=False):
        dataset = LCollect()
        # Register some variables to use after
        self.env.repo = os.path.basename(repo_path)
        self.env.category = category

        dataset.repo = self.env.repo
        dataset.category = category

        os.chdir(os.path.join(repo_path, category, my_pkg))
        for pkg in glob.glob("*" + cst.spec_suffix):
            script_path = os.path.join(repo_path, category, my_pkg, pkg)

            self.env.name, self.env.version = utils.parse_pkgname(
                pkg.split(cst.spec_suffix)[0])

            dataset.name = self.env.name
            dataset.version = self.env.version

            # FIXME: We must develop a upper-class or environment to
            # use that cases to prevent code duplication

            # Begins code duplication
            interphase = re.search(r'-r[0-9][0-9]', self.env.version)
            if not interphase:
                interphase = re.search(r'-r[0-9]', self.env.version)
            self.env.raw_version = self.env.version
            self.env.revision = ""
            # Now, set real values of these variables if package revisioned.
            if interphase is not None and interphase.group():
                self.env.raw_version = self.env.version.replace(
                    interphase.group(), "")
                self.env.revision = interphase.group()
            # End of code duplication

            self.env.__dict__[
                "fullname"] = self.env.name + "-" + self.env.version

            if not self.import_script(script_path):
                out.error("an error occured while processing the spec: %s" \
                        % out.color(script_path, "red"))
                out.error(
                    "please report the above error messages to the package maintainer."
                )
                continue

            metadata = utils.metadata_parser(self.env.metadata)
            metadata.update({
                "name": self.env.name,
                "version": self.env.version
            })
            # This method checks metadata integrity.
            # It warn the user and pass the spec if a spec is broken
            self.check_metadata_integrity(metadata)
            # These values are optional
            if not "options" in metadata:
                metadata.update({"options": None})
            if not "slot" in metadata:
                metadata.update({"slot": "0"})
            if not "src_url" in metadata:
                metadata.update({"src_url": None})

            if lpms.getopt("--verbose"):
                out.write("    %s-%s\n" % (self.env.name, self.env.version))

            try:
                dataset.summary = metadata['summary']
                dataset.homepage = metadata['homepage']
                dataset.license = metadata['license']
                dataset.src_uri = metadata['src_url']
                if metadata['options'] is None:
                    dataset.options = None
                else:
                    dataset.options = metadata['options'].split(" ")
                dataset.slot = metadata['slot']

            except KeyError as err:
                out.error("%s/%s/%s-%s: invalid metadata" % (repo_name, category, \
                        self.env.name, self.env.version))
                out.warn(
                    "repository update was failed and the repository database was removed."
                )
                out.warn(
                    "you can run 'lpms --reload-previous-repodb' command to reload previous db version."
                )
                lpms.terminate("good luck!")

            if update:
                self.repodb.delete_package(package_repo=dataset.repo, package_category=dataset.category, \
                        package_name=self.env.name, package_version=self.env.version)

            static_depends_runtime = []
            static_depends_build = []
            static_depends_postmerge = []
            static_depends_conflict = []
            if 'depends' in self.env.__dict__.keys():
                deps = utils.depends_parser(self.env.depends)
                if 'runtime' in deps:
                    static_depends_runtime.extend(deps['runtime'])
                if 'build' in deps:
                    static_depends_build.extend(deps['build'])
                if 'common' in deps:
                    static_depends_runtime.extend(deps['common'])
                    static_depends_build.extend(deps['common'])
                if 'postmerge' in deps:
                    static_depends_postmerge.extend(deps['postmerge'])
                if 'conflict' in deps:
                    static_depends_conflict.extend(deps['conflict'])

            optional_depends_runtime = []
            optional_depends_build = []
            optional_depends_postmerge = []
            optional_depends_conflict = []
            for opt in ('opt_common', 'opt_conflict', 'opt_postmerge',
                        'opt_runtime', 'opt_build'):
                try:
                    deps = utils.parse_opt_deps(getattr(self.env, opt))
                    if opt.split("_")[1] == "runtime":
                        optional_depends_runtime.append(deps)
                    elif opt.split("_")[1] == "build":
                        optional_depends_build.append(deps)
                    elif opt.split("_")[1] == "common":
                        optional_depends_build.append(deps)
                        optional_depends_runtime.append(deps)
                    elif opt.split("_")[1] == "postmerge":
                        optional_depends_postmerge.append(deps)
                    elif opt.split("_")[1] == "conflict":
                        optional_depends_conflict.append(deps)
                    del deps
                except AttributeError:
                    continue

            dataset.optional_depends_runtime = optional_depends_runtime
            dataset.optional_depends_build = optional_depends_build
            dataset.optional_depends_postmerge = optional_depends_postmerge
            dataset.optional_depends_conflict = optional_depends_conflict

            dataset.static_depends_runtime = static_depends_runtime
            dataset.static_depends_build = static_depends_build
            dataset.static_depends_postmerge = static_depends_postmerge
            dataset.static_depends_conflict = static_depends_conflict

            if metadata['arch'] is not None:
                arches = metadata['arch'].split(" ")
                for arch in arches:
                    dataset.arch = arch
                    self.repodb.insert_package(dataset)
            else:
                dataset.arch = None
                self.repodb.insert_package(dataset)

            # remove optional keys
            for key in ('depends', 'options', 'opt_runtime', 'opt_build', \
                    'opt_conflict', 'opt_common', 'opt_postmerge'):
                try:
                    del self.env.__dict__[key]
                except KeyError:
                    pass
            self.packages_num += 1