def find_conditional_versions(self, **kwargs): package_id = kwargs.get("package_id", None) target = kwargs.get("target", None) results = self.database.find_conditional_versions(package_id, target) result_objs = PackageItem() for result in results: result_obj = LCollect() result_obj.package_id = result[0] result_obj.target = result[1] result_obj.decision_point = pickle.loads(str(result[2])) result_objs.append(result_obj) del result_obj return result_objs
def get_package_metadata(self, **kwargs): object_items = ('id', 'repo', 'category', 'name', 'version', 'slot', 'summary', 'homepage', 'license', 'src_uri', 'arch', 'options') # Set the keywords name = kwargs.get("package_name", None) p_id = kwargs.get("package_id", None) repo = kwargs.get("package_repo", None) category = kwargs.get("package_category", None) version = kwargs.get("package_version", None) if p_id is not None: package_query = self.database.get_package_metadata(package_id=p_id) else: if None in (repo, category, name, version): raise MissingInternalParameter("%s/%s/%s-%s is meaningless") package_query = self.database.get_package_metadata(package_repo=repo, \ package_category=category, package_name=name, package_version=version) # Create a LCollect object pkg_obj = LCollect() # Add the packages to the object for item in object_items: index = object_items.index(item) if index == 11: setattr(pkg_obj, item, pickle.loads(str(package_query[index]))) continue setattr(pkg_obj, item, package_query[index]) return pkg_obj
def get_parent_package(self, **kwargs): if "package_id" in kwargs: parent = self.database.get_parent_package( package_id=kwargs["package_id"]) for key in ("package_category", "package_name", "package_version"): if not key in kwargs: raise DatabaseAPIError("%s is missing." % key) name = kwargs.get("package_name", None) category = kwargs.get("package_category", None) version = kwargs.get("package_version", None) parent = self.database.get_parent_package(category, name, version) package = LCollect() if parent is None: return package.category, package.name, package.slot = parent.split("/") return package
def get_package_dependencies(self, package_id): object_items = ('optional_depends_build', 'optional_depends_runtime', 'optional_depends_postmerge', 'optional_depends_conflict', 'static_depends_build', 'static_depends_runtime', 'static_depends_postmerge', 'static_depends_conflict', 'optional_reverse_build', 'optional_reverse_runtime', 'optional_reverse_postmerge', 'optional_reverse_conflict', 'static_reverse_build', 'static_reverse_runtime', 'static_reverse_postmerge', 'static_reverse_conflict') package_query = self.database.get_package_dependencies(package_id) # Create a LCollect object pkg_obj = LCollect() for item in object_items: index = object_items.index(item) setattr(pkg_obj, item, pickle.loads(str(package_query[index]))) return pkg_obj
def create_operation_plan(self): '''Resolve dependencies and prepares a convenient operation plan''' single_packages = PackageItem() for package in self.packages: self.parent_package = package self.current_package = None self.package_heap[package.id] = package dependencies = [] package_dependencies = self.collect_dependencies(package) if not package_dependencies: single_packages.add(package) continue # Create a list that consists of parent and child items for dependency in package_dependencies: dependency.parent = package.category+"/"+package.name+"/"+package.slot dependencies.append((package.id, dependency)) while True: buff = [] for parent, dependency in dependencies: self.current_package = dependency self.parent_package = None self.package_query.append((dependency.id, parent)) if dependency.id in self.processed: if self.processed[dependency.id] == self.package_options.get(dependency.id, None): # This package was processed and it has no option changes continue # Keep the package options to prevent extra transaction self.processed[dependency.id] = self.package_options.get(dependency.id, None) # Keep the package information for the next operations. # We don't want to create a new transaction for it. self.package_heap[dependency.id] = dependency # Get its dependencies package_collection = self.collect_dependencies(dependency) if not package_collection: # The item has no dependency continue # Create a list that consists of parent and child items for item in package_collection: item.parent = package.category+"/"+package.name+"/"+package.slot buff.append((dependency.id, item)) if not buff: # End of the node break dependencies = buff try: # Sort packages for building operation plan = sorter.topsort(self.package_query) except sorter.CycleError as err: answer, num_parents, children = err out.brightred("Circular dependency detected:\n") for items in sorter.find_cycles(parent_children=children): for item in items: package = self.repodb.find_package(package_id=item).get(0) out.write(package.repo+"/"+package.category+"/"+package.name+"-"\ +package.version+":"+package.slot+" ") out.write("\n") raise DependencyError # This part detects inline option conflicts removed = {} option_conflict = set() for package_id in self.inline_option_targets: for target in self.inline_option_targets[package_id]: for option in self.inline_option_targets[package_id][target]: if option.startswith("-"): if option in removed: removed[option].add((package_id, target)) else: removed[option] = set([(package_id, target)]) else: if "-"+option in removed: for (my_pkg_id, my_target) in removed["-"+option]: if my_target == target: option_conflict.add((my_target, \ self.package_heap[package_id], \ self.package_heap[my_pkg_id],\ option)) if option_conflict: out.error("option conflict detected:\n") for (pkg, add, remove, option)in option_conflict: out.error(out.color(option, "red")+" option on "+pkg+"\n") out.warn("%s/%s/%s/%s adds the option." % (add.repo, add.category, \ add.name, add.version)) out.warn("%s/%s/%s/%s removes the option." % (remove.repo, remove.category, \ remove.name, remove.version)) lpms.terminate() self.conditional_versions = {} for (key, values) in self.conditional_packages.items(): for value in values: target_package = self.package_heap[key] my_item = { "type": value["type"], "version": value["version"], "target": target_package.category+"/"+target_package.name+\ "/"+target_package.slot, } if not value["owner_id"] in self.conditional_versions: self.conditional_versions[value["owner_id"]] = [my_item] else: self.conditional_versions[value["owner_id"]].append(my_item) # TODO: I think I must use most professional way for ignore-depends feature. if lpms.getopt("--ignore-deps"): result = LCollect() result.packages = self.packages result.dependencies = self.package_dependencies result.options = self.package_options result.inline_option_targets = self.inline_option_targets result.conditional_versions = self.conditional_versions result.conflicts = self.conflicts return result # Workaround for postmerge dependencies for (id_dependency, id_package) in self.postmerge_dependencies: plan.remove(id_dependency) plan.insert(plan.index(id_package)+1, id_dependency) final_plan = PackageItem() required_package_ids = [package.id for package in self.packages] for package_id in plan: package = self.package_heap[package_id] continue_conditional = False # If a package has a conditional decision point, # we should consider the condition if package.id not in self.conditional_packages: for c_package_id in self.conditional_packages: c_package = self.package_heap[c_package_id] if package.pk == c_package.pk: continue_conditional = True if package_id in required_package_ids: final_plan.add_by_pk(c_package) break if package_id in required_package_ids: if continue_conditional is False: final_plan.add_by_pk(package) if continue_conditional: continue installed_package = self.instdb.find_package( package_category=package.category, package_name=package.name, package_slot=package.slot ) if installed_package: if package.id in self.inline_options: if installed_package.get(0).applied_options is None: final_plan.add_by_pk(package) continue continue_inline = False for inline_option in self.inline_options[package.id]: if not inline_option in installed_package.get(0).applied_options: final_plan.add_by_pk(package) continue_inline = True break if continue_inline: continue try: conditional_versions_query = self.instdb.find_conditional_versions( target=package.category+"/"+package.name+"/"+package.slot) if conditional_versions_query: for item in conditional_versions_query: item.decision_point["package_id"]=item.package_id if package.id in self.conditional_packages: if not item.decision_point in self.conditional_packages[package.id]: self.conditional_packages[package.id].append(item.decision_point) else: self.conditional_packages[package.id] = [item.decision_point] if package.id in self.conditional_packages: decision_points = self.conditional_packages[package.id] for decision_point in decision_points: comparison = utils.vercmp(installed_package.get(0).version, \ decision_point["version"]) if decision_point["type"] == ">=": if self.handle_condition_conflict(decision_point, final_plan, \ package.pk, ("<", ">"), (0, 1)) is False: continue if not comparison in (1, 0) or package.id in required_package_ids: final_plan.add_by_pk(package) elif decision_point["type"] == "<": if self.handle_condition_conflict(decision_point, final_plan, \ package.pk, (">", "<"), (0, -1)) is False: continue if comparison != -1: final_plan.add_by_pk(package) elif decision_point["type"] == ">": if self.handle_condition_conflict(decision_point, final_plan, \ package.pk, ("<", ">"), (0, 1)) is False: continue if comparison != 1 or package.id in required_package_ids: final_plan.add_by_pk(package) elif decision_point["type"] == "<=": if self.handle_condition_conflict(decision_point, final_plan, \ package.pk, (">", "<"), (0, -1)) is False: continue if not comparison in (-1, 0) or package.id in required_package_ids: final_plan.add_by_pk(package) elif decision_point["type"] == "==": if comparison != 0 or package.id in required_package_ids: final_plan.add_by_pk(package) except ConditionConflict: if not "owner_package" in decision_point: conflict_package = self.instdb.find_package(package_id=\ decision_point["package_id"]).get(0) decision_point["owner_package"] = conflict_package.repo+"/"+ \ conflict_package.category+"/"+ \ conflict_package.name+"/"+ \ conflict_package.version out.error("while selecting a convenient version of %s, a conflict detected:\n" % \ out.color(package.pk, "red")) out.notify(decision_point["owner_package"]+" wants "+\ decision_point["type"]+decision_point["version"]) out.notify(self.conflict_point["owner_package"]+" wants "+\ self.conflict_point["type"]+self.conflict_point["version"]) lpms.terminate("\nplease contact the package maintainers.") # Use new options if the package is effected if self.use_new_options and not package in final_plan: if package.id in self.package_options: for option in self.package_options[package.id]: if not option in installed_package.get(0).applied_options: final_plan.add_by_pk(package) break else: final_plan.add_by_pk(package) # Oh my god! Some packages have no dependency. if single_packages: for single_package in single_packages: for item_id in plan: if self.package_heap[item_id].pk == single_package.pk: single_packages.remove(single_package) break for single_package in single_packages: final_plan.insert_into(0, single_package) # Create LCollect object to manage package dependency data operation_plan = LCollect() operation_plan.packages = final_plan operation_plan.dependencies = self.package_dependencies operation_plan.options = self.package_options operation_plan.inline_option_targets = self.inline_option_targets operation_plan.conditional_versions = self.conditional_versions operation_plan.conflicts = self.conflicts return operation_plan
def get_convenient_package(self, package, instdb=False): def inline_options_management(inline_options): # TODO: inline_options variable must be a set # Check inline options, if an option is not available for the package, warn the user for inline_option in inline_options: if not inline_option in package.options: out.warn("%s option is not available for %s/%s/%s-%s. So that the option is removing..." % ( inline_option, package.repo, package.category, package.name, package.version )) inline_options.remove(inline_option) if inline_options: target = self.current_package.id if self.current_package is not \ None else self.parent_package.id my_package = package.category+"/"+package.name+"/"+package.slot if target in self.inline_option_targets: if my_package in self.inline_option_targets[target]: for option in inline_options: self.inline_option_targets[target][my_package].add(option) else: self.inline_option_targets[target][my_package] = set(inline_options) else: self.inline_option_targets[target] = {my_package: set(inline_options)} if package.id in self.inline_options: if not package.id in self.package_options: self.package_options[package.id] = set() for option in inline_options: if not option in self.inline_options[package.id]: self.inline_options[package.id].append(option) if package.id in self.package_options: self.package_options[package.id].add(option) else: self.inline_options[package.id] = inline_options if package.id in self.package_options: for inline_option in inline_options: self.package_options[package.id].add(inline_option) else: self.package_options[package.id] = set(inline_options) convenient_arches = utils.get_convenient_arches(self.conf.arch) current_package = self.parent_package if self.parent_package is not \ None else self.current_package result = LCollect() database = self.repodb if instdb is False else self.instdb slot = None gte, lte, lt, gt, et = False, False, False, False, False slot_parsed = package.split(":") if len(slot_parsed) == 2: data, slot = slot_parsed elif len(slot_parsed) > 2: out.error("%s invalid dependency in %s.py" % (data, self.current_package)) # Use and exception raise DependencyError else: data = package if ">=" == data[:2]: gte = True pkgname = data[2:] elif "<=" == data[:2]: lte = True pkgname = data[2:] elif "<" == data[:1]: lt = True pkgname = data[1:] elif ">" == data[:1]: gt = True pkgname = data[1:] elif "==" == data[:2]: et = True pkgname = data[2:] else: category, name = data.split("/") inline_options = self.parse_inline_options(name) if inline_options: name = name[:name.index("[")] if (category, name) in self.repository_cache: results = self.repository_cache[(category, name)] else: results = database.find_package(package_name=name, package_category=category) self.repository_cache[(category, name)] = results slot = self.get_convenient_slot(results, slot) if not results: if instdb: return current_package = current_package.repo+"/"+current_package.category+\ "/"+current_package.name+"-"+current_package.version+":"+current_package.slot out.error("unmet dependency: %s depends on %s" % (out.color(current_package, \ "red"), out.color(package, "red"))) raise DependencyError try: package = utils.get_convenient_package( results, self.locked_packages, self.custom_arch_requests, convenient_arches, self.instdb, slot ) except UnavailablePackage: for result in results: out.error("%s/%s/%s-%s:%s {%s} is unavailable for your arch(%s)." % (result.repo, result.category, \ result.name, result.version, result.slot, result.arch, self.conf.arch)) out.write("\n") out.write("%s %s/%s/%s-%s:%s {%s}\n" % (out.color("->", "brightyellow"), current_package.repo, \ current_package.category, current_package.name, current_package.version, \ current_package.slot, current_package.arch)) out.write(" %s %s/%s/%s-%s:%s {%s}\n" % (out.color("->", "brightyellow"), current_package.repo, \ current_package.category, current_package.name, current_package.version, \ current_package.slot, current_package.arch)) raise DependencyError except LockedPackage: out.error("these package(s) is/are locked by the system administrator:") for result in results: out.error_notify("%s/%s/%s-%s:%s {%s}" % (result.repo, result.category, \ result.name, result.version, result.slot, result.arch)) out.write("\n") out.write("%s %s/%s/%s-%s:%s {%s}\n" % (out.color("->", "brightyellow"), current_package.repo, \ current_package.category, current_package.name, current_package.version, \ current_package.slot, current_package.arch)) out.write(" %s %s/%s/%s-%s:%s {%s}\n" % (out.color("->", "brightyellow"), current_package.repo, \ current_package.category, current_package.name, current_package.version, \ current_package.slot, current_package.arch)) raise DependencyError # Set some variables to manage inline options inline_options_management(inline_options) return package category, name = pkgname.split("/") inline_options = self.parse_inline_options(name) if inline_options: name = name[:name.index("[")] name, version = utils.parse_pkgname(name) if (category, name) in self.repository_cache: results = self.repository_cache[(category, name)] else: results = database.find_package(package_name=name, package_category=category) self.repository_cache[(category, name)] = results slot = self.get_convenient_slot(results, slot) packages = [] decision_point = {} owner_package = current_package.repo+"/"+current_package.category+\ "/"+current_package.name+"-"+current_package.version if gte: decision_point = {"type": ">=", "version": version, \ "owner_package": owner_package, "owner_id": current_package.id} for result in results: comparison = utils.vercmp(result.version, version) if comparison == 1 or comparison == 0: packages.append(result) elif lte: decision_point = {"type": "<=", "version": version, \ "owner_package": owner_package, "owner_id": current_package.id} for result in results: comparison = utils.vercmp(result.version, version) if comparison == -1 or comparison == 0: packages.append(result) elif lt: decision_point = {"type": "<", "version": version, \ "owner_package": owner_package, "owner_id": current_package.id} for result in results: comparison = utils.vercmp(result.version, version) if comparison == -1: packages.append(result) elif gt: decision_point = {"type": ">", "version": version, \ "owner_package": owner_package, "owner_id": current_package.id} for result in results: comparison = utils.vercmp(result.version, version) if comparison == 1: packages.append(result) elif et: decision_point = {"type": "==", "version": version, \ "owner_package": owner_package, "owner_id": current_package.id} for result in results: comparison = utils.vercmp(result.version, version) if comparison == 0: packages.append(result) if not packages: out.error("unmet dependency: %s/%s/%s-%s:%s {%s} depends on %s" % \ (current_package.repo, \ current_package.category, \ current_package.name, \ current_package.version, \ current_package.slot, \ current_package.arch, \ out.color(package, "red"))) raise DependencyError try: package = utils.get_convenient_package( results if not packages else packages, self.locked_packages, self.custom_arch_requests, convenient_arches, self.instdb, slot ) except UnavailablePackage: for result in results: out.error("%s/%s/%s-%s:%s {%s}is unavailable for your arch(%s)." % (result.repo, result.category, \ result.name, result.version, result.slot, result.arch, self.conf.arch)) out.write("\n") out.write("%s %s/%s/%s-%s:%s {%s}\n" % (out.color("->", "brightyellow"), current_package.repo, \ current_package.category, current_package.name, current_package.version, \ current_package.slot, current_package.arch)) out.write(" %s %s/%s/%s-%s:%s {%s}\n" % (out.color("->", "brightyellow"), current_package.repo, \ current_package.category, current_package.name, current_package.version, \ current_package.slot, current_package.arch)) raise DependencyError except LockedPackage: out.error("these package(s) is/are locked by the system administrator:") for result in results: out.error_notify("%s/%s/%s-%s:%s {%s}" % (result.repo, result.category, \ result.name, result.version, result.slot, result.arch)) out.write("\n") out.write("%s %s/%s/%s-%s:%s {%s}\n" % (out.color("->", "brightyellow"), current_package.repo, \ current_package.category, current_package.name, current_package.version, \ current_package.slot, current_package.arch)) out.write(" %s %s/%s/%s-%s:%s {%s}\n" % (out.color("->", "brightyellow"), current_package.repo, \ current_package.category, current_package.name, current_package.version, \ current_package.slot, current_package.arch)) raise DependencyError # Set some variables to manage inline options inline_options_management(inline_options) if package.id in self.conditional_packages: self.conditional_packages[package.id].append(decision_point) else: self.conditional_packages[package.id] = [decision_point] return package
def update_package(self, repo_path, category, my_pkg, my_version = None, update = False): dataset = LCollect() # Register some variables to use after self.env.repo = os.path.basename(repo_path) self.env.category = category dataset.repo = self.env.repo dataset.category = category os.chdir(os.path.join(repo_path, category, my_pkg)) for pkg in glob.glob("*"+cst.spec_suffix): script_path = os.path.join(repo_path, category, my_pkg, pkg) self.env.name, self.env.version = utils.parse_pkgname(pkg.split(cst.spec_suffix)[0]) dataset.name = self.env.name dataset.version = self.env.version # FIXME: We must develop a upper-class or environment to # use that cases to prevent code duplication # Begins code duplication interphase = re.search(r'-r[0-9][0-9]', self.env.version) if not interphase: interphase = re.search(r'-r[0-9]', self.env.version) self.env.raw_version = self.env.version self.env.revision = "" # Now, set real values of these variables if package revisioned. if interphase is not None and interphase.group(): self.env.raw_version = self.env.version.replace(interphase.group(), "") self.env.revision = interphase.group() # End of code duplication self.env.__dict__["fullname"] = self.env.name+"-"+self.env.version if not self.import_script(script_path): out.error("an error occured while processing the spec: %s" \ % out.color(script_path, "red")) out.error("please report the above error messages to the package maintainer.") continue metadata = utils.metadata_parser(self.env.metadata) metadata.update({"name": self.env.name, "version": self.env.version}) # This method checks metadata integrity. # It warn the user and pass the spec if a spec is broken self.check_metadata_integrity(metadata) # These values are optional if not "options" in metadata: metadata.update({"options": None}) if not "slot" in metadata: metadata.update({"slot": "0"}) if not "src_url" in metadata: metadata.update({"src_url": None}) if lpms.getopt("--verbose"): out.write(" %s-%s\n" % (self.env.name, self.env.version)) try: dataset.summary = metadata['summary'] dataset.homepage = metadata['homepage'] dataset.license = metadata['license'] dataset.src_uri = metadata['src_url'] if metadata['options'] is None: dataset.options = None else: dataset.options = metadata['options'].split(" ") dataset.slot = metadata['slot'] except KeyError as err: out.error("%s/%s/%s-%s: invalid metadata" % (repo_name, category, \ self.env.name, self.env.version)) out.warn("repository update was failed and the repository database was removed.") out.warn("you can run 'lpms --reload-previous-repodb' command to reload previous db version.") lpms.terminate("good luck!") if update: self.repodb.delete_package(package_repo=dataset.repo, package_category=dataset.category, \ package_name=self.env.name, package_version=self.env.version) static_depends_runtime = []; static_depends_build = []; static_depends_postmerge = []; static_depends_conflict = [] if 'depends' in self.env.__dict__.keys(): deps = utils.depends_parser(self.env.depends) if 'runtime' in deps: static_depends_runtime.extend(deps['runtime']) if 'build' in deps: static_depends_build.extend(deps['build']) if 'common' in deps: static_depends_runtime.extend(deps['common']) static_depends_build.extend(deps['common']) if 'postmerge' in deps: static_depends_postmerge.extend(deps['postmerge']) if 'conflict' in deps: static_depends_conflict.extend(deps['conflict']) optional_depends_runtime = []; optional_depends_build = []; optional_depends_postmerge = []; optional_depends_conflict = [] for opt in ('opt_common', 'opt_conflict', 'opt_postmerge', 'opt_runtime', 'opt_build'): try: deps = utils.parse_opt_deps(getattr(self.env, opt)) if opt.split("_")[1] == "runtime": optional_depends_runtime.append(deps) elif opt.split("_")[1] == "build": optional_depends_build.append(deps) elif opt.split("_")[1] == "common": optional_depends_build.append(deps) optional_depends_runtime.append(deps) elif opt.split("_")[1] == "postmerge": optional_depends_postmerge.append(deps) elif opt.split("_")[1] == "conflict": optional_depends_conflict.append(deps) del deps except AttributeError: continue dataset.optional_depends_runtime = optional_depends_runtime dataset.optional_depends_build = optional_depends_build dataset.optional_depends_postmerge = optional_depends_postmerge dataset.optional_depends_conflict = optional_depends_conflict dataset.static_depends_runtime = static_depends_runtime dataset.static_depends_build = static_depends_build dataset.static_depends_postmerge = static_depends_postmerge dataset.static_depends_conflict = static_depends_conflict if metadata['arch'] is not None: arches = metadata['arch'].split(" ") for arch in arches: dataset.arch = arch self.repodb.insert_package(dataset) else: dataset.arch = None self.repodb.insert_package(dataset) # remove optional keys for key in ('depends', 'options', 'opt_runtime', 'opt_build', \ 'opt_conflict', 'opt_common', 'opt_postmerge'): try: del self.env.__dict__[key] except KeyError: pass self.packages_num += 1
def update_package(self, repo_path, category, my_pkg, my_version=None, update=False): dataset = LCollect() # Register some variables to use after self.env.repo = os.path.basename(repo_path) self.env.category = category dataset.repo = self.env.repo dataset.category = category os.chdir(os.path.join(repo_path, category, my_pkg)) for pkg in glob.glob("*" + cst.spec_suffix): script_path = os.path.join(repo_path, category, my_pkg, pkg) self.env.name, self.env.version = utils.parse_pkgname( pkg.split(cst.spec_suffix)[0]) dataset.name = self.env.name dataset.version = self.env.version # FIXME: We must develop a upper-class or environment to # use that cases to prevent code duplication # Begins code duplication interphase = re.search(r'-r[0-9][0-9]', self.env.version) if not interphase: interphase = re.search(r'-r[0-9]', self.env.version) self.env.raw_version = self.env.version self.env.revision = "" # Now, set real values of these variables if package revisioned. if interphase is not None and interphase.group(): self.env.raw_version = self.env.version.replace( interphase.group(), "") self.env.revision = interphase.group() # End of code duplication self.env.__dict__[ "fullname"] = self.env.name + "-" + self.env.version if not self.import_script(script_path): out.error("an error occured while processing the spec: %s" \ % out.color(script_path, "red")) out.error( "please report the above error messages to the package maintainer." ) continue metadata = utils.metadata_parser(self.env.metadata) metadata.update({ "name": self.env.name, "version": self.env.version }) # This method checks metadata integrity. # It warn the user and pass the spec if a spec is broken self.check_metadata_integrity(metadata) # These values are optional if not "options" in metadata: metadata.update({"options": None}) if not "slot" in metadata: metadata.update({"slot": "0"}) if not "src_url" in metadata: metadata.update({"src_url": None}) if lpms.getopt("--verbose"): out.write(" %s-%s\n" % (self.env.name, self.env.version)) try: dataset.summary = metadata['summary'] dataset.homepage = metadata['homepage'] dataset.license = metadata['license'] dataset.src_uri = metadata['src_url'] if metadata['options'] is None: dataset.options = None else: dataset.options = metadata['options'].split(" ") dataset.slot = metadata['slot'] except KeyError as err: out.error("%s/%s/%s-%s: invalid metadata" % (repo_name, category, \ self.env.name, self.env.version)) out.warn( "repository update was failed and the repository database was removed." ) out.warn( "you can run 'lpms --reload-previous-repodb' command to reload previous db version." ) lpms.terminate("good luck!") if update: self.repodb.delete_package(package_repo=dataset.repo, package_category=dataset.category, \ package_name=self.env.name, package_version=self.env.version) static_depends_runtime = [] static_depends_build = [] static_depends_postmerge = [] static_depends_conflict = [] if 'depends' in self.env.__dict__.keys(): deps = utils.depends_parser(self.env.depends) if 'runtime' in deps: static_depends_runtime.extend(deps['runtime']) if 'build' in deps: static_depends_build.extend(deps['build']) if 'common' in deps: static_depends_runtime.extend(deps['common']) static_depends_build.extend(deps['common']) if 'postmerge' in deps: static_depends_postmerge.extend(deps['postmerge']) if 'conflict' in deps: static_depends_conflict.extend(deps['conflict']) optional_depends_runtime = [] optional_depends_build = [] optional_depends_postmerge = [] optional_depends_conflict = [] for opt in ('opt_common', 'opt_conflict', 'opt_postmerge', 'opt_runtime', 'opt_build'): try: deps = utils.parse_opt_deps(getattr(self.env, opt)) if opt.split("_")[1] == "runtime": optional_depends_runtime.append(deps) elif opt.split("_")[1] == "build": optional_depends_build.append(deps) elif opt.split("_")[1] == "common": optional_depends_build.append(deps) optional_depends_runtime.append(deps) elif opt.split("_")[1] == "postmerge": optional_depends_postmerge.append(deps) elif opt.split("_")[1] == "conflict": optional_depends_conflict.append(deps) del deps except AttributeError: continue dataset.optional_depends_runtime = optional_depends_runtime dataset.optional_depends_build = optional_depends_build dataset.optional_depends_postmerge = optional_depends_postmerge dataset.optional_depends_conflict = optional_depends_conflict dataset.static_depends_runtime = static_depends_runtime dataset.static_depends_build = static_depends_build dataset.static_depends_postmerge = static_depends_postmerge dataset.static_depends_conflict = static_depends_conflict if metadata['arch'] is not None: arches = metadata['arch'].split(" ") for arch in arches: dataset.arch = arch self.repodb.insert_package(dataset) else: dataset.arch = None self.repodb.insert_package(dataset) # remove optional keys for key in ('depends', 'options', 'opt_runtime', 'opt_build', \ 'opt_conflict', 'opt_common', 'opt_postmerge'): try: del self.env.__dict__[key] except KeyError: pass self.packages_num += 1
def find_package(self, **kwargs): results = PackageItem() added_packages = [] object_items = { 'metadata_keys': { 0: 'id', 1: 'repo', 2: 'category', 3: 'name', 4: 'version', 5: 'slot', 6: 'arch', }, 'dependency_keys': { 7: 'options', 8: 'optional_depends_build', 9: 'optional_depends_runtime', 10: 'optional_depends_postmerge', 11: 'optional_depends_conflict', 12: 'static_depends_build', 13: 'static_depends_runtime', 14: 'static_depends_postmerge', 15: 'static_depends_conflict' } } # Set the keywords name = kwargs.get("package_name", None) p_id = kwargs.get("package_id", None) if p_id is None and name is None: raise DatabaseAPIError("you must give package_name parameter.") repo = kwargs.get("package_repo", None) category = kwargs.get("package_category", None) version = kwargs.get("package_version", None) slot = kwargs.get("package_slot", None) available_arches = kwargs.get("available_arches", None) # Get the package query package_query = self.database.find_package( package_id=p_id, package_repo=repo, package_category=category, package_name=name, package_version=version, package_slot=slot, package_available_arches=available_arches, ) # Create a LCollect object pkg_obj = LCollect() # Add the packages to the object for package in package_query: # [0] => repo, [1] => category [2] => name, [3] => version, [6] => arch if not (package[1], package[2], package[3], package[4], package[6]) in added_packages: added_packages.append((package[1], package[2], package[3], package[4], package[6])) else: continue for index, item in object_items['metadata_keys'].iteritems(): setattr(pkg_obj, item, package[index]) for index, item in object_items['dependency_keys'].iteritems(): setattr(pkg_obj, item, pickle.loads(str(package[index]))) pkg_obj.pk = pkg_obj.category + "/" + pkg_obj.name + "/" + pkg_obj.slot results.add(pkg_obj) # Delete the object to prevent overrides del pkg_obj # Create it again pkg_obj = LCollect() return results