def main(self): parsed = self.pkgname.split("/") if len(parsed) == 3: repo, category, name = parsed name, version = utils.parse_pkgname(name) packages = self.instdb.find_package( package_repo=repo, package_category=category, package_name=name, package_version=version ) elif len(parsed) == 2: category, name = parsed name, version = utils.parse_pkgname(name) packages = self.instdb.find_package( package_category=category, package_name=name, package_version=version ) elif len(parsed) == 1: name, version = utils.parse_pkgname(self.pkgname) packages = self.instdb.find_package( package_name=name, package_version=version ) else: out.error("%s could not be recognized." % self.pkgname) lpms.terminate() if not packages: out.error("%s not installed." % self.pkgname) lpms.terminate() for package in packages: symdirs = {} out.normal("%s/%s/%s-%s" % (package.repo, package.category, \ package.name, package.version)) content = self.filesdb.get_paths_by_package(package.name, \ category=package.category, version=package.version) for item in content: item = item[0].encode('UTF-8') if os.path.islink(item): out.write("%s -> %s\n" % (out.color(item, "green"), os.readlink(item))) if os.path.isdir(os.path.realpath(item)): symdirs[os.path.realpath(item)+"/"] = item+"/" else: out.write(item+"\n") if symdirs: for symdir in symdirs: if item.startswith(symdir): out.write("%s -> %s\n" % (out.color(item.replace(symdir, \ symdirs[symdir]), "brightwhite"), out.color(item, "brightwhite")))
def main(self): parsed = self.pkgname.split("/") if len(parsed) == 3: repo, category, name = parsed name, version = utils.parse_pkgname(name) packages = self.instdb.find_package( package_repo=repo, package_category=category, package_name=name, package_version=version ) elif len(parsed) == 2: category, name = parsed name, version = utils.parse_pkgname(name) packages = self.instdb.find_package(package_category=category, package_name=name, package_version=version) elif len(parsed) == 1: name, version = utils.parse_pkgname(self.pkgname) packages = self.instdb.find_package(package_name=name, package_version=version) else: out.error("%s could not be recognized." % self.pkgname) lpms.terminate() if not packages: out.error("%s not installed." % self.pkgname) lpms.terminate() for package in packages: symdirs = {} out.normal("%s/%s/%s-%s" % (package.repo, package.category, package.name, package.version)) content = self.filesdb.get_paths_by_package( package.name, category=package.category, version=package.version ) for item in content: item = item[0].encode("UTF-8") if os.path.islink(item): out.write("%s -> %s\n" % (out.color(item, "green"), os.readlink(item))) if os.path.isdir(os.path.realpath(item)): symdirs[os.path.realpath(item) + "/"] = item + "/" else: out.write(item + "\n") if symdirs: for symdir in symdirs: if item.startswith(symdir): out.write( "%s -> %s\n" % ( out.color(item.replace(symdir, symdirs[symdir]), "brightwhite"), out.color(item, "brightwhite"), ) )
def write_archive_hash(urls, file_name): name, version = utils.parse_pkgname(file_name) for url in utils.parse_url_tag(urls, name, version): archive_name = os.path.basename(url) archive_path = os.path.join(conf.LPMSConfig().src_cache, archive_name) if not os.access(archive_path, os.F_OK): fetcher.URLFetcher().run([url]) sha1 = utils.sha1sum(archive_path) shelltools.echo("hashes", "%s %s %s" % (archive_name, sha1, os.path.getsize(archive_path)))
def write_archive_hash(urls, file_name): name, version = utils.parse_pkgname(file_name) for url in utils.parse_url_tag(urls, name, version): archive_name = os.path.basename(url) archive_path = os.path.join(conf.LPMSConfig().src_cache, archive_name) if not os.access(archive_path, os.F_OK): fetcher.URLFetcher().run([url]) sha1 = utils.sha1sum(archive_path) shelltools.echo( "hashes", "%s %s %s" % (archive_name, sha1, os.path.getsize(archive_path)))
def get_versions(self, category, name): versions = [] if not os.path.isdir( os.path.join(cst.db_path, cst.filesdb, category, name)): raise NotInstalled for pkg in os.listdir( os.path.join(cst.db_path, cst.filesdb, category, name)): versions.append(utils.parse_pkgname(pkg[:-4])[1]) if not versions: raise NotInstalled return versions
def select(self): preform = self.package.split("/") if len(preform) == 3: self.repo, self.category, fullname = preform elif len(preform) == 2: self.category, fullname = preform elif len(preform) == 1: fullname = self.package if cst.slot_indicator in fullname: fullname, self.slot = fullname.split(":") self.name, self.version = utils.parse_pkgname(fullname) packages = self.database.find_package(package_repo=self.repo, package_name=self.name, \ package_category=self.category, package_version=self.version) if not packages: raise PackageNotFound(self.package) convenient_arches = utils.get_convenient_arches(self.conf.arch) try: the_package = utils.get_convenient_package(packages, self.locked_packages, \ self.custom_arch_request, convenient_arches, self.database, self.slot) except UnavailablePackage: for package in packages: out.error("%s/%s/%s-%s:%s is unavailable for your arch(%s)." % (package.repo, package.category, \ package.name, package.version, package.slot, self.conf.arch)) lpms.terminate() except LockedPackage: out.error( "these package(s) is/are locked by the system administrator:") for package in packages: out.error_notify("%s/%s/%s-%s:%s" % (package.repo, package.category, \ package.name, package.version, package.slot)) lpms.terminate() if the_package is None: raise UnavailablePackage(self.package) return the_package
def select(self): preform = self.package.split("/") if len(preform) == 3: self.repo, self.category, fullname = preform elif len(preform) == 2: self.category, fullname = preform elif len(preform) == 1: fullname = self.package if cst.slot_indicator in fullname: fullname, self.slot = fullname.split(":") self.name, self.version = utils.parse_pkgname(fullname) packages = self.database.find_package(package_repo=self.repo, package_name=self.name, \ package_category=self.category, package_version=self.version) if not packages: raise PackageNotFound(self.package) convenient_arches = utils.get_convenient_arches(self.conf.arch) try: the_package = utils.get_convenient_package(packages, self.locked_packages, \ self.custom_arch_request, convenient_arches, self.database, self.slot) except UnavailablePackage: for package in packages: out.error("%s/%s/%s-%s:%s is unavailable for your arch(%s)." % (package.repo, package.category, \ package.name, package.version, package.slot, self.conf.arch)) lpms.terminate() except LockedPackage: out.error("these package(s) is/are locked by the system administrator:") for package in packages: out.error_notify("%s/%s/%s-%s:%s" % (package.repo, package.category, \ package.name, package.version, package.slot)) lpms.terminate() if the_package is None: raise UnavailablePackage(self.package) return the_package
def collect_dependencies(self, package): dependencies = [] current_options = set() already_added = {} def process_option(option): if option.startswith("-"): if option[1:] in options: options.remove(option[1:]) else: options.add(option) # Set the global options(from /etc/lpms/build.conf) for the package options = self.global_options if package.id in self.user_defined_options: for option in self.user_defined_options[package.id]: process_option(option) # Set the options that given via command line for command_line_option in self.command_line_options: if not command_line_option in options: process_option(command_line_option) # Set the options that given via command line with package name and version for keyword in self.custom_options: name, version = utils.parse_pkgname(keyword) if package.name == name: if version is None: for custom_option in self.custom_options[keyword]: process_option(custom_option) else: if version == package.version: for custom_option in self.custom_options[keyword]: process_option(custom_option) # Set inline options. These options are declared in the specs like the following: # sys-fs/udev[gudev] if package.id in self.inline_options: for option in self.inline_options[package.id]: process_option(option) previous_targets = self.instdb.find_inline_options(target=package.category+"/"\ +package.name+"/"+package.slot) for previous_target in previous_targets: if self.instdb.find_package(package_id=previous_target.package_id): for option in previous_target.options: process_option(option) # Set the options that used by the package if package.options: for option in options: if option in package.options: if not package.id in self.package_options: self.package_options[package.id] = set([option]) continue self.package_options[package.id].add(option) def check_conflicts(dependency): for item in self.conflicts: if dependency.pk in self.conflicts[item]: out.error("%s/%s/%s-%s has a conflict with %s" % ( self.package_heap[item].repo, self.package_heap[item].category, self.package_heap[item].name, self.package_heap[item].version, dependency.pk) ) out.error("on the other hand, %s/%s/%s-%s wants to install with %s" % ( package.repo, package.category, package.name, package.version, dependency.pk )) raise ConflictError #Firstly, check static dependencies for keyword in self.dependency_keywords: if keyword.startswith("static"): for dependency in getattr(package, keyword): if keyword.endswith("conflict"): dependency = self.get_convenient_package(dependency, instdb=True) # The package is not installed. if dependency is None: continue self.keep_dependency_information(package.id, keyword, dependency) self.package_heap[dependency.id] = dependency if package.id in self.conflicts: self.conflicts[package.id].add(dependency.pk) else: self.conflicts[package.id] = set([dependency.pk]) continue proper_dependency = self.get_convenient_package(dependency) # TODO: This is a temporary workaround # We must implement more proper exception handling mech. and give more informative messages to users. if proper_dependency is None: raise UnavailablePackage(dependency) if proper_dependency.id in already_added and \ already_added[proper_dependency.id] == options: continue already_added[proper_dependency.id] = options self.keep_dependency_information(package.id, keyword, proper_dependency) check_conflicts(proper_dependency) dependencies.append(proper_dependency) if keyword.endswith("postmerge"): self.postmerge_dependencies.add((proper_dependency.id, package.id)) # Secondly, Check optional dependencies for keyword in self.dependency_keywords: if keyword.startswith("optional"): for dependency_bundle in getattr(package, keyword): instdb = True if keyword.endswith("conflict") else False optional_dependencies = self.parse_suboptional_dependencies(dependency_bundle, options, instdb) for dependency in optional_dependencies: if keyword.endswith("conflict"): if dependency is None: continue self.keep_dependency_information(package.id, keyword, dependency) self.package_heap[dependency.id] = dependency if package.id in self.conflicts: self.conflicts[package.id].add(dependency.pk) else: self.conflicts[package.id] = set([dependency.pk]) continue if dependency.id in already_added and \ already_added[dependency.id] == options: continue already_added[dependency.id] = options if current_options: if package.id in self.package_options: for option in current_options: self.package_options[package.id].add(option) else: self.package_options[package.id] = current_options self.keep_dependency_information(package.id, keyword, dependency) check_conflicts(dependency) dependencies.append(dependency) if keyword.endswith("postmerge"): self.postmerge_dependencies.add((dependency.id, package.id)) return dependencies
def get_convenient_package(self, package, instdb=False): def inline_options_management(inline_options): # TODO: inline_options variable must be a set # Check inline options, if an option is not available for the package, warn the user for inline_option in inline_options: if not inline_option in package.options: out.warn("%s option is not available for %s/%s/%s-%s. So that the option is removing..." % ( inline_option, package.repo, package.category, package.name, package.version )) inline_options.remove(inline_option) if inline_options: target = self.current_package.id if self.current_package is not \ None else self.parent_package.id my_package = package.category+"/"+package.name+"/"+package.slot if target in self.inline_option_targets: if my_package in self.inline_option_targets[target]: for option in inline_options: self.inline_option_targets[target][my_package].add(option) else: self.inline_option_targets[target][my_package] = set(inline_options) else: self.inline_option_targets[target] = {my_package: set(inline_options)} if package.id in self.inline_options: if not package.id in self.package_options: self.package_options[package.id] = set() for option in inline_options: if not option in self.inline_options[package.id]: self.inline_options[package.id].append(option) if package.id in self.package_options: self.package_options[package.id].add(option) else: self.inline_options[package.id] = inline_options if package.id in self.package_options: for inline_option in inline_options: self.package_options[package.id].add(inline_option) else: self.package_options[package.id] = set(inline_options) convenient_arches = utils.get_convenient_arches(self.conf.arch) current_package = self.parent_package if self.parent_package is not \ None else self.current_package result = LCollect() database = self.repodb if instdb is False else self.instdb slot = None gte, lte, lt, gt, et = False, False, False, False, False slot_parsed = package.split(":") if len(slot_parsed) == 2: data, slot = slot_parsed elif len(slot_parsed) > 2: out.error("%s invalid dependency in %s.py" % (data, self.current_package)) # Use and exception raise DependencyError else: data = package if ">=" == data[:2]: gte = True pkgname = data[2:] elif "<=" == data[:2]: lte = True pkgname = data[2:] elif "<" == data[:1]: lt = True pkgname = data[1:] elif ">" == data[:1]: gt = True pkgname = data[1:] elif "==" == data[:2]: et = True pkgname = data[2:] else: category, name = data.split("/") inline_options = self.parse_inline_options(name) if inline_options: name = name[:name.index("[")] if (category, name) in self.repository_cache: results = self.repository_cache[(category, name)] else: results = database.find_package(package_name=name, package_category=category) self.repository_cache[(category, name)] = results slot = self.get_convenient_slot(results, slot) if not results: if instdb: return current_package = current_package.repo+"/"+current_package.category+\ "/"+current_package.name+"-"+current_package.version+":"+current_package.slot out.error("unmet dependency: %s depends on %s" % (out.color(current_package, \ "red"), out.color(package, "red"))) raise DependencyError try: package = utils.get_convenient_package( results, self.locked_packages, self.custom_arch_requests, convenient_arches, self.instdb, slot ) except UnavailablePackage: for result in results: out.error("%s/%s/%s-%s:%s {%s} is unavailable for your arch(%s)." % (result.repo, result.category, \ result.name, result.version, result.slot, result.arch, self.conf.arch)) out.write("\n") out.write("%s %s/%s/%s-%s:%s {%s}\n" % (out.color("->", "brightyellow"), current_package.repo, \ current_package.category, current_package.name, current_package.version, \ current_package.slot, current_package.arch)) out.write(" %s %s/%s/%s-%s:%s {%s}\n" % (out.color("->", "brightyellow"), current_package.repo, \ current_package.category, current_package.name, current_package.version, \ current_package.slot, current_package.arch)) raise DependencyError except LockedPackage: out.error("these package(s) is/are locked by the system administrator:") for result in results: out.error_notify("%s/%s/%s-%s:%s {%s}" % (result.repo, result.category, \ result.name, result.version, result.slot, result.arch)) out.write("\n") out.write("%s %s/%s/%s-%s:%s {%s}\n" % (out.color("->", "brightyellow"), current_package.repo, \ current_package.category, current_package.name, current_package.version, \ current_package.slot, current_package.arch)) out.write(" %s %s/%s/%s-%s:%s {%s}\n" % (out.color("->", "brightyellow"), current_package.repo, \ current_package.category, current_package.name, current_package.version, \ current_package.slot, current_package.arch)) raise DependencyError # Set some variables to manage inline options inline_options_management(inline_options) return package category, name = pkgname.split("/") inline_options = self.parse_inline_options(name) if inline_options: name = name[:name.index("[")] name, version = utils.parse_pkgname(name) if (category, name) in self.repository_cache: results = self.repository_cache[(category, name)] else: results = database.find_package(package_name=name, package_category=category) self.repository_cache[(category, name)] = results slot = self.get_convenient_slot(results, slot) packages = [] decision_point = {} owner_package = current_package.repo+"/"+current_package.category+\ "/"+current_package.name+"-"+current_package.version if gte: decision_point = {"type": ">=", "version": version, \ "owner_package": owner_package, "owner_id": current_package.id} for result in results: comparison = utils.vercmp(result.version, version) if comparison == 1 or comparison == 0: packages.append(result) elif lte: decision_point = {"type": "<=", "version": version, \ "owner_package": owner_package, "owner_id": current_package.id} for result in results: comparison = utils.vercmp(result.version, version) if comparison == -1 or comparison == 0: packages.append(result) elif lt: decision_point = {"type": "<", "version": version, \ "owner_package": owner_package, "owner_id": current_package.id} for result in results: comparison = utils.vercmp(result.version, version) if comparison == -1: packages.append(result) elif gt: decision_point = {"type": ">", "version": version, \ "owner_package": owner_package, "owner_id": current_package.id} for result in results: comparison = utils.vercmp(result.version, version) if comparison == 1: packages.append(result) elif et: decision_point = {"type": "==", "version": version, \ "owner_package": owner_package, "owner_id": current_package.id} for result in results: comparison = utils.vercmp(result.version, version) if comparison == 0: packages.append(result) if not packages: out.error("unmet dependency: %s/%s/%s-%s:%s {%s} depends on %s" % \ (current_package.repo, \ current_package.category, \ current_package.name, \ current_package.version, \ current_package.slot, \ current_package.arch, \ out.color(package, "red"))) raise DependencyError try: package = utils.get_convenient_package( results if not packages else packages, self.locked_packages, self.custom_arch_requests, convenient_arches, self.instdb, slot ) except UnavailablePackage: for result in results: out.error("%s/%s/%s-%s:%s {%s}is unavailable for your arch(%s)." % (result.repo, result.category, \ result.name, result.version, result.slot, result.arch, self.conf.arch)) out.write("\n") out.write("%s %s/%s/%s-%s:%s {%s}\n" % (out.color("->", "brightyellow"), current_package.repo, \ current_package.category, current_package.name, current_package.version, \ current_package.slot, current_package.arch)) out.write(" %s %s/%s/%s-%s:%s {%s}\n" % (out.color("->", "brightyellow"), current_package.repo, \ current_package.category, current_package.name, current_package.version, \ current_package.slot, current_package.arch)) raise DependencyError except LockedPackage: out.error("these package(s) is/are locked by the system administrator:") for result in results: out.error_notify("%s/%s/%s-%s:%s {%s}" % (result.repo, result.category, \ result.name, result.version, result.slot, result.arch)) out.write("\n") out.write("%s %s/%s/%s-%s:%s {%s}\n" % (out.color("->", "brightyellow"), current_package.repo, \ current_package.category, current_package.name, current_package.version, \ current_package.slot, current_package.arch)) out.write(" %s %s/%s/%s-%s:%s {%s}\n" % (out.color("->", "brightyellow"), current_package.repo, \ current_package.category, current_package.name, current_package.version, \ current_package.slot, current_package.arch)) raise DependencyError # Set some variables to manage inline options inline_options_management(inline_options) if package.id in self.conditional_packages: self.conditional_packages[package.id].append(decision_point) else: self.conditional_packages[package.id] = [decision_point] return package
def update_package(self, repo_path, category, my_pkg, my_version = None, update = False): dataset = LCollect() # Register some variables to use after self.env.repo = os.path.basename(repo_path) self.env.category = category dataset.repo = self.env.repo dataset.category = category os.chdir(os.path.join(repo_path, category, my_pkg)) for pkg in glob.glob("*"+cst.spec_suffix): script_path = os.path.join(repo_path, category, my_pkg, pkg) self.env.name, self.env.version = utils.parse_pkgname(pkg.split(cst.spec_suffix)[0]) dataset.name = self.env.name dataset.version = self.env.version # FIXME: We must develop a upper-class or environment to # use that cases to prevent code duplication # Begins code duplication interphase = re.search(r'-r[0-9][0-9]', self.env.version) if not interphase: interphase = re.search(r'-r[0-9]', self.env.version) self.env.raw_version = self.env.version self.env.revision = "" # Now, set real values of these variables if package revisioned. if interphase is not None and interphase.group(): self.env.raw_version = self.env.version.replace(interphase.group(), "") self.env.revision = interphase.group() # End of code duplication self.env.__dict__["fullname"] = self.env.name+"-"+self.env.version if not self.import_script(script_path): out.error("an error occured while processing the spec: %s" \ % out.color(script_path, "red")) out.error("please report the above error messages to the package maintainer.") continue metadata = utils.metadata_parser(self.env.metadata) metadata.update({"name": self.env.name, "version": self.env.version}) # This method checks metadata integrity. # It warn the user and pass the spec if a spec is broken self.check_metadata_integrity(metadata) # These values are optional if not "options" in metadata: metadata.update({"options": None}) if not "slot" in metadata: metadata.update({"slot": "0"}) if not "src_url" in metadata: metadata.update({"src_url": None}) if lpms.getopt("--verbose"): out.write(" %s-%s\n" % (self.env.name, self.env.version)) try: dataset.summary = metadata['summary'] dataset.homepage = metadata['homepage'] dataset.license = metadata['license'] dataset.src_uri = metadata['src_url'] if metadata['options'] is None: dataset.options = None else: dataset.options = metadata['options'].split(" ") dataset.slot = metadata['slot'] except KeyError as err: out.error("%s/%s/%s-%s: invalid metadata" % (repo_name, category, \ self.env.name, self.env.version)) out.warn("repository update was failed and the repository database was removed.") out.warn("you can run 'lpms --reload-previous-repodb' command to reload previous db version.") lpms.terminate("good luck!") if update: self.repodb.delete_package(package_repo=dataset.repo, package_category=dataset.category, \ package_name=self.env.name, package_version=self.env.version) static_depends_runtime = []; static_depends_build = []; static_depends_postmerge = []; static_depends_conflict = [] if 'depends' in self.env.__dict__.keys(): deps = utils.depends_parser(self.env.depends) if 'runtime' in deps: static_depends_runtime.extend(deps['runtime']) if 'build' in deps: static_depends_build.extend(deps['build']) if 'common' in deps: static_depends_runtime.extend(deps['common']) static_depends_build.extend(deps['common']) if 'postmerge' in deps: static_depends_postmerge.extend(deps['postmerge']) if 'conflict' in deps: static_depends_conflict.extend(deps['conflict']) optional_depends_runtime = []; optional_depends_build = []; optional_depends_postmerge = []; optional_depends_conflict = [] for opt in ('opt_common', 'opt_conflict', 'opt_postmerge', 'opt_runtime', 'opt_build'): try: deps = utils.parse_opt_deps(getattr(self.env, opt)) if opt.split("_")[1] == "runtime": optional_depends_runtime.append(deps) elif opt.split("_")[1] == "build": optional_depends_build.append(deps) elif opt.split("_")[1] == "common": optional_depends_build.append(deps) optional_depends_runtime.append(deps) elif opt.split("_")[1] == "postmerge": optional_depends_postmerge.append(deps) elif opt.split("_")[1] == "conflict": optional_depends_conflict.append(deps) del deps except AttributeError: continue dataset.optional_depends_runtime = optional_depends_runtime dataset.optional_depends_build = optional_depends_build dataset.optional_depends_postmerge = optional_depends_postmerge dataset.optional_depends_conflict = optional_depends_conflict dataset.static_depends_runtime = static_depends_runtime dataset.static_depends_build = static_depends_build dataset.static_depends_postmerge = static_depends_postmerge dataset.static_depends_conflict = static_depends_conflict if metadata['arch'] is not None: arches = metadata['arch'].split(" ") for arch in arches: dataset.arch = arch self.repodb.insert_package(dataset) else: dataset.arch = None self.repodb.insert_package(dataset) # remove optional keys for key in ('depends', 'options', 'opt_runtime', 'opt_build', \ 'opt_conflict', 'opt_common', 'opt_postmerge'): try: del self.env.__dict__[key] except KeyError: pass self.packages_num += 1
def main(params): # determine operation type repo_name = None if params: repo_name = params[0] # create operation object operation = Update() repo_num = 0 if repo_name is None: # firstly, lpms tries to create a copy of current repository database. db_backup() out.normal("updating repository database...") operation.repodb.database.begin_transaction() for repo_name in os.listdir(cst.repos): if not repo_name in utils.available_repositories(): continue if os.path.isfile(os.path.join(cst.repos, repo_name, "info/repo.conf")): out.write(out.color(" * ", "red") + repo_name+"\n") operation.update_repository(repo_name) repo_num += 1 operation.repodb.database.commit() out.normal("%s repository(ies) is/are updated." % repo_num) else: if repo_name == ".": current_path = os.getcwd() for repo_path in [os.path.join(cst.repos, item) \ for item in utils.available_repositories()]: if current_path == repo_path or len(current_path.split(repo_path)) == 2: # convert it a valid repo_name variable from the path repo_name = current_path.split(cst.repos)[1][1:] break if repo_name == ".": out.warn("%s does not seem a valid repository path." % \ out.color(current_path, "red")) lpms.terminate() if len(repo_name.split("/")) == 2: out.normal("updating %s" % repo_name) repo, category = repo_name.split("/") repo_path = os.path.join(cst.repos, repo) if not repo in utils.available_repositories(): out.error("%s is not a repository." % out.color(repo, "red")) lpms.terminate() operation.repodb.database.begin_transaction() for pkg in os.listdir(os.path.join(repo_path, category)): try: operation.update_package(repo_path, category, pkg, update=True) except IntegrityError: continue operation.repodb.database.commit() elif len(repo_name.split("/")) == 3: version = None repo, category, name = repo_name.split("/") if repo.startswith("="): repo = repo[1:] try: name, version = utils.parse_pkgname(name) except TypeError: out.error("you should give a version number") lpms.terminate() else: if utils.parse_pkgname(name) is not None and len(utils.parse_pkgname(name)) == 2: out.error("you must use %s" % (out.color("="+repo_name, "red"))) lpms.terminate() if not repo in utils.available_repositories(): out.error("%s is not a repository." % out.color(repo, "red")) lpms.terminate() repo_path = os.path.join(cst.repos, repo) out.normal("updating %s/%s/%s" % (repo, category, name)) operation.repodb.database.begin_transaction() operation.update_package(repo_path, category, name, my_version = version, update = True) operation.repodb.database.commit() else: if not repo_name in utils.available_repositories(): out.error("%s is not a repository." % out.color(repo_name, "red")) lpms.terminate() repo_dir = os.path.join(cst.repos, repo_name) if os.path.isdir(repo_dir): repo_path = os.path.join(repo_dir, cst.repo_file) if os.path.isfile(repo_path): operation.repodb.database.begin_transaction() out.normal("updating repository: %s" % out.color(repo_name, "green")) operation.update_repository(repo_name) operation.repodb.database.commit() else: lpms.terminate("repo.conf file could not found in %s" % repo_dir+"/info") else: lpms.terminate("repo.conf not found in %s" % os.path.join(cst.repos, repo_name)) out.normal("Total %s packages have been processed." % operation.packages_num) # Drop inactive repository from the database for name in operation.repodb.get_repository_names(): if not name in utils.available_repositories(): operation.repodb.delete_repository(name, commit=True) out.warn("%s dropped." % name) # Close the database connection operation.repodb.database.close()
def update_package(self, repo_path, category, my_pkg, my_version=None, update=False): dataset = LCollect() # Register some variables to use after self.env.repo = os.path.basename(repo_path) self.env.category = category dataset.repo = self.env.repo dataset.category = category os.chdir(os.path.join(repo_path, category, my_pkg)) for pkg in glob.glob("*" + cst.spec_suffix): script_path = os.path.join(repo_path, category, my_pkg, pkg) self.env.name, self.env.version = utils.parse_pkgname( pkg.split(cst.spec_suffix)[0]) dataset.name = self.env.name dataset.version = self.env.version # FIXME: We must develop a upper-class or environment to # use that cases to prevent code duplication # Begins code duplication interphase = re.search(r'-r[0-9][0-9]', self.env.version) if not interphase: interphase = re.search(r'-r[0-9]', self.env.version) self.env.raw_version = self.env.version self.env.revision = "" # Now, set real values of these variables if package revisioned. if interphase is not None and interphase.group(): self.env.raw_version = self.env.version.replace( interphase.group(), "") self.env.revision = interphase.group() # End of code duplication self.env.__dict__[ "fullname"] = self.env.name + "-" + self.env.version if not self.import_script(script_path): out.error("an error occured while processing the spec: %s" \ % out.color(script_path, "red")) out.error( "please report the above error messages to the package maintainer." ) continue metadata = utils.metadata_parser(self.env.metadata) metadata.update({ "name": self.env.name, "version": self.env.version }) # This method checks metadata integrity. # It warn the user and pass the spec if a spec is broken self.check_metadata_integrity(metadata) # These values are optional if not "options" in metadata: metadata.update({"options": None}) if not "slot" in metadata: metadata.update({"slot": "0"}) if not "src_url" in metadata: metadata.update({"src_url": None}) if lpms.getopt("--verbose"): out.write(" %s-%s\n" % (self.env.name, self.env.version)) try: dataset.summary = metadata['summary'] dataset.homepage = metadata['homepage'] dataset.license = metadata['license'] dataset.src_uri = metadata['src_url'] if metadata['options'] is None: dataset.options = None else: dataset.options = metadata['options'].split(" ") dataset.slot = metadata['slot'] except KeyError as err: out.error("%s/%s/%s-%s: invalid metadata" % (repo_name, category, \ self.env.name, self.env.version)) out.warn( "repository update was failed and the repository database was removed." ) out.warn( "you can run 'lpms --reload-previous-repodb' command to reload previous db version." ) lpms.terminate("good luck!") if update: self.repodb.delete_package(package_repo=dataset.repo, package_category=dataset.category, \ package_name=self.env.name, package_version=self.env.version) static_depends_runtime = [] static_depends_build = [] static_depends_postmerge = [] static_depends_conflict = [] if 'depends' in self.env.__dict__.keys(): deps = utils.depends_parser(self.env.depends) if 'runtime' in deps: static_depends_runtime.extend(deps['runtime']) if 'build' in deps: static_depends_build.extend(deps['build']) if 'common' in deps: static_depends_runtime.extend(deps['common']) static_depends_build.extend(deps['common']) if 'postmerge' in deps: static_depends_postmerge.extend(deps['postmerge']) if 'conflict' in deps: static_depends_conflict.extend(deps['conflict']) optional_depends_runtime = [] optional_depends_build = [] optional_depends_postmerge = [] optional_depends_conflict = [] for opt in ('opt_common', 'opt_conflict', 'opt_postmerge', 'opt_runtime', 'opt_build'): try: deps = utils.parse_opt_deps(getattr(self.env, opt)) if opt.split("_")[1] == "runtime": optional_depends_runtime.append(deps) elif opt.split("_")[1] == "build": optional_depends_build.append(deps) elif opt.split("_")[1] == "common": optional_depends_build.append(deps) optional_depends_runtime.append(deps) elif opt.split("_")[1] == "postmerge": optional_depends_postmerge.append(deps) elif opt.split("_")[1] == "conflict": optional_depends_conflict.append(deps) del deps except AttributeError: continue dataset.optional_depends_runtime = optional_depends_runtime dataset.optional_depends_build = optional_depends_build dataset.optional_depends_postmerge = optional_depends_postmerge dataset.optional_depends_conflict = optional_depends_conflict dataset.static_depends_runtime = static_depends_runtime dataset.static_depends_build = static_depends_build dataset.static_depends_postmerge = static_depends_postmerge dataset.static_depends_conflict = static_depends_conflict if metadata['arch'] is not None: arches = metadata['arch'].split(" ") for arch in arches: dataset.arch = arch self.repodb.insert_package(dataset) else: dataset.arch = None self.repodb.insert_package(dataset) # remove optional keys for key in ('depends', 'options', 'opt_runtime', 'opt_build', \ 'opt_conflict', 'opt_common', 'opt_postmerge'): try: del self.env.__dict__[key] except KeyError: pass self.packages_num += 1
def main(params): # determine operation type repo_name = None if params: repo_name = params[0] # create operation object operation = Update() repo_num = 0 if repo_name is None: # firstly, lpms tries to create a copy of current repository database. db_backup() out.normal("updating repository database...") operation.repodb.database.begin_transaction() for repo_name in os.listdir(cst.repos): if not repo_name in utils.available_repositories(): continue if os.path.isfile( os.path.join(cst.repos, repo_name, "info/repo.conf")): out.write(out.color(" * ", "red") + repo_name + "\n") operation.update_repository(repo_name) repo_num += 1 operation.repodb.database.commit() out.normal("%s repository(ies) is/are updated." % repo_num) else: if repo_name == ".": current_path = os.getcwd() for repo_path in [os.path.join(cst.repos, item) \ for item in utils.available_repositories()]: if current_path == repo_path or len( current_path.split(repo_path)) == 2: # convert it a valid repo_name variable from the path repo_name = current_path.split(cst.repos)[1][1:] break if repo_name == ".": out.warn("%s does not seem a valid repository path." % \ out.color(current_path, "red")) lpms.terminate() if len(repo_name.split("/")) == 2: out.normal("updating %s" % repo_name) repo, category = repo_name.split("/") repo_path = os.path.join(cst.repos, repo) if not repo in utils.available_repositories(): out.error("%s is not a repository." % out.color(repo, "red")) lpms.terminate() operation.repodb.database.begin_transaction() for pkg in os.listdir(os.path.join(repo_path, category)): try: operation.update_package(repo_path, category, pkg, update=True) except IntegrityError: continue operation.repodb.database.commit() elif len(repo_name.split("/")) == 3: version = None repo, category, name = repo_name.split("/") if repo.startswith("="): repo = repo[1:] try: name, version = utils.parse_pkgname(name) except TypeError: out.error("you should give a version number") lpms.terminate() else: if utils.parse_pkgname(name) is not None and len( utils.parse_pkgname(name)) == 2: out.error("you must use %s" % (out.color("=" + repo_name, "red"))) lpms.terminate() if not repo in utils.available_repositories(): out.error("%s is not a repository." % out.color(repo, "red")) lpms.terminate() repo_path = os.path.join(cst.repos, repo) out.normal("updating %s/%s/%s" % (repo, category, name)) operation.repodb.database.begin_transaction() operation.update_package(repo_path, category, name, my_version=version, update=True) operation.repodb.database.commit() else: if not repo_name in utils.available_repositories(): out.error("%s is not a repository." % out.color(repo_name, "red")) lpms.terminate() repo_dir = os.path.join(cst.repos, repo_name) if os.path.isdir(repo_dir): repo_path = os.path.join(repo_dir, cst.repo_file) if os.path.isfile(repo_path): operation.repodb.database.begin_transaction() out.normal("updating repository: %s" % out.color(repo_name, "green")) operation.update_repository(repo_name) operation.repodb.database.commit() else: lpms.terminate("repo.conf file could not found in %s" % repo_dir + "/info") else: lpms.terminate("repo.conf not found in %s" % os.path.join(cst.repos, repo_name)) out.normal("Total %s packages have been processed." % operation.packages_num) # Drop inactive repository from the database for name in operation.repodb.get_repository_names(): if not name in utils.available_repositories(): operation.repodb.delete_repository(name, commit=True) out.warn("%s dropped." % name) # Close the database connection operation.repodb.database.close()