def update_package(self, repo_path, category, my_pkg, my_version = None, update = False): dataset = LCollect() # Register some variables to use after self.env.repo = os.path.basename(repo_path) self.env.category = category dataset.repo = self.env.repo dataset.category = category os.chdir(os.path.join(repo_path, category, my_pkg)) for pkg in glob.glob("*"+cst.spec_suffix): script_path = os.path.join(repo_path, category, my_pkg, pkg) self.env.name, self.env.version = utils.parse_pkgname(pkg.split(cst.spec_suffix)[0]) dataset.name = self.env.name dataset.version = self.env.version # FIXME: We must develop a upper-class or environment to # use that cases to prevent code duplication # Begins code duplication interphase = re.search(r'-r[0-9][0-9]', self.env.version) if not interphase: interphase = re.search(r'-r[0-9]', self.env.version) self.env.raw_version = self.env.version self.env.revision = "" # Now, set real values of these variables if package revisioned. if interphase is not None and interphase.group(): self.env.raw_version = self.env.version.replace(interphase.group(), "") self.env.revision = interphase.group() # End of code duplication self.env.__dict__["fullname"] = self.env.name+"-"+self.env.version if not self.import_script(script_path): out.error("an error occured while processing the spec: %s" \ % out.color(script_path, "red")) out.error("please report the above error messages to the package maintainer.") continue metadata = utils.metadata_parser(self.env.metadata) metadata.update({"name": self.env.name, "version": self.env.version}) # This method checks metadata integrity. # It warn the user and pass the spec if a spec is broken self.check_metadata_integrity(metadata) # These values are optional if not "options" in metadata: metadata.update({"options": None}) if not "slot" in metadata: metadata.update({"slot": "0"}) if not "src_url" in metadata: metadata.update({"src_url": None}) if lpms.getopt("--verbose"): out.write(" %s-%s\n" % (self.env.name, self.env.version)) try: dataset.summary = metadata['summary'] dataset.homepage = metadata['homepage'] dataset.license = metadata['license'] dataset.src_uri = metadata['src_url'] if metadata['options'] is None: dataset.options = None else: dataset.options = metadata['options'].split(" ") dataset.slot = metadata['slot'] except KeyError as err: out.error("%s/%s/%s-%s: invalid metadata" % (repo_name, category, \ self.env.name, self.env.version)) out.warn("repository update was failed and the repository database was removed.") out.warn("you can run 'lpms --reload-previous-repodb' command to reload previous db version.") lpms.terminate("good luck!") if update: self.repodb.delete_package(package_repo=dataset.repo, package_category=dataset.category, \ package_name=self.env.name, package_version=self.env.version) static_depends_runtime = []; static_depends_build = []; static_depends_postmerge = []; static_depends_conflict = [] if 'depends' in self.env.__dict__.keys(): deps = utils.depends_parser(self.env.depends) if 'runtime' in deps: static_depends_runtime.extend(deps['runtime']) if 'build' in deps: static_depends_build.extend(deps['build']) if 'common' in deps: static_depends_runtime.extend(deps['common']) static_depends_build.extend(deps['common']) if 'postmerge' in deps: static_depends_postmerge.extend(deps['postmerge']) if 'conflict' in deps: static_depends_conflict.extend(deps['conflict']) optional_depends_runtime = []; optional_depends_build = []; optional_depends_postmerge = []; optional_depends_conflict = [] for opt in ('opt_common', 'opt_conflict', 'opt_postmerge', 'opt_runtime', 'opt_build'): try: deps = utils.parse_opt_deps(getattr(self.env, opt)) if opt.split("_")[1] == "runtime": optional_depends_runtime.append(deps) elif opt.split("_")[1] == "build": optional_depends_build.append(deps) elif opt.split("_")[1] == "common": optional_depends_build.append(deps) optional_depends_runtime.append(deps) elif opt.split("_")[1] == "postmerge": optional_depends_postmerge.append(deps) elif opt.split("_")[1] == "conflict": optional_depends_conflict.append(deps) del deps except AttributeError: continue dataset.optional_depends_runtime = optional_depends_runtime dataset.optional_depends_build = optional_depends_build dataset.optional_depends_postmerge = optional_depends_postmerge dataset.optional_depends_conflict = optional_depends_conflict dataset.static_depends_runtime = static_depends_runtime dataset.static_depends_build = static_depends_build dataset.static_depends_postmerge = static_depends_postmerge dataset.static_depends_conflict = static_depends_conflict if metadata['arch'] is not None: arches = metadata['arch'].split(" ") for arch in arches: dataset.arch = arch self.repodb.insert_package(dataset) else: dataset.arch = None self.repodb.insert_package(dataset) # remove optional keys for key in ('depends', 'options', 'opt_runtime', 'opt_build', \ 'opt_conflict', 'opt_common', 'opt_postmerge'): try: del self.env.__dict__[key] except KeyError: pass self.packages_num += 1
def update_package(self, repo_path, category, my_pkg, my_version=None, update=False): dataset = LCollect() # Register some variables to use after self.env.repo = os.path.basename(repo_path) self.env.category = category dataset.repo = self.env.repo dataset.category = category os.chdir(os.path.join(repo_path, category, my_pkg)) for pkg in glob.glob("*" + cst.spec_suffix): script_path = os.path.join(repo_path, category, my_pkg, pkg) self.env.name, self.env.version = utils.parse_pkgname( pkg.split(cst.spec_suffix)[0]) dataset.name = self.env.name dataset.version = self.env.version # FIXME: We must develop a upper-class or environment to # use that cases to prevent code duplication # Begins code duplication interphase = re.search(r'-r[0-9][0-9]', self.env.version) if not interphase: interphase = re.search(r'-r[0-9]', self.env.version) self.env.raw_version = self.env.version self.env.revision = "" # Now, set real values of these variables if package revisioned. if interphase is not None and interphase.group(): self.env.raw_version = self.env.version.replace( interphase.group(), "") self.env.revision = interphase.group() # End of code duplication self.env.__dict__[ "fullname"] = self.env.name + "-" + self.env.version if not self.import_script(script_path): out.error("an error occured while processing the spec: %s" \ % out.color(script_path, "red")) out.error( "please report the above error messages to the package maintainer." ) continue metadata = utils.metadata_parser(self.env.metadata) metadata.update({ "name": self.env.name, "version": self.env.version }) # This method checks metadata integrity. # It warn the user and pass the spec if a spec is broken self.check_metadata_integrity(metadata) # These values are optional if not "options" in metadata: metadata.update({"options": None}) if not "slot" in metadata: metadata.update({"slot": "0"}) if not "src_url" in metadata: metadata.update({"src_url": None}) if lpms.getopt("--verbose"): out.write(" %s-%s\n" % (self.env.name, self.env.version)) try: dataset.summary = metadata['summary'] dataset.homepage = metadata['homepage'] dataset.license = metadata['license'] dataset.src_uri = metadata['src_url'] if metadata['options'] is None: dataset.options = None else: dataset.options = metadata['options'].split(" ") dataset.slot = metadata['slot'] except KeyError as err: out.error("%s/%s/%s-%s: invalid metadata" % (repo_name, category, \ self.env.name, self.env.version)) out.warn( "repository update was failed and the repository database was removed." ) out.warn( "you can run 'lpms --reload-previous-repodb' command to reload previous db version." ) lpms.terminate("good luck!") if update: self.repodb.delete_package(package_repo=dataset.repo, package_category=dataset.category, \ package_name=self.env.name, package_version=self.env.version) static_depends_runtime = [] static_depends_build = [] static_depends_postmerge = [] static_depends_conflict = [] if 'depends' in self.env.__dict__.keys(): deps = utils.depends_parser(self.env.depends) if 'runtime' in deps: static_depends_runtime.extend(deps['runtime']) if 'build' in deps: static_depends_build.extend(deps['build']) if 'common' in deps: static_depends_runtime.extend(deps['common']) static_depends_build.extend(deps['common']) if 'postmerge' in deps: static_depends_postmerge.extend(deps['postmerge']) if 'conflict' in deps: static_depends_conflict.extend(deps['conflict']) optional_depends_runtime = [] optional_depends_build = [] optional_depends_postmerge = [] optional_depends_conflict = [] for opt in ('opt_common', 'opt_conflict', 'opt_postmerge', 'opt_runtime', 'opt_build'): try: deps = utils.parse_opt_deps(getattr(self.env, opt)) if opt.split("_")[1] == "runtime": optional_depends_runtime.append(deps) elif opt.split("_")[1] == "build": optional_depends_build.append(deps) elif opt.split("_")[1] == "common": optional_depends_build.append(deps) optional_depends_runtime.append(deps) elif opt.split("_")[1] == "postmerge": optional_depends_postmerge.append(deps) elif opt.split("_")[1] == "conflict": optional_depends_conflict.append(deps) del deps except AttributeError: continue dataset.optional_depends_runtime = optional_depends_runtime dataset.optional_depends_build = optional_depends_build dataset.optional_depends_postmerge = optional_depends_postmerge dataset.optional_depends_conflict = optional_depends_conflict dataset.static_depends_runtime = static_depends_runtime dataset.static_depends_build = static_depends_build dataset.static_depends_postmerge = static_depends_postmerge dataset.static_depends_conflict = static_depends_conflict if metadata['arch'] is not None: arches = metadata['arch'].split(" ") for arch in arches: dataset.arch = arch self.repodb.insert_package(dataset) else: dataset.arch = None self.repodb.insert_package(dataset) # remove optional keys for key in ('depends', 'options', 'opt_runtime', 'opt_build', \ 'opt_conflict', 'opt_common', 'opt_postmerge'): try: del self.env.__dict__[key] except KeyError: pass self.packages_num += 1