def show_info(self, package): items = self.instdb.database.get_package_build_info(package.id) template = ( 'Start_Time', 'End Time', 'Requestor', 'Requestor ID', 'HOST', 'CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'JOBS', 'CC', 'CXX' ) out.normal("Build information for %s/%s/%s-%s {%s:%s}" % (package.repo, \ package.category, package.name, package.version, package.slot, package.arch)) for index, item in enumerate(template, 1): if index in (1, 2): out.write("%s: %s\n" % (out.color(item, "green"), \ datetime.datetime.fromtimestamp(items[index]).strftime('%Y-%m-%d %H:%M:%S'))) if index == 2: delta = datetime.datetime.fromtimestamp(items[2])-\ datetime.datetime.fromtimestamp(items[1]) operation_time = str(round(float(delta.seconds)/60, 1))+" minutes" if delta.seconds >= 60 \ else str(delta.seconds)+" seconds" out.write("%s: %s\n" % (out.color("Operation Time", "green"), operation_time)) continue out.write("%s: %s\n" % (out.color(item, "green"), items[index]))
def parse_traceback(exception_type=None): '''Parse exceptions and show nice and more readable error messages''' out.write( out.color(">>", "brightred") + " %s/%s/%s-%s\n" % (self.environment.repo, self.environment.category, self.environment.name, self.environment.version)) exc_type, exc_value, exc_traceback = sys.exc_info() formatted_lines = traceback.format_exception( exc_type, exc_value, exc_traceback) if not self.environment.debug: for item in formatted_lines: item = item.strip() if item.startswith("File"): regex = re.compile(r'(\w+)\S*$') regex = regex.search(item) if regex is None: continue if regex.group() in operation_order: line = re.compile(r'[^\d.]+') line = line.sub('', item) out.write("%s %s " % (out.color("on line %s:" % line, "red"), formatted_lines[-1])) break else: traceback.print_exc() out.error("an error occurred when running the %s function." % out.color(operation, "red")) return False
def run(self): if lpms.getopt("--help") or len(self.params) == 0: self.usage() for param in self.params: param = param.split("/") if len(param) == 3: myrepo, mycategory, myname = param packages = self.repodb.find_package( package_name=myname, package_repo=myrepo, package_category=mycategory ) elif len(param) == 2: mycategory, myname = param packages = self.repodb.find_package(package_name=myname, package_category=mycategory) elif len(param) == 1: packages = self.repodb.find_package(package_name=param[0]) else: out.error("%s seems invalid." % out.color("/".join(param), "brightred")) lpms.terminate() if not packages: out.error("%s not found!" % out.color("/".join(param), "brightred")) lpms.terminate() # Show time! self.show_package(packages)
def run(self): if lpms.getopt("--help") or len(self.params) == 0: self.usage() for param in self.params: param = param.split("/") if len(param) == 3: myrepo, mycategory, myname = param packages = self.repodb.find_package(package_name=myname, \ package_repo=myrepo, package_category=mycategory) elif len(param) == 2: mycategory, myname = param packages = self.repodb.find_package(package_name=myname, \ package_category=mycategory) elif len(param) == 1: packages = self.repodb.find_package(package_name=param[0]) else: out.error("%s seems invalid." % out.color("/".join(param), "brightred")) lpms.terminate() if not packages: out.error("%s not found!" % out.color("/".join(param), "brightred")) lpms.terminate() # Show time! self.show_package(packages)
def perform_operation(self): utils.xterm_title("(%s/%s) lpms: merging %s/%s-%s from %s" % (self.environment.index, self.environment.count, self.environment.category, self.environment.name, self.environment.version, self.environment.repo)) # create $info_file_name.gz archive and remove info file self.create_info_archive() # merge the package self.merge_package() # clean the previous version if it is exists self.clean_obsolete_content() # write to database self.write_db() # create or update /usr/share/info/dir self.update_info_index() if self.backup: out.write("%s%s configuration file changed. Use %s to fix these files.\n" % (out.color(" > ", "green"), len(self.backup), \ out.color("merge-conf", "red"))) if shelltools.is_exists(cst.lock_file): shelltools.remove_file(cst.lock_file) return True, self.environment
def main(): available_repositories = utils.available_repositories() for item in os.listdir(cst.repos): repo_conf = os.path.join(cst.repos, item, cst.repo_file) if os.access(repo_conf, os.F_OK): with open(repo_conf) as data: data = conf.ReadConfig(data.read().splitlines(), delimiter="@") if item in available_repositories: out.normal("%s [%s]" % (item, out.color("enabled", "brightgreen"))) else: out.normal("%s [%s]" % (item, out.color("disabled", "brightred"))) out.notify("system name: %s" % item) if hasattr(data, "name"): out.notify("development name: %s" % data.name) else: out.warn("development name is not defined!") if hasattr(data, "summary"): out.notify("summary: %s" % data.summary) else: out.warn("summary is not defined!") if hasattr(data, "maintainer"): out.notify("maintainer: %s" % data.maintainer) else: out.warn("maintainer is not defined!") out.write("\n")
def main(self): out.normal("searching for %s\n" % self.keyword) for (repo, category, name, version, path) in self.search(): try: replace = re.compile("(%s)" % "|".join([self.keyword]), re.I) out.write( "%s/%s/%s-%s -- %s\n" % ( out.color(repo, "green"), out.color(category, "green"), out.color(name, "green"), out.color(version, "green"), replace.sub(out.color(r"\1", "brightred"), path), ) ) except: out.write( "%s/%s/%s-%s -- %s\n" % ( out.color(repo, "green"), out.color(category, "green"), out.color(name, "green"), out.color(version, "green"), path, ) )
def confirm(text): turns = 5 while turns: turns -= 1 out.warn(text+"["+out.color("yes", "green")+"/"+out.color("no", "red")+"]") answer = sys.stdin.readline().strip() if answer == "yes" or answer == "y" or answer == "": return True elif answer == "no" or answer == "n": return False out.write(out.color("Sorry, response " + answer + " not understood! yes/y or no/n\n", "red"))
def main(self): parsed = self.pkgname.split("/") if len(parsed) == 3: repo, category, name = parsed name, version = utils.parse_pkgname(name) packages = self.instdb.find_package( package_repo=repo, package_category=category, package_name=name, package_version=version ) elif len(parsed) == 2: category, name = parsed name, version = utils.parse_pkgname(name) packages = self.instdb.find_package( package_category=category, package_name=name, package_version=version ) elif len(parsed) == 1: name, version = utils.parse_pkgname(self.pkgname) packages = self.instdb.find_package( package_name=name, package_version=version ) else: out.error("%s could not be recognized." % self.pkgname) lpms.terminate() if not packages: out.error("%s not installed." % self.pkgname) lpms.terminate() for package in packages: symdirs = {} out.normal("%s/%s/%s-%s" % (package.repo, package.category, \ package.name, package.version)) content = self.filesdb.get_paths_by_package(package.name, \ category=package.category, version=package.version) for item in content: item = item[0].encode('UTF-8') if os.path.islink(item): out.write("%s -> %s\n" % (out.color(item, "green"), os.readlink(item))) if os.path.isdir(os.path.realpath(item)): symdirs[os.path.realpath(item)+"/"] = item+"/" else: out.write(item+"\n") if symdirs: for symdir in symdirs: if item.startswith(symdir): out.write("%s -> %s\n" % (out.color(item.replace(symdir, \ symdirs[symdir]), "brightwhite"), out.color(item, "brightwhite")))
def usage(self): """ Prints available commands with their descriptions. """ out.normal("lpms -- %s Package Management System %s\n" % \ (out.color("L", "red"), out.color("v"+__VERSION__, "green"))) out.write("In order to build a package:\n\n") out.write(" # lpms <package-name> <extra-command>\n\n") out.write("To see extra commands use --help parameter.\n\n") out.write("Build related arguments:\n") for build_argument in self.build_arguments: if hasattr(build_argument, "short"): out.write(('%-29s %-10s : %s\n' % \ (out.color(build_argument.arg, 'green'), \ out.color(build_argument.short, 'green'), \ build_argument.description))) else: out.write(('%-32s : %s\n' % \ (out.color(build_argument.arg, 'green'), \ build_argument.description))) out.write("\nOther arguments:\n") for other_argument in self.other_arguments: if hasattr(other_argument, "short"): out.write(('%-29s %-10s : %s\n' % \ (out.color(other_argument.arg, 'green'), \ out.color(other_argument.short, 'green'), \ other_argument.description))) else: out.write(('%-32s : %s\n' % \ (out.color(other_argument.arg, 'green'), \ other_argument.description))) # Do nothing after showing help message sys.exit(0)
def confirm(text): turns = 5 while turns: turns -= 1 out.warn(text + "[" + out.color("yes", "green") + "/" + out.color("no", "red") + "]") answer = sys.stdin.readline().strip() if answer == "yes" or answer == "y" or answer == "": return True elif answer == "no" or answer == "n": return False out.write( out.color( "Sorry, response " + answer + " not understood! yes/y or no/n\n", "red"))
def get_build_libraries(self): result = set() lib_index = None current_length = first_length = len(self.environment.libraries) for lib in self.environment.libraries: if len(lib.split("/")) == 2: lib_source, lib_name = lib.split("/") libfile = os.path.join(cst.repos, lib_source, "libraries", lib_name + ".py") result.add(lib_name) else: if len(self.environment.libraries) > first_length: parent = self.environment.libraries[lib_index] if len(parent.split("/")) == 2: parents_repo = parent.split("/")[0] result.add(lib) libfile = os.path.join(cst.repos, parents_repo, "libraries", lib + ".py") else: result.add(lib) libfile = os.path.join(cst.repos, self.environment.repo, "libraries", lib + ".py") else: result.add(lib) libfile = os.path.join(cst.repos, self.environment.repo, "libraries", lib + ".py") if not os.path.isfile(libfile): out.error("build library not found: %s" % out.color(libfile, "red")) lpms.terminate() # import the script if not self.import_script(libfile): out.error("an error occured while processing the library: %s" \ % out.color(libfile, "red")) out.error( "please report the above error messages to the library maintainer." ) lpms.terminate() if len(self.environment.libraries) > current_length: lib_index = self.environment.libraries.index(lib) current_length = len(self.environment.libraries) self.environment.libraries = list(result)
def run(self, repo): keyword = "["+repo+"]" # import repo.conf self.read_conf_file() if keyword in self.data: first = self.data.index(keyword) for line in self.data[first+1:]: if line.startswith("["): continue if self._type is None and line.startswith("type"): self._type = line.split("@")[1].strip() if self._type == 'local': return elif self.remote is None and line.startswith("remote"): self.remote = line.split("@")[1].strip() if self._type == "git": from lpms.syncers import git as syncer lpms.logger.info("synchronizing %s from %s" % (repo, self.remote)) out.notify("synchronizing %s from %s" % (out.color(repo, "green"), self.remote)) syncer.run(repo, self.remote)
def run_extract(self): # if the environment has no extract_plan variable, doesn't run extract function if not hasattr(self.environment, "extract_nevertheless" ) or not self.environment.extract_nevertheless: if not hasattr(self.environment, "extract_plan"): return target = os.path.dirname(self.environment.build_dir) extracted_file = os.path.join(os.path.dirname(target), ".extracted") if os.path.isfile(extracted_file): if self.environment.force_extract: shelltools.remove_file(extracted_file) else: out.write("%s %s/%s-%s had been already extracted.\n" % (out.color(">>", "brightyellow"), \ self.environment.category, self.environment.name, self.environment.version)) return True utils.xterm_title("lpms: extracting %s/%s/%s-%s" % (self.environment.repo, self.environment.category, \ self.environment.name, self.environment.version)) out.notify("extracting archive(s) to %s" % os.path.dirname(self.environment.build_dir)) # now, extract the archives self.run_stage("extract") out.notify("%s has been extracted." % self.environment.fullname) shelltools.touch(extracted_file) if self.environment.stage == "extract": lpms.terminate()
def download(self, url, location=None, report_hook=None): # ui.debug("URL: "+str(url)) try: response = urllib2.urlopen(url) except urllib2.URLError, e: out.error("%s cannot be downloaded" % out.color(url, "brightwhite")) return False
def run(self, repo): keyword = "[" + repo + "]" # import repo.conf self.read_conf_file() if keyword in self.data: first = self.data.index(keyword) for line in self.data[first + 1:]: if line.startswith("["): continue if self._type is None and line.startswith("type"): self._type = line.split("@")[1].strip() if self._type == 'local': return elif self.remote is None and line.startswith("remote"): self.remote = line.split("@")[1].strip() if self._type == "git": from lpms.syncers import git as syncer lpms.logger.info("synchronizing %s from %s" % (repo, self.remote)) out.notify("synchronizing %s from %s" % (out.color(repo, "green"), self.remote)) syncer.run(repo, self.remote)
def system(cmd, show=False, stage=None, sandbox=None): cfg = conf.LPMSConfig() if sandbox is None: sandbox = True if cfg.sandbox else False # override 'sandbox' variable if the user wants to modifiy from cli if lpms.getopt('--enable-sandbox'): sandbox = True elif lpms.getopt('--disable-sandbox'): sandbox = False if lpms.getopt("--verbose"): ret, output, err = run_cmd(cmd, True) elif (not cfg.print_output or lpms.getopt("--quiet")) \ and not show: ret, output, err = run_cmd(cmd, show=False, enable_sandbox=sandbox) else: ret, output, err = run_cmd(cmd, show=True, enable_sandbox=sandbox) if ret != 0: if not conf.LPMSConfig().print_output or lpms.getopt("--quiet"): out.brightred("\n>> error messages:\n") out.write(err) out.warn("command failed: %s" % out.color(cmd, "red")) if stage and output and err: return False, output + err return False return True
def system(cmd, show=False, stage=None, sandbox=None): cfg = conf.LPMSConfig() if sandbox is None: sandbox = True if cfg.sandbox else False # override 'sandbox' variable if the user wants to modifiy from cli if lpms.getopt("--enable-sandbox"): sandbox = True elif lpms.getopt("--disable-sandbox"): sandbox = False if lpms.getopt("--verbose"): ret, output, err = run_cmd(cmd, True) elif (not cfg.print_output or lpms.getopt("--quiet")) and not show: ret, output, err = run_cmd(cmd, show=False, enable_sandbox=sandbox) else: ret, output, err = run_cmd(cmd, show=True, enable_sandbox=sandbox) if ret != 0: if not conf.LPMSConfig().print_output or lpms.getopt("--quiet"): out.brightred("\n>> error messages:\n") out.write(err) out.warn("command failed: %s" % out.color(cmd, "red")) if stage and output and err: return False, output + err return False return True
def remove(self): try: api.remove_package(self.request.names, self.request.instruction) except PackageNotFound as err: out.write(">> %s could not be found.\n" % out.color(err.message, "red")) sys.exit(0)
def main(self): parsed = self.pkgname.split("/") if len(parsed) == 3: repo, category, name = parsed name, version = utils.parse_pkgname(name) packages = self.instdb.find_package( package_repo=repo, package_category=category, package_name=name, package_version=version ) elif len(parsed) == 2: category, name = parsed name, version = utils.parse_pkgname(name) packages = self.instdb.find_package(package_category=category, package_name=name, package_version=version) elif len(parsed) == 1: name, version = utils.parse_pkgname(self.pkgname) packages = self.instdb.find_package(package_name=name, package_version=version) else: out.error("%s could not be recognized." % self.pkgname) lpms.terminate() if not packages: out.error("%s not installed." % self.pkgname) lpms.terminate() for package in packages: symdirs = {} out.normal("%s/%s/%s-%s" % (package.repo, package.category, package.name, package.version)) content = self.filesdb.get_paths_by_package( package.name, category=package.category, version=package.version ) for item in content: item = item[0].encode("UTF-8") if os.path.islink(item): out.write("%s -> %s\n" % (out.color(item, "green"), os.readlink(item))) if os.path.isdir(os.path.realpath(item)): symdirs[os.path.realpath(item) + "/"] = item + "/" else: out.write(item + "\n") if symdirs: for symdir in symdirs: if item.startswith(symdir): out.write( "%s -> %s\n" % ( out.color(item.replace(symdir, symdirs[symdir]), "brightwhite"), out.color(item, "brightwhite"), ) )
def set_parser(set_name): sets = [] for repo in available_repositories(): repo_set_file = os.path.join(cst.repos, repo, "info/sets", "%s.set" % set_name) if os.path.isfile((repo_set_file)): sets.append(repo_set_file) user_set_file = "%s/%s.set" % (cst.user_sets_dir, set_name) if os.path.isfile(user_set_file): sets.append(user_set_file) if len(sets) > 1: out.normal("ambiguous for %s\n" % out.color(set_name, "green")) def ask(): for c, s in enumerate(sets): out.write(" " + out.color(str(c + 1), "green") + ") " + s + "\n") out.write("\nselect one of them:\n") out.write("to exit, press Q or q.\n") while True: ask() answer = sys.stdin.readline().strip() if answer == "Q" or answer == "q": lpms.terminate() elif answer.isalpha(): out.warn("please give a number.") continue try: set_file = sets[int(answer) - 1] break except (IndexError, ValueError): out.warn("invalid command.") continue elif len(sets) == 1: set_file = sets[0] else: out.warn("%s not found!" % out.color(set_name, "red")) return [] return [line for line in file(set_file).read().strip().split("\n") \ if not line.startswith("#") and line != ""]
def list_disk_pkgs(repo, category): '''Lists pkgnames in the disk using repo and category name''' packages = [] source_dir = os.path.join(cst.repos, repo, category) if not os.path.isdir(source_dir): out.warn("%s does not exist." % out.color(source_dir, "red")) return packages sources = os.listdir(source_dir) if not sources: out.warn("%s seems empty." % out.color(source_dir, "red")) return packages for source in sources: if glob.glob(os.path.join(source_dir, source)+"/*.py"): packages.append(source) return packages
def check_metadata_integrity(self, metadata): required_fields = ('summary', 'license', 'arch') for field in required_fields: if not field in metadata: item = self.env.repo + "/" + self.env.category + "/" + self.env.name + "-" + self.env.version out.error("An integrity error has been found in %s: %s field must be defined in metadata." \ % (item, out.color(field, "red"))) raise IntegrityError
def list_disk_pkgs(repo, category): '''Lists pkgnames in the disk using repo and category name''' packages = [] source_dir = os.path.join(cst.repos, repo, category) if not os.path.isdir(source_dir): out.warn("%s does not exist." % out.color(source_dir, "red")) return packages sources = os.listdir(source_dir) if not sources: out.warn("%s seems empty." % out.color(source_dir, "red")) return packages for source in sources: if glob.glob(os.path.join(source_dir, source) + "/*.py"): packages.append(source) return packages
def check_metadata_integrity(self, metadata): required_fields = ('summary', 'license', 'arch') for field in required_fields: if not field in metadata: item = self.env.repo+"/"+self.env.category+"/"+self.env.name+"-"+self.env.version out.error("An integrity error has been found in %s: %s field must be defined in metadata." \ % (item, out.color(field, "red"))) raise IntegrityError
def read_news(self, news_id): try: news_id = int(news_id) repo, metadata, message = self.cursor.data[news_id] except ValueError: out.error("invalid id: %s" % news_id) return except IndexError: out.error("message found not found with this id: %d" % news_id) return out.write(out.color("from", "green")+": "+metadata["from"]+"\n") out.write(out.color("summary", "green")+": "+metadata["summary"]+"\n") out.write(out.color("date", "green")+": "+metadata["date"]+"\n") out.write(out.color("priority", "green")+": "+metadata["priority"]+"\n") out.write(message+"\n") self.cursor.mark_as_read("%s/%s" % (repo, metadata["summary"]))
def external_fetcher(self, command, download_plan, location): # run command def fetch(command, download_plan, location): #current = os.getcwd() if location is not None: os.chdir(location) else: os.chdir(config.src_cache) for url in download_plan: localfile = os.path.basename(url) partfile = localfile + ".part" output = shelltools.system(command + " " + partfile + " " + url, show=True, sandbox=False) if not output: out.error(url + " cannot be downloaded") return False else: shelltools.move(partfile, localfile) #os.chdir(current) return True # parse fetch command realcommand = command.split(" ")[0] isexist = False if realcommand.startswith("/"): if not os.path.isfile(realcommand): out.error( out.color("EXTERNAL FETCH COMMAND: ", "red") + realcommand + " not found!") lpms.terminate() return fetch(command, download_plan, location) else: for syspath in os.environ["PATH"].split(":"): if os.path.isfile(os.path.join(syspath, realcommand)): # this is no good isexist = True return fetch(command, download_plan, location) if not isexist: out.error( out.color("EXTERNAL FETCH COMMAND: ", "red") + realcommand + " not found!") lpms.terminate()
def list_news(self): self.cursor.get_all_news() i = 0 if not self.cursor.data: out.warn("no readable news.") return out.normal("readable messages listed:") out.write("index repo priority summary\n") out.write("===============================================\n") for news in self.cursor.data: repo, metadata = news[:-1] if not "%s/%s" % (repo, metadata["summary"]) in self.read_items: read = out.color("*", "brightgreen") else: read = "" out.write("[%s]%s\t%-15s\t%-12s %s\n" % (out.color(str(i), "green"), read, repo, \ metadata["priority"], metadata["summary"])) i += 1
def collision_check(): # TODO: This is a temporary solution. collision_check function # must be a reusable part for using in remove operation out.normal("checking file collisions...") lpms.logger.info("checking file collisions") collision_object = file_collisions.CollisionProtect( environment.category, environment.name, environment.slot, real_root=environment.real_root, source_dir=environment.install_dir) collision_object.handle_collisions() if collision_object.orphans: out.write( out.color(" > ", "brightyellow") + "these files are orphan. the package will adopt the files:\n" ) index = 0 for orphan in collision_object.orphans: out.notify(orphan) index += 1 if index > 100: # FIXME: the files must be logged out.write( out.color(" > ", "brightyellow") + "...and many others.") break if collision_object.collisions: out.write( out.color(" > ", "brightyellow") + "file collisions detected:\n") for item in collision_object.collisions: (category, name, slot, version), path = item out.write(out.color(" -- ", "red")+category+"/"+name+"-"\ +version+":"+slot+" -> "+path+"\n") if collision_object.collisions and self.config.collision_protect: if environment.force_file_collision: out.warn( "Disregarding these collisions, you have been warned!") else: return False return True
def set_parser(set_name): sets = [] for repo in available_repositories(): repo_set_file = os.path.join(cst.repos, repo, "info/sets", "%s.set" % set_name) if os.path.isfile((repo_set_file)): sets.append(repo_set_file) user_set_file = "%s/%s.set" % (cst.user_sets_dir, set_name) if os.path.isfile(user_set_file): sets.append(user_set_file) if len(sets) > 1: out.normal("ambiguous for %s\n" % out.color(set_name, "green")) def ask(): for c, s in enumerate(sets): out.write(" "+out.color(str(c+1), "green")+") "+s+"\n") out.write("\nselect one of them:\n") out.write("to exit, press Q or q.\n") while True: ask() answer = sys.stdin.readline().strip() if answer == "Q" or answer == "q": lpms.terminate() elif answer.isalpha(): out.warn("please give a number.") continue try: set_file = sets[int(answer)-1] break except (IndexError, ValueError): out.warn("invalid command.") continue elif len(sets) == 1: set_file = sets[0] else: out.warn("%s not found!" % out.color(set_name, "red")) return [] return [line for line in file(set_file).read().strip().split("\n") \ if not line.startswith("#") and line != ""]
def usage(self): out.normal("Search given keywords in database") out.green("General Usage:\n") out.write(" $ lpms -s <keyword>\n") out.write("\nOther options:\n") for item in help_output: if len(item) == 2: out.write("%-28s: %s\n" % (out.color(item[0], "green"), item[1])) lpms.terminate()
def run(self): package = self.package.split("/") if len(package) == 3: myrepo, mycategory, myname = package packages = self.instdb.find_package(package_name=myname, package_repo=myrepo, package_category=mycategory) elif len(package) == 2: mycategory, myname = package packages = self.instdb.find_package(package_name=myname, package_category=mycategory) elif len(package) == 1: packages = self.instdb.find_package(package_name=package[0]) else: out.error("%s seems invalid." % out.color("/".join(package), "brightred")) lpms.terminate() if not packages: out.error("%s not found!" % out.color("/".join(package), "brightred")) lpms.terminate() for package in packages: self.show_info(package)
def main(self): out.normal("searching for %s\n" % self.keyword) for (repo, category, name, version, path) in self.search(): try: replace = re.compile("(%s)" % '|'.join([self.keyword]), re.I) out.write( "%s/%s/%s-%s -- %s\n" % (out.color(repo, "green"), out.color(category, "green"), out.color(name, "green"), out.color(version, "green"), replace.sub(out.color(r"\1", "brightred"), path))) except: out.write( "%s/%s/%s-%s -- %s\n" % (out.color(repo, "green"), out.color( category, "green"), out.color( name, "green"), out.color(version, "green"), path))
def reload_previous_repodb(): dirname = os.path.dirname(cst.repos) for _file in os.listdir(dirname): if _file.startswith("repositorydb") and _file.count(".") == 2: shelltools.copy(os.path.join(dirname, _file), cst.repositorydb_path) from datetime import datetime timestamp = _file.split(".")[-1] previous_date = datetime.fromtimestamp(float(timestamp)).strftime('%Y-%m-%d %H:%M:%S') out.normal("loading previous database copy: %s" % out.color(previous_date, "red")) return out.error("no repodb backup found.")
def read_news(self, news_id): try: news_id = int(news_id) repo, metadata, message = self.cursor.data[news_id] except ValueError: out.error("invalid id: %s" % news_id) return except IndexError: out.error("message found not found with this id: %d" % news_id) return out.write(out.color("from", "green") + ": " + metadata["from"] + "\n") out.write( out.color("summary", "green") + ": " + metadata["summary"] + "\n") out.write(out.color("date", "green") + ": " + metadata["date"] + "\n") out.write( out.color("priority", "green") + ": " + metadata["priority"] + "\n") out.write(message + "\n") self.cursor.mark_as_read("%s/%s" % (repo, metadata["summary"]))
def get_build_libraries(self): result = set() lib_index = None current_length = first_length = len(self.environment.libraries) for lib in self.environment.libraries: if len(lib.split("/")) == 2: lib_source, lib_name = lib.split("/") libfile = os.path.join(cst.repos, lib_source, "libraries", lib_name+".py") result.add(lib_name) else: if len(self.environment.libraries) > first_length: parent = self.environment.libraries[lib_index] if len(parent.split("/")) == 2: parents_repo = parent.split("/")[0] result.add(lib) libfile = os.path.join(cst.repos, parents_repo, "libraries", lib+".py") else: result.add(lib) libfile = os.path.join(cst.repos, self.environment.repo, "libraries", lib+".py") else: result.add(lib) libfile = os.path.join(cst.repos, self.environment.repo, "libraries", lib+".py") if not os.path.isfile(libfile): out.error("build library not found: %s" % out.color(libfile, "red")) lpms.terminate() # import the script if not self.import_script(libfile): out.error("an error occured while processing the library: %s" \ % out.color(libfile, "red")) out.error("please report the above error messages to the library maintainer.") lpms.terminate() if len(self.environment.libraries) > current_length: lib_index = self.environment.libraries.index(lib) current_length = len(self.environment.libraries) self.environment.libraries = list(result)
def fetcher_ui(self, bytes_so_far, total_size, filename): # our ui :) no progress bar or others... sys.stdout.write( "\r%s %s/%s (%0.2f%%) %s" % (out.color(filename, "brightwhite"), str(bytes_so_far / 1024) + "kb", str(total_size / 1024) + "kb", round((float(bytes_so_far) / total_size) * 100, 2), self.estimated_time((bytes_so_far / 1024), (total_size / 1024), (time.time() - self.begining)))) if bytes_so_far >= total_size: sys.stdout.write('\n') sys.stdout.flush()
def run(self): package = self.package.split("/") if len(package) == 3: myrepo, mycategory, myname = package packages = self.instdb.find_package(package_name=myname, \ package_repo=myrepo, package_category=mycategory) elif len(package) == 2: mycategory, myname = package packages = self.instdb.find_package(package_name=myname, \ package_category=mycategory) elif len(package) == 1: packages = self.instdb.find_package(package_name=package[0]) else: out.error("%s seems invalid." % out.color("/".join(package), "brightred")) lpms.terminate() if not packages: out.error("%s not found!" % out.color("/".join(package), "brightred")) lpms.terminate() for package in packages: self.show_info(package)
def show_info(self, package): items = self.instdb.database.get_package_build_info(package.id) template = ( "Start_Time", "End Time", "Requestor", "Requestor ID", "HOST", "CFLAGS", "CXXFLAGS", "LDFLAGS", "JOBS", "CC", "CXX", ) out.normal( "Build information for %s/%s/%s-%s {%s:%s}" % (package.repo, package.category, package.name, package.version, package.slot, package.arch) ) for index, item in enumerate(template, 1): if index in (1, 2): out.write( "%s: %s\n" % ( out.color(item, "green"), datetime.datetime.fromtimestamp(items[index]).strftime("%Y-%m-%d %H:%M:%S"), ) ) if index == 2: delta = datetime.datetime.fromtimestamp(items[2]) - datetime.datetime.fromtimestamp(items[1]) operation_time = ( str(round(float(delta.seconds) / 60, 1)) + " minutes" if delta.seconds >= 60 else str(delta.seconds) + " seconds" ) out.write("%s: %s\n" % (out.color("Operation Time", "green"), operation_time)) continue out.write("%s: %s\n" % (out.color(item, "green"), items[index]))
def fetcher_ui(self, bytes_so_far, total_size, filename): # our ui :) no progress bar or others... sys.stdout.write( "\r%s %s/%s (%0.2f%%) %s" %( out.color(filename, "brightwhite"), str(bytes_so_far/1024) + "kb", str(total_size/1024) + "kb", round((float(bytes_so_far) / total_size)*100, 2), self.estimated_time((bytes_so_far/1024), (total_size/1024), (time.time()-self.begining))) ) if bytes_so_far >= total_size: sys.stdout.write('\n') sys.stdout.flush()
def parse_traceback(exception_type=None): '''Parse exceptions and show nice and more readable error messages''' out.write(out.color(">>", "brightred")+" %s/%s/%s-%s\n" % (self.environment.repo, self.environment.category, self.environment.name, self.environment.version)) exc_type, exc_value, exc_traceback = sys.exc_info() formatted_lines = traceback.format_exception(exc_type, exc_value, exc_traceback) if not self.environment.debug: for item in formatted_lines: item = item.strip() if item.startswith("File"): regex = re.compile(r'(\w+)\S*$') regex = regex.search(item) if regex is None: continue if regex.group() in operation_order: line = re.compile(r'[^\d.]+') line = line.sub('', item) out.write("%s %s " % (out.color("on line %s:" % line, "red"), formatted_lines[-1])) break else: traceback.print_exc() out.error("an error occurred when running the %s function." % out.color(operation, "red")) return False
def reload_previous_repodb(): dirname = os.path.dirname(cst.repos) for _file in os.listdir(dirname): if _file.startswith("repositorydb") and _file.count(".") == 2: shelltools.copy(os.path.join(dirname, _file), cst.repositorydb_path) from datetime import datetime timestamp = _file.split(".")[-1] previous_date = datetime.fromtimestamp( float(timestamp)).strftime('%Y-%m-%d %H:%M:%S') out.normal("loading previous database copy: %s" % out.color(previous_date, "red")) return out.error("no repodb backup found.")
def gnome2_icon_cache_update(*args, **kwargs): parameters = "-q -t -f" target = "/usr/share/icons/hicolor" if args: parameters = " ".join(args) if kwargs: if "target" in kwargs: target = kwargs["target"] out.notify("updating GTK+ icon cache...") if not shelltools.system("gtk-update-icon-cache %s %s" % (parameters, target), sandbox=False): out.write(out.color("\n\tFAILED\n", "red"))
def external_fetcher(self, command, download_plan, location): # run command def fetch(command, download_plan, location): #current = os.getcwd() if location is not None: os.chdir(location) else: os.chdir(config.src_cache) for url in download_plan: localfile = os.path.basename(url) partfile = localfile+".part" output = shelltools.system(command+" "+partfile+" "+url, show=True, sandbox=False) if not output: out.error(url+" cannot be downloaded") return False else: shelltools.move(partfile, localfile) #os.chdir(current) return True # parse fetch command realcommand = command.split(" ")[0] isexist = False if realcommand.startswith("/"): if not os.path.isfile(realcommand): out.error(out.color("EXTERNAL FETCH COMMAND: ", "red")+realcommand+" not found!") lpms.terminate() return fetch(command, download_plan, location) else: for syspath in os.environ["PATH"].split(":"): if os.path.isfile(os.path.join(syspath, realcommand)): # this is no good isexist = True return fetch(command, download_plan, location) if not isexist: out.error(out.color("EXTERNAL FETCH COMMAND: ", "red")+realcommand+" not found!") lpms.terminate()
def mangle_spec(self): # TODO: Use more convenient exceptions for error states. '''Compiles the spec file and imports its content to lpms' build environment.''' if not os.path.isfile(self.internals.env.spec_file): out.error("%s could not be found!" % self.internals.env.spec_file) raise BuildError elif not os.access(self.internals.env.spec_file, os.R_OK): out.error("%s is not readable!" % self.internals.env.spec_file) raise BuildError # TODO: Use a more proper name for import_script if not self.internals.import_script(self.internals.env.spec_file): out.error("an error occured while processing the spec: %s" \ % out.color(self.internals.env.spec_file, "red")) # TODO: Here, show package maintainer and bugs_to out.error("please report the above error messages to the package maintainer.") raise BuildError
def standard_extract(): """ Runs standard extract procedure """ target = os.path.dirname(build_dir) for url in extract_plan: out.write(" %s %s\n" % (out.color(">", "green"), \ os.path.join(cfg.LPMSConfig().src_cache, \ os.path.basename(url)))) archive_path = os.path.join(cfg.LPMSConfig().src_cache, \ os.path.basename(url)) try: partial = [atom.strip() for atom in partial.split(" ") if atom != "#"] archive.extract(str(archive_path), str(target), partial) except NameError: archive.extract(str(archive_path), str(target))
def select_pkgs(self): for pkg in self.instdb.get_all_packages(): self.repo, self.category, self.name, self.version, self.slot = pkg # catch packages which are from the outside if not self.repodb.find_package(package_name=self.name, \ package_category=self.category): if not (self.category, self.name) in self.notfound_pkg: self.notfound_pkg.append((self.category, self.name)) # get version data from repository database repository_items = self.repodb.find_package(package_name=self.name, \ package_category=self.category) if not repository_items: # if the installed package could not found in the repository database # add the item to not-founds list self.notfound_pkg.append((self.category, self.name)) continue # collect available package version by slot value available_versions = {} for item in repository_items: if item.slot in available_versions: available_versions[item.slot].append(item.version) else: available_versions[item.slot] = [item.version] # comparise versions for item in repository_items: if item.slot == self.slot: best_version = utils.best_version( available_versions[item.slot]) result = utils.vercmp(best_version, self.version) if result != 0: self.packages.append( os.path.join(self.category, self.name) + ":" + self.slot) break if self.notfound_pkg: out.write( "%s: the following packages were installed but they could not be found in the database:\n\n" % out.color("WARNING", "brightyellow")) for no_category, no_name, in self.notfound_pkg: out.notify("%s/%s" % (no_category, no_name)) out.write("\n")
def standard_extract(): """ Runs standard extract procedure """ target = os.path.dirname(build_dir) for url in extract_plan: out.write(" %s %s\n" % (out.color(">", "green"), \ os.path.join(cfg.LPMSConfig().src_cache, \ os.path.basename(url)))) archive_path = os.path.join(cfg.LPMSConfig().src_cache, \ os.path.basename(url)) try: partial = [ atom.strip() for atom in partial.split(" ") if atom != "#" ] archive.extract(str(archive_path), str(target), partial) except NameError: archive.extract(str(archive_path), str(target))
def mangle_spec(self): # TODO: Use more convenient exceptions for error states. '''Compiles the spec file and imports its content to lpms' build environment.''' if not os.path.isfile(self.internals.env.spec_file): out.error("%s could not be found!" % self.internals.env.spec_file) raise BuildError elif not os.access(self.internals.env.spec_file, os.R_OK): out.error("%s is not readable!" % self.internals.env.spec_file) raise BuildError # TODO: Use a more proper name for import_script if not self.internals.import_script(self.internals.env.spec_file): out.error("an error occured while processing the spec: %s" \ % out.color(self.internals.env.spec_file, "red")) # TODO: Here, show package maintainer and bugs_to out.error( "please report the above error messages to the package maintainer." ) raise BuildError
def select_pkgs(self): for pkg in self.instdb.get_all_packages(): self.repo, self.category, self.name, self.version, self.slot = pkg # catch packages which are from the outside if not self.repodb.find_package(package_name=self.name, \ package_category=self.category): if not (self.category, self.name) in self.notfound_pkg: self.notfound_pkg.append((self.category, self.name)) # get version data from repository database repository_items = self.repodb.find_package(package_name=self.name, \ package_category=self.category) if not repository_items: # if the installed package could not found in the repository database # add the item to not-founds list self.notfound_pkg.append((self.category, self.name)) continue # collect available package version by slot value available_versions = {} for item in repository_items: if item.slot in available_versions: available_versions[item.slot].append(item.version) else: available_versions[item.slot] = [item.version] # comparise versions for item in repository_items: if item.slot == self.slot: best_version = utils.best_version(available_versions[item.slot]) result = utils.vercmp(best_version, self.version) if result != 0: self.packages.append(os.path.join(self.category, self.name)+":"+self.slot) break if self.notfound_pkg: out.write("%s: the following packages were installed but they could not be found in the database:\n\n" % out.color("WARNING", "brightyellow")) for no_category, no_name, in self.notfound_pkg: out.notify("%s/%s" % (no_category, no_name)) out.write("\n")
def extract_lzma(self, path): if not utils.executable_path("tar"): lpms.terminate("please check app-arch/tar package") current = os.getcwd() os.chdir(self.location) cmd = utils.executable_path("tar") + " --lzma xvf %s" % path if path.endswith(".xz"): cmd = utils.executable_path("tar") + " Jxvf %s" % path stdout = subprocess.PIPE stderr = subprocess.PIPE result = subprocess.Popen(cmd, shell=True, stdout=stdout, stderr=stderr) output, err = result.communicate() if result.returncode != 0: out.error("could not extract: %s" % out.color(path, "red")) print(output + err) os.chdir(current) lpms.terminate() os.chdir(current)
def update_repository(self, repo_name): exceptions = ['scripts', 'licenses', 'news', 'info', 'libraries', '.git', '.svn'] # fistly, drop the repo self.repodb.database.delete_repository(repo_name, commit=True) repo_path = os.path.join(cst.repos, repo_name) for category in os.listdir(repo_path): target_directory = os.path.join(repo_path, category) if category in exceptions or not os.path.isdir(target_directory): continue packages = os.listdir(target_directory) try: packages.remove("info.xml") except ValueError: pass if lpms.getopt("--verbose"): out.notify("%s" % out.color(category, "brightwhite")) for my_pkg in packages: try: self.update_package(repo_path, category, my_pkg) except IntegrityError: continue
def run_extract(self): # if the environment has no extract_plan variable, doesn't run extract function if not hasattr(self.environment, "extract_nevertheless") or not self.environment.extract_nevertheless: if not hasattr(self.environment, "extract_plan"): return target = os.path.dirname(self.environment.build_dir) extracted_file = os.path.join(os.path.dirname(target), ".extracted") if os.path.isfile(extracted_file): if self.environment.force_extract: shelltools.remove_file(extracted_file) else: out.write("%s %s/%s-%s had been already extracted.\n" % (out.color(">>", "brightyellow"), \ self.environment.category, self.environment.name, self.environment.version)) return True utils.xterm_title("lpms: extracting %s/%s/%s-%s" % (self.environment.repo, self.environment.category, \ self.environment.name, self.environment.version)) out.notify("extracting archive(s) to %s" % os.path.dirname(self.environment.build_dir)) # now, extract the archives self.run_stage("extract") out.notify("%s has been extracted." % self.environment.fullname) shelltools.touch(extracted_file) if self.environment.stage == "extract": lpms.terminate()
def perform_operation(self): '''Handles command line arguments and drive building operation''' self.set_environment_variables() # Check /proc and /dev. These filesystems must be mounted # to perform operations properly. for item in ('/proc', '/dev'): if not os.path.ismount(item): out.warn("%s is not mounted. You have been warned." % item) # clean source code extraction directory if it is wanted # TODO: check the following condition when resume functionality is back if self.instruction.clean_tmp: if self.instruction.resume_build is not None: out.warn("clean-tmp is disabled because of resume-build is enabled.") else: self.clean_temporary_directory() # we want to save starting time of the build operation to calculate building time # The starting point of logging lpms.logger.info("starting build (%s/%s) %s/%s/%s-%s" % ( self.instruction.index, self.instruction.count, self.internals.env.repo, self.internals.env.category, self.internals.env.name, self.internals.env.version ) ) out.normal("(%s/%s) building %s/%s from %s" % ( self.instruction.index, self.instruction.count, out.color(self.internals.env.category, "green"), out.color(self.internals.env.name+"-"+self.internals.env.version, "green"), self.internals.env.repo ) ) if self.internals.env.sandbox: lpms.logger.info("sandbox enabled build") out.notify("sandbox is enabled") else: lpms.logger.warning("sandbox disabled build") out.warn_notify("sandbox is disabled") # fetch packages which are in download_plan list if self.internals.env.src_url is not None: # preprocess url shortcuts such as $name, $version and etc self.parse_src_url_field() # if the package is revisioned, override build_dir and install_dir. # remove revision number from these variables. if self.revisioned: for variable in ("build_dir", "install_dir"): new_variable = "".join(os.path.basename(getattr(self.internals.env, \ variable)).split(self.revision)) setattr(self.internals.env, variable, \ os.path.join(os.path.dirname(getattr(self.internals.env, \ variable)), new_variable)) utils.xterm_title("lpms: downloading %s/%s/%s-%s" % ( self.internals.env.repo, self.internals.env.category, self.internals.env.name, self.internals.env.version ) ) self.prepare_download_plan(self.internals.env.applied_options) if not fetcher.URLFetcher().run(self.download_plan): lpms.terminate("\nplease check the spec") if self.internals.env.applied_options is not None and self.internals.env.applied_options: out.notify("applied options: %s" % " ".join(self.internals.env.applied_options)) if self.internals.env.src_url is None and not self.extract_plan \ and hasattr(self.internals.env, "extract"): # Workaround for #208 self.internals.env.extract_nevertheless = True # Remove previous sandbox log if it is exist. if os.path.exists(cst.sandbox_log): shelltools.remove_file(cst.sandbox_log) # Enter the building directory os.chdir(self.internals.env.build_dir) # Manage ccache if hasattr(self.config, "ccache") and self.config.ccache: if utils.drive_ccache(config=self.config): out.notify("ccache is enabled.") else: out.warn("ccache could not be enabled. so you should check dev-util/ccache") self.internals.env.start_time = time.time() return True, self.internals.env