def clean_obsolete_content(self): '''Cleans obsolete content which belogs to previous installs''' if self.instdb.find_package(package_name=self.environment.name, \ package_category=self.environment.category, package_slot=self.environment.slot): obsolete = self.compare_different_versions() if not obsolete: return out.normal("cleaning obsolete content") directories = [] for item in obsolete: target = os.path.join(self.environment.real_root, item[0][1:]) if not os.path.exists(target): continue if os.path.islink(target): os.unlink(target) elif os.path.isfile(target): shelltools.remove_file(target) else: directories.append(target) directories.reverse() for directory in directories: if not os.listdir(directory): # Remove directory if it does not include anything shelltools.remove_dir(directory)
def run_extract(self): # if the environment has no extract_plan variable, doesn't run extract function if not hasattr(self.environment, "extract_nevertheless" ) or not self.environment.extract_nevertheless: if not hasattr(self.environment, "extract_plan"): return target = os.path.dirname(self.environment.build_dir) extracted_file = os.path.join(os.path.dirname(target), ".extracted") if os.path.isfile(extracted_file): if self.environment.force_extract: shelltools.remove_file(extracted_file) else: out.write("%s %s/%s-%s had been already extracted.\n" % (out.color(">>", "brightyellow"), \ self.environment.category, self.environment.name, self.environment.version)) return True utils.xterm_title("lpms: extracting %s/%s/%s-%s" % (self.environment.repo, self.environment.category, \ self.environment.name, self.environment.version)) out.notify("extracting archive(s) to %s" % os.path.dirname(self.environment.build_dir)) # now, extract the archives self.run_stage("extract") out.notify("%s has been extracted." % self.environment.fullname) shelltools.touch(extracted_file) if self.environment.stage == "extract": lpms.terminate()
def perform_operation(self): utils.xterm_title("(%s/%s) lpms: merging %s/%s-%s from %s" % (self.environment.index, self.environment.count, self.environment.category, self.environment.name, self.environment.version, self.environment.repo)) # create $info_file_name.gz archive and remove info file self.create_info_archive() # merge the package self.merge_package() # clean the previous version if it is exists self.clean_obsolete_content() # write to database self.write_db() # create or update /usr/share/info/dir self.update_info_index() if self.backup: out.write("%s%s configuration file changed. Use %s to fix these files.\n" % (out.color(" > ", "green"), len(self.backup), \ out.color("merge-conf", "red"))) if shelltools.is_exists(cst.lock_file): shelltools.remove_file(cst.lock_file) return True, self.environment
def clean(target): for item in shelltools.listdir(target): path = os.path.join(target, item) if os.path.isdir(path): shelltools.remove_dir(path) else: shelltools.remove_file(path)
def linstall(parameters='', arg='install'): '''Runs standard installation function with given parameters and commands''' args = 'make prefix=%(prefix)s/%(defaultprefix)s \ datadir=%(prefix)s/%(data)s \ infodir=%(prefix)s/%(info)s \ localstatedir=%(prefix)s/%(localstate)s \ mandir=%(prefix)s/%(man)s \ sysconfdir=%(prefix)s/%(conf)s \ %(parameters)s \ %(argument)s' % { 'prefix': install_dir, 'defaultprefix': cst.prefix, 'man': cst.man, 'info': cst.info, 'localstate': cst.localstate, 'conf': cst.conf, 'data': cst.data, 'parameters': parameters, 'argument': arg, } args = " ".join([member for member in args.split(" ") if member != ""]) out.notify("running %s" % args) if not system(args): raise BuildError("linstall failed.") else: # remove /usr/share/info/dir file if it exists dir_file = "%s/usr/share/info/dir" % install_dir if os.path.isfile(dir_file): shelltools.remove_file("%s/usr/share/info/dir" % install_dir)
def run_post_install(self): if self.environment.no_configure or self.environment.real_root != cst.root: out.warn_notify("skipping post_install function...") pkg_data = (self.environment.repo, self.environment.category, \ self.environment.name, self.environment.version) pending_file = os.path.join(self.environment.real_root, cst.configure_pending_file) shelltools.makedirs(os.path.dirname(pending_file)) if not os.path.exists(pending_file): with open(pending_file, "wb") as _data: pickle.dump([pkg_data], _data) else: data = [] with open(pending_file, "rb") as _data: pending_packages = pickle.load(_data) if not pkg_data in pending_packages: data.append(pkg_data) data.extend(pending_packages) shelltools.remove_file(pending_file) with open(pending_file, "wb") as _data: pickle.dump(data, _data) return # sandbox must be disabled self.environment.sandbox = False self.run_stage("post_install")
def calculate_hashes(self): def write_archive_hash(urls, file_name): name, version = utils.parse_pkgname(file_name) for url in utils.parse_url_tag(urls, name, version): archive_name = os.path.basename(url) archive_path = os.path.join(conf.LPMSConfig().src_cache, archive_name) if not os.access(archive_path, os.F_OK): fetcher.URLFetcher().run([url]) sha1 = utils.sha1sum(archive_path) shelltools.echo("hashes", "%s %s %s" % (archive_name, sha1, os.path.getsize(archive_path))) excepts = ('hashes') shelltools.remove_file("hashes") if len(self.files) == 0: self.files = os.listdir(self.current_dir) for f in self.files: if f in excepts: continue if f.endswith(cst.spec_suffix): out.normal("processing %s" % f) shelltools.echo("hashes", "%s %s %s" % (f, utils.sha1sum(f), os.path.getsize(f))) content = utils.import_script(f) if "src_url" in utils.metadata_parser(content["metadata"]).keys(): write_archive_hash(utils.metadata_parser(content["metadata"])["src_url"], f) elif "src_url" in content.keys(): write_archive_hash(content["src_url"], f) else: lpms.terminate("src_url was not defined in spec") del content elif os.path.isdir(f): for l in os.listdir(os.path.join(self.current_dir, f)): path = os.path.join(f, l) out.normal("processing %s" % path) shelltools.echo("hashes", "%s %s %s" % (path, utils.sha1sum(path), os.path.getsize(path)))
def main(pkgname, real_root): instdb = dbapi.InstallDB() filesdb = dbapi.FilesDB() # start remove operation repo, category, name, version = pkgname # initialize remove class rmpkg = Remove(repo, category, name, version, real_root) lpms.logger.info("removing %s/%s/%s-%s from %s" % \ (repo, category, name, version, rmpkg.real_root)) out.normal("removing %s/%s/%s-%s from %s" % \ (repo, category, name, version, rmpkg.real_root)) # remove the package content rmpkg.remove_content() # remove entries from the database package_id = instdb.find_package(package_repo=repo, package_category=category, \ package_name=name, package_version=version).get(0).id instdb.database.delete_build_info(package_id) instdb.delete_conditional_versions(package_id=package_id) instdb.delete_inline_options(package_id=package_id) instdb.delete_package(package_repo=repo, package_category=category, \ package_name=name, package_version=version, commit=True) # remove paths from files table filesdb.delete_item_by_pkgdata(category, name, version, commit=True) # unlock if shelltools.is_exists(cst.lock_file): shelltools.remove_file(cst.lock_file)
def create_info_archive(self): info_path = os.path.join(self.environment.install_dir, cst.info) if not os.path.isdir(info_path): return for item in os.listdir(os.path.join(self.environment.install_dir, cst.info)): info_file = os.path.join(info_path, item) with open(info_file, 'rb') as content: with gzip.open(info_file+".gz", 'wb') as output: output.writelines(content) self.info_files.append(os.path.join(self.environment.real_root, cst.info, item)+".gz") shelltools.remove_file(info_file)
def makesym(source, target, ignore_fix_target=False): if not ignore_fix_target: target = fix_target_path(target) if len(target.split("/")) > 1: shelltools.makedirs(os.path.dirname(target)) # Remove the target if it is already a symlink # TODO: Should we remove the file if it is a regular file or directory? if os.path.islink(target): shelltools.remove_file(target) shelltools.make_symlink(source, target)
def raw_install(parameters = '', arg='install'): '''Runs installation function with only given parameters''' out.notify("running make %s %s" % (parameters, arg)) if not system("make %s %s" % (parameters, arg)): raise BuildError("raw_install failed.") else: # remove /usr/share/info/dir file if it exists dir_file = "%s/usr/share/info/dir" % install_dir if os.path.isfile(dir_file): shelltools.remove_file("%s/usr/share/info/dir" % install_dir)
def raw_install(parameters='', arg='install'): '''Runs installation function with only given parameters''' out.notify("running make %s %s" % (parameters, arg)) if not system("make %s %s" % (parameters, arg)): raise BuildError("raw_install failed.") else: # remove /usr/share/info/dir file if it exists dir_file = "%s/usr/share/info/dir" % install_dir if os.path.isfile(dir_file): shelltools.remove_file("%s/usr/share/info/dir" % install_dir)
def create_info_archive(self): info_path = os.path.join(self.environment.install_dir, cst.info) if not os.path.isdir(info_path): return for item in os.listdir( os.path.join(self.environment.install_dir, cst.info)): info_file = os.path.join(info_path, item) with open(info_file, 'rb') as content: with gzip.open(info_file + ".gz", 'wb') as output: output.writelines(content) self.info_files.append( os.path.join(self.environment.real_root, cst.info, item) + ".gz") shelltools.remove_file(info_file)
def db_backup(): import time from lpms.shelltools import copy, remove_file # remove previous database backup dirname = os.path.join(cst.root, cst.db_path) for _file in os.listdir(dirname): if _file.startswith("repositorydb") and _file.count(".") == 2: remove_file(os.path.join(dirname, _file)) # create a backup with UNIX timestamp timestamp = int(time.time()) repositorydb = os.path.join(cst.root, cst.db_path, cst.repositorydb)+cst.db_prefix copy(repositorydb, repositorydb+".%s" % timestamp)
def mark_as_read(self, item): if not os.path.isfile(self.news_read_file): with open(self.news_read_file, "wb") as raw_data: pickle.dump([item], raw_data) else: with open(self.news_read_file, "rb") as raw_data: data = pickle.load(raw_data) if not item in data: data.append(item) shelltools.remove_file(self.news_read_file) with open(self.news_read_file, "wb") as raw_data: if not item in data: data.append(item) pickle.dump(data, raw_data)
def system(*args, **kwargs): result = shelltools.system(" ".join(args), stage=current_stage, \ sandbox=False if current_stage in cst.sandbox_exception_stages else None) if isinstance(result, bool): return result if len(result) == 2: if result[1]: logfile = "%s/build.log" % dirname(dirname(build_dir)) if isfile(logfile): shelltools.remove_file(logfile) echo(result[1],logfile) out.normal("for detalied output, view %s" % logfile) return result[0] return result
def db_backup(): import time from lpms.shelltools import copy, remove_file # remove previous database backup dirname = os.path.join(cst.root, cst.db_path) for _file in os.listdir(dirname): if _file.startswith("repositorydb") and _file.count(".") == 2: remove_file(os.path.join(dirname, _file)) # create a backup with UNIX timestamp timestamp = int(time.time()) repositorydb = os.path.join(cst.root, cst.db_path, cst.repositorydb) + cst.db_prefix copy(repositorydb, repositorydb + ".%s" % timestamp)
def calculate_hashes(self): def write_archive_hash(urls, file_name): name, version = utils.parse_pkgname(file_name) for url in utils.parse_url_tag(urls, name, version): archive_name = os.path.basename(url) archive_path = os.path.join(conf.LPMSConfig().src_cache, archive_name) if not os.access(archive_path, os.F_OK): fetcher.URLFetcher().run([url]) sha1 = utils.sha1sum(archive_path) shelltools.echo( "hashes", "%s %s %s" % (archive_name, sha1, os.path.getsize(archive_path))) excepts = ('hashes') shelltools.remove_file("hashes") if len(self.files) == 0: self.files = os.listdir(self.current_dir) for f in self.files: if f in excepts: continue if f.endswith(cst.spec_suffix): out.normal("processing %s" % f) shelltools.echo( "hashes", "%s %s %s" % (f, utils.sha1sum(f), os.path.getsize(f))) content = utils.import_script(f) if "src_url" in utils.metadata_parser( content["metadata"]).keys(): write_archive_hash( utils.metadata_parser(content["metadata"])["src_url"], f) elif "src_url" in content.keys(): write_archive_hash(content["src_url"], f) else: lpms.terminate("src_url was not defined in spec") del content elif os.path.isdir(f): for l in os.listdir(os.path.join(self.current_dir, f)): path = os.path.join(f, l) out.normal("processing %s" % path) shelltools.echo( "hashes", "%s %s %s" % (path, utils.sha1sum(path), os.path.getsize(path)))
def remove_content(self): dirs = [] for _file in self.filesdb.get_paths_by_package(self.name, category=self.category, version=self.version): _file = _file[0] target = os.path.join(self.real_root, _file[1:]) if os.path.dirname(_file[1:]) == cst.info: utils.update_info_index(target, dir_path=os.path.join(self.real_root, cst.info, "dir"), delete=True) if os.path.islink(target): os.unlink(target) elif os.path.isfile(target): if os.path.exists(target): shelltools.remove_file(target) else: dirs.append(target) dirs.reverse() for target in dirs: if os.path.isdir(target) and not os.listdir(target): shelltools.remove_dir(target)
def configure_pending(packages, instruct): '''Configure packages that do not configured after merge operation''' if not utils.check_root(msg=False): lpms.terminate("you must be root.") root = instruct["real_root"] if not root: root = cst.root instruct["real_root"] = root pending_file = os.path.join(root, cst.configure_pending_file) failed = [] if not os.access(pending_file, os.F_OK): lpms.terminate("there are no pending packages.") with open(pending_file, 'rb') as data: pending_packages = pickle.load(data) for package in pending_packages: repo, category, name, version = package spec = os.path.join(cst.repos, repo, category, name) + "/" + name + "-" + version + ".py" out.normal("configuring %s/%s/%s-%s" % (repo, category, name, version)) if not os.access(spec, os.R_OK): out.warn("%s seems not exist or not readable. skipping..." % spec) failed.append(package) continue if not initpreter.InitializeInterpreter( package, instruct, ['post_install']).initialize(): out.warn("%s/%s/%s-%s could not configured." % (repo, category, name, version)) failed.append(package) shelltools.remove_file(pending_file) if failed: with open(pending_file, 'wb') as data: pickle.dump(failed, data)
def clean_temporary_directory(self): '''Cleans temporary directory which contains source code and building environment.''' def clean(target): for item in shelltools.listdir(target): path = os.path.join(target, item) if os.path.isdir(path): shelltools.remove_dir(path) else: shelltools.remove_file(path) # dont remove these directories which are located in work_dir exceptions = ('install', 'source') if shelltools.listdir(self.internals.env.build_dir): clean(self.internals.env.build_dir) if shelltools.listdir(self.internals.env.install_dir): clean(self.internals.env.install_dir) # Now, clean workdir for item in shelltools.listdir(self.internals.env.work_dir): if not item in exceptions: path = os.path.join(self.internals.env.work_dir, item) if os.path.isdir(path): shelltools.remove_dir(path) else: shelltools.remove_file(path)
def configure_pending(packages, instruct): '''Configure packages that do not configured after merge operation''' if not utils.check_root(msg=False): lpms.terminate("you must be root.") root = instruct["real_root"] if not root: root = cst.root instruct["real_root"] = root pending_file = os.path.join(root, cst.configure_pending_file) failed = [] if not os.access(pending_file, os.F_OK): lpms.terminate("there are no pending packages.") with open(pending_file, 'rb') as data: pending_packages = pickle.load(data) for package in pending_packages: repo, category, name, version = package spec = os.path.join(cst.repos, repo, category, name)+"/"+name+"-"+version+".py" out.normal("configuring %s/%s/%s-%s" % (repo, category, name, version)) if not os.access(spec, os.R_OK): out.warn("%s seems not exist or not readable. skipping..." % spec) failed.append(package) continue if not initpreter.InitializeInterpreter(package, instruct, ['post_install']).initialize(): out.warn("%s/%s/%s-%s could not configured." % (repo, category, name, version)) failed.append(package) shelltools.remove_file(pending_file) if failed: with open(pending_file, 'wb') as data: pickle.dump(failed, data)
def run_extract(self): # if the environment has no extract_plan variable, doesn't run extract function if not hasattr(self.environment, "extract_nevertheless") or not self.environment.extract_nevertheless: if not hasattr(self.environment, "extract_plan"): return target = os.path.dirname(self.environment.build_dir) extracted_file = os.path.join(os.path.dirname(target), ".extracted") if os.path.isfile(extracted_file): if self.environment.force_extract: shelltools.remove_file(extracted_file) else: out.write("%s %s/%s-%s had been already extracted.\n" % (out.color(">>", "brightyellow"), \ self.environment.category, self.environment.name, self.environment.version)) return True utils.xterm_title("lpms: extracting %s/%s/%s-%s" % (self.environment.repo, self.environment.category, \ self.environment.name, self.environment.version)) out.notify("extracting archive(s) to %s" % os.path.dirname(self.environment.build_dir)) # now, extract the archives self.run_stage("extract") out.notify("%s has been extracted." % self.environment.fullname) shelltools.touch(extracted_file) if self.environment.stage == "extract": lpms.terminate()
from lpms.db import dbapi filesdb = dbapi.FilesDB() installdb = dbapi.InstallDB() cmdline = sys.argv if "--help" in cmdline: out.normal("A tool that to create reverse depends database from scratch.") out.write("To hide outputs use '--quiet' parameter.\n") if "--quiet" in cmdline: out.normal("creating reverse depends database...") if os.path.exists("/var/db/lpms/reverse_depends.db"): shelltools.remove_file("/var/db/lpms/reverse_depends.db") reversedb = dbapi.ReverseDependsDB() for package in installdb.get_all_names(): reverse_repo, reverse_category, reverse_name = package versions = [] map(lambda ver: versions.extend(ver), installdb.get_version(reverse_name, pkg_category=reverse_category).values()) for reverse_version in versions: depends = installdb.get_depends(reverse_repo, reverse_category, reverse_name, reverse_version) if not depends: print package, depends continue for dep in depends["runtime"]: if len(dep) != 5: continue
from lpms.db import dbapi filesdb = dbapi.FilesDB() installdb = dbapi.InstallDB() cmdline = sys.argv if "--help" in cmdline: out.normal("A tool that to create file relations database from scratch.") out.write("To hide outputs use '--quiet' parameter.\n") if "--quiet" in cmdline: out.normal("creating file relations database...") if os.path.exists("/var/db/lpms/file_relations.db"): shelltools.remove_file("/var/db/lpms/file_relations.db") relationsdb = dbapi.FileRelationsDB() for package in installdb.get_all_names(): repo, category, name = package versions = [] map(lambda ver: versions.extend(ver), installdb.get_version(name, pkg_category=category).values()) for version in versions: content = filesdb.list_files(category, name, version) for file_path in content["file"]: if os.path.exists(file_path) and os.access(file_path, os.X_OK): if utils.get_mimetype(file_path) in ('application/x-executable', 'application/x-archive', \ 'application/x-sharedlib'): if not "--quiet" in cmdline:
def merge_package(self): '''Moves files to the target destination in the most safest way.''' def get_perms(path): '''Get permissions of given path, it may be file or directory''' return {"uid": utils.get_uid(path), "gid": utils.get_gid(path), "mod": utils.get_mod(path) } out.normal("%s/%s/%s-%s:%s is merging to %s" % (self.environment.repo, self.environment.category, \ self.environment.name, self.environment.version, self.environment.slot, \ self.environment.real_root)) # Remove files db entries for this package:slot if it exists self.filesdb.delete_item_by_pkgdata(self.environment.category, self.environment.name, \ self.environment.previous_version, commit=True) # Remove file_relations db entries for this package:slot if it exists self.file_relationsdb.delete_item_by_pkgdata(self.environment.category, \ self.environment.name, self.environment.previous_version, commit=True) # Merge the package, now walk_iter = os.walk(self.environment.install_dir, followlinks=True) while True: try: parent, directories, files = next(walk_iter) # TODO: Check the target path's permissions for writing or reading # Remove install_dir from parent to get real parent path pruned_parent = parent.replace(self.environment.install_dir, "") # create directories for directory in directories: source = os.path.join(parent, directory) target = os.path.join(self.environment.real_root, pruned_parent, directory) real_target = "/".join([pruned_parent, directory]) if self.is_parent_symlink(target): break if os.path.islink(source): self.symlinks.append(target+"/") realpath = os.path.realpath(source) if os.path.islink(target): shelltools.remove_file(target) # create real directory if len(realpath.split(self.environment.install_dir)) > 1: realpath = realpath.split(self.environment.install_dir)[1][1:] shelltools.makedirs(os.path.join(self.environment.real_root, realpath)) # make symlink if os.path.isdir(target): shelltools.remove_dir(target) elif os.path.isfile(target): shelltools.remove_file(target) shelltools.make_symlink(os.readlink(source), target) else: if os.path.isfile(target): # TODO: Rename this file and warn the user shelltools.remove_file(target) shelltools.makedirs(target) # Get permissions perms = get_perms(source) # if path is a symlink, pass permission mumbo-jumbos if not os.path.islink(source): # Set permissions shelltools.set_id(target, perms["uid"], perms["gid"]) shelltools.set_mod(target, perms["mod"]) # TODO: Common items? # Add the item to filesdb self.append_filesdb("dir", real_target, perms) else: # Add the item to filesdb self.append_filesdb("link", real_target, perms, \ realpath=os.path.realpath(source)) # Merge regular files to the target # Firstly, handle reserved files reserve_files = [] if self.environment.reserve_files: if isinstance(self.environment.reserve_files, basestring): reserve_files.extend([f_item for f_item in self.environment.reserve_files.split(" ") \ if f_item != ""]) elif isinstance(self.environment.reserve_files, list) or isinstance(self.environment.reserve_files, tuple): reserve_files.extend(self.environment.reserve_files) if os.path.isfile(os.path.join(cst.user_dir, cst.protect_file)): with open(os.path.join(cst.user_dir, cst.protect_file)) as data: for rf in data.readlines(): if not rf.startswith("#"): reserve_files.append(rf.strip()) # Here we are starting to merge for _file in files: source = os.path.join(parent, _file) target = os.path.join(self.environment.real_root, pruned_parent, _file) real_target = "/".join([pruned_parent, _file]) if self.is_parent_symlink(target): break # Keep file relations for using after to handle reverse dependencies if os.path.exists(source) and os.access(source, os.X_OK): if utils.get_mimetype(source) in self.binary_filetypes: self.file_relationsdb.append_query(( self.environment.repo, self.environment.category, self.environment.name, self.environment.version, target, file_relations.get_depends(source)) ) # Strip binary files and keep them smaller if self.strip_debug_symbols and utils.get_mimetype(source) in self.binary_filetypes: utils.run_strip(source) if self.environment.ignore_reserve_files: reserve_files = [] self.environment.reserve_files = True def add_file_item(): # Prevent code duplication if not os.path.islink(target): shelltools.set_id(target, perms["uid"], perms["gid"]) shelltools.set_mod(target, perms["mod"]) self.append_filesdb("file", real_target, perms, \ sha1sum=utils.sha1sum(target), size = utils.get_size(source, dec=True) ) else: self.append_filesdb("link", real_target, perms,\ realpath=os.path.realpath(source)) if self.environment.reserve_files is not False: conf_file = os.path.join(pruned_parent, _file) isconf = (_file.endswith(".conf") or _file.endswith(".cfg")) def is_reserve(): if self.environment.ignore_reserve_files: return False elif not conf_file in reserve_files: return False return True if os.path.exists(target) and not is_reserve(): if pruned_parent[0:4] == "/etc" or isconf: if os.path.isfile(conf_file) and utils.sha1sum(source) != utils.sha1sum(conf_file): self.append_merge_conf(conf_file) target = target+".lpms-backup" self.backup.append(target) if os.path.exists(target) and is_reserve(): # The file is reserved. # Adds to filesdb add_file_item() # We don't need the following operations continue if os.path.islink(source): sha1 = False realpath = os.readlink(source) if self.environment.install_dir in realpath: realpath = realpath.split(self.environment.install_dir)[1] if os.path.isdir(target): shelltools.remove_dir(target) elif os.path.isfile(target) or os.path.islink(target): shelltools.remove_file(target) shelltools.make_symlink(realpath, target) else: sha1 = utils.sha1sum(source) perms = get_perms(source) shelltools.move(source, target) # Adds to filesdb add_file_item() except StopIteration as err: break self.file_relationsdb.insert_query(commit=True) self.filesdb.insert_query(commit=True) lpms.logger.info("%s/%s has been merged to %s." % (self.environment.category, self.environment.fullname, \ self.environment.real_root))
def perform_operation(self): '''Handles command line arguments and drive building operation''' self.set_environment_variables() # Check /proc and /dev. These filesystems must be mounted # to perform operations properly. for item in ('/proc', '/dev'): if not os.path.ismount(item): out.warn("%s is not mounted. You have been warned." % item) # clean source code extraction directory if it is wanted # TODO: check the following condition when resume functionality is back if self.instruction.clean_tmp: if self.instruction.resume_build is not None: out.warn("clean-tmp is disabled because of resume-build is enabled.") else: self.clean_temporary_directory() # we want to save starting time of the build operation to calculate building time # The starting point of logging lpms.logger.info("starting build (%s/%s) %s/%s/%s-%s" % ( self.instruction.index, self.instruction.count, self.internals.env.repo, self.internals.env.category, self.internals.env.name, self.internals.env.version ) ) out.normal("(%s/%s) building %s/%s from %s" % ( self.instruction.index, self.instruction.count, out.color(self.internals.env.category, "green"), out.color(self.internals.env.name+"-"+self.internals.env.version, "green"), self.internals.env.repo ) ) if self.internals.env.sandbox: lpms.logger.info("sandbox enabled build") out.notify("sandbox is enabled") else: lpms.logger.warning("sandbox disabled build") out.warn_notify("sandbox is disabled") # fetch packages which are in download_plan list if self.internals.env.src_url is not None: # preprocess url shortcuts such as $name, $version and etc self.parse_src_url_field() # if the package is revisioned, override build_dir and install_dir. # remove revision number from these variables. if self.revisioned: for variable in ("build_dir", "install_dir"): new_variable = "".join(os.path.basename(getattr(self.internals.env, \ variable)).split(self.revision)) setattr(self.internals.env, variable, \ os.path.join(os.path.dirname(getattr(self.internals.env, \ variable)), new_variable)) utils.xterm_title("lpms: downloading %s/%s/%s-%s" % ( self.internals.env.repo, self.internals.env.category, self.internals.env.name, self.internals.env.version ) ) self.prepare_download_plan(self.internals.env.applied_options) if not fetcher.URLFetcher().run(self.download_plan): lpms.terminate("\nplease check the spec") if self.internals.env.applied_options is not None and self.internals.env.applied_options: out.notify("applied options: %s" % " ".join(self.internals.env.applied_options)) if self.internals.env.src_url is None and not self.extract_plan \ and hasattr(self.internals.env, "extract"): # Workaround for #208 self.internals.env.extract_nevertheless = True # Remove previous sandbox log if it is exist. if os.path.exists(cst.sandbox_log): shelltools.remove_file(cst.sandbox_log) # Enter the building directory os.chdir(self.internals.env.build_dir) # Manage ccache if hasattr(self.config, "ccache") and self.config.ccache: if utils.drive_ccache(config=self.config): out.notify("ccache is enabled.") else: out.warn("ccache could not be enabled. so you should check dev-util/ccache") self.internals.env.start_time = time.time() return True, self.internals.env
from lpms.db import dbapi filesdb = dbapi.FilesDB() installdb = dbapi.InstallDB() cmdline = sys.argv if "--help" in cmdline: out.normal("A tool that to create reverse depends database from scratch.") out.write("To hide outputs use '--quiet' parameter.\n") if "--quiet" in cmdline: out.normal("creating reverse depends database...") if os.path.exists("/var/db/lpms/reverse_depends.db"): shelltools.remove_file("/var/db/lpms/reverse_depends.db") reversedb = dbapi.ReverseDependsDB() for package in installdb.get_all_names(): reverse_repo, reverse_category, reverse_name = package versions = [] map( lambda ver: versions.extend(ver), installdb.get_version(reverse_name, pkg_category=reverse_category).values()) for reverse_version in versions: depends = installdb.get_depends(reverse_repo, reverse_category, reverse_name, reverse_version) if not depends: print package, depends
def rmfile(target): paths = glob.glob(fix_target_path(target)) if not paths: raise BuildError("no file matched pattern: %s" % fix_target_path(target)) for path in paths: shelltools.remove_file(path)
if not self.is_installed(category, name, version): raise NotInstalled("%s/%s-%s is not installed." % (category, name, version)) filesdb_cursor = self.cursor(category, name, version) return filesdb_cursor.has_path(path) if lpms.getopt("--help"): print( "A script that to migrate old xml based files database to new sql based one" ) lpms.terminate() installdb = dbapi.InstallDB() fapi = FilesAPI() shelltools.remove_file("/var/db/lpms/filesdb.db") _filesdb = dbapi.FilesDB() i = 0 for pkg in installdb.get_all_names(): repo, category, name = pkg versions = installdb.get_version(name, repo_name=repo, pkg_category=category) for slot in versions: for version in versions[slot]: i += 1 print("%d - %s/%s/%s-%s" % (i, repo, category, name, version)) content = fapi.list_files(category, name, version) for path in content["dirs"]: if path == "" or not os.path.exists(path):
def merge_package(self): '''Moves files to the target destination in the most safest way.''' def get_perms(path): '''Get permissions of given path, it may be file or directory''' return { "uid": utils.get_uid(path), "gid": utils.get_gid(path), "mod": utils.get_mod(path) } out.normal("%s/%s/%s-%s:%s is merging to %s" % (self.environment.repo, self.environment.category, \ self.environment.name, self.environment.version, self.environment.slot, \ self.environment.real_root)) # Remove files db entries for this package:slot if it exists self.filesdb.delete_item_by_pkgdata(self.environment.category, self.environment.name, \ self.environment.previous_version, commit=True) # Remove file_relations db entries for this package:slot if it exists self.file_relationsdb.delete_item_by_pkgdata(self.environment.category, \ self.environment.name, self.environment.previous_version, commit=True) # Merge the package, now walk_iter = os.walk(self.environment.install_dir, followlinks=True) while True: try: parent, directories, files = next(walk_iter) # TODO: Check the target path's permissions for writing or reading # Remove install_dir from parent to get real parent path pruned_parent = parent.replace(self.environment.install_dir, "") # create directories for directory in directories: source = os.path.join(parent, directory) target = os.path.join(self.environment.real_root, pruned_parent, directory) real_target = "/".join([pruned_parent, directory]) if self.is_parent_symlink(target): break if os.path.islink(source): self.symlinks.append(target + "/") realpath = os.path.realpath(source) if os.path.islink(target): shelltools.remove_file(target) # create real directory if len(realpath.split( self.environment.install_dir)) > 1: realpath = realpath.split( self.environment.install_dir)[1][1:] shelltools.makedirs( os.path.join(self.environment.real_root, realpath)) # make symlink if os.path.isdir(target): shelltools.remove_dir(target) elif os.path.isfile(target): shelltools.remove_file(target) shelltools.make_symlink(os.readlink(source), target) else: if os.path.isfile(target): # TODO: Rename this file and warn the user shelltools.remove_file(target) shelltools.makedirs(target) # Get permissions perms = get_perms(source) # if path is a symlink, pass permission mumbo-jumbos if not os.path.islink(source): # Set permissions shelltools.set_id(target, perms["uid"], perms["gid"]) shelltools.set_mod(target, perms["mod"]) # TODO: Common items? # Add the item to filesdb self.append_filesdb("dir", real_target, perms) else: # Add the item to filesdb self.append_filesdb("link", real_target, perms, \ realpath=os.path.realpath(source)) # Merge regular files to the target # Firstly, handle reserved files reserve_files = [] if self.environment.reserve_files: if isinstance(self.environment.reserve_files, basestring): reserve_files.extend([f_item for f_item in self.environment.reserve_files.split(" ") \ if f_item != ""]) elif isinstance(self.environment.reserve_files, list) or isinstance( self.environment.reserve_files, tuple): reserve_files.extend(self.environment.reserve_files) if os.path.isfile(os.path.join(cst.user_dir, cst.protect_file)): with open(os.path.join(cst.user_dir, cst.protect_file)) as data: for rf in data.readlines(): if not rf.startswith("#"): reserve_files.append(rf.strip()) # Here we are starting to merge for _file in files: source = os.path.join(parent, _file) target = os.path.join(self.environment.real_root, pruned_parent, _file) real_target = "/".join([pruned_parent, _file]) if self.is_parent_symlink(target): break # Keep file relations for using after to handle reverse dependencies if os.path.exists(source) and os.access(source, os.X_OK): if utils.get_mimetype(source) in self.binary_filetypes: self.file_relationsdb.append_query( (self.environment.repo, self.environment.category, self.environment.name, self.environment.version, target, file_relations.get_depends(source))) # Strip binary files and keep them smaller if self.strip_debug_symbols and utils.get_mimetype( source) in self.binary_filetypes: utils.run_strip(source) if self.environment.ignore_reserve_files: reserve_files = [] self.environment.reserve_files = True def add_file_item(): # Prevent code duplication if not os.path.islink(target): shelltools.set_id(target, perms["uid"], perms["gid"]) shelltools.set_mod(target, perms["mod"]) self.append_filesdb("file", real_target, perms, \ sha1sum=utils.sha1sum(target), size = utils.get_size(source, dec=True) ) else: self.append_filesdb("link", real_target, perms,\ realpath=os.path.realpath(source)) if self.environment.reserve_files is not False: conf_file = os.path.join(pruned_parent, _file) isconf = (_file.endswith(".conf") or _file.endswith(".cfg")) def is_reserve(): if self.environment.ignore_reserve_files: return False elif not conf_file in reserve_files: return False return True if os.path.exists(target) and not is_reserve(): if pruned_parent[0:4] == "/etc" or isconf: if os.path.isfile(conf_file) and utils.sha1sum( source) != utils.sha1sum(conf_file): self.append_merge_conf(conf_file) target = target + ".lpms-backup" self.backup.append(target) if os.path.exists(target) and is_reserve(): # The file is reserved. # Adds to filesdb add_file_item() # We don't need the following operations continue if os.path.islink(source): sha1 = False realpath = os.readlink(source) if self.environment.install_dir in realpath: realpath = realpath.split( self.environment.install_dir)[1] if os.path.isdir(target): shelltools.remove_dir(target) elif os.path.isfile(target) or os.path.islink(target): shelltools.remove_file(target) shelltools.make_symlink(realpath, target) else: sha1 = utils.sha1sum(source) perms = get_perms(source) shelltools.move(source, target) # Adds to filesdb add_file_item() except StopIteration as err: break self.file_relationsdb.insert_query(commit=True) self.filesdb.insert_query(commit=True) lpms.logger.info("%s/%s has been merged to %s." % (self.environment.category, self.environment.fullname, \ self.environment.real_root))
def perform_operation(self): '''Handles command line arguments and drive building operation''' self.set_environment_variables() # Check /proc and /dev. These filesystems must be mounted # to perform operations properly. for item in ('/proc', '/dev'): if not os.path.ismount(item): out.warn("%s is not mounted. You have been warned." % item) # clean source code extraction directory if it is wanted # TODO: check the following condition when resume functionality is back if self.instruction.clean_tmp: if self.instruction.resume_build is not None: out.warn( "clean-tmp is disabled because of resume-build is enabled." ) else: self.clean_temporary_directory() # we want to save starting time of the build operation to calculate building time # The starting point of logging lpms.logger.info("starting build (%s/%s) %s/%s/%s-%s" % (self.instruction.index, self.instruction.count, self.internals.env.repo, self.internals.env.category, self.internals.env.name, self.internals.env.version)) out.normal( "(%s/%s) building %s/%s from %s" % (self.instruction.index, self.instruction.count, out.color(self.internals.env.category, "green"), out.color( self.internals.env.name + "-" + self.internals.env.version, "green"), self.internals.env.repo)) if self.internals.env.sandbox: lpms.logger.info("sandbox enabled build") out.notify("sandbox is enabled") else: lpms.logger.warning("sandbox disabled build") out.warn_notify("sandbox is disabled") # fetch packages which are in download_plan list if self.internals.env.src_url is not None: # preprocess url shortcuts such as $name, $version and etc self.parse_src_url_field() # if the package is revisioned, override build_dir and install_dir. # remove revision number from these variables. if self.revisioned: for variable in ("build_dir", "install_dir"): new_variable = "".join(os.path.basename(getattr(self.internals.env, \ variable)).split(self.revision)) setattr(self.internals.env, variable, \ os.path.join(os.path.dirname(getattr(self.internals.env, \ variable)), new_variable)) utils.xterm_title( "lpms: downloading %s/%s/%s-%s" % (self.internals.env.repo, self.internals.env.category, self.internals.env.name, self.internals.env.version)) self.prepare_download_plan(self.internals.env.applied_options) if not fetcher.URLFetcher().run(self.download_plan): lpms.terminate("\nplease check the spec") if self.internals.env.applied_options is not None and self.internals.env.applied_options: out.notify("applied options: %s" % " ".join(self.internals.env.applied_options)) if self.internals.env.src_url is None and not self.extract_plan \ and hasattr(self.internals.env, "extract"): # Workaround for #208 self.internals.env.extract_nevertheless = True # Remove previous sandbox log if it is exist. if os.path.exists(cst.sandbox_log): shelltools.remove_file(cst.sandbox_log) # Enter the building directory os.chdir(self.internals.env.build_dir) # Manage ccache if hasattr(self.config, "ccache") and self.config.ccache: if utils.drive_ccache(config=self.config): out.notify("ccache is enabled.") else: out.warn( "ccache could not be enabled. so you should check dev-util/ccache" ) self.internals.env.start_time = time.time() return True, self.internals.env