def calculate_hashes(self): def write_archive_hash(urls, file_name): name, version = utils.parse_pkgname(file_name) for url in utils.parse_url_tag(urls, name, version): archive_name = os.path.basename(url) archive_path = os.path.join(conf.LPMSConfig().src_cache, archive_name) if not os.access(archive_path, os.F_OK): fetcher.URLFetcher().run([url]) sha1 = utils.sha1sum(archive_path) shelltools.echo("hashes", "%s %s %s" % (archive_name, sha1, os.path.getsize(archive_path))) excepts = ('hashes') shelltools.remove_file("hashes") if len(self.files) == 0: self.files = os.listdir(self.current_dir) for f in self.files: if f in excepts: continue if f.endswith(cst.spec_suffix): out.normal("processing %s" % f) shelltools.echo("hashes", "%s %s %s" % (f, utils.sha1sum(f), os.path.getsize(f))) content = utils.import_script(f) if "src_url" in utils.metadata_parser(content["metadata"]).keys(): write_archive_hash(utils.metadata_parser(content["metadata"])["src_url"], f) elif "src_url" in content.keys(): write_archive_hash(content["src_url"], f) else: lpms.terminate("src_url was not defined in spec") del content elif os.path.isdir(f): for l in os.listdir(os.path.join(self.current_dir, f)): path = os.path.join(f, l) out.normal("processing %s" % path) shelltools.echo("hashes", "%s %s %s" % (path, utils.sha1sum(path), os.path.getsize(path)))
def calculate_hashes(self): def write_archive_hash(urls, file_name): name, version = utils.parse_pkgname(file_name) for url in utils.parse_url_tag(urls, name, version): archive_name = os.path.basename(url) archive_path = os.path.join(conf.LPMSConfig().src_cache, archive_name) if not os.access(archive_path, os.F_OK): fetcher.URLFetcher().run([url]) sha1 = utils.sha1sum(archive_path) shelltools.echo( "hashes", "%s %s %s" % (archive_name, sha1, os.path.getsize(archive_path))) excepts = ('hashes') shelltools.remove_file("hashes") if len(self.files) == 0: self.files = os.listdir(self.current_dir) for f in self.files: if f in excepts: continue if f.endswith(cst.spec_suffix): out.normal("processing %s" % f) shelltools.echo( "hashes", "%s %s %s" % (f, utils.sha1sum(f), os.path.getsize(f))) content = utils.import_script(f) if "src_url" in utils.metadata_parser( content["metadata"]).keys(): write_archive_hash( utils.metadata_parser(content["metadata"])["src_url"], f) elif "src_url" in content.keys(): write_archive_hash(content["src_url"], f) else: lpms.terminate("src_url was not defined in spec") del content elif os.path.isdir(f): for l in os.listdir(os.path.join(self.current_dir, f)): path = os.path.join(f, l) out.normal("processing %s" % path) shelltools.echo( "hashes", "%s %s %s" % (path, utils.sha1sum(path), os.path.getsize(path)))
def write_archive_hash(urls, file_name): name, version = utils.parse_pkgname(file_name) for url in utils.parse_url_tag(urls, name, version): archive_name = os.path.basename(url) archive_path = os.path.join(conf.LPMSConfig().src_cache, archive_name) if not os.access(archive_path, os.F_OK): fetcher.URLFetcher().run([url]) sha1 = utils.sha1sum(archive_path) shelltools.echo("hashes", "%s %s %s" % (archive_name, sha1, os.path.getsize(archive_path)))
def add_file_item(): # Prevent code duplication if not os.path.islink(target): shelltools.set_id(target, perms["uid"], perms["gid"]) shelltools.set_mod(target, perms["mod"]) self.append_filesdb("file", real_target, perms, \ sha1sum=utils.sha1sum(target), size = utils.get_size(source, dec=True) ) else: self.append_filesdb("link", real_target, perms,\ realpath=os.path.realpath(source))
def write_archive_hash(urls, file_name): name, version = utils.parse_pkgname(file_name) for url in utils.parse_url_tag(urls, name, version): archive_name = os.path.basename(url) archive_path = os.path.join(conf.LPMSConfig().src_cache, archive_name) if not os.access(archive_path, os.F_OK): fetcher.URLFetcher().run([url]) sha1 = utils.sha1sum(archive_path) shelltools.echo( "hashes", "%s %s %s" % (archive_name, sha1, os.path.getsize(archive_path)))
def merge_package(self): '''Moves files to the target destination in the most safest way.''' def get_perms(path): '''Get permissions of given path, it may be file or directory''' return {"uid": utils.get_uid(path), "gid": utils.get_gid(path), "mod": utils.get_mod(path) } out.normal("%s/%s/%s-%s:%s is merging to %s" % (self.environment.repo, self.environment.category, \ self.environment.name, self.environment.version, self.environment.slot, \ self.environment.real_root)) # Remove files db entries for this package:slot if it exists self.filesdb.delete_item_by_pkgdata(self.environment.category, self.environment.name, \ self.environment.previous_version, commit=True) # Remove file_relations db entries for this package:slot if it exists self.file_relationsdb.delete_item_by_pkgdata(self.environment.category, \ self.environment.name, self.environment.previous_version, commit=True) # Merge the package, now walk_iter = os.walk(self.environment.install_dir, followlinks=True) while True: try: parent, directories, files = next(walk_iter) # TODO: Check the target path's permissions for writing or reading # Remove install_dir from parent to get real parent path pruned_parent = parent.replace(self.environment.install_dir, "") # create directories for directory in directories: source = os.path.join(parent, directory) target = os.path.join(self.environment.real_root, pruned_parent, directory) real_target = "/".join([pruned_parent, directory]) if self.is_parent_symlink(target): break if os.path.islink(source): self.symlinks.append(target+"/") realpath = os.path.realpath(source) if os.path.islink(target): shelltools.remove_file(target) # create real directory if len(realpath.split(self.environment.install_dir)) > 1: realpath = realpath.split(self.environment.install_dir)[1][1:] shelltools.makedirs(os.path.join(self.environment.real_root, realpath)) # make symlink if os.path.isdir(target): shelltools.remove_dir(target) elif os.path.isfile(target): shelltools.remove_file(target) shelltools.make_symlink(os.readlink(source), target) else: if os.path.isfile(target): # TODO: Rename this file and warn the user shelltools.remove_file(target) shelltools.makedirs(target) # Get permissions perms = get_perms(source) # if path is a symlink, pass permission mumbo-jumbos if not os.path.islink(source): # Set permissions shelltools.set_id(target, perms["uid"], perms["gid"]) shelltools.set_mod(target, perms["mod"]) # TODO: Common items? # Add the item to filesdb self.append_filesdb("dir", real_target, perms) else: # Add the item to filesdb self.append_filesdb("link", real_target, perms, \ realpath=os.path.realpath(source)) # Merge regular files to the target # Firstly, handle reserved files reserve_files = [] if self.environment.reserve_files: if isinstance(self.environment.reserve_files, basestring): reserve_files.extend([f_item for f_item in self.environment.reserve_files.split(" ") \ if f_item != ""]) elif isinstance(self.environment.reserve_files, list) or isinstance(self.environment.reserve_files, tuple): reserve_files.extend(self.environment.reserve_files) if os.path.isfile(os.path.join(cst.user_dir, cst.protect_file)): with open(os.path.join(cst.user_dir, cst.protect_file)) as data: for rf in data.readlines(): if not rf.startswith("#"): reserve_files.append(rf.strip()) # Here we are starting to merge for _file in files: source = os.path.join(parent, _file) target = os.path.join(self.environment.real_root, pruned_parent, _file) real_target = "/".join([pruned_parent, _file]) if self.is_parent_symlink(target): break # Keep file relations for using after to handle reverse dependencies if os.path.exists(source) and os.access(source, os.X_OK): if utils.get_mimetype(source) in self.binary_filetypes: self.file_relationsdb.append_query(( self.environment.repo, self.environment.category, self.environment.name, self.environment.version, target, file_relations.get_depends(source)) ) # Strip binary files and keep them smaller if self.strip_debug_symbols and utils.get_mimetype(source) in self.binary_filetypes: utils.run_strip(source) if self.environment.ignore_reserve_files: reserve_files = [] self.environment.reserve_files = True def add_file_item(): # Prevent code duplication if not os.path.islink(target): shelltools.set_id(target, perms["uid"], perms["gid"]) shelltools.set_mod(target, perms["mod"]) self.append_filesdb("file", real_target, perms, \ sha1sum=utils.sha1sum(target), size = utils.get_size(source, dec=True) ) else: self.append_filesdb("link", real_target, perms,\ realpath=os.path.realpath(source)) if self.environment.reserve_files is not False: conf_file = os.path.join(pruned_parent, _file) isconf = (_file.endswith(".conf") or _file.endswith(".cfg")) def is_reserve(): if self.environment.ignore_reserve_files: return False elif not conf_file in reserve_files: return False return True if os.path.exists(target) and not is_reserve(): if pruned_parent[0:4] == "/etc" or isconf: if os.path.isfile(conf_file) and utils.sha1sum(source) != utils.sha1sum(conf_file): self.append_merge_conf(conf_file) target = target+".lpms-backup" self.backup.append(target) if os.path.exists(target) and is_reserve(): # The file is reserved. # Adds to filesdb add_file_item() # We don't need the following operations continue if os.path.islink(source): sha1 = False realpath = os.readlink(source) if self.environment.install_dir in realpath: realpath = realpath.split(self.environment.install_dir)[1] if os.path.isdir(target): shelltools.remove_dir(target) elif os.path.isfile(target) or os.path.islink(target): shelltools.remove_file(target) shelltools.make_symlink(realpath, target) else: sha1 = utils.sha1sum(source) perms = get_perms(source) shelltools.move(source, target) # Adds to filesdb add_file_item() except StopIteration as err: break self.file_relationsdb.insert_query(commit=True) self.filesdb.insert_query(commit=True) lpms.logger.info("%s/%s has been merged to %s." % (self.environment.category, self.environment.fullname, \ self.environment.real_root))
def merge_package(self): '''Moves files to the target destination in the most safest way.''' def get_perms(path): '''Get permissions of given path, it may be file or directory''' return { "uid": utils.get_uid(path), "gid": utils.get_gid(path), "mod": utils.get_mod(path) } out.normal("%s/%s/%s-%s:%s is merging to %s" % (self.environment.repo, self.environment.category, \ self.environment.name, self.environment.version, self.environment.slot, \ self.environment.real_root)) # Remove files db entries for this package:slot if it exists self.filesdb.delete_item_by_pkgdata(self.environment.category, self.environment.name, \ self.environment.previous_version, commit=True) # Remove file_relations db entries for this package:slot if it exists self.file_relationsdb.delete_item_by_pkgdata(self.environment.category, \ self.environment.name, self.environment.previous_version, commit=True) # Merge the package, now walk_iter = os.walk(self.environment.install_dir, followlinks=True) while True: try: parent, directories, files = next(walk_iter) # TODO: Check the target path's permissions for writing or reading # Remove install_dir from parent to get real parent path pruned_parent = parent.replace(self.environment.install_dir, "") # create directories for directory in directories: source = os.path.join(parent, directory) target = os.path.join(self.environment.real_root, pruned_parent, directory) real_target = "/".join([pruned_parent, directory]) if self.is_parent_symlink(target): break if os.path.islink(source): self.symlinks.append(target + "/") realpath = os.path.realpath(source) if os.path.islink(target): shelltools.remove_file(target) # create real directory if len(realpath.split( self.environment.install_dir)) > 1: realpath = realpath.split( self.environment.install_dir)[1][1:] shelltools.makedirs( os.path.join(self.environment.real_root, realpath)) # make symlink if os.path.isdir(target): shelltools.remove_dir(target) elif os.path.isfile(target): shelltools.remove_file(target) shelltools.make_symlink(os.readlink(source), target) else: if os.path.isfile(target): # TODO: Rename this file and warn the user shelltools.remove_file(target) shelltools.makedirs(target) # Get permissions perms = get_perms(source) # if path is a symlink, pass permission mumbo-jumbos if not os.path.islink(source): # Set permissions shelltools.set_id(target, perms["uid"], perms["gid"]) shelltools.set_mod(target, perms["mod"]) # TODO: Common items? # Add the item to filesdb self.append_filesdb("dir", real_target, perms) else: # Add the item to filesdb self.append_filesdb("link", real_target, perms, \ realpath=os.path.realpath(source)) # Merge regular files to the target # Firstly, handle reserved files reserve_files = [] if self.environment.reserve_files: if isinstance(self.environment.reserve_files, basestring): reserve_files.extend([f_item for f_item in self.environment.reserve_files.split(" ") \ if f_item != ""]) elif isinstance(self.environment.reserve_files, list) or isinstance( self.environment.reserve_files, tuple): reserve_files.extend(self.environment.reserve_files) if os.path.isfile(os.path.join(cst.user_dir, cst.protect_file)): with open(os.path.join(cst.user_dir, cst.protect_file)) as data: for rf in data.readlines(): if not rf.startswith("#"): reserve_files.append(rf.strip()) # Here we are starting to merge for _file in files: source = os.path.join(parent, _file) target = os.path.join(self.environment.real_root, pruned_parent, _file) real_target = "/".join([pruned_parent, _file]) if self.is_parent_symlink(target): break # Keep file relations for using after to handle reverse dependencies if os.path.exists(source) and os.access(source, os.X_OK): if utils.get_mimetype(source) in self.binary_filetypes: self.file_relationsdb.append_query( (self.environment.repo, self.environment.category, self.environment.name, self.environment.version, target, file_relations.get_depends(source))) # Strip binary files and keep them smaller if self.strip_debug_symbols and utils.get_mimetype( source) in self.binary_filetypes: utils.run_strip(source) if self.environment.ignore_reserve_files: reserve_files = [] self.environment.reserve_files = True def add_file_item(): # Prevent code duplication if not os.path.islink(target): shelltools.set_id(target, perms["uid"], perms["gid"]) shelltools.set_mod(target, perms["mod"]) self.append_filesdb("file", real_target, perms, \ sha1sum=utils.sha1sum(target), size = utils.get_size(source, dec=True) ) else: self.append_filesdb("link", real_target, perms,\ realpath=os.path.realpath(source)) if self.environment.reserve_files is not False: conf_file = os.path.join(pruned_parent, _file) isconf = (_file.endswith(".conf") or _file.endswith(".cfg")) def is_reserve(): if self.environment.ignore_reserve_files: return False elif not conf_file in reserve_files: return False return True if os.path.exists(target) and not is_reserve(): if pruned_parent[0:4] == "/etc" or isconf: if os.path.isfile(conf_file) and utils.sha1sum( source) != utils.sha1sum(conf_file): self.append_merge_conf(conf_file) target = target + ".lpms-backup" self.backup.append(target) if os.path.exists(target) and is_reserve(): # The file is reserved. # Adds to filesdb add_file_item() # We don't need the following operations continue if os.path.islink(source): sha1 = False realpath = os.readlink(source) if self.environment.install_dir in realpath: realpath = realpath.split( self.environment.install_dir)[1] if os.path.isdir(target): shelltools.remove_dir(target) elif os.path.isfile(target) or os.path.islink(target): shelltools.remove_file(target) shelltools.make_symlink(realpath, target) else: sha1 = utils.sha1sum(source) perms = get_perms(source) shelltools.move(source, target) # Adds to filesdb add_file_item() except StopIteration as err: break self.file_relationsdb.insert_query(commit=True) self.filesdb.insert_query(commit=True) lpms.logger.info("%s/%s has been merged to %s." % (self.environment.category, self.environment.fullname, \ self.environment.real_root))
for path in content["dirs"]: if path == "" or not os.path.exists(path): if path != "": print("\t%s not found" % path) continue uid = utils.get_uid(path) gid = utils.get_gid(path) mod = utils.get_mod(path) if not os.path.islink(path): _filesdb.append_query((repo, category, name, version, path, \ "dir", None, gid, mod, uid, None, None)) else: _filesdb.append_query((repo, category, name, version, path, \ "link", None, gid, mod, uid, None, os.path.realpath(path))) for path in content["file"]: if path == "" or not os.path.exists(path): if path != "": print("\t%s not found" % path) continue uid = utils.get_uid(path) gid = utils.get_gid(path) mod = utils.get_mod(path) if not os.path.islink(path): size = utils.get_size(path, dec=True) sha1sum = utils.sha1sum(path) _filesdb.append_query((repo, category, name, version, path, \ "file", size, gid, mod, uid, sha1sum, None)) else: _filesdb.append_query((repo, category, name, version, path, \ "link", None, gid, mod, uid, None, None)) _filesdb.insert_query(commit=True)