def _run(self): """ Execute the action. Return an exit status. """ self.setup() unpack_dir = self._meta['unpackdir'] if not self._meta.get('fetch_path'): try: os.makedirs(unpack_dir, 0o755) const_setup_directory(unpack_dir) except (OSError, IOError) as err: if err.errno != errno.EEXIST: self._entropy.output( "%s: %s" % ( blue(_("Fetch path setup error")), err, ), importance = 1, level = "info", header = red(" ## ") ) return 1 exit_st = 0 for method in self._meta['phases']: exit_st = method() if exit_st != 0: break return exit_st
def _run(self): """ Execute the action. Return an exit status. """ self.setup() unpack_dir = self._meta['unpackdir'] if not self._meta.get('fetch_path'): try: os.makedirs(unpack_dir, 0o755) const_setup_directory(unpack_dir) except (OSError, IOError) as err: if err.errno != errno.EEXIST: self._entropy.output("%s: %s" % ( blue(_("Fetch path setup error")), err, ), importance=1, level="info", header=red(" ## ")) return 1 exit_st = 0 for method in self._meta['phases']: exit_st = method() if exit_st != 0: break return exit_st
def _quickpkg(self, entropy_client, inst_repo): """ Solo Pkg Quickpkg command. """ packages = self._nsargs.packages savedir = self._nsargs.savedir if not os.path.isdir(savedir) and not os.path.exists(savedir): # this is validated by the parser # but not in case of no --savedir provided const_setup_directory(savedir) if not os.path.exists(savedir): entropy_client.output( "%s: %s" % ( brown(_("broken directory path")), savedir,), level="error", importance=1) return 1 package_ids = self._scan_packages(entropy_client, inst_repo, packages) if not package_ids: return 1 for package_id in package_ids: atom = inst_repo.retrieveAtom(package_id) entropy_client.output( "%s: %s" % ( teal(_("generating package")), purple(atom),), header=brown(" @@ "), back=True) pkg_data = inst_repo.getPackageData(package_id) file_path = entropy_client.generate_package( pkg_data, save_directory=savedir) if file_path is None: entropy_client.output( "%s: %s" % ( darkred(_("package file creation error")), blue(atom),), level="error", importance=1) return 3 entropy_client.output( "[%s] %s: %s" % ( darkgreen(atom), teal(_("package generated")), purple(file_path),), header=brown(" ## ")) return 0
def _extract(self, entropy_client): """ Solo Pkg Extract command. """ files = self._nsargs.files savedir = self._nsargs.savedir if not os.path.isdir(savedir) and not os.path.exists(savedir): # this is validated by the parser # but not in case of no --savedir provided const_setup_directory(savedir) if not os.path.exists(savedir): entropy_client.output( "%s: %s" % ( brown(_("broken directory path")), savedir,), level="error", importance=1) return 1 for _file in files: entropy_client.output( "%s: %s" % ( teal(_("working on package file")), purple(_file)), header=darkred(" @@ "), back=True) file_name = os.path.basename(_file) package_path = os.path.join( savedir, file_name + ".db") ext_rc = entropy.tools.dump_entropy_metadata( _file, package_path) if not ext_rc: entropy_client.output( "%s: %s" % ( teal(_("error during metadata extraction")), purple(_file)), header=darkred(" @@ "), level="error", importance=1) return 1 entropy_client.output( "%s: %s" % ( teal(_("metadata file generated")), purple(package_path)), header=darkred(" @@ ")) return 0
def _file_lock_create(self, lock_path, blocking=False, shared=False): """ Create and allocate the lock file pointed by lock_data structure. """ lock_dir = os.path.dirname(lock_path) try: os.makedirs(lock_dir, 0o775) except OSError as err: if err.errno != errno.EEXIST: raise const_setup_directory(lock_dir) try: fmode = 0o664 if shared: fd = os.open(lock_path, os.O_CREAT | os.O_RDONLY, fmode) else: fd = os.open(lock_path, os.O_CREAT | os.O_APPEND, fmode) except OSError as err: if err.errno in (errno.ENOENT, errno.EACCES): # cannot get lock or dir doesn't exist return False, None raise # ensure that entropy group can write on that try: const_setup_file(lock_path, etpConst['entropygid'], 0o664) except OSError: pass flock_f = FlockFile(lock_path, fd=fd) if blocking: if shared: flock_f.acquire_shared() else: flock_f.acquire_exclusive() else: acquired = False if shared: acquired = flock_f.try_acquire_shared() else: acquired = flock_f.try_acquire_exclusive() if not acquired: return False, None return True, flock_f
def _inflate(self, entropy_client): """ Solo Pkg Inflate command. """ files = self._nsargs.files savedir = self._nsargs.savedir if not os.path.isdir(savedir) and not os.path.exists(savedir): # this is validated by the parser # but not in case of no --savedir provided const_setup_directory(savedir) if not os.path.exists(savedir): entropy_client.output( "%s: %s" % ( brown(_("broken directory path")), savedir,), level="error", importance=1) return 1 spm = entropy_client.Spm() for _file in files: entropy_client.output( "%s: %s" % ( teal(_("working on package file")), purple(_file)), header=darkred(" @@ "), back=True) file_name = os.path.basename(_file) package_path = os.path.join(savedir, file_name) if os.path.realpath(_file) != os.path.realpath(package_path): # make a copy first shutil.copy2(_file, package_path) pkg_data = spm.extract_package_metadata(package_path) entropy_client.output( "%s: %s" % ( teal(_("package file extraction complete")), purple(package_path)), header=darkred(" @@ "), back=True) # append development revision number # and create final package file name sha1 = None signatures = pkg_data.get('signatures') if signatures is not None: sha1 = signatures['sha1'] pkg_data['revision'] = etpConst['spmetprev'] download_dirpath = entropy.tools.create_package_dirpath( pkg_data['branch'], nonfree=False, restricted=False) download_name = entropy.dep.create_package_relative_path( pkg_data['category'], pkg_data['name'], pkg_data['version'], pkg_data['versiontag'], ext=etpConst['packagesext'], revision=pkg_data['revision'], sha1=sha1) pkg_data['download'] = download_dirpath + os.path.sep + \ download_name # migrate to the proper format final_path = os.path.join(savedir, download_name) if package_path != final_path: try: os.makedirs(os.path.dirname(final_path)) except OSError as err: if err.errno != errno.EISDIR: raise shutil.move(package_path, final_path) package_path = final_path tmp_fd, tmp_path = const_mkstemp( prefix="equo.smart.inflate.", dir=savedir) os.close(tmp_fd) # attach entropy metadata to package file repo = entropy_client.open_generic_repository(tmp_path) repo.initializeRepository() _package_id = repo.addPackage( pkg_data, revision=pkg_data['revision']) repo.commit() repo.close() entropy_client.output( "%s: %s" % ( teal(_("package metadata generation complete")), purple(package_path)), header=darkred(" @@ "), back=True) entropy.tools.aggregate_entropy_metadata( package_path, tmp_path) os.remove(tmp_path) entropy_client.output( "%s: %s" % ( teal(_("package file generated at")), purple(package_path)), header=darkred(" @@ ")) return 0
def path_lock(self, path): """ Given a path, return a FlockFile object that can be used for inter-process synchronization purposes. @param path: path to protect with a file lock @type path: string @return: a FlockFile object instance @rtype: entropy.misc.FlockFile """ lock_path = path + "._entropy_package.lock" path_dir = os.path.dirname(path) const_setup_directory(path_dir) def wait_msg_cb(obj, exclusive): if exclusive: msg = _("Acquiring exclusive lock on") else: msg = _("Acquiring shared lock on") self._entropy.output( "%s %s ..." % ( darkred(msg), darkgreen(obj.get_path()), ), level = "warning", # use stderr, avoid breaking --quiet back = True, importance = 0) def acquired_msg_cb(obj, exclusive): if exclusive: msg = _("Acquired exclusive lock on") else: msg = _("Acquired shared lock on") self._entropy.output( "%s %s" % ( darkred(msg), darkgreen(obj.get_path()), ), level = "warning", # use stderr, avoid breaking --quiet back = True, importance = 0) class PackageFlockFile(FlockFile): _ALLOWED_ERRORS = (errno.EPERM, errno.ENOSYS, errno.ENOLCK) def __init__(self, *args, **kwargs): super(PackageFlockFile, self).__init__(*args, **kwargs) self._wait_msg_cb = wait_msg_cb self._acquired_msg_cb = acquired_msg_cb def acquire_shared(self): """ Avoid failures if lock cannot be acquired due to filesystem limitations (NFS?). """ try: return super(PackageFlockFile, self).acquire_shared() except (OSError, IOError) as err: if err.errno not in self._ALLOWED_ERRORS: raise sys.stderr.write( "PackageFlockFile(%s).shared: lock error: %s\n" % ( self._path, err)) def try_acquire_shared(self): """ Avoid failures if lock cannot be acquired due to filesystem limitations (NFS?). """ try: return super(PackageFlockFile, self).try_acquire_shared() except (OSError, IOError) as err: if err.errno not in self._ALLOWED_ERRORS: raise sys.stderr.write( "PackageFlockFile(%s).try_shared: lock error: %s\n" % ( self._path, err)) return True def acquire_exclusive(self): """ Avoid failures if lock cannot be acquired due to filesystem limitations (NFS?). """ try: return super(PackageFlockFile, self).acquire_exclusive() except (OSError, IOError) as err: if err.errno not in self._ALLOWED_ERRORS: raise sys.stderr.write( "PackageFlockFile(%s).exclusive: lock error: %s\n" % ( self._path, err)) def try_acquire_exclusive(self): """ Avoid failures if lock cannot be acquired due to filesystem limitations (NFS?). """ try: return super(PackageFlockFile, self).try_acquire_exclusive() except (OSError, IOError) as err: if err.errno not in self._ALLOWED_ERRORS: raise sys.stderr.write( "PackageFlockFile(%s).try_excl: lock error: %s\n" % ( self._path, err)) return True return PackageFlockFile(lock_path)
def _file_lock_create(self, lock_path, blocking=False, shared=False): """ Create and allocate the lock file pointed by lock_data structure. """ lock_dir = os.path.dirname(lock_path) try: const_setup_directory(lock_dir) except OSError as err: const_debug_write( __name__, "Error in const_setup_directory %s: %s" % (lock_dir, err)) # we may just not have the perms to create the dir. if err.errno != errno.EPERM: raise try: fmode = 0o664 if shared: fd = os.open(lock_path, os.O_CREAT | os.O_RDONLY, fmode) else: fd = os.open(lock_path, os.O_CREAT | os.O_APPEND, fmode) except OSError as err: if err.errno in (errno.ENOENT, errno.EACCES): # cannot get lock or dir doesn't exist return False, None raise # ensure that entropy group can write on that try: const_setup_file(lock_path, etpConst['entropygid'], 0o664) except OSError: pass acquired = False flock_f = None try: flock_f = FlockFile(lock_path, fd=fd) if blocking: if shared: flock_f.acquire_shared() else: flock_f.acquire_exclusive() acquired = True return True, flock_f # non blocking if shared: acquired = flock_f.try_acquire_shared() else: acquired = flock_f.try_acquire_exclusive() if not acquired: return False, None return True, flock_f except Exception: if flock_f is not None: try: flock_f.close() except (OSError, IOError): pass flock_f = None raise finally: if not acquired and flock_f is not None: try: flock_f.close() except (OSError, IOError): pass flock_f = None
def _file_lock_create(self, lock_path, blocking=False, shared=False): """ Create and allocate the lock file pointed by lock_data structure. """ lock_dir = os.path.dirname(lock_path) try: const_setup_directory(lock_dir) except OSError as err: const_debug_write( __name__, "Error in const_setup_directory %s: %s" % ( lock_dir, err)) # we may just not have the perms to create the dir. if err.errno != errno.EPERM: raise try: fmode = 0o664 if shared: fd = os.open(lock_path, os.O_CREAT | os.O_RDONLY, fmode) else: fd = os.open(lock_path, os.O_CREAT | os.O_APPEND, fmode) except OSError as err: if err.errno in (errno.ENOENT, errno.EACCES): # cannot get lock or dir doesn't exist return False, None raise # ensure that entropy group can write on that try: const_setup_file(lock_path, etpConst['entropygid'], 0o664) except OSError: pass acquired = False flock_f = None try: flock_f = FlockFile(lock_path, fd=fd) if blocking: if shared: flock_f.acquire_shared() else: flock_f.acquire_exclusive() acquired = True return True, flock_f # non blocking if shared: acquired = flock_f.try_acquire_shared() else: acquired = flock_f.try_acquire_exclusive() if not acquired: return False, None return True, flock_f except Exception: if flock_f is not None: try: flock_f.close() except (OSError, IOError): pass flock_f = None raise finally: if not acquired and flock_f is not None: try: flock_f.close() except (OSError, IOError): pass flock_f = None
def _inflate(self, entropy_client): """ Solo Pkg Inflate command. """ files = self._nsargs.files savedir = self._nsargs.savedir if not os.path.isdir(savedir) and not os.path.exists(savedir): # this is validated by the parser # but not in case of no --savedir provided const_setup_directory(savedir) if not os.path.exists(savedir): entropy_client.output( "%s: %s" % ( brown(_("broken directory path")), savedir,), level="error", importance=1) return 1 spm = entropy_client.Spm() for _file in files: entropy_client.output( "%s: %s" % ( teal(_("working on package file")), purple(_file)), header=darkred(" @@ "), back=True) file_name = os.path.basename(_file) package_path = os.path.join(savedir, file_name) if os.path.realpath(_file) != os.path.realpath(package_path): # make a copy first shutil.copy2(_file, package_path) pkg_data = spm.extract_package_metadata(package_path) entropy_client.output( "%s: %s" % ( teal(_("package file extraction complete")), purple(package_path)), header=darkred(" @@ "), back=True) # append development revision number # and create final package file name sha1 = None signatures = pkg_data.get('signatures') if signatures is not None: sha1 = signatures['sha1'] pkg_data['revision'] = etpConst['spmetprev'] download_dirpath = entropy.tools.create_package_dirpath( pkg_data['branch'], nonfree=False, restricted=False) download_name = entropy.dep.create_package_relative_path( pkg_data['category'], pkg_data['name'], pkg_data['version'], pkg_data['versiontag'], ext=etpConst['packagesext'], revision=pkg_data['revision'], sha1=sha1) pkg_data['download'] = download_dirpath + os.path.sep + \ download_name # migrate to the proper format final_path = os.path.join(savedir, download_name) if package_path != final_path: try: os.makedirs(os.path.dirname(final_path)) except OSError as err: if err.errno not in (errno.EISDIR, errno.EEXIST): raise shutil.move(package_path, final_path) package_path = final_path tmp_fd, tmp_path = const_mkstemp( prefix="equo.smart.inflate.", dir=savedir) os.close(tmp_fd) # attach entropy metadata to package file repo = entropy_client.open_generic_repository(tmp_path) repo.initializeRepository() _package_id = repo.addPackage( pkg_data, revision=pkg_data['revision']) repo.commit() repo.close() entropy_client.output( "%s: %s" % ( teal(_("package metadata generation complete")), purple(package_path)), header=darkred(" @@ "), back=True) entropy.tools.aggregate_entropy_metadata( package_path, tmp_path) os.remove(tmp_path) entropy_client.output( "%s: %s" % ( teal(_("package file generated at")), purple(package_path)), header=darkred(" @@ ")) return 0
def path_lock(self, path): """ Given a path, return a FlockFile object that can be used for inter-process synchronization purposes. @param path: path to protect with a file lock @type path: string @return: a FlockFile object instance @rtype: entropy.misc.FlockFile """ lock_path = path + "._entropy_package.lock" path_dir = os.path.dirname(path) const_setup_directory(path_dir) def wait_msg_cb(obj, exclusive): if exclusive: msg = _("Acquiring exclusive lock on") else: msg = _("Acquiring shared lock on") self._entropy.output( "%s %s ..." % ( darkred(msg), darkgreen(obj.get_path()), ), level="warning", # use stderr, avoid breaking --quiet back=True, importance=0) def acquired_msg_cb(obj, exclusive): if exclusive: msg = _("Acquired exclusive lock on") else: msg = _("Acquired shared lock on") self._entropy.output( "%s %s" % ( darkred(msg), darkgreen(obj.get_path()), ), level="warning", # use stderr, avoid breaking --quiet back=True, importance=0) class PackageFlockFile(FlockFile): _ALLOWED_ERRORS = (errno.EPERM, errno.ENOSYS, errno.ENOLCK) def __init__(self, *args, **kwargs): super(PackageFlockFile, self).__init__(*args, **kwargs) self._wait_msg_cb = wait_msg_cb self._acquired_msg_cb = acquired_msg_cb def acquire_shared(self): """ Avoid failures if lock cannot be acquired due to filesystem limitations (NFS?). """ try: return super(PackageFlockFile, self).acquire_shared() except (OSError, IOError) as err: if err.errno not in self._ALLOWED_ERRORS: raise sys.stderr.write( "PackageFlockFile(%s).shared: lock error: %s\n" % (self._path, err)) def try_acquire_shared(self): """ Avoid failures if lock cannot be acquired due to filesystem limitations (NFS?). """ try: return super(PackageFlockFile, self).try_acquire_shared() except (OSError, IOError) as err: if err.errno not in self._ALLOWED_ERRORS: raise sys.stderr.write( "PackageFlockFile(%s).try_shared: lock error: %s\n" % (self._path, err)) return True def acquire_exclusive(self): """ Avoid failures if lock cannot be acquired due to filesystem limitations (NFS?). """ try: return super(PackageFlockFile, self).acquire_exclusive() except (OSError, IOError) as err: if err.errno not in self._ALLOWED_ERRORS: raise sys.stderr.write( "PackageFlockFile(%s).exclusive: lock error: %s\n" % (self._path, err)) def try_acquire_exclusive(self): """ Avoid failures if lock cannot be acquired due to filesystem limitations (NFS?). """ try: return super(PackageFlockFile, self).try_acquire_exclusive() except (OSError, IOError) as err: if err.errno not in self._ALLOWED_ERRORS: raise sys.stderr.write( "PackageFlockFile(%s).try_excl: lock error: %s\n" % (self._path, err)) return True return PackageFlockFile(lock_path)
def _quickpkg(self, entropy_client): """ Solo Pkg Quickpkg command. """ packages = self._nsargs.packages ask = self._ask savedir = self._nsargs.savedir if not os.path.isdir(savedir) and not os.path.exists(savedir): # this is validated by the parser # but not in case of no --savedir provided const_setup_directory(savedir) if not os.path.exists(savedir): entropy_client.output( "%s: %s" % ( brown(_("broken directory path")), savedir,), level="error", importance=1) return 1 entropy_repository = entropy_client.installed_repository() pkg_matches = self._scan_packages(entropy_client, packages, installed=True) if not pkg_matches: return 1 entropy_client.output( "%s:" % ( brown(_("This is the list of packages " "that would be considered")), )) for pkg in pkg_matches: pkg_id, pkg_repo = pkg repo = entropy_client.open_repository(pkg_repo) atom = repo.retrieveAtom(pkg_id) entropy_client.output( "[%s] %s" % ( brown(pkg_repo), darkgreen(atom),), header=" ") if ask: q_rc = entropy_client.ask_question( _("Would you like to continue ?")) if q_rc == _("No"): return 0 for pkg in pkg_matches: pkg_id, pkg_repo = pkg repo = entropy_client.open_repository(pkg_repo) atom = repo.retrieveAtom(pkg_id) entropy_client.output( "%s: %s" % ( teal(_("generating package")), purple(atom),), header=brown(" @@ "), back=True) pkg_data = repo.getPackageData(pkg_id) file_path = entropy_client.generate_package( pkg_data, save_directory=savedir) if file_path is None: entropy_client.output( "%s: %s" % ( darkred(_("package file creation error")), blue(atom),), level="error", importance=1) return 3 entropy_client.output( "[%s] %s: %s" % ( darkgreen(atom), teal(_("package generated")), purple(file_path),), header=brown(" ## ")) return 0