def try_cleanup_cache(self, package_directory=None, current_id=None): """Attempts to cleanup cache in a given directory. :param package_directory: directory containing cached packages :param current_id: optional id of the package to exclude from the list of deleted packages """ if not package_directory: return try: pkg_ids_listed = set(os.listdir(package_directory)) except OSError: # No directory for this package, probably someone # already deleted everything. Anyway nothing to delete return # if current_id was given: ensure it's not checked for removal pkg_ids_listed -= {current_id} for pkg_id in pkg_ids_listed: stale_directory = os.path.join(package_directory, pkg_id) if not os.path.isdir(package_directory): continue usage_lock_path = os.path.join(self._cache_directory, '{}_usage.lock'.format(pkg_id)) ipc_lock = m_utils.ExclusiveInterProcessLock( path=usage_lock_path, sleep_func=eventlet.sleep) try: with usage_mem_locks[pkg_id].write_lock(False) as acquired: if not acquired: # the package is in use by other deployment in this # process will do nothing, someone else would delete it continue acquired_ipc_lock = ipc_lock.acquire(blocking=False) if not acquired_ipc_lock: # the package is in use by other deployment in another # process, will do nothing, someone else would delete continue shutil.rmtree(stale_directory, ignore_errors=True) ipc_lock.release() for lock_type in ('usage', 'download'): lock_path = os.path.join( self._cache_directory, '{}_{}.lock'.format(pkg_id, lock_type)) try: os.remove(lock_path) except OSError: LOG.warning( _LW("Couldn't delete lock file: " "{}").format(lock_path)) except RuntimeError: # couldn't upgrade read lock to write-lock. go on. continue
def _get_package_by_definition(self, package_def): package_id = package_def.id package_directory = os.path.join( self._cache_directory, package_def.fully_qualified_name, getattr(package_def, 'version', '0.0.0'), package_id) if os.path.isdir(package_directory): try: return load_utils.load_from_dir(package_directory) except pkg_exc.PackageLoadError: LOG.exception( _LE('Unable to load package from cache. Clean-up.')) shutil.rmtree(package_directory, ignore_errors=True) # the package is not yet in cache, let's try and download it. download_lock_path = os.path.join( self._cache_directory, '{}_download.lock'.format(package_id)) download_ipc_lock = m_utils.ExclusiveInterProcessLock( path=download_lock_path, sleep_func=eventlet.sleep) with download_mem_locks[package_id].write_lock(), download_ipc_lock: # NOTE(kzaitsev): # in case there were 2 concurrent threads/processes one might have # already downloaded this package. Check before trying to download if os.path.isdir(package_directory): try: return load_utils.load_from_dir(package_directory) except pkg_exc.PackageLoadError: LOG.error( _LE('Unable to load package from cache. Clean-up.')) shutil.rmtree(package_directory, ignore_errors=True) # attempt the download itself try: LOG.debug("Attempting to download package {} {}".format( package_def.fully_qualified_name, package_id)) package_data = self.client.packages.download(package_id) except muranoclient_exc.HTTPException as e: msg = 'Error loading package id {0}: {1}'.format( package_id, str(e)) exc_info = sys.exc_info() six.reraise(pkg_exc.PackageLoadError, pkg_exc.PackageLoadError(msg), exc_info[2]) package_file = None try: with tempfile.NamedTemporaryFile(delete=False) as package_file: package_file.write(package_data) with load_utils.load_from_file(package_file.name, target_dir=package_directory, drop_dir=False) as app_package: LOG.info( _LI("Successfully downloaded and unpacked package {} {}" ).format(package_def.fully_qualified_name, package_id)) self._downloaded.append(app_package) self.try_cleanup_cache(os.path.split(package_directory)[0], current_id=package_id) return app_package except IOError: msg = 'Unable to extract package data for %s' % package_id exc_info = sys.exc_info() six.reraise(pkg_exc.PackageLoadError, pkg_exc.PackageLoadError(msg), exc_info[2]) finally: try: if package_file: os.remove(package_file.name) except OSError: pass