def schedule_delete_from_backend(uri, options, context, image_id, **kwargs): """ Given a uri and a time, schedule the deletion of an image. """ use_delay = config.get_option(options, 'delayed_delete', type='bool', default=False) if not use_delay: registry.update_image_metadata(context, image_id, {'status': 'deleted'}) try: return delete_from_backend(uri, **kwargs) except (UnsupportedBackend, exception.NotFound): msg = _("Failed to delete image from store (%(uri)s).") % locals() logger.error(msg) datadir = config.get_option(options, 'scrubber_datadir') scrub_time = config.get_option(options, 'scrub_time', type='int', default=0) delete_time = time.time() + scrub_time file_path = os.path.join(datadir, str(image_id)) utils.safe_mkdirs(datadir) if os.path.exists(file_path): msg = _("Image id %(image_id)s already queued for delete") % { 'image_id': image_id} raise exception.Duplicate(msg) with open(file_path, 'w') as f: f.write('\n'.join([uri, str(int(delete_time))])) os.chmod(file_path, 0600) os.utime(file_path, (delete_time, delete_time)) registry.update_image_metadata(context, image_id, {'status': 'pending_delete'})
def configure_db(options): """ Establish the database, create an engine if needed, and register the models. :param options: Mapping of configuration options """ global _ENGINE global logger if not _ENGINE: debug = config.get_option( options, 'debug', type='bool', default=False) verbose = config.get_option( options, 'verbose', type='bool', default=False) timeout = config.get_option( options, 'sql_idle_timeout', type='int', default=3600) _ENGINE = create_engine(options['sql_connection'], pool_recycle=timeout) logger = logging.getLogger('sqlalchemy.engine') if debug: logger.setLevel(logging.DEBUG) elif verbose: logger.setLevel(logging.INFO) models.register_models(_ENGINE)
def configure_db(options): """ Establish the database, create an engine if needed, and register the models. :param options: Mapping of configuration options """ global _ENGINE, sa_logger, logger if not _ENGINE: debug = config.get_option( options, 'debug', type='bool', default=False) verbose = config.get_option( options, 'verbose', type='bool', default=False) timeout = config.get_option( options, 'sql_idle_timeout', type='int', default=3600) sql_connection = config.get_option(options, 'sql_connection') try: _ENGINE = create_engine(sql_connection, pool_recycle=timeout) except Exception, err: msg = _("Error configuring registry database with supplied " "sql_connection '%(sql_connection)s'. " "Got error:\n%(err)s") % locals() logger.error(msg) raise sa_logger = logging.getLogger('sqlalchemy.engine') if debug: sa_logger.setLevel(logging.DEBUG) elif verbose: sa_logger.setLevel(logging.INFO) models.register_models(_ENGINE)
def configure_db(options): """ Establish the database, create an engine if needed, and register the models. :param options: Mapping of configuration options """ global _ENGINE, sa_logger, logger if not _ENGINE: debug = config.get_option(options, 'debug', type='bool', default=False) verbose = config.get_option(options, 'verbose', type='bool', default=False) timeout = config.get_option(options, 'sql_idle_timeout', type='int', default=3600) sql_connection = config.get_option(options, 'sql_connection') try: _ENGINE = create_engine(sql_connection, pool_recycle=timeout) except Exception, err: msg = _("Error configuring registry database with supplied " "sql_connection '%(sql_connection)s'. " "Got error:\n%(err)s") % locals() logger.error(msg) raise sa_logger = logging.getLogger('sqlalchemy.engine') if debug: sa_logger.setLevel(logging.DEBUG) elif verbose: sa_logger.setLevel(logging.INFO) models.register_models(_ENGINE)
def create_bucket_if_missing(bucket, s3_conn, options): """ Creates a missing bucket in S3 if the ``s3_store_create_bucket_on_put`` option is set. :param bucket: Name of bucket to create :param s3_conn: Connection to S3 :param options: Option mapping """ from boto.exception import S3ResponseError try: s3_conn.get_bucket(bucket) except S3ResponseError, e: if e.status == httplib.NOT_FOUND: add_bucket = config.get_option(options, 's3_store_create_bucket_on_put', type='bool', default=False) if add_bucket: try: s3_conn.create_bucket(bucket) except S3ResponseError, e: msg = ("Failed to add bucket to S3.\n" "Got error from S3: %(e)s" % locals()) raise glance.store.BackendException(msg) else: msg = ("The bucket %(bucket)s does not exist in " "S3. Please set the " "s3_store_create_bucket_on_put option " "to add bucket to S3 automatically." % locals()) raise glance.store.BackendException(msg)
def create_container_if_missing(container, swift_conn, options): """ Creates a missing container in Swift if the ``swift_store_create_container_on_put`` option is set. :param container: Name of container to create :param swift_conn: Connection to Swift :param options: Option mapping """ try: swift_conn.head_container(container) except swift_client.ClientException, e: if e.http_status == httplib.NOT_FOUND: add_container = config.get_option(options, 'swift_store_create_container_on_put', type='bool', default=False) if add_container: try: swift_conn.put_container(container) except ClientException, e: msg = _("Failed to add container to Swift.\n" "Got error from Swift: %(e)s") % locals() raise glance.store.BackendException(msg) else: msg = (_("The container %(container)s does not exist in " "Swift. Please set the " "swift_store_create_container_on_put option" "to add container to Swift automatically.") % locals()) raise glance.store.BackendException(msg)
def __init__(self, options, strategy=None): strategy = config.get_option(options, "notifier_strategy", type="str", default="default") try: self.strategy = self.STRATEGIES[strategy](options) except KeyError: raise exception.InvalidNotifierStrategy(strategy=strategy)
def configure_registry_client(options): """ Sets up a registry client for use in registry lookups :param options: Configuration options coming from controller """ global _CLIENT_KWARGS, _CLIENT_HOST, _CLIENT_PORT, _METADATA_ENCRYPTION_KEY try: host = options['registry_host'] port = int(options['registry_port']) except (TypeError, ValueError): msg = _("Configuration option was not valid") logger.error(msg) raise exception.BadRegistryConnectionConfiguration(msg) except IndexError: msg = _("Could not find required configuration option") logger.error(msg) raise exception.BadRegistryConnectionConfiguration(msg) use_ssl = config.get_option(options, 'registry_client_protocol', default='http').lower() == 'https' key_file = options.get('registry_client_key_file') cert_file = options.get('registry_client_cert_file') ca_file = options.get('registry_client_ca_file') _METADATA_ENCRYPTION_KEY = options.get('metadata_encryption_key') _CLIENT_HOST = host _CLIENT_PORT = port _CLIENT_KWARGS = {'use_ssl': use_ssl, 'key_file': key_file, 'cert_file': cert_file, 'ca_file': ca_file}
def configure_registry_client(options): """ Sets up a registry client for use in registry lookups :param options: Configuration options coming from controller """ global _CLIENT_KWARGS, _CLIENT_HOST, _CLIENT_PORT try: host = options["registry_host"] port = int(options["registry_port"]) except (TypeError, ValueError): msg = _("Configuration option was not valid") logger.error(msg) raise exception.BadRegistryConnectionConfiguration(msg) except IndexError: msg = _("Could not find required configuration option") logger.error(msg) raise exception.BadRegistryConnectionConfiguration(msg) use_ssl = config.get_option(options, "registry_client_protocol", default="http").lower() == "https" key_file = options.get("registry_client_key_file") cert_file = options.get("registry_client_cert_file") ca_file = options.get("registry_client_ca_file") _CLIENT_HOST = host _CLIENT_PORT = port _CLIENT_KWARGS = {"use_ssl": use_ssl, "key_file": key_file, "cert_file": cert_file, "ca_file": ca_file}
def __init__(self, options): logger.info("Initializing scrubber with options: %s" % options) self.options = options scrub_time = config.get_option(options, 'scrub_time', type='int', default=0) logger.info("Scrub interval set to %s seconds" % scrub_time) self.scrub_time = datetime.timedelta(seconds=scrub_time) db_api.configure_db(options)
def __init__(self, options): logger.info(_("Initializing scrubber with options: %s") % options) self.options = options self.datadir = config.get_option(options, 'scrubber_datadir') self.cleanup = config.get_option(options, 'cleanup_scrubber', type='bool', default=False) host = config.get_option(options, 'registry_host') port = config.get_option(options, 'registry_port', type='int') self.registry = client.RegistryClient(host, port) utils.safe_mkdirs(self.datadir) if self.cleanup: self.cleanup_time = config.get_option(options, 'cleanup_scrubber_time', type='int', default=86400) store.create_stores(options)
def __init__(self, options): logger.info(_("Initializing scrubber with options: %s") % options) self.options = options scrub_time = config.get_option(options, "scrub_time", type="int", default=0) logger.info(_("Scrub interval set to %s seconds") % scrub_time) self.scrub_time = datetime.timedelta(seconds=scrub_time) db_api.configure_db(options) store.create_stores(options)
def __init__(self, options): logger.info(_("Initializing scrubber with options: %s") % options) self.options = options scrub_time = config.get_option(options, 'scrub_time', type='int', default=0) logger.info(_("Scrub interval set to %s seconds") % scrub_time) self.scrub_time = datetime.timedelta(seconds=scrub_time) db_api.configure_db(options) store.create_stores(options)
def configure_db(options): """ Establish the database, create an engine if needed, and register the models. :param options: Mapping of configuration options """ global _ENGINE, sa_logger, logger if not _ENGINE: debug = config.get_option( options, 'debug', type='bool', default=False) verbose = config.get_option( options, 'verbose', type='bool', default=False) timeout = config.get_option( options, 'sql_idle_timeout', type='int', default=3600) sql_connection = config.get_option(options, 'sql_connection') connection_dict = sqlalchemy.engine.url.make_url(sql_connection) engine_args = {'pool_recycle': timeout, 'echo': False, 'convert_unicode': True } if 'mysql' in connection_dict.drivername: engine_args['listeners'] = [MySQLPingListener()] try: _ENGINE = create_engine(sql_connection, **engine_args) except Exception, err: msg = _("Error configuring registry database with supplied " "sql_connection '%(sql_connection)s'. " "Got error:\n%(err)s") % locals() logger.error(msg) raise sa_logger = logging.getLogger('sqlalchemy.engine') if debug: sa_logger.setLevel(logging.DEBUG) elif verbose: sa_logger.setLevel(logging.INFO) models.register_models(_ENGINE)
def schedule_delete_from_backend(uri, options, context, image_id, **kwargs): """ Given a uri and a time, schedule the deletion of an image. """ use_delay = config.get_option(options, 'delayed_delete', type='bool', default=False) if not use_delay: registry.update_image_metadata(context, image_id, {'status': 'deleted'}) try: return delete_from_backend(uri, **kwargs) except (UnsupportedBackend, exception.NotFound): msg = _("Failed to delete image from store (%(uri)s).") % locals() logger.error(msg) datadir = config.get_option(options, 'scrubber_datadir') scrub_time = config.get_option(options, 'scrub_time', type='int', default=0) delete_time = time.time() + scrub_time file_path = os.path.join(datadir, str(image_id)) utils.safe_mkdirs(datadir) if os.path.exists(file_path): msg = _("Image id %(image_id)s already queued for delete") % { 'image_id': image_id } raise exception.Duplicate(msg) with open(file_path, 'w') as f: f.write('\n'.join([uri, str(int(delete_time))])) os.chmod(file_path, 0600) os.utime(file_path, (delete_time, delete_time)) registry.update_image_metadata(context, image_id, {'status': 'pending_delete'})
class Store(glance.store.base.Store): """An implementation of the swift backend adapter.""" EXAMPLE_URL = "swift://<USER>:<KEY>@<AUTH_ADDRESS>/<CONTAINER>/<FILE>" CHUNKSIZE = 65536 def configure(self): """ Configure the Store to use the stored configuration options Any store that needs special configuration should implement this method. If the store was not able to successfully configure itself, it should raise `exception.BadStoreConfiguration` """ self.auth_address = self._option_get('swift_store_auth_address') self.user = self._option_get('swift_store_user') self.key = self._option_get('swift_store_key') self.container = self.options.get('swift_store_container', DEFAULT_CONTAINER) try: if self.options.get('swift_store_large_object_size'): self.large_object_size = int( self.options.get('swift_store_large_object_size') ) * (1024 * 1024) # Size specified in MB in conf files else: self.large_object_size = DEFAULT_LARGE_OBJECT_SIZE if self.options.get('swift_store_large_object_chunk_size'): self.large_object_chunk_size = int( self.options.get('swift_store_large_object_chunk_size') ) * (1024 * 1024) # Size specified in MB in conf files else: self.large_object_chunk_size = DEFAULT_LARGE_OBJECT_CHUNK_SIZE except Exception, e: reason = _("Error in configuration options: %s") % e logger.error(reason) raise exception.BadStoreConfiguration(store_name="swift", reason=reason) self.scheme = 'swift+https' if self.auth_address.startswith('http://'): self.scheme = 'swift+http' self.full_auth_address = self.auth_address elif self.auth_address.startswith('https://'): self.full_auth_address = self.auth_address else: # Defaults https self.full_auth_address = 'https://' + self.auth_address self.snet = config.get_option( self.options, 'swift_enable_snet', type='bool', default=False)
def make_context(self, *args, **kwargs): """ Create a context with the given arguments. """ # Determine the context class to use ctxcls = RequestContext if 'context_class' in self.options: ctxcls = utils.import_class(self.options['context_class']) # Determine whether to use tenant or owner owner_is_tenant = config.get_option(self.options, 'owner_is_tenant', type='bool', default=True) kwargs.setdefault('owner_is_tenant', owner_is_tenant) return ctxcls(*args, **kwargs)
def schedule_delete_from_backend(uri, options, context, id, **kwargs): """ Given a uri and a time, schedule the deletion of an image. """ use_delay = config.get_option(options, 'delayed_delete', type='bool', default=False) if not use_delay: registry.update_image_metadata(options, context, id, {'status': 'deleted'}) try: return delete_from_backend(uri, **kwargs) except (UnsupportedBackend, exception.NotFound): msg = "Failed to delete image from store (%s). " logger.error(msg % uri) registry.update_image_metadata(options, context, id, {'status': 'pending_delete'})
def schedule_delete_from_backend(uri, options, context, image_id, **kwargs): """ Given a uri and a time, schedule the deletion of an image. """ use_delay = config.get_option(options, 'delayed_delete', type='bool', default=False) if not use_delay: registry.update_image_metadata(options, context, image_id, {'status': 'deleted'}) try: return delete_from_backend(uri, **kwargs) except (UnsupportedBackend, exception.NotFound): msg = _("Failed to delete image from store (%(uri)s).") % locals() logger.error(msg) registry.update_image_metadata(options, context, image_id, {'status': 'pending_delete'})
def configure(self): """ Configure the Store to use the stored configuration options Any store that needs special configuration should implement this method. If the store was not able to successfully configure itself, it should raise `exception.BadStoreConfiguration` """ self.auth_address = self._option_get('swift_store_auth_address') self.user = self._option_get('swift_store_user') self.key = self._option_get('swift_store_key') self.container = self.options.get('swift_store_container', DEFAULT_SWIFT_CONTAINER) self.scheme = 'swift+https' if self.auth_address.startswith('http://'): self.scheme = 'swift+http' self.full_auth_address = self.auth_address elif self.auth_address.startswith('https://'): self.full_auth_address = self.auth_address else: # Defaults https self.full_auth_address = 'https://' + self.auth_address self.snet = config.get_option( self.options, 'swift_enable_snet', type='bool', default=False)
def enabled(self): return config.get_option(self.options, "image_cache_enabled", type="bool", default=False)
def max_size(self): default = 1 * 1024 * 1024 * 1024 # 1 GB return config.get_option(self.options, 'image_cache_max_size_bytes', type='int', default=default)
def percent_extra_to_free(self): return config.get_option(self.options, 'image_cache_percent_extra_to_free', type='float', default=0.05)
def _get_option(self, name, datatype, default): """Retrieve a configuration option.""" return config.get_option(self._options, name, type=datatype, default=default)
def max_size(self): default = 1 * 1024 * 1024 * 1024 # 1 GB return config.get_option( self.options, 'image_cache_max_size_bytes', type='int', default=default)
def percent_extra_to_free(self): return config.get_option( self.options, 'image_cache_percent_extra_to_free', type='float', default=0.05)
def configure(self): self.snet = config.get_option( self.options, 'swift_enable_snet', type='bool', default=False)
def enabled(self): return config.get_option(self.options, 'image_cache_enabled', type='bool', default=False)