def _wrapper(global_config, **local_conf): # Queuing initialization queue.init(CONF, is_server_side=False) # Configure oslo logging and configuration services. log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) config.setup_remote_pydev_debug() # Initializing the database engine and session factory before the app # starts ensures we don't lose requests due to lazy initialization of # db connections. try: repositories.setup_database_engine_and_factory( initialize_secret_stores=True ) repositories.commit() except Exception: LOG.exception('Failed to sync secret_stores table.') repositories.rollback() raise wsgi_app = func(global_config, **local_conf) if newrelic_loaded: wsgi_app = newrelic.agent.WSGIApplicationWrapper(wsgi_app) LOG.info('Barbican app created and initialized') return wsgi_app
def _create_generic_order_for_put(self): """Create a real order to modify and perform PUT actions on This makes sure that a project exists for our order and that there is an order within the database. This is a little hacky due to issues testing certificate order types. """ # Create generic order resp, order_uuid = create_order( self.app, order_type='key', meta=generic_key_meta ) self.assertEqual(202, resp.status_int) # Modify the order in the DB to allow actions to be performed order_model = order_repo.get(order_uuid, self.project_id) order_model.type = 'certificate' order_model.status = models.States.PENDING order_model.meta = {'nope': 'nothing'} order_model.save() repositories.commit() return order_uuid
def test_should_raise_for_pycrypto_stored_key_no_private_key(self): private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048, backend=default_backend()) public_key = private_key.public_key() private_key_pem = private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption()) self.private_key_value = base64.b64encode(private_key_pem) public_key_pem = public_key.public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.PKCS1) self.public_key_value = base64.b64encode(public_key_pem) self.store_plugin.get_secret.side_effect = self.stored_key_side_effect self.result.status = cert_man.CertificateStatus.WAITING_FOR_CA secret_repo.delete_entity_by_id(self.private_key.id, self.external_project_id) # We need to commit deletions or we'll get deleted objects with deleted # set to True. This is caused by SQLAlchemy's identity mapping and our # use of scoped_session. repositories.commit() self.order.meta.update(self.stored_key_meta) self.assertRaises(excep.StoredKeyPrivateKeyNotFound, cert_res.issue_certificate_request, self.order, self.project, self.result_follow_on)
def wrapper(*args, **kwargs): fn_name = getattr(fn, '__name__', '????') if not queue.is_server_side(): # Non-server mode directly invokes tasks. fn(*args, **kwargs) LOG.info(u._LI("Completed worker task: '%s'"), fn_name) else: # Manage session/transaction. try: fn(*args, **kwargs) repositories.commit() LOG.info(u._LI("Completed worker task: '%s'"), fn_name) except Exception: """NOTE: Wrapped functions must process with care! Exceptions that reach here will revert the entire transaction, including any updates made to entities such as setting error codes and error messages. """ LOG.exception( u._LE("Problem seen processing worker task: '%s'"), fn_name ) repositories.rollback() finally: repositories.clear()
def test_rollback_with_error_during_project_cleanup(self, mock_delete, mock_handle_error): self._init_memory_db_setup() secret = self._create_secret_for_project(self.project1_data) self.assertIsNotNone(secret) secret_id = secret.id project1_id = self.project1_data.id secret_repo = rep.get_secret_repository() db_secrets = secret_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_secrets)) self.assertEqual(secret.id, db_secrets[0].id) kek_repo = rep.get_kek_datum_repository() db_kek = kek_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_kek)) # Commit changes made so far before creating rollback scenario rep.commit() handle_error_mock = mock.MagicMock() self.task.handler_error = handle_error_mock self.assertRaises(exception.BarbicanException, self.task.process, project_id=self.project_id1, resource_type='project', operation_type='deleted') mock_handle_error.assert_called_once_with( self.project1_data, 500, mock.ANY, mock.ANY, operation_type='deleted', project_id=mock.ANY, resource_type='project', ) args, kwargs = mock_handle_error.call_args self.assertEqual(500, args[1]) self.assertEqual(self.project_id1, kwargs['project_id']) self.assertEqual('project', kwargs['resource_type']) self.assertEqual('deleted', kwargs['operation_type']) # Make sure entities are still present after rollback db_secrets = secret_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_secrets)) self.assertEqual(secret_id, db_secrets[0].id) db_kek = kek_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_kek)) project_repo = rep.get_project_repository() db_project = project_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_project))
def test_rollback_with_error_during_project_cleanup(self, mock_delete, mock_handle_error): self._init_memory_db_setup() rep.start() secret = self._create_secret_for_project(self.project1_data) self.assertIsNotNone(secret) secret_id = secret.id project1_id = self.project1_data.id db_secrets = self.repos.secret_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_secrets)) self.assertEqual(secret.id, db_secrets[0].id) db_tenant_secret = self.repos.tenant_secret_repo.get_project_entities( project1_id) self.assertEqual(1, len(db_tenant_secret)) db_kek = self.repos.kek_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_kek)) # rollback changes made so far before creating rollback scenario rep.commit() task = consumer.KeystoneEventConsumer() handle_error_mock = mock.MagicMock() task.handler_error = handle_error_mock self.assertRaises(exception.BarbicanException, task.process, project_id=self.project_id1, resource_type='project', operation_type='deleted') mock_handle_error.assert_called() args, kwargs = mock_handle_error.call_args self.assertEqual(500, args[1]) self.assertEqual(self.project_id1, kwargs['project_id']) self.assertEqual('project', kwargs['resource_type']) self.assertEqual('deleted', kwargs['operation_type']) # Make sure entities are still present after rollback db_secrets = self.repos.secret_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_secrets)) self.assertEqual(secret_id, db_secrets[0].id) db_tenant_secret = self.repos.tenant_secret_repo.get_project_entities( project1_id) self.assertEqual(1, len(db_tenant_secret)) db_kek = self.repos.kek_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_kek)) db_tenant = self.repos.tenant_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_tenant))
def on_post(self, external_project_id, **kwargs): project = res.get_or_create_project(external_project_id) body = api.load_body(pecan.request, validator=self.type_order_validator) order_type = body.get('type') order_meta = body.get('meta') request_type = order_meta.get('request_type') LOG.debug('Processing order type %s, request type %s', order_type, request_type) if order_type == models.OrderType.CERTIFICATE: validators.validate_ca_id(project.id, body.get('meta')) if request_type == 'stored-key': container_ref = order_meta.get('container_ref') validators.validate_stored_key_rsa_container( external_project_id, container_ref, pecan.request) self.quota_enforcer.enforce(project) new_order = models.Order() new_order.meta = body.get('meta') new_order.type = order_type new_order.project_id = project.id request_id = None ctxt = controllers._get_barbican_context(pecan.request) if ctxt: new_order.creator_id = ctxt.user request_id = ctxt.request_id self.order_repo.create_from(new_order) # Grab our id before commit due to obj expiration from sqlalchemy order_id = new_order.id # Force commit to avoid async issues with the workers repo.commit() self.queue.process_type_order(order_id=order_id, project_id=external_project_id, request_id=request_id) url = hrefs.convert_order_to_href(order_id) pecan.response.status = 202 pecan.response.headers['Location'] = url return {'order_ref': url}
def set_project_quotas(self, external_project_id, parsed_project_quotas): """Create a new database entry, or update existing one :param external_project_id: ID of project whose quotas are to be set :param parsed_project_quotas: quota values to save in database :return: None """ project = res.get_or_create_project(external_project_id) self.repo.create_or_update_by_project_id(project.id, parsed_project_quotas) # commit to DB to avoid async issues if the enforcer is called from # another thread repo.commit()
def process(self, *args, **kwargs): try: rep.start() super(KeystoneEventConsumer, self).process(*args, **kwargs) rep.commit() except Exception as e: """Exceptions that reach here needs to revert the entire transaction. No need to log error message as its already done earlier. """ rep.rollback() raise e finally: rep.clear()
def test_return_400_on_put_when_order_is_active(self): order_uuid = self._create_generic_order_for_put() # Put the order in a active state to prevent modification order_model = order_repo.get(order_uuid, self.project_id) order_model.status = models.States.ACTIVE order_model.save() repositories.commit() resp = self.app.put_json( '/orders/{0}'.format(order_uuid), {'type': 'certificate', 'meta': {}}, headers={'Content-Type': 'application/json'}, expect_errors=True ) self.assertEqual(400, resp.status_int)
def refresh_ca_table(self): """Refreshes the CertificateAuthority table.""" updates_made = False for plugin in plugin_utils.get_active_plugins(self): plugin_name = utils.generate_fullname_for(plugin) cas, offset, limit, total = self.ca_repo.get_by_create_date( plugin_name=plugin_name, suppress_exception=True) if total < 1: # if no entries are found, then the plugin has not yet been # queried or that plugin's entries have expired. # Most of the time, this will be a no-op for plugins. self.update_ca_info(plugin) updates_made = True if updates_made: # commit to DB to avoid async issues with different threads repos.commit()
def test_create_should_fail_when_ca_not_in_defined_project_ca_ids(self): # Create a Project CA and add it project_ca_model = models.ProjectCertificateAuthority( self.project.id, self.available_ca_ids[0] ) project_ca_repo.create_from(project_ca_model) repositories.commit() # Make sure we set the ca_id to an id not defined in the project self.certificate_meta['ca_id'] = self.available_ca_ids[1] create_resp, order_uuid = create_order( self.app, order_type='certificate', meta=self.certificate_meta, expect_errors=True ) self.assertEqual(403, create_resp.status_int)
def wrapper(*args, **kwargs): if not queue.is_server_side(): fn(*args, **kwargs) # Non-server mode directly invokes tasks. else: # Start the database session. repositories.start() # Manage session/transaction. try: fn(*args, **kwargs) repositories.commit() except Exception: """NOTE: Wrapped functions must process with care! Exceptions that reach here will revert the entire transaction, including any updates made to entities such as setting error codes and error messages. """ repositories.rollback() finally: repositories.clear()
def sync_secret_stores(sql_url, verbose, log_file): """Command to sync secret stores table with config . :param sql_url: sql connection string to connect to a database :param verbose: If True, log and print more information :param log_file: If set, override the log_file configured """ if verbose: # The verbose flag prints out log events to the screen, otherwise # the log events will only go to the log file CONF.set_override('debug', True) if log_file: CONF.set_override('log_file', log_file) LOG.info("Syncing the secret_stores table with barbican.conf") log.setup(CONF, 'barbican') try: if sql_url: CONF.set_override('sql_connection', sql_url) repo.setup_database_engine_and_factory( initialize_secret_stores=True) repo.commit() except Exception as ex: LOG.exception('Failed to sync secret_stores table.') repo.rollback() raise ex finally: if verbose: CONF.clear_override('debug') if log_file: CONF.clear_override('log_file') repo.clear() if sql_url: CONF.clear_override('sql_connection') log.setup(CONF, 'barbican') # reset the overrides
def _enqueue_task(self, task): """Re-enqueue the specified task.""" retry_task_name = 'N/A' retry_args = 'N/A' retry_kwargs = 'N/A' # Start a new isolated database transaction just for this task. repositories.start() try: # Invoke queue client to place retried RPC task on queue. retry_task_name = task.retry_task retry_args = task.retry_args retry_kwargs = task.retry_kwargs retry_method = getattr(self.queue, retry_task_name) retry_method(*retry_args, **retry_kwargs) # Remove the retry record from the queue. task.status = models.States.ACTIVE self.order_retry_repo.delete_entity_by_id(task.id, None) repositories.commit() LOG.debug( "(Enqueued method '{0}' with args '{1}' and " "kwargs '{2}')".format( retry_task_name, retry_args, retry_kwargs)) except Exception: LOG.exception( u._LE( "Problem enqueuing method '%(name)s' with args '%(args)s' " "and kwargs '%(kwargs)s'."), { 'name': retry_task_name, 'args': retry_args, 'kwargs': retry_kwargs } ) repositories.rollback() finally: repositories.clear()
def setUp(self): super(WhenCreatingCertificateOrders, self).setUp() self.certificate_meta = { 'request': 'XXXXXX' } # Make sure we have a project self.project = resources.get_or_create_project(self.project_id) # Create CA's in the db self.available_ca_ids = [] for i in range(2): ca_information = { 'plugin_name': 'plugin_name', 'plugin_ca_id': 'plugin_name ca_id1', 'name': 'plugin name', 'description': 'Master CA for default plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY' } ca_model = models.CertificateAuthority(ca_information) ca = ca_repo.create_from(ca_model) self.available_ca_ids.append(ca.id) foreign_project = resources.get_or_create_project('foreign_project') foreign_ca_information = { 'project_id': foreign_project.id, 'plugin_name': 'plugin_name', 'plugin_ca_id': 'plugin_name ca_id1', 'name': 'plugin name', 'description': 'Master CA for default plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY' } foreign_ca_model = models.CertificateAuthority(foreign_ca_information) foreign_ca = ca_repo.create_from(foreign_ca_model) self.foreign_ca_id = foreign_ca.id repositories.commit()
def test_can_add_new_cert_order_with_ca_id_project_ca_defined(self): # Create a Project CA and add it project_ca_model = models.ProjectCertificateAuthority( self.project.id, self.available_ca_ids[0] ) project_ca_repo.create_from(project_ca_model) repositories.commit() # Attempt to create an order self.certificate_meta['ca_id'] = self.available_ca_ids[0] create_resp, order_uuid = create_order( self.app, order_type='certificate', meta=self.certificate_meta ) self.assertEqual(202, create_resp.status_int) order = order_repo.get(order_uuid, self.project_id) self.assertIsInstance(order, models.Order)
def setUp(self): super(BaseCertificateRequestsTestCase, self).setUp() self.external_project_id = "56789" self.project = res.get_or_create_project(self.external_project_id) project_repo.save(self.project) self.barbican_meta_dto = mock.MagicMock() self.order_meta = {} self.plugin_meta = {} self.barbican_meta = {} self.result = cert_man.ResultDTO( cert_man.CertificateStatus.WAITING_FOR_CA ) self.result_follow_on = common.FollowOnProcessingStatusDTO() self.cert_plugin = mock.MagicMock() self.cert_plugin.issue_certificate_request.return_value = self.result self.cert_plugin.check_certificate_status.return_value = self.result self.store_plugin = mock.MagicMock() parsed_ca = { 'plugin_name': "cert_plugin", 'plugin_ca_id': "XXXX", 'name': "test ca", 'description': 'Test CA', 'ca_signing_certificate': 'ZZZZZ', 'intermediates': 'YYYYY' } self.ca = models.CertificateAuthority(parsed_ca) ca_repo.create_from(self.ca) self.ca_id = self.ca.id # second ca for testing parsed_ca = { 'plugin_name': "cert_plugin", 'plugin_ca_id': "XXXX2", 'name': "test ca2", 'description': 'Test CA2', 'ca_signing_certificate': 'ZZZZZ2', 'intermediates': 'YYYYY2' } self.ca2 = models.CertificateAuthority(parsed_ca) ca_repo.create_from(self.ca2) self.ca_id2 = self.ca2.id # data for preferred CA and global preferred CA tests # add those to the repo in those tests self.pref_ca = models.PreferredCertificateAuthority( self.project.id, self.ca_id) self.global_pref_ca = models.PreferredCertificateAuthority( self.project.id, self.ca_id) # data for stored key cases self.private_key = models.Secret() self.private_key.secret_type = 'PRIVATE' self.private_key.project_id = self.project.id secret_repo.create_from(self.private_key) self.public_key = models.Secret() self.public_key.secret_type = 'PUBLIC' self.public_key.project_id = self.project.id secret_repo.create_from(self.public_key) self.passphrase = models.Secret() self.passphrase.secret_type = 'PASSPHRASE' self.passphrase.project_id = self.project.id secret_repo.create_from(self.passphrase) self.private_key_value = None self.public_key_value = "public_key" self.passphrase_value = None self.parsed_container_with_passphrase = { 'name': 'container name', 'type': 'rsa', 'secret_refs': [ {'name': 'private_key', 'secret_ref': 'https://localhost/secrets/' + self.private_key.id}, {'name': 'public_key', 'secret_ref': 'https://localhost/secrets/' + self.public_key.id}, {'name': 'private_key_passphrase', 'secret_ref': 'https://localhost/secrets/' + self.passphrase.id} ] } self.parsed_container = { 'name': 'container name', 'type': 'rsa', 'secret_refs': [ {'name': 'private_key', 'secret_ref': 'https://localhost/secrets/' + self.private_key.id}, {'name': 'public_key', 'secret_ref': 'https://localhost/secrets/' + self.public_key.id} ] } self.container_with_passphrase = models.Container( self.parsed_container_with_passphrase) self.container_with_passphrase.project_id = self.project.id container_repo.create_from(self.container_with_passphrase) self.container = models.Container(self.parsed_container) self.container.project_id = self.project.id container_repo.create_from(self.container) repositories.commit() self.stored_key_meta = { cert_man.REQUEST_TYPE: cert_man.CertificateRequestType.STORED_KEY_REQUEST, "container_ref": "https://localhost/containers/" + self.container.id, "subject_dn": "cn=host.example.com,ou=dev,ou=us,o=example.com" } self.order = models.Order() self.order.meta = self.order_meta self.order.project_id = self.project.id self.order.order_barbican_meta = self.barbican_meta self.order.type = 'certificate' order_repo.create_from(self.order) self._config_cert_plugin() self._config_store_plugin() self._config_cert_event_plugin() self._config_save_meta_plugin() self._config_get_meta_plugin() self._config_save_barbican_meta_plugin() self._config_get_barbican_meta_plugin() self._config_barbican_meta_dto()
def clean_command(sql_url, min_num_days, do_clean_unassociated_projects, do_soft_delete_expired_secrets, verbose, log_file): """Clean command to clean up the database. :param sql_url: sql connection string to connect to a database :param min_num_days: clean up soft deletions older than this date :param do_clean_unassociated_projects: If True, clean up unassociated projects :param do_soft_delete_expired_secrets: If True, soft delete secrets that have expired :param verbose: If True, log and print more information :param log_file: If set, override the log_file configured """ if verbose: # The verbose flag prints out log events to the screen, otherwise # the log events will only go to the log file CONF.set_override('debug', True) if log_file: CONF.set_override('log_file', log_file) LOG.info("Cleaning up soft deletions in the barbican database") log.setup(CONF, 'barbican') cleanup_total = 0 current_time = timeutils.utcnow() stop_watch = timeutils.StopWatch() stop_watch.start() try: if sql_url: CONF.set_override('sql_connection', sql_url) repo.setup_database_engine_and_factory() if do_clean_unassociated_projects: cleanup_total += cleanup_unassociated_projects() if do_soft_delete_expired_secrets: cleanup_total += soft_delete_expired_secrets( threshold_date=current_time) threshold_date = None if min_num_days >= 0: threshold_date = current_time - datetime.timedelta( days=min_num_days) else: threshold_date = current_time cleanup_total += cleanup_all(threshold_date=threshold_date) repo.commit() except Exception as ex: LOG.exception('Failed to clean up soft deletions in database.') repo.rollback() cleanup_total = 0 # rollback happened, no entries affected raise ex finally: stop_watch.stop() elapsed_time = stop_watch.elapsed() if verbose: CONF.clear_override('debug') if log_file: CONF.clear_override('log_file') repo.clear() if sql_url: CONF.clear_override('sql_connection') log.setup(CONF, 'barbican') # reset the overrides LOG.info("Cleaning of database affected %s entries", cleanup_total) LOG.info('DB clean up finished in %s seconds', elapsed_time)