def test_download_file_url(self): # NOTE: only in v2 API class MyGlanceStubClient(stubs.StubGlanceClient): """A client that returns a file url.""" (outfd, s_tmpfname) = tempfile.mkstemp(prefix='directURLsrc') outf = os.fdopen(outfd, 'wb') inf = open('/dev/urandom', 'rb') for i in range(10): _data = inf.read(1024) outf.write(_data) outf.close() def get(self, image_id): return type('GlanceTestDirectUrlMeta', (object, ), {'direct_url': 'file://%s' + self.s_tmpfname}) stub_context = context.RequestContext(auth_token=True) stub_context.user_id = 'fake' stub_context.project_id = 'fake' stub_client = MyGlanceStubClient() (outfd, tmpfname) = tempfile.mkstemp(prefix='directURLdst') writer = os.fdopen(outfd, 'w') stub_service = service.GlanceImageService(stub_client, context=stub_context, version=2) image_id = 1 # doesn't matter self.config(allowed_direct_url_schemes=['file'], group='glance') stub_service.download(image_id, writer) writer.close() # compare the two files rc = filecmp.cmp(tmpfname, stub_client.s_tmpfname) self.assertTrue( rc, "The file %s and %s should be the same" % (tmpfname, stub_client.s_tmpfname)) os.remove(stub_client.s_tmpfname) os.remove(tmpfname)
def test_delete_flow_dns_exception_with_retry_and_fail(self): service_id = str(uuid.uuid4()) domains_old = domain.Domain(domain='cdn.poppy.org') current_origin = origin.Origin(origin='poppy.org') service_obj = service.Service(service_id=service_id, name='poppy cdn service', domains=[domains_old], origins=[current_origin], flavor_id='cdn') kwargs = { 'project_id': json.dumps(str(uuid.uuid4())), 'service_id': json.dumps(service_id), 'time_seconds': [i * self.time_factor for i in range(self.total_retries)], 'provider_details': json.dumps( dict([(k, v.to_dict()) for k, v in service_obj.provider_details.items()])), 'context_dict': context_utils.RequestContext().to_dict() } (service_controller, storage_controller, dns_controller, ssl_cert_controller) = self.all_controllers() with MonkeyPatchControllers(service_controller, dns_controller, storage_controller, ssl_cert_controller, memoized_controllers.task_controllers): self.patch_delete_flow(service_controller, storage_controller, dns_controller) dns_controller.delete = mock.Mock() dns_responder_returns = self.dns_exceptions_only() dns_controller.delete._mock_side_effect = ( dns_responder for dns_responder in dns_responder_returns) engines.run(delete_service.delete_service(), store=kwargs)
def _test_rpc_call(self, method): agent = rpc.PluginApi('fake_topic') ctxt = oslo_context.RequestContext(user_id='fake_user', project_id='fake_project') expect_val = 'foo' with mock.patch.object(agent.client, 'call') as mock_call,\ mock.patch.object(agent.client, 'prepare') as mock_prepare: mock_prepare.return_value = agent.client mock_call.return_value = expect_val func_obj = getattr(agent, method) if method == 'tunnel_sync': actual_val = func_obj(ctxt, 'fake_tunnel_ip') elif method == 'get_ports_by_vnic_type_and_host': actual_val = func_obj(ctxt, 'fake_vnic_type', 'fake_host') mock_call.assert_called_once_with( ctxt, 'get_ports_by_vnic_type_and_host', host='fake_host', vnic_type='fake_vnic_type') else: actual_val = func_obj(ctxt, 'fake_device', 'fake_agent_id') self.assertEqual(actual_val, expect_val)
def test_purge_flow_normal(self): service_id = str(uuid.uuid4()) domains_old = domain.Domain(domain='cdn.poppy.org') current_origin = origin.Origin(origin='poppy.org') service_obj = service.Service(service_id=service_id, name='poppy cdn service', domains=[domains_old], origins=[current_origin], flavor_id='cdn') kwargs = { 'project_id': json.dumps(str(uuid.uuid4())), 'service_id': json.dumps(service_id), 'provider_details': json.dumps( dict([(k, v.to_dict()) for k, v in service_obj.provider_details.items()])), 'purge_url': 'cdn.poppy.org', 'hard': json.dumps(True), 'service_obj': json.dumps(service_obj.to_dict()), 'context_dict': context_utils.RequestContext().to_dict() } (service_controller, storage_controller, dns_controller, ssl_cert_controller) = self.all_controllers() with MonkeyPatchControllers(service_controller, dns_controller, storage_controller, ssl_cert_controller, memoized_controllers.task_controllers): self.patch_purge_flow(service_controller, storage_controller, dns_controller) engines.run(purge_service.purge_service(), store=kwargs)
def start(self): super(RPCService, self).start() admin_context = context.RequestContext('admin', 'admin', is_admin=True) target = messaging.Target(topic=self.topic, server=self.host) endpoints = [self.manager] serializer = objects_base.IronicObjectSerializer() self.rpcserver = rpc.get_server(target, endpoints, serializer) self.rpcserver.start() self.handle_signal() self.manager.init_host() self.tg.add_dynamic_timer(self.manager.periodic_tasks, periodic_interval_max=CONF.periodic_interval, context=admin_context) LOG.info( _LI('Created RPC server for service %(service)s on host ' '%(host)s.'), { 'service': self.topic, 'host': self.host })
def test_update_device_list_unsupported(self): rpcapi = agent_rpc.PluginApi(topics.PLUGIN) ctxt = oslo_context.RequestContext('fake_user', 'fake_project') devices_up = ['fake_device1', 'fake_device2'] devices_down = ['fake_device3', 'fake_device4'] expected_ret_val = {'devices_up': ['fake_device2'], 'failed_devices_up': ['fake_device1'], 'devices_down': [ {'device': 'fake_device3', 'exists': True}], 'failed_devices_down': ['fake_device4']} rpcapi.update_device_up = mock.Mock( side_effect=[Exception('fake_device1 fails'), None]) rpcapi.update_device_down = mock.Mock( side_effect=[{'device': 'fake_device3', 'exists': True}, Exception('fake_device4 fails')]) with mock.patch.object(rpcapi.client, 'call'),\ mock.patch.object(rpcapi.client, 'prepare') as prepare_mock: prepare_mock.side_effect = oslo_messaging.UnsupportedVersion( 'test') res = rpcapi.update_device_list(ctxt, devices_up, devices_down, 'fake_agent_id', 'fake_host') self.assertEqual(expected_ret_val, res)
def test_service_state_flow_normal(self): service_id = str(uuid.uuid4()) domains_old = domain.Domain(domain='cdn.poppy.org') current_origin = origin.Origin(origin='poppy.org') service_obj = service.Service(service_id=service_id, name='poppy cdn service', domains=[domains_old], origins=[current_origin], flavor_id='cdn') enable_kwargs = { 'project_id': json.dumps(str(uuid.uuid4())), 'state': 'enable', 'service_obj': json.dumps(service_obj.to_dict()), 'time_seconds': [i * self.time_factor for i in range(self.total_retries)], 'context_dict': context_utils.RequestContext().to_dict() } disable_kwargs = enable_kwargs.copy() disable_kwargs['state'] = 'disable' (service_controller, storage_controller, dns_controller, ssl_cert_controller) = self.all_controllers() with MonkeyPatchControllers(service_controller, dns_controller, storage_controller, ssl_cert_controller, memoized_controllers.task_controllers): self.patch_service_state_flow(service_controller, storage_controller, dns_controller) engines.run(update_service_state.enable_service(), store=enable_kwargs) engines.run(update_service_state.disable_service(), store=disable_kwargs)
def dolog(self, request, context): # Set this thread's context context_utils.RequestContext( overwrite=True, user_id=CONF.EMBER_CONFIG['user_id'], project_id=CONF.EMBER_CONFIG['project_id'], project_name=CONF.PROJECT_NAME) start = datetime.utcnow() LOG.info('=> GRPC %s%s' % (f.__name__, _get_idempotent_id(request))) if request.ListFields(): msg = ' params:\n%s' % tab(request) else: msg = 'out params' LOG.debug('With%s' % msg) try: result = f(self, request, context) except Exception as exc: end = datetime.utcnow() if context._state.code: code = str(context._state.code)[11:] details = context._state.details tback = '' else: code = 'Unexpected exception' details = getattr(exc, 'message', '-') tback = '\n' + tab(traceback.format_exc()) LOG.error('!! GRPC %s failed in %.0fs with %s (%s)%s' % (f.__name__, (end - start).total_seconds(), code, details, tback)) raise end = datetime.utcnow() LOG.info('<= GRPC %s%s served in %.0fs' % (f.__name__, _get_response_id(result), (end - start).total_seconds())) str_result = tab(result) if str(result) else 'nothing' LOG.debug('Returns:\n%s' % tab(str_result)) return result
def before(self, state): state.request.context = 'context' print dict(state.request.headers) creds = { 'user_name': 'X-User-Name', 'user': '******', 'project_name': 'X-Project-Name', 'tenant': 'X-Project-Id', 'domain': 'X-User-Domain-Id', 'domain_name': 'X-User-Domain-Name', 'auth_token': 'X-Auth-Token', 'roles': ['admin'], } #is_admin = policy.check('is_admin', creds, creds) is_admin = True state.request.context = context.RequestContext(is_admin=is_admin, **creds) print state.request.context #print 'I am hook' '''
def test_subtask_thread_local(self): """Security context and locks, if set, propagates to WrapperTasks.""" def verify_no_ctx(wrapper): self.assertIsNone(ctx.get_current()) tx.FeedTask('test_no_context', lpar.LPAR.getter( self.adpt)).add_functor_subtask(verify_no_ctx).execute() def verify_ctx(wrapper): _context = ctx.get_current() self.assertIsNotNone(_context) self.assertEqual('123', _context.request_id) # Copy the base set of locks to expect our_locks = list(locks) # Add our wrappers uuid since that will be set also. our_locks.append(wrapper.uuid) self.assertEqual(set(our_locks), set(tx._get_locks())) ctx.RequestContext(request_id='123') locks = ['L123', 'L456', 'L789'] tx._set_locks(locks) tx.FeedTask('test_set_context', lpar.LPAR.getter( self.adpt)).add_functor_subtask(verify_ctx).execute() # Context propagates even if FeedTask is executed in a subthread, as # long as our executor is used. # Make two to ensure they're run in separate threads ft1 = tx.FeedTask('subthread1', lpar.LPAR.getter( self.adpt)).add_functor_subtask(verify_ctx) ft2 = tx.FeedTask('subthread2', lpar.LPAR.getter( self.adpt)).add_functor_subtask(verify_ctx) self.assertRaises(tf_ex.WrappedFailure, tf_eng.run, tf_uf.Flow('subthread_flow').add(ft1, ft2), engine='parallel') tf_eng.run(tf_uf.Flow('subthread_flow').add(ft1, ft2), engine='parallel', executor=tx.ContextThreadPoolExecutor(2))
def setUp(self): super(BarbicanKeyManagerTestCase, self).setUp() username = CONF.identity.username password = CONF.identity.password project_name = CONF.identity.project_name auth_url = CONF.identity.auth_url user_domain_name = CONF.identity.user_domain_name project_domain_name = CONF.identity.project_domain_name auth = v3.Password(auth_url=auth_url, username=username, password=password, project_name=project_name, user_domain_name=user_domain_name, project_domain_name=project_domain_name) sess = session.Session(auth=auth) keystone_client = client.Client(session=sess) project_list = keystone_client.projects.list(name=project_name) self.ctxt = context.RequestContext( auth_token=auth.auth_ref.auth_token, tenant=project_list[0].id)
def test_create_ssl_certificate_normal(self, mock_creds, mock_dns_client): providers = ['cdn_provider'] cert_obj_json = ssl_certificate.SSLCertificate('cdn', 'mytestsite.com', 'san') kwargs = { 'providers_list_json': json.dumps(providers), 'project_id': json.dumps(str(uuid.uuid4())), 'cert_obj_json': json.dumps(cert_obj_json.to_dict()), 'context_dict': context_utils.RequestContext().to_dict() } service_controller, storage_controller, dns_controller = \ self.all_controllers() with MonkeyPatchControllers(service_controller, dns_controller, storage_controller, memoized_controllers.task_controllers): self.patch_create_ssl_certificate_flow(service_controller, storage_controller, dns_controller) engines.run(create_ssl_certificate.create_ssl_certificate(), store=kwargs)
def load_certificates_data(cert_mngr, listener, context=None): """Load TLS certificate data from the listener. return TLS_CERT and SNI_CERTS """ tls_cert = None sni_certs = [] if not context: context = oslo_context.RequestContext(project_id=listener.project_id) if listener.tls_certificate_id: tls_cert = _map_cert_tls_container( cert_mngr.get_cert(context, listener.tls_certificate_id, check_only=True)) if listener.sni_containers: for sni_cont in listener.sni_containers: cert_container = _map_cert_tls_container( cert_mngr.get_cert(context, sni_cont.tls_container_id, check_only=True)) sni_certs.append(cert_container) return {'tls_cert': tls_cert, 'sni_certs': sni_certs}
def _test_rpc_api(self, rpcapi, topic, method, rpc_method, **kwargs): ctxt = oslo_context.RequestContext('fake_user', 'fake_project') expected_retval = 'foo' if rpc_method == 'call' else None expected_version = kwargs.pop('version', None) fanout = kwargs.pop('fanout', False) with mock.patch.object(rpcapi.client, rpc_method) as rpc_mock,\ mock.patch.object(rpcapi.client, 'prepare') as prepare_mock: prepare_mock.return_value = rpcapi.client rpc_mock.return_value = expected_retval retval = getattr(rpcapi, method)(ctxt, **kwargs) prepare_args = {} if expected_version: prepare_args['version'] = expected_version if fanout: prepare_args['fanout'] = fanout if topic: prepare_args['topic'] = topic prepare_mock.assert_called_once_with(**prepare_args) self.assertEqual(retval, expected_retval) rpc_mock.assert_called_once_with(ctxt, method, **kwargs)
def test_get_devices_details_list_and_failed_devices_unsupported(self): rpcapi = agent_rpc.PluginApi(topics.PLUGIN) ctxt = oslo_context.RequestContext('fake_user', 'fake_project') devices = ['fake_device1', 'fake_device2'] dev2_details = { 'device': 'fake_device2', 'network_id': 'net_id', 'port_id': 'port_id', 'admin_state_up': True } expected_ret_val = { 'devices': [dev2_details], 'failed_devices': ['fake_device1'] } rpcapi.get_device_details = mock.Mock( side_effect=[Exception('fake_device1 fails'), dev2_details]) with mock.patch.object(rpcapi.client, 'call'),\ mock.patch.object(rpcapi.client, 'prepare') as prepare_mock: prepare_mock.side_effect = oslo_messaging.UnsupportedVersion( 'test') res = rpcapi.get_devices_details_list_and_failed_devices( ctxt, devices, 'fake_agent_id', 'fake_host') self.assertEqual(expected_ret_val, res)
def _encrypt_data(context, data): try: # TODO(pbourke): move auth construction into common area if it ends up # been required in other areas auth = identity.V3Password( auth_url=settings.KEY_MANAGER['auth_url'], username=settings.KEY_MANAGER['username'], user_domain_name=settings.KEY_MANAGER['user_domain_name'], password=settings.KEY_MANAGER['password'], project_name=settings.KEY_MANAGER['project_name'], project_domain_name=settings.KEY_MANAGER['project_domain_name'] ) except (KeyError, AttributeError) as e: LOG.exception(e) msg = ('Could not find valid key manager credentials in the ' 'murano-dashboard config. encryptData yaql function not ' 'available') raise castellan_exception.KeyManagerError(message_arg=msg) sess = session.Session(auth=auth) auth_context = _oslo_context.RequestContext( auth_token=auth.get_token(sess), tenant=auth.get_project_id(sess)) options.set_defaults(cfg.CONF, auth_endpoint=settings.KEY_MANAGER['auth_url']) options.set_defaults(cfg.CONF, barbican_endpoint_type='internal') manager = key_manager.API() try: # TODO(pbourke): while we feel opaque data should cover the most common # use case, we may want to add support for other secret types in the # future (see https://goo.gl/tZhfqe) stored_key_id = manager.store(auth_context, opaque_data.OpaqueData(data)) except castellan_exception.KeyManagerError as e: LOG.exception(e) raise return stored_key_id
def test_create_flow_dns_exception(self): providers = ['cdn_provider'] kwargs = { 'providers_list_json': json.dumps(providers), 'project_id': json.dumps(str(uuid.uuid4())), 'auth_token': json.dumps(str(uuid.uuid4())), 'service_id': json.dumps(str(uuid.uuid4())), 'time_seconds': [i * self.time_factor for i in range(self.total_retries)], 'context_dict': context_utils.RequestContext().to_dict() } (service_controller, storage_controller, dns_controller, ssl_cert_controller) = self.all_controllers() with MonkeyPatchControllers(service_controller, dns_controller, storage_controller, ssl_cert_controller, memoized_controllers.task_controllers): self.patch_create_flow(service_controller, storage_controller, dns_controller) dns_controller.create = mock.Mock() dns_controller.create._mock_return_value = { 'cdn_provider': { 'error': 'Whoops!', 'error_class': 'tests.unit.distributed_task' '.taskflow.test_flows.DNSException' } } engines.run(create_service.create_service(), store=kwargs)
def __init__(self, namespaces, pollster_list, group_prefix=None): super(AgentManager, self).__init__() def _match(pollster): """Find out if pollster name matches to one of the list.""" return any( fnmatch.fnmatch(pollster.name, pattern) for pattern in pollster_list) # features of using coordination and pollster-list are exclusive, and # cannot be used at one moment to avoid both samples duplication and # samples being lost if pollster_list and cfg.CONF.coordination.backend_url: raise PollsterListForbidden() if type(namespaces) is not list: namespaces = [namespaces] # we'll have default ['compute', 'central'] here if no namespaces will # be passed extensions = (self._extensions('poll', namespace).extensions for namespace in namespaces) if pollster_list: extensions = (itertools.ifilter(_match, exts) for exts in extensions) self.extensions = list(itertools.chain(*list(extensions))) self.discovery_manager = self._extensions('discover') self.context = context.RequestContext('admin', 'admin', is_admin=True) self.partition_coordinator = coordination.PartitionCoordinator() # Compose coordination group prefix. # We'll use namespaces as the basement for this partitioning. namespace_prefix = '-'.join(sorted(namespaces)) self.group_prefix = ('%s-%s' % (namespace_prefix, group_prefix) if group_prefix else namespace_prefix)
def _setup_rpc(self): # RPC support self.service_topics = {svc_constants.CORE: topics.PLUGIN, svc_constants.L3_ROUTER_NAT: topics.L3PLUGIN} self.rpc_context = oslo_context.RequestContext('neutron', 'neutron', is_admin=False) self.conn = n_rpc.create_connection(new=True) self.endpoints = [BridgeRpcCallbacks(), securitygroups_rpc.SecurityGroupServerRpcCallback(), dhcp_rpc.DhcpRpcCallback(), l3_rpc.L3RpcCallback(), agents_db.AgentExtRpcCallback(), metadata_rpc.MetadataRpcCallback()] for svc_topic in self.service_topics.values(): self.conn.create_consumer(svc_topic, self.endpoints, fanout=False) # Consume from all consumers in threads self.conn.consume_in_threads() self.notifier = AgentNotifierApi(topics.AGENT) self.agent_notifiers[q_const.AGENT_TYPE_DHCP] = ( dhcp_rpc_agent_api.DhcpAgentNotifyAPI() ) self.agent_notifiers[q_const.AGENT_TYPE_L3] = ( l3_rpc_agent_api.L3AgentNotifyAPI() )
def test_deprecated_args(self): user = uuid.uuid4().hex tenant = uuid.uuid4().hex domain = uuid.uuid4().hex user_domain = uuid.uuid4().hex project_domain = uuid.uuid4().hex ctx = context.RequestContext(user=user, tenant=tenant, domain=domain, user_domain=user_domain, project_domain=project_domain) self.assertEqual(0, len(self.warnings)) self.assertEqual(user, ctx.user_id) self.assertEqual(tenant, ctx.project_id) self.assertEqual(domain, ctx.domain_id) self.assertEqual(user_domain, ctx.user_domain_id) self.assertEqual(project_domain, ctx.project_domain_id) self.assertEqual(0, len(self.warnings)) self.assertEqual(user, ctx.user) if context._log_deprecation_warnings: self.assertEqual(1, len(self.warnings)) self.assertEqual(tenant, ctx.tenant) if context._log_deprecation_warnings: self.assertEqual(2, len(self.warnings)) self.assertEqual(domain, ctx.domain) if context._log_deprecation_warnings: self.assertEqual(3, len(self.warnings)) self.assertEqual(user_domain, ctx.user_domain) if context._log_deprecation_warnings: self.assertEqual(4, len(self.warnings)) self.assertEqual(project_domain, ctx.project_domain) if context._log_deprecation_warnings: self.assertEqual(5, len(self.warnings))
def setUp(self): super(TestGlanceSwiftTempURL, self).setUp() client = stubs.StubGlanceClient() self.context = context.RequestContext() self.context.auth_token = 'fake' self.service = service.GlanceImageService(client, 2, self.context) self.config(swift_temp_url_key='correcthorsebatterystaple', group='glance') self.config(swift_endpoint_url='https://swift.example.com', group='glance') self.config(swift_account='AUTH_a422b2-91f3-2f46-74b7-d7c9e8958f5d30', group='glance') self.config(swift_api_version='v1', group='glance') self.config(swift_container='glance', group='glance') self.config(swift_temp_url_duration=1200, group='glance') self.config(swift_store_multiple_containers_seed=0, group='glance') self.config() self.fake_image = { 'id': '757274c4-2856-4bd2-bb20-9a4a231e187b' }
def test_published_no_mock(self): publisher = msg_publisher.RPCPublisher(netutils.urlsplit('rpc://')) endpoint = mock.MagicMock(['record_metering_data']) collector = messaging.get_rpc_server( self.transport, self.CONF.publisher_rpc.metering_topic, endpoint) endpoint.record_metering_data.side_effect = ( lambda *args, **kwds: collector.stop()) collector.start() eventlet.sleep() publisher.publish_samples(context.RequestContext(), self.test_data) collector.wait() class Matcher(object): @staticmethod def __eq__(data): for i, sample_item in enumerate(data): if sample_item['counter_name'] != self.test_data[i].name: return False return True endpoint.record_metering_data.assert_called_once_with(mock.ANY, data=Matcher())
def load_certificates_data(cert_mngr, obj, context=None): """Load TLS certificate data from the listener/pool. return TLS_CERT and SNI_CERTS """ tls_cert = None sni_certs = [] if not context: context = oslo_context.RequestContext(project_id=obj.project_id) if obj.tls_certificate_id: try: tls_cert = _map_cert_tls_container( cert_mngr.get_cert(context, obj.tls_certificate_id, check_only=True)) except Exception as e: LOG.warning('Unable to retrieve certificate: %s due to %s.', obj.tls_certificate_id, str(e)) raise exceptions.CertificateRetrievalException( ref=obj.tls_certificate_id) if hasattr(obj, 'sni_containers') and obj.sni_containers: for sni_cont in obj.sni_containers: try: cert_container = _map_cert_tls_container( cert_mngr.get_cert(context, sni_cont.tls_container_id, check_only=True)) except Exception as e: LOG.warning('Unable to retrieve certificate: %s due to %s.', sni_cont.tls_container_id, str(e)) raise exceptions.CertificateRetrievalException( ref=sni_cont.tls_container_id) sni_certs.append(cert_container) return {'tls_cert': tls_cert, 'sni_certs': sni_certs}
def fill_context(self, request): # The request context stores itself in thread-local memory for logging. request_context = oslo_context.RequestContext( request_id=request.environ.get('openstack.request_id')) if authorization.AUTH_CONTEXT_ENV in request.environ: msg = _LW('Auth context already exists in the request ' 'environment; it will be used for authorization ' 'instead of creating a new one.') LOG.warning(msg) return # NOTE(gyee): token takes precedence over SSL client certificates. # This will preserve backward compatibility with the existing # behavior. Tokenless authorization with X.509 SSL client # certificate is effectively disabled if no trusted issuers are # provided. if request.environ.get(core.CONTEXT_ENV, {}).get('is_admin', False): request_context.is_admin = True auth_context = {} elif CONF.admin_token and request.user_token == CONF.admin_token: versionutils.report_deprecated_feature( LOG, _LW('build_auth_context middleware checking for the admin ' 'token is deprecated as of the Mitaka release and will be ' 'removed in the O release. If your deployment requires ' 'use of the admin token, update keystone-paste.ini so ' 'that admin_token_auth is before build_auth_context in ' 'the paste pipelines, otherwise remove the ' 'admin_token_auth middleware from the paste pipelines.')) request_context.is_admin = True auth_context = {} elif request.token_auth.has_user_token: request_context.auth_token = request.user_token ref = token_model.KeystoneToken(token_id=request.user_token, token_data=request.token_info) auth_context = authorization.token_to_auth_context(ref) elif self._validate_trusted_issuer(request): auth_context = self._build_tokenless_auth_context(request) else: LOG.debug('There is either no auth token in the request or ' 'the certificate issuer is not trusted. No auth ' 'context will be set.') return # The attributes of request_context are put into the logs. This is a # common pattern for all the OpenStack services. In all the other # projects these are IDs, so set the attributes to IDs here rather than # the name. request_context.user = auth_context.get('user_id') request_context.tenant = auth_context.get('project_id') request_context.domain = auth_context.get('domain_id') request_context.user_domain = auth_context.get('user_domain_id') request_context.project_domain = auth_context.get('project_domain_id') request_context.update_store() LOG.debug('RBAC: auth_context: %s', auth_context) request.environ[authorization.AUTH_CONTEXT_ENV] = auth_context
This example requires the following modules to be installed. $ pip install oslo.context oslo.log More information can be found at: https://docs.openstack.org/oslo.context/latest/user/index.html """ from oslo_config import cfg from oslo_context import context from oslo_log import log as logging CONF = cfg.CONF DOMAIN = "demo" logging.register_options(CONF) CONF.logging_user_identity_format = "%(user)s/%(tenant)s@%(project_domain)s" logging.setup(CONF, DOMAIN) LOG = logging.getLogger(__name__) LOG.info("Message without context") # ids in Openstack are 32 characters long # For readability a shorter id value is used context.RequestContext(request_id='req-abc', user='******', tenant='d6134462', project_domain='a6b9360e') LOG.info("Message with context")
def test_store_current(self): # By default a new context is stored. ctx = context.RequestContext() self.assertIs(context.get_current(), ctx) fixture.ClearRequestContext()._remove_cached_context() self.assertIsNone(context.get_current())
import datetime import mock from oslo_context import context as ctx from oslo_policy import policy import testtools from esi_leap.api.controllers.v1.contract import ContractsController from esi_leap.common import exception from esi_leap.common import statuses from esi_leap.objects import contract from esi_leap.objects import offer from esi_leap.resource_objects.test_node import TestNode from esi_leap.tests.api import base as test_api_base admin_ctx = ctx.RequestContext(project_id='adminid', roles=['admin']) admin_ctx_dict = admin_ctx.to_policy_values() owner_ctx = ctx.RequestContext(project_id='ownerid', roles=['owner']) owner_ctx_dict = owner_ctx.to_policy_values() lessee_ctx = ctx.RequestContext(project_id="lesseeid", roles=['lessee']) lessee_ctx_dict = lessee_ctx.to_policy_values() random_ctx = ctx.RequestContext(project_id='randomid', roles=['randomrole']) random_ctx_dict = random_ctx.to_policy_values() owner_ctx_2 = ctx.RequestContext(project_id='ownerid2', roles=['owner']) lessee_ctx_2 = ctx.RequestContext(project_id="lesseeid2", roles=['lessee']) start = datetime.datetime(2016, 7, 16)
start_time=now - timedelta(days=2), end_time=now + timedelta(days=1), duration=16400, status="available", server_config_query={'foo': 'bar'}, cost=11.5) test_bid_data_3 = dict(server_quantity=2, start_time=now - timedelta(days=2), end_time=now + timedelta(days=1), duration=16400, status="available", server_config_query={'foo': 'bar'}, cost=11.5) admin_context = ctx.RequestContext(is_admin=True) scoped_context = ctx.RequestContext(is_admin=False, project_id='1234') scoped_context_2 = ctx.RequestContext(is_admin=False, project_id='7788') def test_offer_get_all(app, db, session): api.offer_create(test_offer_data, scoped_context) api.offer_create(test_offer_data_2, scoped_context) api.offer_create(test_offer_data_3, scoped_context_2) assert len(api.offer_get_all(scoped_context)) == 3 def test_offer_get_all_by_project_id(app, db, session):
def setUp(self): super(PolicyFileTestCase, self).setUp() self.context = context.RequestContext(user='******', tenant='fake', is_admin=False) self.target = {}
from oslo_config import cfg from oslo_log import log as logging from oslo_context import context CONF = cfg.CONF DOMAIN = "demo" logging.register_options(CONF) logging.setup(CONF, DOMAIN) LOG = logging.getLogger(DOMAIN) LOG.info("Message without context") con = context.RequestContext() LOG.info("Message with context") cont_new = context.RequestContext(user='******', tenant='test_project', project_domain='test_domain') LOG.info("Message with new context") LOG.info("Message with con", context=con) LOG.info("Message with new")