Example #1
0
def build_token_values_v2(access, admin_domain_id):
    token_data = access['token']

    token_expires_at = timeutils.parse_isotime(token_data['expires'])

    # Trim off the microseconds because the revocation event only has
    # expirations accurate to the second.
    token_expires_at = token_expires_at.replace(microsecond=0)

    token_values = {
        'expires_at': timeutils.normalize_time(token_expires_at),
        'issued_at': timeutils.normalize_time(
            timeutils.parse_isotime(token_data['issued_at'])),
        'audit_id': token_data.get('audit_ids', [None])[0],
        'audit_chain_id': token_data.get('audit_ids', [None])[-1],
    }

    token_values['user_id'] = access.get('user', {}).get('id')

    project = token_data.get('tenant')
    if project is not None:
        token_values['project_id'] = project['id']
    else:
        token_values['project_id'] = None

    token_values['identity_domain_id'] = admin_domain_id
    token_values['assignment_domain_id'] = admin_domain_id

    role_list = []
    # Roles are by ID in metadata and by name in the user section
    roles = access.get('metadata', {}).get('roles', [])
    for role in roles:
        role_list.append(role)
    token_values['roles'] = role_list
    return token_values
Example #2
0
    def _is_valid_token(self, token):
        """Verify the token is valid format and has not expired."""
        current_time = timeutils.normalize_time(timeutils.utcnow())

        try:
            # Get the data we need from the correct location (V2 and V3 tokens
            # differ in structure, Try V3 first, fall back to V2 second)
            token_data = token.get('token', token.get('access'))
            expires_at = token_data.get('expires_at',
                                        token_data.get('expires'))
            if not expires_at:
                expires_at = token_data['token']['expires']
            expiry = timeutils.normalize_time(
                timeutils.parse_isotime(expires_at))
        except Exception:
            LOG.exception(_LE('Unexpected error or malformed token '
                              'determining token expiry: %s'), token)
            raise exception.TokenNotFound(_('Failed to validate token'))

        if current_time < expiry:
            self.check_revocation(token)
            # Token has not expired and has not been revoked.
            return None
        else:
            raise exception.TokenNotFound(_('Failed to validate token'))
Example #3
0
 def assertServerUsage(self, server, launched_at, terminated_at):
     resp_launched_at = timeutils.parse_isotime(
         server.get('%slaunched_at' % self.prefix))
     self.assertEqual(timeutils.normalize_time(resp_launched_at),
                      launched_at)
     resp_terminated_at = timeutils.parse_isotime(
         server.get('%sterminated_at' % self.prefix))
     self.assertEqual(timeutils.normalize_time(resp_terminated_at),
                      terminated_at)
Example #4
0
    def _add_to_revocation_list(self, data, lock):
        filtered_list = []
        revoked_token_data = {}

        current_time = self._get_current_time()
        expires = data['expires']

        if isinstance(expires, six.string_types):
            expires = timeutils.parse_isotime(expires)

        expires = timeutils.normalize_time(expires)

        if expires < current_time:
            LOG.warning(_LW('Token `%s` is expired, not adding to the '
                            'revocation list.'), data['id'])
            return

        revoked_token_data['expires'] = utils.isotime(expires,
                                                      subsecond=True)
        revoked_token_data['id'] = data['id']

        token_list = self._get_key_or_default(self.revocation_key, default=[])
        if not isinstance(token_list, list):
            # NOTE(morganfainberg): In the case that the revocation list is not
            # in a format we understand, reinitialize it. This is an attempt to
            # not allow the revocation list to be completely broken if
            # somehow the key is changed outside of keystone (e.g. memcache
            # that is shared by multiple applications). Logging occurs at error
            # level so that the cloud administrators have some awareness that
            # the revocation_list needed to be cleared out. In all, this should
            # be recoverable. Keystone cannot control external applications
            # from changing a key in some backends, however, it is possible to
            # gracefully handle and notify of this event.
            LOG.error(_LE('Reinitializing revocation list due to error '
                          'in loading revocation list from backend.  '
                          'Expected `list` type got `%(type)s`. Old '
                          'revocation list data: %(list)r'),
                      {'type': type(token_list), 'list': token_list})
            token_list = []

        # NOTE(morganfainberg): on revocation, cleanup the expired entries, try
        # to keep the list of tokens revoked at the minimum.
        for token_data in token_list:
            try:
                expires_at = timeutils.normalize_time(
                    timeutils.parse_isotime(token_data['expires']))
            except ValueError:
                LOG.warning(_LW('Removing `%s` from revocation list due to '
                                'invalid expires data in revocation list.'),
                            token_data.get('id', 'INVALID_TOKEN_DATA'))
                continue
            if expires_at > current_time:
                filtered_list.append(token_data)
        filtered_list.append(revoked_token_data)
        self._set_key(self.revocation_key, filtered_list, lock)
Example #5
0
    def test_touch_conductor(self, mock_utcnow):
        test_time = datetime.datetime(2000, 1, 1, 0, 0)
        mock_utcnow.return_value = test_time
        c = self._create_test_cdr()
        self.assertEqual(test_time, timeutils.normalize_time(c.updated_at))

        test_time = datetime.datetime(2000, 1, 1, 0, 1)
        mock_utcnow.return_value = test_time
        self.dbapi.touch_conductor(c.hostname)
        c = self.dbapi.get_conductor(c.hostname)
        self.assertEqual(test_time, timeutils.normalize_time(c.updated_at))
    def test_token_model_v2(self):
        token_data = token_model.KeystoneToken(uuid.uuid4().hex, self.v2_sample_token)
        self.assertIs(token_model.V2, token_data.version)
        expires = timeutils.normalize_time(timeutils.parse_isotime(self.v2_sample_token["access"]["token"]["expires"]))
        issued = timeutils.normalize_time(timeutils.parse_isotime(self.v2_sample_token["access"]["token"]["issued_at"]))
        self.assertEqual(expires, token_data.expires)
        self.assertEqual(issued, token_data.issued)
        self.assertEqual(self.v2_sample_token["access"]["user"]["id"], token_data.user_id)
        self.assertEqual(self.v2_sample_token["access"]["user"]["name"], token_data.user_name)
        self.assertEqual(CONF.identity.default_domain_id, token_data.user_domain_id)
        self.assertEqual("Default", token_data.user_domain_name)
        self.assertEqual(CONF.identity.default_domain_id, token_data.project_domain_id)
        self.assertEqual("Default", token_data.project_domain_name)
        self.assertEqual(self.v2_sample_token["access"]["trust"]["id"], token_data.trust_id)
        self.assertEqual(self.v2_sample_token["access"]["trust"]["trustor_user_id"], token_data.trustor_user_id)
        self.assertEqual(self.v2_sample_token["access"]["trust"]["impersonation"], token_data.trust_impersonation)
        self.assertEqual(self.v2_sample_token["access"]["trust"]["trustee_user_id"], token_data.trustee_user_id)
        # Project Scoped Token
        self.assertEqual(self.v2_sample_token["access"]["token"]["tenant"]["id"], token_data.project_id)
        self.assertEqual(self.v2_sample_token["access"]["token"]["tenant"]["name"], token_data.project_name)
        self.assertTrue(token_data.project_scoped)
        self.assertTrue(token_data.scoped)
        self.assertTrue(token_data.trust_scoped)
        self.assertEqual([r["name"] for r in self.v2_sample_token["access"]["user"]["roles"]], token_data.role_names)
        token_data["token"].pop("tenant")
        self.assertFalse(token_data.scoped)
        self.assertFalse(token_data.project_scoped)
        self.assertFalse(token_data.domain_scoped)
        self.assertRaises(exception.UnexpectedError, getattr, token_data, "project_id")
        self.assertRaises(exception.UnexpectedError, getattr, token_data, "project_name")
        self.assertRaises(exception.UnexpectedError, getattr, token_data, "project_domain_id")
        self.assertRaises(exception.UnexpectedError, getattr, token_data, "project_domain_id")
        # No Domain Scoped tokens in V2
        self.assertRaises(NotImplementedError, getattr, token_data, "domain_id")
        self.assertRaises(NotImplementedError, getattr, token_data, "domain_name")
        token_data["domain"] = {"id": uuid.uuid4().hex, "name": uuid.uuid4().hex}
        self.assertRaises(NotImplementedError, getattr, token_data, "domain_id")
        self.assertRaises(NotImplementedError, getattr, token_data, "domain_name")
        self.assertFalse(token_data.domain_scoped)

        token_data["token"]["audit_ids"] = [uuid.uuid4().hex]
        self.assertEqual(token_data.audit_chain_id, token_data["token"]["audit_ids"][0])
        token_data["token"]["audit_ids"].append(uuid.uuid4().hex)
        self.assertEqual(token_data.audit_chain_id, token_data["token"]["audit_ids"][1])
        self.assertEqual(token_data.audit_id, token_data["token"]["audit_ids"][0])
        del token_data["token"]["audit_ids"]
        self.assertIsNone(token_data.audit_id)
        self.assertIsNone(token_data.audit_chain_id)
    def test_token_model_v3(self):
        token_data = token_model.KeystoneToken(uuid.uuid4().hex, self.v3_sample_token)
        self.assertIs(token_model.V3, token_data.version)
        expires = timeutils.normalize_time(timeutils.parse_isotime(self.v3_sample_token["token"]["expires_at"]))
        issued = timeutils.normalize_time(timeutils.parse_isotime(self.v3_sample_token["token"]["issued_at"]))
        self.assertEqual(expires, token_data.expires)
        self.assertEqual(issued, token_data.issued)
        self.assertEqual(self.v3_sample_token["token"]["user"]["id"], token_data.user_id)
        self.assertEqual(self.v3_sample_token["token"]["user"]["name"], token_data.user_name)
        self.assertEqual(self.v3_sample_token["token"]["user"]["domain"]["id"], token_data.user_domain_id)
        self.assertEqual(self.v3_sample_token["token"]["user"]["domain"]["name"], token_data.user_domain_name)
        self.assertEqual(self.v3_sample_token["token"]["project"]["domain"]["id"], token_data.project_domain_id)
        self.assertEqual(self.v3_sample_token["token"]["project"]["domain"]["name"], token_data.project_domain_name)
        self.assertEqual(self.v3_sample_token["token"]["OS-TRUST:trust"]["id"], token_data.trust_id)
        self.assertEqual(self.v3_sample_token["token"]["OS-TRUST:trust"]["trustor_user_id"], token_data.trustor_user_id)
        self.assertEqual(self.v3_sample_token["token"]["OS-TRUST:trust"]["trustee_user_id"], token_data.trustee_user_id)
        # Project Scoped Token
        self.assertRaises(exception.UnexpectedError, getattr, token_data, "domain_id")
        self.assertRaises(exception.UnexpectedError, getattr, token_data, "domain_name")
        self.assertFalse(token_data.domain_scoped)
        self.assertEqual(self.v3_sample_token["token"]["project"]["id"], token_data.project_id)
        self.assertEqual(self.v3_sample_token["token"]["project"]["name"], token_data.project_name)
        self.assertTrue(token_data.project_scoped)
        self.assertTrue(token_data.scoped)
        self.assertTrue(token_data.trust_scoped)
        self.assertEqual([r["id"] for r in self.v3_sample_token["token"]["roles"]], token_data.role_ids)
        self.assertEqual([r["name"] for r in self.v3_sample_token["token"]["roles"]], token_data.role_names)
        token_data.pop("project")
        self.assertFalse(token_data.project_scoped)
        self.assertFalse(token_data.scoped)
        self.assertRaises(exception.UnexpectedError, getattr, token_data, "project_id")
        self.assertRaises(exception.UnexpectedError, getattr, token_data, "project_name")
        self.assertFalse(token_data.project_scoped)
        domain_id = uuid.uuid4().hex
        domain_name = uuid.uuid4().hex
        token_data["domain"] = {"id": domain_id, "name": domain_name}
        self.assertEqual(domain_id, token_data.domain_id)
        self.assertEqual(domain_name, token_data.domain_name)
        self.assertTrue(token_data.domain_scoped)

        token_data["audit_ids"] = [uuid.uuid4().hex]
        self.assertEqual(token_data.audit_id, token_data["audit_ids"][0])
        self.assertEqual(token_data.audit_chain_id, token_data["audit_ids"][0])
        token_data["audit_ids"].append(uuid.uuid4().hex)
        self.assertEqual(token_data.audit_chain_id, token_data["audit_ids"][1])
        del token_data["audit_ids"]
        self.assertIsNone(token_data.audit_id)
        self.assertIsNone(token_data.audit_chain_id)
Example #8
0
def build_token_values(token_data):
    token_values = {
        'expires_at': timeutils.normalize_time(
            timeutils.parse_isotime(token_data['expires_at'])),
        'issued_at': timeutils.normalize_time(
            timeutils.parse_isotime(token_data['issued_at']))}

    user = token_data.get('user')
    if user is not None:
        token_values['user_id'] = user['id']
        token_values['identity_domain_id'] = user['domain']['id']
    else:
        token_values['user_id'] = None
        token_values['identity_domain_id'] = None

    project = token_data.get('project', token_data.get('tenant'))
    if project is not None:
        token_values['project_id'] = project['id']
        token_values['assignment_domain_id'] = project['domain']['id']
    else:
        token_values['project_id'] = None
        token_values['assignment_domain_id'] = None

    role_list = []
    roles = token_data.get('roles')
    if roles is not None:
        for role in roles:
            role_list.append(role['id'])
    token_values['roles'] = role_list

    trust = token_data.get('OS-TRUST:trust')
    if trust is None:
        token_values['trust_id'] = None
        token_values['trustor_id'] = None
        token_values['trustee_id'] = None
    else:
        token_values['trust_id'] = trust['id']
        token_values['trustor_id'] = trust['trustor_user']['id']
        token_values['trustee_id'] = trust['trustee_user']['id']

    oauth1 = token_data.get('OS-OAUTH1')
    if oauth1 is None:
        token_values['consumer_id'] = None
        token_values['access_token_id'] = None
    else:
        token_values['consumer_id'] = oauth1['consumer_id']
        token_values['access_token_id'] = oauth1['access_token_id']
    return token_values
Example #9
0
    def _get_volumes(self, context, period_start, period_stop,
                     tenant_id=None, detailed=False, metadata=None):
        """Returns a list of volumes

        :param context: cinder context from request
        :param period_start: Datetime start
        :param period_stop: Datetime stop
        :param tenant_id: String|None Id of a tenant
        :param detailed: Optionally include detailed volume info
        :param metadata: Dict|None Dictionary of metadata search terms
        """
        volumes = self._volume_api_get_all(context, period_start,
                                           period_stop, tenant_id, metadata)
        rval = {}
        for volume in volumes:
            info = {}
            info['hours'] = self._hours_for(volume, period_start, period_stop)
            # size in GB
            info['size'] = volume['size']
            info['volume_id'] = volume['id']
            info['display_name'] = volume['display_name']
            info['started_at'] = \
                timeutils.normalize_time(volume['launched_at'])
            info['project_id'] = volume['project_id']
            info['ended_at'] = (
                timeutils.normalize_time(volume['terminated_at']) if
                volume['terminated_at'] else None
            )
            info['status'] = volume['status']
            info['attach_status'] = volume['attach_status']

            if info['project_id'] not in rval:
                summary = {}
                summary['project_id'] = info['project_id']
                if detailed:
                    summary['volume_usages'] = []
                summary['total_gb_usage'] = 0
                summary['total_hours'] = 0
                summary['start'] = timeutils.normalize_time(period_start)
                summary['stop'] = timeutils.normalize_time(period_stop)
                rval[info['project_id']] = summary

            summary = rval[info['project_id']]
            summary['total_gb_usage'] += info['size'] * info['hours']
            summary['total_hours'] += info['hours']
            if detailed:
                summary['volume_usages'].append(info)
        return rval.values()
Example #10
0
    def record_metering_data(self, data):
        # We may have receive only one counter on the wire
        if not data:
            return
        if not isinstance(data, list):
            data = [data]

        for meter in data:
            LOG.debug(
                "metering data %(counter_name)s " "for %(resource_id)s @ %(timestamp)s: %(counter_volume)s",
                {
                    "counter_name": meter["counter_name"],
                    "resource_id": meter["resource_id"],
                    "timestamp": meter.get("timestamp", "NO TIMESTAMP"),
                    "counter_volume": meter["counter_volume"],
                },
            )
            # Convert the timestamp to a datetime instance.
            # Storage engines are responsible for converting
            # that value to something they can store.
            if meter.get("timestamp"):
                ts = timeutils.parse_isotime(meter["timestamp"])
                meter["timestamp"] = timeutils.normalize_time(ts)
        try:
            self.meter_conn.record_metering_data_batch(data)
        except Exception as err:
            LOG.error(_LE("Failed to record %(len)s: %(err)s."), {"len": len(data), "err": err})
            raise
Example #11
0
 def test_process_bind_param_mysql(self):
     dialect = mock.Mock()
     dialect.name = 'mysql'
     value = timeutils.utcnow(True)
     expected_value = timeutils.normalize_time(value)
     result = self.sqltype.process_bind_param(value, dialect)
     self.assertEqual(expected_value, result)
Example #12
0
    def record_metering_data(self, data):
        # We may have receive only one counter on the wire
        if not isinstance(data, list):
            data = [data]

        for meter in data:
            LOG.debug(
                "metering data %(counter_name)s " "for %(resource_id)s @ %(timestamp)s: %(counter_volume)s",
                {
                    "counter_name": meter["counter_name"],
                    "resource_id": meter["resource_id"],
                    "timestamp": meter.get("timestamp", "NO TIMESTAMP"),
                    "counter_volume": meter["counter_volume"],
                },
            )
            if publisher_utils.verify_signature(meter, self.conf.publisher.telemetry_secret):
                try:
                    # Convert the timestamp to a datetime instance.
                    # Storage engines are responsible for converting
                    # that value to something they can store.
                    if meter.get("timestamp"):
                        ts = timeutils.parse_isotime(meter["timestamp"])
                        meter["timestamp"] = timeutils.normalize_time(ts)
                    self.meter_conn.record_metering_data(meter)
                except Exception as err:
                    LOG.exception(_LE("Failed to record metering data: %s"), err)
                    # raise the exception to propagate it up in the chain.
                    raise
            else:
                LOG.warning(_LW("message signature invalid, discarding message: %r"), meter)
Example #13
0
    def _check_status_complete(self, started_at, wait_secs):
        def simulated_effort():
            client_name = self.properties[self.CLIENT_NAME]
            self.entity = self.properties[self.ENTITY_NAME]
            if client_name and self.entity:
                # Allow the user to set the value to a real resource id.
                entity_id = self.data().get('value') or self.resource_id
                try:
                    obj = getattr(self.client(name=client_name), self.entity)
                    obj.get(entity_id)
                except Exception as exc:
                    LOG.debug('%s.%s(%s) %s' % (client_name, self.entity,
                                                entity_id, six.text_type(exc)))
            else:
                # just sleep some more
                eventlet.sleep(1)

        if isinstance(started_at, six.string_types):
            started_at = timeutils.parse_isotime(started_at)

        started_at = timeutils.normalize_time(started_at)
        waited = timeutils.utcnow() - started_at
        LOG.info("Resource %s waited %s/%s seconds",
                 self.name, waited, wait_secs)

        # wait_secs < 0 is an infinite wait time.
        if wait_secs >= 0 and waited > datetime.timedelta(seconds=wait_secs):
            fail_prop = self.properties[self.FAIL]
            if fail_prop and self.action != self.DELETE:
                raise ValueError("Test Resource failed %s" % self.name)
            return True

        simulated_effort()
        return False
Example #14
0
    def record_metering_data(self, data):
        # We may have receive only one counter on the wire
        if not data:
            return
        if not isinstance(data, list):
            data = [data]

        for meter in data:
            LOG.debug(
                'metering data %(counter_name)s '
                'for %(resource_id)s @ %(timestamp)s: %(counter_volume)s',
                {'counter_name': meter['counter_name'],
                 'resource_id': meter['resource_id'],
                 'timestamp': meter.get('timestamp', 'NO TIMESTAMP'),
                 'counter_volume': meter['counter_volume']})
            # Convert the timestamp to a datetime instance.
            # Storage engines are responsible for converting
            # that value to something they can store.
            if meter.get('timestamp'):
                ts = timeutils.parse_isotime(meter['timestamp'])
                meter['timestamp'] = timeutils.normalize_time(ts)
        try:
            self.conn.record_metering_data_batch(data)
        except Exception as err:
            LOG.error(_LE('Failed to record %(len)s: %(err)s.'),
                      {'len': len(data), 'err': err})
            raise
Example #15
0
    def test_trusted_filter_update_cache_timezone(self, req_mock):
        oat_data = {"hosts": [{"host_name": "node1",
                                    "trust_lvl": "untrusted",
                                    "vtime": "2012-09-09T05:10:40-04:00"}]}
        req_mock.return_value = requests.codes.OK, oat_data
        extra_specs = {'trust:trusted_host': 'untrusted'}
        spec_obj = objects.RequestSpec(
            context=mock.sentinel.ctx,
            flavor=objects.Flavor(memory_mb=1024,
                                  extra_specs=extra_specs))
        host = fakes.FakeHostState('host1', 'node1', {})

        timeutils.set_time_override(
            timeutils.normalize_time(
                timeutils.parse_isotime("2012-09-09T09:10:40Z")))

        self.filt_cls.host_passes(host, spec_obj)  # Fill the caches

        req_mock.reset_mock()
        self.filt_cls.host_passes(host, spec_obj)
        self.assertFalse(req_mock.called)

        req_mock.reset_mock()
        timeutils.advance_time_seconds(
            CONF.trusted_computing.attestation_auth_timeout - 10)
        self.filt_cls.host_passes(host, spec_obj)
        self.assertFalse(req_mock.called)

        timeutils.clear_time_override()
Example #16
0
    def _iso_to_datetime(self, expiration):
        """Convert ISO formatted string to datetime."""
        if isinstance(expiration, six.string_types):
            expiration_iso = timeutils.parse_isotime(expiration.strip())
            expiration = timeutils.normalize_time(expiration_iso)

        return expiration
Example #17
0
def get_time_since_last_update(cluster):
    cluster_updated_at = timeutils.normalize_time(
        timeutils.parse_isotime(cluster.updated_at))
    current_time = timeutils.utcnow()
    spacing = timeutils.delta_seconds(cluster_updated_at,
                                      current_time)
    return spacing
Example #18
0
    def process_bind_param(self, value, dialect):
        if value is None:
            return None
        if dialect.name == 'mysql':
            return timeutils.normalize_time(value)

        return value
Example #19
0
def sanitize_timestamp(timestamp):
    """Return a naive utc datetime object."""
    if not timestamp:
        return timestamp
    if not isinstance(timestamp, datetime.datetime):
        timestamp = timeutils.parse_isotime(timestamp)
    return timeutils.normalize_time(timestamp)
Example #20
0
    def test_create_token(self):

        creds = self.os.credentials
        user_id = creds.user_id
        username = creds.username
        password = creds.password
        user_domain_id = creds.user_domain_id

        # 'user_domain_id' needs to be specified otherwise tempest.lib assumes
        # it to be 'default'
        token_id, resp = self.non_admin_token.get_token(
            user_id=user_id,
            user_domain_id=user_domain_id,
            password=password,
            auth_data=True)

        self.assertNotEmpty(token_id)
        self.assertIsInstance(token_id, six.string_types)

        now = timeutils.utcnow()
        expires_at = timeutils.normalize_time(
            timeutils.parse_isotime(resp['expires_at']))
        self.assertGreater(resp['expires_at'],
                           resp['issued_at'])
        self.assertGreater(expires_at, now)

        subject_id = resp['user']['id']
        self.assertEqual(subject_id, user_id)

        subject_name = resp['user']['name']
        self.assertEqual(subject_name, username)

        self.assertEqual(resp['methods'][0], 'password')
Example #21
0
    def record_metering_data(self, data):
        # We may have receive only one counter on the wire
        if not isinstance(data, list):
            data = [data]

        for meter in data:
            LOG.debug(_(
                'metering data %(counter_name)s '
                'for %(resource_id)s @ %(timestamp)s: %(counter_volume)s')
                % ({'counter_name': meter['counter_name'],
                    'resource_id': meter['resource_id'],
                    'timestamp': meter.get('timestamp', 'NO TIMESTAMP'),
                    'counter_volume': meter['counter_volume']}))
            if publisher_utils.verify_signature(
                    meter, self.conf.publisher.telemetry_secret):
                try:
                    # Convert the timestamp to a datetime instance.
                    # Storage engines are responsible for converting
                    # that value to something they can store.
                    if meter.get('timestamp'):
                        ts = timeutils.parse_isotime(meter['timestamp'])
                        meter['timestamp'] = timeutils.normalize_time(ts)
                    self.meter_conn.record_metering_data(meter)
                except Exception as err:
                    LOG.exception(_('Failed to record metering data: %s'),
                                  err)
                    # raise the exception to propagate it up in the chain.
                    raise
            else:
                LOG.warning(_(
                    'message signature invalid, discarding message: %r'),
                    meter)
Example #22
0
    def record_events(self, events):
        if not isinstance(events, list):
            events = [events]

        event_list = []
        for ev in events:
            if publisher_utils.verify_signature(
                    ev, self.conf.publisher.telemetry_secret):
                try:
                    event_list.append(
                        models.Event(
                            message_id=ev['message_id'],
                            event_type=ev['event_type'],
                            generated=timeutils.normalize_time(
                                timeutils.parse_isotime(ev['generated'])),
                            traits=[models.Trait(
                                    name, dtype,
                                    models.Trait.convert_value(dtype, value))
                                    for name, dtype, value in ev['traits']],
                            raw=ev.get('raw', {}))
                    )
                except Exception:
                    LOG.exception(_LE("Error processing event and it will be "
                                      "dropped: %s"), ev)
            else:
                LOG.warning(_LW(
                    'event signature invalid, discarding event: %s'), ev)
        self.event_conn.record_events(event_list)
    def update_software_deployment(self, cnxt, deployment_id, config_id,
                                   input_values, output_values, action,
                                   status, status_reason, updated_at):
        update_data = {}
        if config_id:
            update_data['config_id'] = config_id
        if input_values:
            update_data['input_values'] = input_values
        if output_values:
            update_data['output_values'] = output_values
        if action:
            update_data['action'] = action
        if status:
            update_data['status'] = status
        if status_reason:
            update_data['status_reason'] = status_reason
        if updated_at:
            update_data['updated_at'] = timeutils.normalize_time(
                timeutils.parse_isotime(updated_at))
        else:
            update_data['updated_at'] = timeutils.utcnow()

        sd = software_deployment_object.SoftwareDeployment.update_by_id(
            cnxt, deployment_id, update_data)

        # only push metadata if this update resulted in the config_id
        # changing, since metadata is just a list of configs
        if config_id:
            self._push_metadata_software_deployments(
                cnxt, sd.server_id, sd.stack_user_project_id)

        return api.format_software_deployment(sd)
Example #24
0
def _list_hosts(req, service=None):
    """Returns a summary list of hosts."""
    curr_time = timeutils.utcnow(with_timezone=True)
    context = req.environ['cinder.context']
    filters = {'disabled': False}
    services = objects.ServiceList.get_all(context, filters)
    zone = ''
    if 'zone' in req.GET:
        zone = req.GET['zone']
    if zone:
        services = [s for s in services if s['availability_zone'] == zone]
    hosts = []
    for host in services:
        delta = curr_time - (host.updated_at or host.created_at)
        alive = abs(delta.total_seconds()) <= CONF.service_down_time
        status = (alive and "available") or "unavailable"
        active = 'enabled'
        if host.disabled:
            active = 'disabled'
        LOG.debug('status, active and update: %s, %s, %s',
                  status, active, host.updated_at)
        updated_at = host.updated_at
        if updated_at:
            updated_at = timeutils.normalize_time(updated_at)
        hosts.append({'host_name': host.host,
                      'service': host.topic,
                      'zone': host.availability_zone,
                      'service-status': status,
                      'service-state': active,
                      'last-update': updated_at,
                      })
    if service:
        hosts = [host for host in hosts
                 if host['service'] == service]
    return hosts
Example #25
0
 def test_update_node_provision(self, mock_utcnow):
     mocked_time = datetime.datetime(2000, 1, 1, 0, 0)
     mock_utcnow.return_value = mocked_time
     node = utils.create_test_node()
     res = self.dbapi.update_node(node.id, {'provision_state': 'fake'})
     self.assertEqual(mocked_time,
                      timeutils.normalize_time(res['provision_updated_at']))
Example #26
0
 def list(self):
     """Show a list of all cinder services."""
     ctxt = context.get_admin_context()
     services = objects.ServiceList.get_all(ctxt)
     print_format = "%-16s %-36s %-16s %-10s %-5s %-20s %-12s %-15s"
     print(print_format % (_('Binary'),
                           _('Host'),
                           _('Zone'),
                           _('Status'),
                           _('State'),
                           _('Updated At'),
                           _('RPC Version'),
                           _('Object Version')))
     for svc in services:
         alive = utils.service_is_up(svc)
         art = ":-)" if alive else "XXX"
         status = 'enabled'
         if svc.disabled:
             status = 'disabled'
         updated_at = svc.updated_at
         if updated_at:
             updated_at = timeutils.normalize_time(updated_at)
         rpc_version = (svc.rpc_current_version or
                        rpc.LIBERTY_RPC_VERSIONS.get(svc.binary, ''))
         object_version = (svc.object_current_version or 'liberty')
         print(print_format % (svc.binary, svc.host.partition('.')[0],
                               svc.availability_zone, status, art,
                               updated_at, rpc_version,
                               object_version))
Example #27
0
 def get_events(self, event_filter):
     iclient = es.client.IndicesClient(self.conn)
     indices = iclient.get_mapping('%s_*' % self.index_name).keys()
     if indices:
         filter_args = self._make_dsl_from_filter(indices, event_filter)
         results = self.conn.search(fields=['_id', 'timestamp',
                                            '_type', '_source'],
                                    sort='timestamp:asc',
                                    **filter_args)
         trait_mappings = {}
         for record in results['hits']['hits']:
             trait_list = []
             if not record['_type'] in trait_mappings:
                 trait_mappings[record['_type']] = list(
                     self.get_trait_types(record['_type']))
             for key in record['_source']['traits'].keys():
                 value = record['_source']['traits'][key]
                 for t_map in trait_mappings[record['_type']]:
                     if t_map['name'] == key:
                         dtype = t_map['data_type']
                         break
                 trait_list.append(models.Trait(
                     name=key, dtype=dtype,
                     value=models.Trait.convert_value(dtype, value)))
             gen_ts = timeutils.normalize_time(timeutils.parse_isotime(
                 record['_source']['timestamp']))
             yield models.Event(message_id=record['_id'],
                                event_type=record['_type'],
                                generated=gen_ts,
                                traits=sorted(
                                    trait_list,
                                    key=operator.attrgetter('dtype')),
                                raw=record['_source']['raw'])
Example #28
0
 def list_revoke_events(self, request):
     since = request.params.get('since')
     last_fetch = None
     if since:
         try:
             last_fetch = timeutils.normalize_time(
                 timeutils.parse_isotime(since))
         except ValueError:
             raise exception.ValidationError(
                 message=_('invalid date format %s') % since)
     # FIXME(notmorgan): The revocation events cannot have resource options
     # added to them or lazy-loaded relationships as long as to_dict
     # is called outside of an active session context. This API is unused
     # and should be deprecated in the near future. Fix this before adding
     # resource_options or any lazy-loaded relationships to the revocation
     # events themselves.
     events = self.revoke_api.list_events(last_fetch=last_fetch)
     # Build the links by hand as the standard controller calls require ids
     response = {'events': [event.to_dict() for event in events],
                 'links': {
                     'next': None,
                     'self': RevokeController.base_url(
                         request.context_dict,
                         path=request.context_dict['path']),
                     'previous': None}
                 }
     return response
    def test_time_string_to_float_conversions(self):
        payload_cls = token_formatters.BasePayload

        original_time_str = utils.isotime(subsecond=True)
        time_obj = timeutils.parse_isotime(original_time_str)
        expected_time_float = (
            (timeutils.normalize_time(time_obj) -
             datetime.datetime.utcfromtimestamp(0)).total_seconds())

        # NOTE(lbragstad): The token expiration time for Fernet tokens is
        # passed in the payload of the token. This is different from the token
        # creation time, which is handled by Fernet and doesn't support
        # subsecond precision because it is a timestamp integer.
        self.assertIsInstance(expected_time_float, float)

        actual_time_float = payload_cls._convert_time_string_to_float(
            original_time_str)
        self.assertIsInstance(actual_time_float, float)
        self.assertEqual(expected_time_float, actual_time_float)

        # Generate expected_time_str using the same time float. Using
        # original_time_str from utils.isotime will occasionally fail due to
        # floating point rounding differences.
        time_object = datetime.datetime.utcfromtimestamp(actual_time_float)
        expected_time_str = utils.isotime(time_object, subsecond=True)

        actual_time_str = payload_cls._convert_float_to_time_string(
            actual_time_float)
        self.assertEqual(expected_time_str, actual_time_str)
    def test_create_token(self):

        token_client = self.non_admin_token_client

        # get a token for the user
        creds = self.os.credentials
        username = creds.username
        password = creds.password
        tenant_name = creds.tenant_name

        body = token_client.auth(username, password, tenant_name)

        self.assertNotEmpty(body['token']['id'])
        self.assertIsInstance(body['token']['id'], six.string_types)

        now = timeutils.utcnow()
        expires_at = timeutils.normalize_time(
            timeutils.parse_isotime(body['token']['expires']))
        self.assertGreater(expires_at, now)

        self.assertEqual(body['token']['tenant']['id'],
                         creds.credentials.tenant_id)
        self.assertEqual(body['token']['tenant']['name'],
                         tenant_name)

        self.assertEqual(body['user']['id'], creds.credentials.user_id)
Example #31
0
        def moving_window(x):
            msec = datetime.timedelta(milliseconds=1)
            zero = datetime.timedelta(seconds=0)
            half_span = datetime.timedelta(seconds=window / 2)
            start = timeutils.normalize_time(data.index[0])
            stop = timeutils.normalize_time(data.index[-1] +
                                            datetime.timedelta(
                                                seconds=min_grain))
            # min_grain addition necessary since each bin of rolled-up data
            # is indexed by leftmost timestamp of bin.

            left = half_span if center else zero
            right = 2 * half_span - left - msec
            # msec subtraction is so we don't include right endpoint in slice.

            x = timeutils.normalize_time(x)

            if x - left >= start and x + right <= stop:
                dslice = data[x - left:x + right]

                if center and dslice.size % 2 == 0:
                    return func([
                        func(data[x - msec - left:x - msec + right]),
                        func(data[x + msec - left:x + msec + right])
                    ])

                # (NOTE) atmalagon: the msec shift here is so that we have two
                # consecutive windows; one centered at time x - msec,
                # and one centered at time x + msec. We then average the
                # aggregates from the two windows; this result is centered
                # at time x. Doing this double average is a way to return a
                # centered average indexed by a timestamp that existed in
                # the input data (which wouldn't be the case for an even number
                # of points if we did only one centered average).

            else:
                return numpy.nan
            if dslice.size < min_size:
                return numpy.nan
            return func(dslice)
Example #32
0
    def _get_value_as_type(self, forced_type=None):
        """Convert metadata value to the specified data type.

        This method is called during metadata query to help convert the
        querying metadata to the data type specified by user. If there is no
        data type given, the metadata will be parsed by ast.literal_eval to
        try to do a smart converting.

        NOTE (flwang) Using "_" as prefix to avoid an InvocationError raised
        from wsmeext/sphinxext.py. It's OK to call it outside the Query class.
        Because the "public" side of that class is actually the outside of the
        API, and the "private" side is the API implementation. The method is
        only used in the API implementation, so it's OK.

        :returns: metadata value converted with the specified data type.
        """
        type = forced_type or self.type
        try:
            converted_value = self.value
            if not type:
                try:
                    converted_value = ast.literal_eval(self.value)
                except (ValueError, SyntaxError):
                    # Unable to convert the metadata value automatically
                    # let it default to self.value
                    pass
            else:
                if type not in self._supported_types:
                    # Types must be explicitly declared so the
                    # correct type converter may be used. Subclasses
                    # of Query may define _supported_types and
                    # _type_converters to define their own types.
                    raise TypeError()
                converted_value = self._type_converters[type](self.value)
                if isinstance(converted_value, datetime.datetime):
                    converted_value = timeutils.normalize_time(converted_value)
        except ValueError:
            msg = (
                'Unable to convert the value %(value)s to the expected data type %(type)s.'
            ).format(value=self.value, type=type)
            raise ClientSideError(msg)
        except TypeError:
            msg = ('The data type %(type)s is not supported. The supported '
                   'data type list is: %(supported)s').format(
                       type=type, supported=self._supported_types)
            raise ClientSideError(msg)
        except Exception:
            msg = ('Unexpected exception converting %(value)s to '
                   'the expected data type %(type)s.').format(value=self.value,
                                                              type=type)
            raise ClientSideError(msg)
        return converted_value
Example #33
0
    def to_event(self, priority, notification_body):
        event_type = notification_body['event_type']
        message_id = notification_body['metadata']['message_id']
        when = timeutils.normalize_time(timeutils.parse_isotime(
            notification_body['metadata']['timestamp']))

        traits = (self.traits[t].to_trait(notification_body)
                  for t in self.traits)
        # Only accept non-None value traits ...
        traits = [trait for trait in traits if trait is not None]
        raw = notification_body if priority in self.raw_levels else {}
        event = models.Event(message_id, event_type, when, traits, raw)
        return event
Example #34
0
    def test_create_trust(self):
        new_id = uuid.uuid4().hex
        trust_data = self.create_sample_trust(new_id)

        self.assertEqual(new_id, trust_data['id'])
        self.assertEqual(self.trustee['id'], trust_data['trustee_user_id'])
        self.assertEqual(self.trustor['id'], trust_data['trustor_user_id'])
        self.assertGreater(timeutils.normalize_time(trust_data['expires_at']),
                           timeutils.utcnow())

        self.assertEqual([{'id': 'member'},
                          {'id': 'other'},
                          {'id': 'browser'}], trust_data['roles'])
Example #35
0
    def test_expiration_should_be_normalized_with_new_secret(self):
        target_expiration = '2114-02-28 12:14:44.180394-05:00'
        resp, secret_uuid = create_secret(self.app,
                                          expiration=target_expiration)

        self.assertEqual(201, resp.status_int)

        # Verify that the system normalizes time to UTC
        secret = secrets_repo.get(secret_uuid, self.project_id)
        local_datetime = timeutils.parse_isotime(target_expiration)
        datetime_utc = timeutils.normalize_time(local_datetime)

        self.assertEqual(datetime_utc, secret.expiration)
Example #36
0
 def validate(self, value, context):
     if not value:
         return True
     try:
         expiration_tz = timeutils.parse_isotime(value.strip())
         expiration = timeutils.normalize_time(expiration_tz)
         if expiration > timeutils.utcnow():
             return True
         raise ValueError(_('Expiration time is out of date.'))
     except Exception as ex:
         self._error_message = (_('Expiration {0} is invalid: {1}').format(
             value, six.text_type(ex)))
     return False
Example #37
0
    def _compare(test, db, obj):
        for field, value in db.items():
            if not hasattr(obj, field):
                continue

            if field in ('modified_at', 'created_at', 'updated_at',
                         'deleted_at') and db[field]:
                test.assertEqual(db[field],
                                 timeutils.normalize_time(obj[field]))
            elif isinstance(obj[field], obj_base.ObjectListBase):
                test.assertEqual(db[field], obj[field].objects)
            else:
                test.assertEqual(db[field], obj[field])
Example #38
0
    def _compare(test, db, obj):
        for field, value in db.items():
            if not hasattr(obj, field):
                continue

            if (isinstance(obj.fields[field], fields.DateTimeField) and
               db[field]):
                test.assertEqual(db[field],
                                 timeutils.normalize_time(obj[field]))
            elif isinstance(obj[field], obj_base.ObjectListBase):
                test.assertEqual(db[field], obj[field].objects)
            else:
                test.assertEqual(db[field], obj[field])
Example #39
0
    def _is_valid_token(self, token, window_seconds=0):
        """Verify the token is valid format and has not expired."""
        current_time = timeutils.normalize_time(timeutils.utcnow())

        try:
            expiry = timeutils.parse_isotime(token.expires_at)
            expiry = timeutils.normalize_time(expiry)

            # add a window in which you can fetch a token beyond expiry
            expiry += datetime.timedelta(seconds=window_seconds)

        except Exception:
            LOG.exception('Unexpected error or malformed token '
                          'determining token expiry: %s', token)
            raise exception.TokenNotFound(_('Failed to validate token'))

        if current_time < expiry:
            self.check_revocation(token)
            # Token has not expired and has not been revoked.
            return None
        else:
            raise exception.TokenNotFound(_('Failed to validate token'))
Example #40
0
    def _extract_when(body):
        """Extract the generated datetime from the notification."""
        # NOTE: I am keeping the logic the same as it was in the collector,
        # However, *ALL* notifications should have a 'timestamp' field, it's
        # part of the notification envelope spec. If this was put here because
        # some openstack project is generating notifications without a
        # timestamp, then that needs to be filed as a bug with the offending
        # project (mdragon)
        when = body.get('timestamp', body.get('_context_timestamp'))
        if when:
            return timeutils.normalize_time(timeutils.parse_isotime(when))

        return timeutils.utcnow()
Example #41
0
    def _is_valid_receipt(self, receipt, window_seconds=0):
        """Verify the receipt is valid format and has not expired."""
        current_time = timeutils.normalize_time(timeutils.utcnow())

        try:
            expiry = timeutils.parse_isotime(receipt.expires_at)
            expiry = timeutils.normalize_time(expiry)

            # add a window in which you can fetch a receipt beyond expiry
            expiry += datetime.timedelta(seconds=window_seconds)

        except Exception:
            LOG.exception('Unexpected error or malformed receipt '
                          'determining receipt expiry: %s', receipt)
            raise exception.ReceiptNotFound(
                _('Failed to validate receipt'), receipt_id=receipt.id)

        if current_time < expiry:
            return None
        else:
            raise exception.ReceiptNotFound(
                _('Failed to validate receipt'), receipt_id=receipt.id)
Example #42
0
def build_token_values_v2(access, default_domain_id):
    token_data = access['token']
    token_values = {
        'expires_at': timeutils.normalize_time(
            timeutils.parse_isotime(token_data['expires'])),
        'issued_at': timeutils.normalize_time(
            timeutils.parse_isotime(token_data['issued_at']))}

    token_values['user_id'] = access.get('user', {}).get('id')

    project = token_data.get('tenant')
    if project is not None:
        token_values['project_id'] = project['id']
    else:
        token_values['project_id'] = None

    token_values['identity_domain_id'] = default_domain_id
    token_values['assignment_domain_id'] = default_domain_id

    trust = token_data.get('trust')
    if trust is None:
        token_values['trust_id'] = None
        token_values['trustor_id'] = None
        token_values['trustee_id'] = None
    else:
        token_values['trust_id'] = trust['id']
        token_values['trustor_id'] = trust['trustor_id']
        token_values['trustee_id'] = trust['trustee_id']

    token_values['consumer_id'] = None
    token_values['access_token_id'] = None

    role_list = []
    # Roles are by ID in metadata and by name in the user section
    roles = access.get('metadata', {}).get('roles', [])
    for role in roles:
        role_list.append(role)
    token_values['roles'] = role_list
    return token_values
    def _cache_get(self, token_id):
        """Return token information from cache.

        If token is invalid raise exc.InvalidToken
        return token only if fresh (not expired).
        """

        if not token_id:
            # Nothing to do
            return

        key, context = self._get_cache_key(token_id)

        with self._cache_pool.reserve() as cache:
            serialized = cache.get(key)

        if serialized is None:
            return None

        if isinstance(serialized, six.text_type):
            serialized = serialized.encode('utf8')
        data = self._deserialize(serialized, context)

        # Note that _INVALID_INDICATOR and (data, expires) are the only
        # valid types of serialized cache entries, so there is not
        # a collision with jsonutils.loads(serialized) == None.
        if not isinstance(data, six.string_types):
            data = data.decode('utf-8')
        cached = jsonutils.loads(data)
        if cached == self._INVALID_INDICATOR:
            self._LOG.debug('Cached Token is marked unauthorized')
            raise exc.InvalidToken(_('Token authorization failed'))

        data, expires = cached

        try:
            expires = timeutils.parse_isotime(expires)
        except ValueError:
            # Gracefully handle upgrade of expiration times from *nix
            # timestamps to ISO 8601 formatted dates by ignoring old cached
            # values.
            return

        expires = timeutils.normalize_time(expires)
        utcnow = timeutils.utcnow()
        if utcnow < expires:
            self._LOG.debug('Returning cached token')
            return data
        else:
            self._LOG.debug('Cached Token seems expired')
            raise exc.InvalidToken(_('Token authorization failed'))
Example #44
0
    def _get_eventlog_collection(self,
                                 marker,
                                 limit,
                                 sort_key,
                                 sort_dir,
                                 expand=False,
                                 resource_url=None,
                                 q=None,
                                 alarms=False,
                                 logs=False,
                                 include_suppress=False):

        if limit and limit < 0:
            raise wsme.exc.ClientSideError(_("Limit must be positive"))
        sort_dir = utils.validate_sort_dir(sort_dir)
        kwargs = {}
        if q is not None:
            for i in q:
                if i.op == 'eq':
                    if i.field == 'start' or i.field == 'end':
                        val = timeutils.normalize_time(
                            timeutils.parse_isotime(
                                i.value).replace(tzinfo=None))
                        i.value = val.isoformat()
                    kwargs[i.field] = i.value

        evtType = _getEventType(alarms, logs)
        kwargs["evtType"] = evtType
        kwargs["include_suppress"] = include_suppress

        if marker:
            marker_obj = objects.event_log.get_by_uuid(pecan.request.context,
                                                       marker)

            ilog = pecan.request.dbapi.event_log_get_list(
                limit,
                marker_obj,
                sort_key=sort_key,
                sort_dir=sort_dir,
                evtType=evtType,
                include_suppress=include_suppress)
        else:
            kwargs['limit'] = limit
            ilog = pecan.request.dbapi.event_log_get_all(**kwargs)

        return EventLogCollection.convert_with_links(ilog,
                                                     limit,
                                                     url=resource_url,
                                                     expand=expand,
                                                     sort_key=sort_key,
                                                     sort_dir=sort_dir)
Example #45
0
    def put(self, request_token_id):
        ENFORCER.enforce_call(action='identity:authorize_request_token')
        roles = (flask.request.get_json(force=True, silent=True)
                 or {}).get('roles', [])
        validation.lazy_validate(schema.request_token_authorize, roles)
        ctx = flask.request.environ[context.REQUEST_CONTEXT_ENV]
        if ctx.is_delegated_auth:
            raise exception.Forbidden(
                _('Cannot authorize a request token with a token issued via '
                  'delegation.'))

        req_token = PROVIDERS.oauth_api.get_request_token(request_token_id)

        expires_at = req_token['expires_at']
        if expires_at:
            now = timeutils.utcnow()
            expires = timeutils.normalize_time(
                timeutils.parse_isotime(expires_at))
            if now > expires:
                raise exception.Unauthorized(_('Request token is expired'))

        authed_roles = _normalize_role_list(roles)

        # verify the authorizing user has the roles
        try:
            auth_context = flask.request.environ[
                authorization.AUTH_CONTEXT_ENV]
            user_token_ref = auth_context['token']
        except KeyError:
            LOG.warning("Couldn't find the auth context.")
            raise exception.Unauthorized()

        user_id = user_token_ref.user_id
        project_id = req_token['requested_project_id']
        user_roles = PROVIDERS.assignment_api.get_roles_for_user_and_project(
            user_id, project_id)
        cred_set = set(user_roles)

        if not cred_set.issuperset(authed_roles):
            msg = _('authorizing user does not have role required')
            raise exception.Unauthorized(message=msg)

        # create least of just the id's for the backend
        role_ids = list(authed_roles)

        # finally authorize the token
        authed_token = PROVIDERS.oauth_api.authorize_request_token(
            request_token_id, user_id, role_ids)

        to_return = {'token': {'oauth_verifier': authed_token['verifier']}}
        return to_return
Example #46
0
    def _update_cache_entry(self, state):
        entry = {}

        host = state['host_name']
        entry['trust_lvl'] = state['trust_lvl']

        try:
            # Normalize as naive object to interoperate with utcnow().
            entry['vtime'] = timeutils.normalize_time(
                timeutils.parse_isotime(state['vtime']))
        except ValueError:
            try:
                # Mt. Wilson does not necessarily return an ISO8601 formatted
                # `vtime`, so we should try to parse it as a string formatted
                # datetime.
                vtime = timeutils.parse_strtime(state['vtime'], fmt="%c")
                entry['vtime'] = timeutils.normalize_time(vtime)
            except ValueError:
                # Mark the system as un-trusted if get invalid vtime.
                entry['trust_lvl'] = 'unknown'
                entry['vtime'] = timeutils.utcnow()

        self.compute_nodes[host] = entry
Example #47
0
 def _create_token(self, token_id, token_data):
     try:
         if isinstance(token_data['expires'], six.string_types):
             token_data['expires'] = timeutils.normalize_time(
                 timeutils.parse_isotime(token_data['expires']))
         self._persistence.create_token(token_id, token_data)
     except Exception:
         exc_info = sys.exc_info()
         # an identical token may have been created already.
         # if so, return the token_data as it is also identical
         try:
             self._persistence.get_token(token_id)
         except exception.TokenNotFound:
             six.reraise(*exc_info)
Example #48
0
File: core.py Project: Boye-Z/123
 def process_bind_param(self, value, dialect):
     if value is None:
         return value
     else:
         if not isinstance(value, datetime.datetime):
             raise ValueError(_('Programming Error: value to be stored '
                                'must be a datetime object.'))
         value = timeutils.normalize_time(value)
         value = value.replace(tzinfo=pytz.UTC)
         # NOTE(morgan): We are casting this to an int, and ensuring we
         # preserve microsecond data by moving the decimal. This is easier
         # than being concerned with the differences in Numeric types in
         # different SQL backends.
         return int((value - self.epoch).total_seconds() * 1000000)
Example #49
0
    def will_expire_soon(self, best_before=BEST_BEFORE_SECONDS):
        """Determines if expiration is about to occur.

        :return: boolean : true if expiration is within the given duration

        """
        norm_expires = timeutils.normalize_time(self.expires)
        soon = (timeutils.utcnow() + datetime.timedelta(seconds=best_before))
        expiring = norm_expires < soon

        if expiring:
            logger.debug("Token expiring at %s", norm_expires)

        return expiring
Example #50
0
    def test_create_token(self):
        """Test creating token for user"""
        creds = self.os_primary.credentials
        user_id = creds.user_id
        username = creds.username
        password = creds.password
        user_domain_id = creds.user_domain_id

        # 'user_domain_id' needs to be specified otherwise tempest.lib assumes
        # it to be 'default'
        token_id, resp = self.non_admin_token.get_token(
            user_id=user_id,
            username=username,
            user_domain_id=user_domain_id,
            password=password,
            auth_data=True)

        self.assertNotEmpty(token_id)
        self.assertIsInstance(token_id, str)

        now = timeutils.utcnow()
        expires_at = timeutils.normalize_time(
            timeutils.parse_isotime(resp['expires_at']))
        self.assertGreater(resp['expires_at'],
                           resp['issued_at'])
        self.assertGreater(expires_at, now)

        subject_id = resp['user']['id']
        if user_id:
            self.assertEqual(subject_id, user_id)
        else:
            # Expect a user ID, but don't know what it will be.
            self.assertIsNotNone(subject_id, 'Expected user ID in token.')

        subject_name = resp['user']['name']

        if username:
            # NOTE: resource name that are case-sensitive in keystone
            # depends on backends such as MySQL or LDAP which are
            # case-insensitive, case-preserving. Resource name is
            # returned as it is stored in the backend, not as it is
            # requested. Verifying the username with both lower-case to
            # avoid failure on different backends
            self.assertEqual(subject_name.lower(), username.lower())
        else:
            # Expect a user name, but don't know what it will be
            self.assertIsNotNone(subject_name, 'Expected user name in token.')

        self.assertEqual(resp['methods'][0], 'password')
Example #51
0
    def authorize_request_token(self, request, request_token_id, roles):
        """An authenticated user is going to authorize a request token.

        As a security precaution, the requested roles must match those in
        the request token. Because this is in a CLI-only world at the moment,
        there is not another easy way to make sure the user knows which roles
        are being requested before authorizing.
        """
        if request.context.is_delegated_auth:
            raise exception.Forbidden(
                _('Cannot authorize a request token'
                  ' with a token issued via delegation.'))

        req_token = self.oauth_api.get_request_token(request_token_id)

        expires_at = req_token['expires_at']
        if expires_at:
            now = timeutils.utcnow()
            expires = timeutils.normalize_time(
                timeutils.parse_isotime(expires_at))
            if now > expires:
                raise exception.Unauthorized(_('Request token is expired'))

        # put the roles in a set for easy comparison
        authed_roles = set()
        for role in roles:
            authed_roles.add(role['id'])

        # verify the authorizing user has the roles
        user_token = authorization.get_token_ref(request.context_dict)
        user_id = user_token.user_id
        project_id = req_token['requested_project_id']
        user_roles = self.assignment_api.get_roles_for_user_and_project(
            user_id, project_id)
        cred_set = set(user_roles)

        if not cred_set.issuperset(authed_roles):
            msg = _('authorizing user does not have role required')
            raise exception.Unauthorized(message=msg)

        # create list of just the id's for the backend
        role_ids = list(authed_roles)

        # finally authorize the token
        authed_token = self.oauth_api.authorize_request_token(
            request_token_id, user_id, role_ids)

        to_return = {'token': {'oauth_verifier': authed_token['verifier']}}
        return to_return
Example #52
0
 def sample(self, ctxt, publisher_id, event_type, payload, metadata):
     events = [
         models.Event(message_id=ev['message_id'],
                      event_type=ev['event_type'],
                      generated=timeutils.normalize_time(
                          timeutils.parse_isotime(ev['generated'])),
                      traits=[
                          models.Trait(
                              name, dtype,
                              models.Trait.convert_value(dtype, value))
                          for name, dtype, value in ev['traits']
                      ]) for ev in payload
     ]
     with self.publish_context as p:
         p(events)
Example #53
0
    def will_expire_soon(self, stale_duration=None):
        """Determines if expiration is about to occur.

        :returns: true if expiration is within the given duration
        :rtype: boolean

        """
        stale_duration = (STALE_TOKEN_DURATION
                          if stale_duration is None else stale_duration)
        norm_expires = timeutils.normalize_time(self.expires)
        # (gyee) should we move auth_token.will_expire_soon() to timeutils
        # instead of duplicating code here?
        soon = (timeutils.utcnow() +
                datetime.timedelta(seconds=stale_duration))
        return norm_expires < soon
Example #54
0
    def _is_token_valid(self, region_name):
        try:
            keystone = \
                self.os_clients_dict[region_name]['keystone'].keystone_client
            if (not self._identity_tokens
                    or region_name not in self._identity_tokens
                    or not self._identity_tokens[region_name]):
                self._identity_tokens[region_name] = \
                    keystone.tokens.validate(keystone.session.get_token())
                LOG.info("Get new token for subcloud %s expires_at=%s" %
                         (region_name,
                          self._identity_tokens[region_name]['expires_at']))
                # Reset the cached dictionary
                self.os_clients_dict[region_name] = \
                    collections.defaultdict(dict)
                return False

            token = \
                keystone.tokens.validate(self._identity_tokens[region_name])
            if token != self._identity_tokens[region_name]:
                LOG.info("updating token %s to %s" %
                         (self._identity_tokens[region_name], token))
                self._identity_tokens[region_name] = token
                self.os_clients_dict[region_name] = \
                    collections.defaultdict(dict)
                return False

        except Exception as exception:
            LOG.info('_is_token_valid handle: %s', exception.message)
            # Reset the cached dictionary
            self.os_clients_dict[region_name] = collections.defaultdict(dict)
            self._identity_tokens[region_name] = None
            return False

        expiry_time = timeutils.normalize_time(
            timeutils.parse_isotime(
                self._identity_tokens[region_name]['expires_at']))
        if timeutils.is_soon(expiry_time, STALE_TOKEN_DURATION):
            LOG.info("The cached keystone token for subcloud %s "
                     "will expire soon %s" %
                     (region_name,
                      self._identity_tokens[region_name]['expires_at']))
            # Reset the cached dictionary
            self.os_clients_dict[region_name] = collections.defaultdict(dict)
            self._identity_tokens[region_name] = None
            return False
        else:
            return True
Example #55
0
    def test_time_string_to_int_conversions(self):
        payload_cls = token_formatters.BasePayload

        expected_time_str = utils.isotime()
        time_obj = timeutils.parse_isotime(expected_time_str)
        expected_time_int = (
            (timeutils.normalize_time(time_obj) -
             datetime.datetime.utcfromtimestamp(0)).total_seconds())

        actual_time_int = payload_cls._convert_time_string_to_int(
            expected_time_str)
        self.assertEqual(expected_time_int, actual_time_int)

        actual_time_str = payload_cls._convert_int_to_time_string(
            actual_time_int)
        self.assertEqual(expected_time_str, actual_time_str)
Example #56
0
    def publish_samples(self, samples):
        if not isinstance(samples, list):
            samples = [samples]

        # Transform the Sample objects into a list of dicts
        meters = [
            utils.meter_message_from_counter(
                sample, cfg.CONF.publisher.telemetry_secret)
            for sample in samples
        ]

        for meter in meters:
            if meter.get('timestamp'):
                ts = timeutils.parse_isotime(meter['timestamp'])
                meter['timestamp'] = timeutils.normalize_time(ts)
            self.meter_conn.record_metering_data(meter)
Example #57
0
    def _extract_expiration(self, json_data, schema_name):
        """Extracts and returns the expiration date from the JSON data."""
        expiration = None
        expiration_raw = json_data.get('expiration', None)
        if expiration_raw and expiration_raw.strip():
            try:
                expiration_tz = timeutils.parse_isotime(expiration_raw)
                expiration = timeutils.normalize_time(expiration_tz)
            except ValueError:
                LOG.exception("Problem parsing expiration date")
                raise exception.InvalidObject(schema=schema_name,
                                              reason=u._("Invalid date "
                                                         "for 'expiration'"),
                                              property="expiration")

        return expiration
Example #58
0
 def _confirm_telemetry_sample(self, server, sample):
     """Check this sample matches the expected resource metadata."""
     # Confirm display_name
     self.assertEqual(server['name'],
                      sample['resource_metadata']['display_name'])
     # Confirm instance_type of flavor
     flavor = sample['resource_metadata'].get(
         'flavor.name', sample['resource_metadata'].get('instance_type'))
     self.assertEqual(server['flavor'], flavor)
     # Confirm the oldest sample was created before upgrade.
     if OPTS.mode == 'check':
         oldest_timestamp = timeutils.normalize_time(
             timeutils.parse_isotime(sample['timestamp']))
         self.assertTrue(
             oldest_timestamp < JAVELIN_START,
             'timestamp should come before start of second javelin run')
 def assertReportedEventMatchesRecorded(self, event, sample, before_time):
     after_time = timeutils.utcnow()
     event_issued_before = timeutils.normalize_time(
         timeutils.parse_isotime(event['issued_before']))
     self.assertTrue(
         before_time <= event_issued_before,
         'invalid event issued_before time; %s is not later than %s.' %
         (timeutils.isotime(event_issued_before, subsecond=True),
          timeutils.isotime(before_time, subsecond=True)))
     self.assertTrue(
         event_issued_before <= after_time,
         'invalid event issued_before time; %s is not earlier than %s.' %
         (timeutils.isotime(event_issued_before, subsecond=True),
          timeutils.isotime(after_time, subsecond=True)))
     del (event['issued_before'])
     self.assertEqual(sample, event)
Example #60
0
 def sample(self, ctxt, publisher_id, event_type, payload, metadata):
     events = [
         models.Event(
             message_id=ev['message_id'],
             event_type=ev['event_type'],
             generated=timeutils.normalize_time(
                 timeutils.parse_isotime(ev['generated'])),
             traits=[models.Trait(name, dtype,
                                  models.Trait.convert_value(dtype, value))
                     for name, dtype, value in ev['traits']],
             raw=ev.get('raw', {}))
         for ev in payload if publisher_utils.verify_signature(
             ev, cfg.CONF.publisher.telemetry_secret)
     ]
     with self.publish_context as p:
         p(events)