Exemple #1
0
def test_time_manipulation():
    round_tripped_datetime = utils.datetime_to_iso(
        utils.from_unix_epoch_time_secs(
            utils.to_unix_epoch_time_secs(
                utils.iso_to_datetime("2014-12-10T19:09:34.000Z"))))
    assert "2014-12-10T19:09:34.000Z" == round_tripped_datetime

    round_tripped_datetime = utils.datetime_to_iso(
        utils.from_unix_epoch_time_secs(
            utils.to_unix_epoch_time_secs(
                utils.iso_to_datetime("1969-04-28T23:48:34.123Z"))))
    assert "1969-04-28T23:48:34.123Z" == round_tripped_datetime

    # check that rounding to milliseconds works
    round_tripped_datetime = utils.datetime_to_iso(
        utils.from_unix_epoch_time_secs(
            utils.to_unix_epoch_time_secs(
                utils.iso_to_datetime("1969-04-28T23:48:34.999499Z"))))
    assert "1969-04-28T23:48:34.999Z" == round_tripped_datetime

    # check that rounding to milliseconds works
    round_tripped_datetime = utils.datetime_to_iso(
        utils.from_unix_epoch_time_secs(
            utils.to_unix_epoch_time_secs(
                utils.iso_to_datetime("1969-04-27T23:59:59.999999Z"))))
    assert "1969-04-28T00:00:00.000Z" == round_tripped_datetime
    def test_synapse_client__discrete_sts_token_stores(self, mock_fetch_token):
        """Verify that two Synapse objects will not share the same cached tokens"""
        syn1 = Synapse(skip_checks=True)
        syn2 = Synapse(skip_checks=True)

        expected_token = {
            'awsAccessKeyId':
            'ABC',
            'awsSecretAccessKey':
            '456',
            'expiration':
            datetime_to_iso(datetime.datetime.utcnow() +
                            datetime.timedelta(hours=12))
        }
        mock_fetch_token.return_value = expected_token

        synapse_id = 'syn_123'
        permission = 'read_write'

        token = syn1.get_sts_storage_token(synapse_id, permission)
        assert expected_token == token
        assert mock_fetch_token.call_count == 1

        token = syn1.get_sts_storage_token(synapse_id, permission)
        assert expected_token == token

        # should have been satisfied from cache, not fetched again
        assert mock_fetch_token.call_count == 1

        # but now fetching from a separate synapse object should not be satisfied from a common cache
        token = syn2.get_sts_storage_token(synapse_id, permission)
        assert expected_token == token
        assert mock_fetch_token.call_count == 2
    def test_old_tokens_pruned(self, mock_datetime):
        """Verify that tokens that do not have the remaining min_life before their expiration are not returned
        and are pruned as new tokens are added."""

        # we mock datetime and drop subseconds to make results to the second deterministic
        # as they go back and forth bewteen parsing etc
        utc_now = datetime.datetime.utcnow().replace(microsecond=0)
        mock_datetime.datetime.utcnow = mock.Mock(return_value=utc_now)

        token_cache = _TokenCache(1000)

        # this token should be immediately pruned
        token_cache['syn_1'] = {
            'expiration':
            datetime_to_iso(utc_now - datetime.timedelta(seconds=1))
        }
        assert 0 == len(token_cache)

        token_cache['syn_2'] = {
            'expiration':
            datetime_to_iso(utc_now + datetime.timedelta(seconds=1))
        }
        token_cache['syn_3'] = {
            'expiration':
            datetime_to_iso(utc_now + datetime.timedelta(minutes=1))
        }
        token_cache['syn_4'] = {
            'expiration':
            datetime_to_iso(utc_now + datetime.timedelta(hours=1))
        }

        # all the additional keys should still be there
        assert ['syn_2', 'syn_3', 'syn_4'] == list(token_cache.keys())

        # if we set a new key in the future any keys that are expired at that time should be pruned
        mock_datetime.datetime.utcnow = mock.Mock(
            return_value=utc_now + datetime.timedelta(minutes=30))

        token_cache['syn_5'] = {
            'expiration': datetime_to_iso(utc_now + datetime.timedelta(days=1))
        }
        assert ['syn_4', 'syn_5'] == list(token_cache.keys())
    def _make_credentials(cls, bucket_items=None):
        credentials = {
            'accessKeyId': 'foo',
            'secretAccessKey': 'bar',
            'sessionToken': 'baz',
            'expiration': datetime_to_iso(cls._utcnow),
        }
        if bucket_items:
            credentials.update(bucket_items)

        return credentials
    def test_fetch_and_cache_token(self):
        entity_id = 'syn_1'
        token_store = StsTokenStore()
        min_remaining_life = datetime.timedelta(hours=1)

        expiration = datetime_to_iso(datetime.datetime.utcnow() +
                                     datetime.timedelta(hours=10))
        read_token = {'accessKeyId': '123', 'expiration': expiration}
        write_token = {'accessKeyId': '456', 'expiration': expiration}

        def synGET(uri):
            if 'read_write' in uri:
                return write_token
            return read_token

        syn = mock.Mock(restGET=mock.Mock(side_effect=synGET))

        token = token_store.get_token(syn, entity_id, 'read_only',
                                      min_remaining_life)
        assert token is read_token
        assert syn.restGET.call_count == 1
        assert f"/entity/{entity_id}/sts?permission=read_only" == syn.restGET.call_args[
            0][0]

        # getting the token again shouldn't cause it to be fetched again
        token = token_store.get_token(syn, entity_id, 'read_only',
                                      min_remaining_life)
        assert token is read_token
        assert syn.restGET.call_count == 1

        # however fetching a read_write token should cause a separate fetch
        token = token_store.get_token(syn, entity_id, 'read_write',
                                      min_remaining_life)
        assert token is write_token
        assert syn.restGET.call_count == 2
        assert f"/entity/{entity_id}/sts?permission=read_write" == syn.restGET.call_args[
            0][0]

        # but that should also b cached now
        token = token_store.get_token(syn, entity_id, 'read_write',
                                      min_remaining_life)
        assert token is write_token
        assert syn.restGET.call_count == 2
    def test_max_size(self):
        """Verify a token cache will not exceed the specified number of keys, ejecting FIFO as needed"""
        max_size = 5
        ejections = 3

        token_cache = _TokenCache(max_size)
        token = {
            'expiration':
            datetime_to_iso(datetime.datetime.utcnow() +
                            datetime.timedelta(days=1))
        }

        for i in range(max_size + ejections):
            token_cache[f"syn_{i}"] = token

        expected_keys = [
            f"syn_{i}" for i in range(ejections, max_size + ejections)
        ]
        assert expected_keys == list(token_cache.keys())
def epoch_time_to_iso(epoch_time):
    """
    Convert seconds since unix epoch to a string in ISO format
    """
    return None if epoch_time is None else utils.datetime_to_iso(
        utils.from_unix_epoch_time_secs(epoch_time))