예제 #1
0
def test_clear_metrics_cache(test_client, complete_admin_auth_token):
    def clear_metrics():
        return test_client.post(
            f'/api/v1/metrics/clear_cache/',
            headers=dict(Authorization=complete_admin_auth_token,
                         Accept='application/json'),
        )

    # Do a non-test clear metrics to flush the cache
    clear_metrics()

    # Create 4 fake metrics
    red.set('2_metrics_fake_metric1', '123')
    red.set('2_metrics_fake_metric2', '123')
    red.set('2_metrics_fake_metric3', '123')
    red.set('2_metrics_fake_metric4', '123')

    # Clear them for reals now
    response = clear_metrics()

    # Make sure that 4 metrics are removed from the cache
    assert response.json['data']['removed_entries'] == 44

    # And check that they're well and truly gone from the cache!
    assert not red.get('2_metrics_fake_metric1')
    assert not red.get('2_metrics_fake_metric2')
    assert not red.get('2_metrics_fake_metric3')
    assert not red.get('2_metrics_fake_metric4')
예제 #2
0
def execute_with_partial_history_cache(metric_name,
                                       query,
                                       object_model,
                                       strategy,
                                       enable_cache=True,
                                       group_by=None,
                                       query_name=''):
    # enable_cache pass-thru. This is so we don't cache data when filters are active.
    if strategy in dumb_strategies or not enable_cache:
        return _handle_combinatory_strategy(query, None, strategy)
    if query_name:
        metric_name = metric_name + '_' + query_name
    # Redis object names
    if g.get('query_organisations'):
        ORG_STRING = get_metrics_org_string(g.query_organisations)
    else:
        ORG_STRING = get_metrics_org_string(g.active_organisation.id)
    CURRENT_MAX_ID = f'{ORG_STRING}_{object_model.__table__.name}_max_id'
    HIGHEST_ID_CACHED = f'{ORG_STRING}_{metric_name}_{group_by}_max_cached_id'
    CACHE_RESULT = f'{ORG_STRING}_{metric_name}_{group_by}'
    # Checks if provided combinatry strategy is valid
    if strategy not in valid_strategies:
        raise Exception(f'Invalid combinatory strategy {strategy} requested.')

    # Getting the current maximum ID in the database. Also caching it so we don't have to
    # get it from the DB many times in the same request
    current_max_id = red.get(CURRENT_MAX_ID)
    if not current_max_id:
        query_max = db.session.query(db.func.max(
            object_model.id)).with_session(db.session).first()
        try:
            current_max_id = query_max[0]
        except IndexError:
            current_max_id = 0

        red.set(CURRENT_MAX_ID, current_max_id, 10)

    # Gets cache results since the last time the metrics were fetched
    highest_id_in_cache = int(red.get(HIGHEST_ID_CACHED) or 0)
    cache_result = _load_cache(CACHE_RESULT)
    # If there's no cache (either it's a new attribute we're tracking, or the cache is corrupted)
    # then we should pull results starting at id=0
    if cache_result:
        filtered_query = query.filter(object_model.id > highest_id_in_cache)
    else:
        filtered_query = query

    #Combines results
    result = _handle_combinatory_strategy(filtered_query, cache_result,
                                          strategy)
    # Updates the cache with new data
    _store_cache(CACHE_RESULT, result)
    red.set(HIGHEST_ID_CACHED, current_max_id, config.METRICS_CACHE_TIMEOUT)

    return result
예제 #3
0
def rate_limit(key, rate):
    """Function to impose ratelimits on an API. Pass it a key, and it'll only allow that key `rate` times per hour
    key: Key on which to rate limit (like a username)
    rate: Number of allowed requests per hour
    
    Returns True if rate limited
    """

    # Don't ratelimit our unit tests!
    if current_app.config['IS_TEST']:
        return False

    # Check if the key has been tried recently
    if red.exists(key):
        attempts = int(red.get(key))
        if attempts > rate:
            # If rate limited, return how long (minutes) until you can try again
            ttl = int(red.ttl(key) / 60)
            return ttl
        red.set(key, attempts + 1, keepttl=True)
        return False
    # Add key to redis to start tracking rates
    red.setex(
        key,
        3600,  # 1 Hour
        value=1)
    return False
예제 #4
0
def _set_user_gps_from_location(user_id: int, location: str):
    """
    Wrapped version for testing
    """

    # Try load location from redis cache to avoid hitting OSM too much
    cached_tuple_string = red.get(redis_location_key(location))
    if cached_tuple_string:
        gps_tuple = json.loads(cached_tuple_string)
    else:

        gps_tuple = osm_location_to_gps_lookup(location)
        if not gps_tuple:
            logg.warning(
                f'GPS for location not found on OSM for user {user_id}')
            return

        red.set(redis_location_key(location), json.dumps(gps_tuple))

    lat, lng = gps_tuple

    user = User.query.get(user_id)
    if not user:
        capture_message(f'User not found for id {user_id}')
        return

    user.lat = lat
    user.lng = lng

    db.session.commit()
예제 #5
0
def _load_cache(key):
    cached_object = red.get(key)
    if not cached_object:
        return None
    try:
        return pickle.loads(cached_object)
    except:
        red.delete(key)
        return None
예제 #6
0
def execute_with_partial_history_cache(metric_name, query, object_model, strategy, disable_cache = False):
    # disable_cache pass-thru. This is so we don't cache data when filters are active.
    if disable_cache:
        return _handle_combinatory_strategy(query, None, strategy)

    # Redis object names
    CURRENT_MAX_ID = f'{g.active_organisation.id}_{object_model.__table__.name}_max_id'
    HIGHEST_ID_CACHED = f'{g.active_organisation.id}_{metric_name}_max_cached_id'
    CACHE_RESULT = f'{g.active_organisation.id}_{metric_name}'

    # Checks if provided combinatry strategy is valid
    if strategy not in valid_strategies:
        raise Exception(f'Invalid combinatory strategy {strategy} requested.')

    # Getting the current maximum ID in the database. Also caching it so we don't have to
    # get it from the DB many times in the same request
    current_max_id = red.get(CURRENT_MAX_ID)
    if not current_max_id:
        current_max_id = db.session.query(db.func.max(object_model.id)).first() or (0, )
        current_max_id = current_max_id[0]
        red.set(CURRENT_MAX_ID, current_max_id, 10)

    # Gets cache results since the last time the metrics were fetched
    highest_id_in_cache = int(red.get(HIGHEST_ID_CACHED) or 0)
    cache_result = _load_cache(CACHE_RESULT)
    # If there's no cache (either it's a new attribute we're tracking, or the cache is corrupted)
    # then we should pull results starting at id=0
    if cache_result:
        filtered_query = query.filter(object_model.id > highest_id_in_cache)
    else:
        filtered_query = query

    #Combines results
    result = _handle_combinatory_strategy(filtered_query, cache_result, strategy)

    # Updates the cache with new data
    _store_cache(CACHE_RESULT, result)
    red.set(HIGHEST_ID_CACHED, current_max_id)

    return result
예제 #7
0
def _set_user_gps_from_location(user_id: int,
                                location: str,
                                skip_cache=False,
                                user_obj=None):
    """
    Wrapped version for testing
    :param user_id: the user ID to set the location for
    :param location: the user to set the location for
    :param skip_cache: flag to skip cache access (Will still write to cache)
    :param user_obj: the user object to set location for (Optional)

    """
    user = user_obj or User.query.get(user_id)

    if not user:
        capture_message(f'User not found for id {user_id}')
        return

    # Add country to location lookup if it's not already there
    country = user.default_organisation.country if user.default_organisation else None
    query_location = location
    if country and location and country not in location:
        query_location = f'{location}, {country}'

    # Try load location from redis cache to avoid hitting OSM too much
    cached_tuple_string = None
    if not skip_cache:
        cached_tuple_string = red.get(redis_location_key(query_location))

    if cached_tuple_string:
        gps_tuple = tuple(json.loads(cached_tuple_string))
    else:
        gps_tuple = osm_location_to_gps_lookup(
            query_location) or get_location_from_peers(location)
        if not gps_tuple:
            logg.warning(
                f'GPS for location "{query_location}" not found on OSM or amongst peers for user {user_id}'
            )
            return

        red.set(redis_location_key(query_location), json.dumps(gps_tuple))

    lat, lng = gps_tuple

    user.lat = lat
    user.lng = lng

    db.session.commit()
예제 #8
0
def test_dataset_api(test_client, authed_sempo_admin_user):
    data = {
        "data": [
            {
                "0": "Alf",
                "1": "Melmac",
                "2": "19027192211"
            },
            {
                "0": "Alf",
                "1": "Tanner",
                "2": "19027192211" # Same phone number, should trigger update
            },
            {
                "0": "Willie",
                "1": "Tanner",
                "2": "19027192222"
            }
        ],
        "headerPositions": {
            "0": "first_name",
            "1": "last_name",
            "2": "phone"
        },
        "country": "",
        "saveName": "",
        "isVendor": False
    }

    auth = get_complete_auth_token(authed_sempo_admin_user)
    
    response = test_client.post(
        f"/api/v1/dataset/",
        headers=dict(
            Authorization=auth,
            Accept='application/json',
        ),
        json=data
    )

    redis_id = get_job_key(authed_sempo_admin_user.id, response.json['task_uuid']) 
    status = { 'percent_complete': 0 }
    status = json.loads(red.get(redis_id))
    assert status['message'] == 'success'
    assert status['percent_complete'] == 100.0
    assert status['diagnostics'] == [['User Created', 200], ['User Updated', 200], ['User Created', 200]]
예제 #9
0
def cached_funds_available(allowed_cache_age_seconds=60):
    """
    IF refreshing cash THEN:
        return: [current blockchain balance] - [all transfers with blockchain state pending or unknown]
        save to cache: [funds available], [ID of highest transfer used in cache], [cache creation datetime]
    ELSE
        return: [funds available at last cache] - [all non-failed transfers since last cache]
    Max Txn ID is a simple way to determine whether txn was used in cache or not, and thus needs to be accounted for
    :param allowed_cache_age_seconds: how long between checking the blockchain for external funds added or removed
    :return: amount of funds available
    """
    token = g.active_organisation.org_level_transfer_account.token

    balance_wei = bt.get_wallet_balance(
        g.active_organisation.org_level_transfer_account.blockchain_address,
        token)

    return token.token_amount_to_system(balance_wei)

    refresh_cache = False
    funds_available_cache = red.get('funds_available_cache')

    try:
        parsed_cache = json.loads(funds_available_cache)

        last_updated_datetime = datetime.datetime.fromtimestamp(
            float(parsed_cache['last_updated']))

        earliest_allowed = datetime.datetime.utcnow() - datetime.timedelta(
            seconds=allowed_cache_age_seconds)
        if last_updated_datetime < earliest_allowed:
            refresh_cache = True

    except Exception as e:
        refresh_cache = True

    if refresh_cache:

        master_wallet_balance = bt.get_wallet_balance()

        highest_transfer_id_checked = 0
        required_blockchain_statuses = ['PENDING', 'UNKNOWN']

    else:
        cached_funds_available = parsed_cache['cached_funds_available']
        highest_transfer_id_checked = parsed_cache[
            'highest_transfer_id_checked']
        required_blockchain_statuses = ['PENDING', 'UNKNOWN', 'COMPLETE']

    new_dibursements = (CreditTransfer.query.filter(
        CreditTransfer.transfer_type == TransferTypeEnum.PAYMENT).filter(
            CreditTransfer.transfer_subtype == TransferSubTypeEnum.DISBURSEMENT
        ).filter(CreditTransfer.transfer_status ==
                 TransferStatusEnum.COMPLETE).filter(
                     CreditTransfer.id > highest_transfer_id_checked).filter(
                         CreditTransfer.created > datetime.datetime.utcnow() -
                         datetime.timedelta(hours=36)).all())

    local_disbursement_value = 0
    for disbursement in new_dibursements:

        status = disbursement.blockchain_status

        if status in required_blockchain_statuses:
            local_disbursement_value += disbursement.transfer_amount

    if refresh_cache:

        balance = master_wallet_balance - local_disbursement_value

        if len(new_dibursements) > 0:
            highest_transfer_id_checked = new_dibursements[-1].id
        else:
            all_transfers = CreditTransfer.query.all()
            if len(all_transfers) > 0:
                highest_transfer_id_checked = all_transfers[-1].id
            else:
                highest_transfer_id_checked = 0

        cache_data = {
            'cached_funds_available': balance,
            'highest_transfer_id_checked': highest_transfer_id_checked,
            'last_updated': datetime.datetime.utcnow().timestamp()
        }

        red.set('funds_available_cache', json.dumps(cache_data))

        balance = master_wallet_balance - local_disbursement_value

        return balance

    else:

        balance = cached_funds_available - local_disbursement_value

        return balance
예제 #10
0
def get_job_result(user_id, func_uuid):
    key = get_job_key(user_id, func_uuid)
    return red.get(key)
예제 #11
0
def master_wallet_funds_available(allowed_cache_age_seconds=60):
    """
    IF refreshing cash THEN:
        return: [current blockchain balance] - [all transfers with blockchain state pending or unknown]
        save to cache: [funds available at last cache], [ID of highest transfer used in cache], [cache creation datetime]
    ELSE
        return: [funds available at last cache] - [all non-failed transfers since cache created]
    :param allowed_cache_age_seconds: how long between checking the blockchain for external funds added or removed
    :return: amount of funds available
    """

    refresh_cache = False
    funds_available_cache = red.get('funds_available_cache')

    try:
        parsed_cache = json.loads(funds_available_cache)

        last_updated_datetime = datetime.datetime.fromtimestamp(
            float(parsed_cache['last_updated']))

        earliest_allowed = datetime.datetime.utcnow() - datetime.timedelta(
            seconds=allowed_cache_age_seconds)
        if last_updated_datetime < earliest_allowed:
            refresh_cache = True

    except Exception as e:
        refresh_cache = True

    if refresh_cache:

        blockchain_task = celery_app.signature(
            'worker.celery_tasks.get_master_balance')

        result = blockchain_task.apply_async()

        try:
            master_wallet_balance = result.wait(timeout=6,
                                                propagate=True,
                                                interval=0.5)

        except Exception as e:
            print(e)
            sentry.captureException()
            raise BlockchainError("Blockchain Error")

        finally:
            result.forget()

        highest_transfer_id_checked = 0
        required_blockchain_statuses = ['PENDING', 'UNKNOWN']

    else:
        cached_funds_available = parsed_cache['cached_funds_available']
        highest_transfer_id_checked = parsed_cache[
            'highest_transfer_id_checked']
        required_blockchain_statuses = ['PENDING', 'UNKNOWN', 'COMPLETE']

    new_dibursements = (models.CreditTransfer.query.filter(
        models.CreditTransfer.transfer_type ==
        models.TransferTypeEnum.DISBURSEMENT).filter(
            models.CreditTransfer.transfer_status ==
            models.TransferStatusEnum.COMPLETE).filter(
                models.CreditTransfer.id > highest_transfer_id_checked
            ).filter(
                models.CreditTransfer.created > datetime.datetime.utcnow() -
                datetime.timedelta(hours=36)).all())

    local_disbursement_value = 0
    for disbursement in new_dibursements:

        status = disbursement.blockchain_status

        if status in required_blockchain_statuses:
            local_disbursement_value += disbursement.transfer_amount

    if refresh_cache:

        balance = master_wallet_balance - local_disbursement_value

        if len(new_dibursements) > 0:
            highest_transfer_id_checked = new_dibursements[-1].id
        else:
            all_transfers = models.CreditTransfer.query.all()
            if len(all_transfers) > 0:
                highest_transfer_id_checked = all_transfers[-1].id
            else:
                highest_transfer_id_checked = 0

        cache_data = {
            'cached_funds_available': balance,
            'highest_transfer_id_checked': highest_transfer_id_checked,
            'last_updated': datetime.datetime.utcnow().timestamp()
        }

        red.set('funds_available_cache', json.dumps(cache_data))

        balance = master_wallet_balance - local_disbursement_value

        return balance

    else:

        balance = cached_funds_available - local_disbursement_value

        return balance