Esempio n. 1
0
def deflate_trending_users(event, context):
    total_cnt, deflated_cnt = user_manager.trending_deflate()
    with LogLevelContext(logger, logging.INFO):
        logger.info(
            f'Trending users deflated: {deflated_cnt} out of {total_cnt}')
    deleted_cnt = user_manager.trending_delete_tail(total_cnt)
    with LogLevelContext(logger, logging.INFO):
        logger.info(
            f'Trending users removed: {deleted_cnt} out of {total_cnt}')
Esempio n. 2
0
def dispatch(event, context):
    "Top-level dispatch of appsync event to the correct handler"
    # it is a sin that python has no dictionary destructing asignment
    gql = get_gql_details(event)
    field = gql.get('field')
    caller_user_id = gql.get('callerUserId')
    arguments = gql.get('arguments')
    source = gql.get('source')

    handler = routes.get_handler(field)
    if not handler:
        # should not be able to get here
        msg = f'No handler for field `{field}` found'
        logger.exception(msg)
        raise Exception(msg)

    # we suppress INFO logging, except this message
    with LogLevelContext(logger, logging.INFO):
        logger.info(f'Handling AppSync GQL resolution of `{field}`')

    try:
        resp = handler(caller_user_id, arguments, source, context)
    except ClientException as err:
        msg = 'ClientError: ' + str(err)
        logger.warning(msg)
        return {'error': {'message': msg, 'data': err.data, 'info': err.info}}

    return {'success': resp}
Esempio n. 3
0
def video_post_processed(event, context):
    # we suppress INFO logging, except this message
    with LogLevelContext(logger, logging.INFO):
        logger.info('Handling S3 Object Created (video post processed) event')

    path = urllib.parse.unquote(event['Records'][0]['s3']['object']['key'])
    _, _, post_id, _, _ = path.split('/')

    # strongly consistent because we may have just added the post to dynamo
    post = post_manager.get_post(post_id, strongly_consistent=True)
    if not post:
        logger.warning(f'Unable to find post `{post_id}`, ignoring upload')
        return

    if post.status != PostStatus.PROCESSING:
        logger.warning(
            f'Post `{post_id}` is not in PROCESSING status: `{post.status}`, ignoring'
        )
        return

    try:
        post.finish_processing_video_upload()
    except Exception as err:
        post.error(str(err))
        if not isinstance(err, PostException):
            raise err
        logger.warning(str(err))
Esempio n. 4
0
def video_post_uploaded(event, context):
    # Seems the boto s3 client deals with non-urlencoded keys to objects everywhere, but
    # apparenttly this falls outside that scope. The event emitter passes us a urlencoded path.
    path = urllib.parse.unquote(event['Records'][0]['s3']['object']['key'])
    size_bytes = event['Records'][0]['s3']['object']['size']

    # we suppress INFO logging, except this message
    with LogLevelContext(logger, logging.INFO):
        logger.info('Handling S3 Object Created (video post uploaded) event', extra={'s3_key': path})

    _, _, post_id, _ = path.split('/')

    # strongly consistent because we may have just added the post to dynamo
    post = post_manager.get_post(post_id, strongly_consistent=True)
    if not post:
        logger.warning(f'Unable to find post `{post_id}`, ignoring upload')
        return

    if post.status != PostStatus.PENDING:
        logger.warning(f'Post `{post_id}` is not in PENDING status: `{post.status}`, ignoring upload')
        return

    max_size_bytes = 2 * 1024 * 1024 * 1024  # 2GB as speced via chat
    if size_bytes > max_size_bytes:
        logger.warning(f'Received upload of `{size_bytes}` bytes which exceeds max size for post `{post_id}`')
        post.error()

    try:
        post.start_processing_video_upload()
    except Exception as err:
        post.error(str(err))
        if not isinstance(err, PostException):
            raise err
        logger.warning(str(err))
Esempio n. 5
0
def dispatch(event, context):
    "Top-level dispatch of appsync event to the correct handler"
    # it is a sin that python has no dictionary destructing asignment
    client = get_client_details(event)
    gql = get_gql_details(event)

    field = gql['field']
    handler = routes.get_handler(field)
    if not handler:
        # should not be able to get here
        msg = f'No handler for field `{field}` found'
        logger.exception(msg)
        raise Exception(msg)

    # we suppress INFO logging, except this message
    with LogLevelContext(logger, logging.INFO):
        logger.info(f'Handling AppSync GQL resolution of `{field}`')

    try:
        data = handler(
            gql['callerUserId'],
            gql['arguments'],
            source=gql['source'],
            context=context,
            event=event,
            client=client,
        )
    except ClientException as err:
        logger.warning(str(err))
        return {'error': err.serialize()}

    return {'data': data}
Esempio n. 6
0
def video_post_uploaded(event, context):
    # we suppress INFO logging, except this message
    with LogLevelContext(logger, logging.INFO):
        logger.info('Handling S3 Object Created (video post uploaded) event')

    path = urllib.parse.unquote(event['Records'][0]['s3']['object']['key'])
    _, _, post_id, _ = path.split('/')

    # strongly consistent because we may have just added the post to dynamo
    post = post_manager.get_post(post_id, strongly_consistent=True)
    if not post:
        logger.warning(f'Unable to find post `{post_id}`, ignoring upload')
        return

    if post.status != PostStatus.PENDING:
        logger.warning(
            f'Post `{post_id}` is not in PENDING status: `{post.status}`, ignoring upload'
        )
        return

    size_bytes = event['Records'][0]['s3']['object']['size']
    max_size_bytes = 2 * 1024 * 1024 * 1024  # 2GB as speced via chat
    if size_bytes > max_size_bytes:
        logger.warning(
            f'Received upload of `{size_bytes}` bytes which exceeds max size for post `{post_id}`'
        )
        post.error()

    try:
        post.start_processing_video_upload()
    except Exception as err:
        post.error(str(err))
        if not isinstance(err, PostException):
            raise err
        logger.warning(str(err))
Esempio n. 7
0
def dispatch(event, context):
    "Top-level dispatch of appsync event to the correct handler"
    # it is a sin that python has no dictionary destructing asignment
    client = get_client_details(event)
    gql = get_gql_details(event)
    field = gql.get('field')
    caller_user_id = gql.get('callerUserId')
    arguments = gql.get('arguments')
    source = gql.get('source')

    handler = routes.get_handler(field)
    if not handler:
        # should not be able to get here
        msg = f'No handler for field `{field}` found'
        logger.exception(msg)
        raise Exception(msg)

    # we suppress INFO logging, except this message
    with LogLevelContext(logger, logging.INFO):
        logger.info(f'Handling AppSync GQL resolution of `{field}`')

    try:
        # Once support for direct-to-lambda resolvers lands, would be good to simplify this interface
        # to match that. https://github.com/sid88in/serverless-appsync-plugin/pull/350
        resp = handler(caller_user_id,
                       arguments,
                       source=source,
                       context=context,
                       client=client)
    except ClientException as err:
        msg = 'ClientError: ' + str(err)
        logger.warning(msg)
        return {'error': {'message': msg, 'data': err.data, 'info': err.info}}

    return {'success': resp}
Esempio n. 8
0
def pre_sign_up(event, context):
    with LogLevelContext(logger, logging.INFO):
        logger.info('Handling Cognito PreSignUp event')

    validate_username_format(event)
    validate_user_attribute_lowercase(event, 'email')

    # AWS doesn't let you set preferred_username in this call because the user isn't confirmed yet
    # validate_user_attribute_lowercase(event, 'preferred_username')

    client_id = event['callerContext']['clientId']
    if client_id == COGNITO_TESTING_CLIENT_ID:
        # make sure users created by the testing client are marked as such
        # so they can be identified and deleted later on, if testing cleanup doesn't catch them
        family_name = get_user_attribute(event, 'family_name')
        if family_name != 'TESTER':
            raise CognitoClientException(
                f'Invalid family_name: `{family_name}`')

        # testing client is allowed to optionally auto-confirm & verify users
        # so they can login without receiving an email/text
        if (event['request'].get('clientMetadata')
                or {}).get('autoConfirmUser'):
            event['response']['autoConfirmUser'] = True
            if get_user_attribute(event, 'email'):
                event['response']['autoVerifyEmail'] = True
            if get_user_attribute(event, 'phone_number'):
                event['response']['autoVerifyPhone'] = True

    return event
Esempio n. 9
0
def define_auth_challenge(event, context):
    with LogLevelContext(logger, logging.INFO):
        logger.info('Handling Cognito DefineAuthChallenge event')
    # Log the user in, no need to challenge them. Note that
    # custom auth is restricted to only the backend user pool client
    event['response']['issueTokens'] = True
    return event
Esempio n. 10
0
def pre_auth(event, context):
    with LogLevelContext(logger, logging.INFO):
        logger.info('Handling Cognito PreAuth event')

    # if the user doesn't exist in the user pool or is unconfirmed
    # cognito appears to create a random uuid as their 'userName'
    validate_user_attribute_lowercase(event, 'email')
    validate_user_attribute_lowercase(event, 'preferred_username')
    return event
Esempio n. 11
0
def send_user_notifications(event, context):
    if not USER_NOTIFICATIONS_ENABLED:
        with LogLevelContext(logger, logging.INFO):
            logger.info('User notifications disabled')
        return
    only_usernames = USER_NOTIFICATIONS_ONLY_USERNAMES.split(
        ' ') if USER_NOTIFICATIONS_ONLY_USERNAMES else None
    with LogLevelContext(logger, logging.INFO):
        logger.info(
            f'Preparing to send notifications as needed to users: {only_usernames or "all"}'
        )
    now = pendulum.now('utc')
    total_cnt, success_cnt = card_manager.notify_users(
        now=now, only_usernames=only_usernames)
    with LogLevelContext(logger, logging.INFO):
        logger.info(
            f'User notifications sent successfully: {success_cnt} out of {total_cnt}'
        )
Esempio n. 12
0
def process_records(event, context):
    for record in event['Records']:

        name = record['eventName']
        pk = deserialize(record['dynamodb']['Keys']['partitionKey'])
        sk = deserialize(record['dynamodb']['Keys']['sortKey'])
        old_item = {
            k: deserialize(v)
            for k, v in record['dynamodb'].get('OldImage', {}).items()
        }
        new_item = {
            k: deserialize(v)
            for k, v in record['dynamodb'].get('NewImage', {}).items()
        }

        with LogLevelContext(logger, logging.INFO):
            logger.info(f'{name}: `{pk}` / `{sk}` starting processing')

        # we still have some pks in an old (& deprecated) format with more than one item_id in the pk
        pk_prefix, item_id = pk.split('/')[:2]
        sk_prefix = sk.split('/')[0]

        item_kwargs = {
            k: v
            for k, v in {
                'new_item': new_item,
                'old_item': old_item
            }.items() if v
        }
        for func in dispatch.search(pk_prefix, sk_prefix, name, old_item,
                                    new_item):
            with LogLevelContext(logger, logging.INFO):
                logger.info(f'{name}: `{pk}` / `{sk}` running: {func}')
            try:
                func(item_id, **item_kwargs)
            except Exception as err:
                logger.exception(str(err))
Esempio n. 13
0
def handle_appstore_server_notification(event, context):
    with LogLevelContext(logger, logging.INFO):
        logger.info('handle_appstore_server_notification() called')

    body_str = event.get('body')

    if body_str:
        appstore_server_response = json.loads(body_str)
        logger.warning(f'Appstore server response {appstore_server_response}')
        # https://developer.apple.com/documentation/appstoreservernotifications/responsebody
        if appstore_server_response.get('unified_receipt'):
            appstore_manager.add_transaction(appstore_server_response['unified_receipt'])

    return {
        'statusCode': 200,
    }
Esempio n. 14
0
def create_dating_chat(event, context):
    with LogLevelContext(logger, logging.INFO):
        logger.info('create_dating_chat() called')

    user_id = event['userId']
    chat_id = event['chatId']
    match_user_id = event['matchUserId']
    message_text = event['messageText']

    # Create direct chat with system message
    now = pendulum.now('utc')
    chat = chat_manager.add_direct_chat(chat_id, user_id, match_user_id, now=now)
    chat_message_manager.add_system_message(chat_id, message_text, user_ids=[user_id, match_user_id], now=now)

    chat.refresh_item(strongly_consistent=True)
    return chat.item
Esempio n. 15
0
def image_post_uploaded(event, context):
    # we suppress INFO logging, except this message
    with LogLevelContext(logger, logging.INFO):
        logger.info('Handling S3 Object Created (image post uploaded) event')

    # Avoid firing on creation of other images (profile photo, album art)
    # Once images are moved to their new path at {userId}/post/{postId}/image/{size}.jpg,
    # the s3 object created event suffix filter should be expaneded to '/image/native.jpg'
    # and this check removed (currently set to '/native.jpg').
    path = urllib.parse.unquote(event['Records'][0]['s3']['object']['key'])
    if 'post' not in path:
        return

    # At this point we have triggered this event because of:
    #   - video post poster images
    #   - image upload for image posts schema version 0
    #   - image upload for image posts schema version 1
    post_id = path.split('/')[2]

    # strongly consistent because we may have just added the post to dynamo
    post = post_manager.get_post(post_id, strongly_consistent=True)
    if not post:
        logger.warning(f'Unable to find post `{post_id}`, ignoring upload')
        return

    if post.type != PostType.IMAGE:
        logger.warning(
            f'Fired for video post `{post_id}` poster image, ignoring')
        return

    if post.status != PostStatus.PENDING:
        logger.warning(
            f'Post `{post_id}` is not in PENDING status: `{post.status}`, ignoring upload'
        )
        return

    try:
        post.process_image_upload()
    except Exception as err:
        post.error(str(err))
        if not isinstance(err, PostException):
            raise err
        logger.warning(str(err))
Esempio n. 16
0
def send_amplitude_event(event, context):
    with LogLevelContext(logger, logging.INFO):
        logger.info('send_amplitude_event() called')

    body_str = event.get('body')
    status_code = 200
    if body_str:
        amplitude_body = json.loads(body_str)
        user_id = amplitude_body.get('userId')
        event_name = amplitude_body.get('type')
        event_payload = amplitude_body.get('payload')

        if user_id and event_name and event_payload:
            amplitude_client.attr_log_event(user_id, event_name, event_payload)
        else:
            status_code = 400

    return {
        'statusCode': status_code,
    }
Esempio n. 17
0
def custom_message(event, context):
    with LogLevelContext(logger, logging.INFO):
        logger.info('Handling Cognito CustomMessage event',
                    extra={'event': event})

    if event['triggerSource'] in ('CustomMessage_SignUp',
                                  'CustomMessage_ResendCode'):
        user_id = event['userName']
        code = event['request']['codeParameter']
        deepurl = f'https://real.app/confirm/email/{user_id}/{code}'
        event['response'][
            'smsMessage'] = f'Welcome to REAL. Your confirmation code is {code}'
        event['response']['emailSubject'] = 'Welcome to REAL'
        event['response']['emailMessage'] = (
            f'Welcome to REAL. Tap <a href="{deepurl}">here</a> to confirm your account. '
            f'Should you need it, your confirmation code is {code}.')

    if event['triggerSource'] == 'CustomMessage_ForgotPassword':
        user_id = event['userName']
        code = event['request']['codeParameter']
        deepurl = f'https://real.app/confirm/forgot/{user_id}/{code}'
        event['response'][
            'smsMessage'] = f'Your REAL password reset code is {code}'
        event['response']['emailSubject'] = 'Your REAL password reset link'
        event['response']['emailMessage'] = (
            f'Tap <a href="{deepurl}">here</a> to choose a new REAL password. '
            f'Should you need it, your password reset code is {code}.')

    if event['triggerSource'] in ('CustomMessage_UpdateUserAttribute',
                                  'CustomMessage_VerifyUserAttribute'):
        user_id = event['userName']
        code = event['request']['codeParameter']
        deepurl = f'https://real.app/confirm/email/{user_id}/{code}'
        event['response'][
            'smsMessage'] = f'Your REAL confirmation code is {code}'
        event['response']['emailSubject'] = 'Your REAL confirmation link'
        event['response']['emailMessage'] = (
            f'Tap <a href="{deepurl}">here</a> to confirm your email address with REAL. '
            f'Should you need it, your confirmation code is {code}.')

    return event
Esempio n. 18
0
def auto_disable_dating(event, context):
    cnt = user_manager.clear_expired_dating_status()
    with LogLevelContext(logger, logging.INFO):
        logger.info(f'Disabled user dating status: {cnt}')
Esempio n. 19
0
def update_appstore_subscriptions(event, context):
    cnt = appstore_manager.update_subscriptions()
    with LogLevelContext(logger, logging.INFO):
        logger.info(f'AppStore subscriptions updated: {cnt}')
Esempio n. 20
0
def update_user_ages(event, context):
    total_cnt, updated_cnt = user_manager.update_ages()
    with LogLevelContext(logger, logging.INFO):
        logger.info(f'User ages updated: {updated_cnt} out of {total_cnt}')
Esempio n. 21
0
def send_dating_matches_notification(event, context):
    cnt = user_manager.send_dating_matches_notification()
    with LogLevelContext(logger, logging.INFO):
        logger.info(f'Sent dating matches push notification: {cnt}')
Esempio n. 22
0
def clear_expired_user_subscriptions(event, context):
    cnt = user_manager.clear_expired_subscriptions()
    with LogLevelContext(logger, logging.INFO):
        logger.info(f'Expired user subscriptions cleared: {cnt}')
Esempio n. 23
0
        'field': field,
        'callerUserId': caller_user_id,
        'arguments': arguments,
        'source': source,
    }

    client = {}
    if (version := headers.get('x-real-version')):
        client['version'] = version
    if (device := headers.get('x-real-device')):
        client['device'] = device
    if (system := headers.get('x-real-system')):
        client['system'] = system

    # we suppress INFO logging, except this message
    with LogLevelContext(logger, logging.INFO):
        logger.info(f'Handling AppSync GQL resolution of `{field}`',
                    extra={
                        'gql': gql_details,
                        'client': client
                    })

    try:
        resp = handler(caller_user_id, arguments, source, context)
    except ClientException as err:
        msg = 'ClientError: ' + str(err)
        logger.warning(msg)
        return {'error': {'message': msg, 'data': err.data, 'info': err.info}}

    return {'success': resp}
Esempio n. 24
0
def garbage_collect_albums(event, context):
    cnt = album_manager.garbage_collect()
    with LogLevelContext(logger, logging.INFO):
        logger.info(f'Albums garbage collected: {cnt}')
Esempio n. 25
0
def detect_bad_words(event, context):
    comment_manager.clear_comment_bad_words()
    chat_message_manager.clear_chat_message_bad_words()
    with LogLevelContext(logger, logging.INFO):
        logger.info('Detect bad words in comments & chat messages')