Exemple #1
0
def save_deal(deal_id):
    '''
    This allows a user to bookmark a deal, so the user can refer to the deal
    at a later time
    '''
    msg = {}
    user = get_current_user()
    if str(deal_id) in user.deals_saved:
        msg['status'] = 'error'
        msg['message'] = 'you cannot save the same deal twice'
    else:
        user.deals_saved.append(deal_id)
        user.save()
        # noting that current user has bookmarked this deal
        set_user_action_as_completed('save', deal_id, user.sequence_num)
        # updating our redis cache
        for sort in sorts:
            r.delete("".join([user.name, '_', 'bookmarked', '_', sort]))
        msg['status'] = 'success'
    return jsonify(msg)
Exemple #2
0
def vote_deal(deal_id):
    '''
    This function updates the number of votes of the deal by:
        1) increasing num_votes by 1
        2) add a new vote object into a deal's 'votes' list
    '''
    msg = {}
    user = get_current_user()
    if str(deal_id) in user.votes:
        msg['status'] = 'error'
        msg['message'] = 'you cannot vote for the same deal twice'
        return jsonify(msg)

    # we want to make sure that the user see's that his or her vote was counted
    # right away w/o any delay. Hence, the following line is part of
    # celery_tasks.upvote
    else:
        try:
            user.deals_voted.append(str(deal_id))
            user.save()
            deal_queryset = Deal.objects(id=deal_id)
            deal_queryset.update_one(inc__num_votes=1)
            deal = deal_queryset.first()
            # flushing redis cache to reflect the new vote count
            remove_deal(deal.sequence_num)
            # update redis cache: noting that current user has voted this deal in redis
            set_user_action_as_completed('vote', deal_id, user.sequence_num)
            for sort in sorts:
                r.delete("".join([user.name, '_', 'liked', '_', sort]))
            # update mongodb
            celery_tasks.upvote.delay(deal_id, user.id, request.remote_addr)
            msg['status'] = 'success'
            return jsonify(msg)
        except Exception as e:
            print e
            abort(404)
Exemple #3
0
def store_list_of_deals(key, deal_seq_nums):
    '''
    Storing a list of deals that correpsonds to a deal query into redis.
    This is used to reduce the need to query mongodb for a list of deals
    that correponds to that query e.g. "get all deals in electronics less
    than 1 wk old and sortd by popularity".

    However, race condition may occur. Therefore, we use pipe.watch and
    pipe.multi to ensure all redis transaction occur atomically. Also, if race
    conditions occur, we will retry up to 25 times. It is ok if we quit after
    25 tries because we will fallback to querying mongohq or whatever datastore
    instead. It will be slower, but we will return the correct result.
    '''
    if len(deal_seq_nums) == 0:
        # storing a -1 to denote that there are no deals associated with this key
        pipe = r.pipeline()
        pipe.rpush(key, 'None')
        pipe.expire(key, cache_timeout)
        pipe.execute()
    else:
        # creating a lock to prevent multiple threads from updating the same
        # key. In other words, we use setnx as a lock to prevent
        # race conditions between threads on updating the same key
        #
        # Also, we set the lock to expire in 10 seconds so if the lock
        # owner dies prior to releasing the lock, the lock will be released
        # automatically by redis
        #
        # Note to self:
        # It is possible that a thread has acquired a lock but the thread
        # dies before assigning an expiration to the lock or releases the lock.
        #
        # If that's the case, use the code below to ensure that we atomically
        # grab the lock and set the lock with an expiration. Because of
        # performance issues and potentially buggy code, the following piece
        # of code is not used
        #
        # lock_obtained = False
        # retries = 10
        # done = False
        # with r.pipeline() as pipe:
        #     while retries > 0 and not done:
        #         try:
        #             retries = retries - 1
        #             pipe.watch('lock')
        #             pipe.multi()
        #             pipe.setnx('lock', obtained)
        #             pipe.expires('lock', 10)
        #             lock_obtained = all(pipe.execute())
        #             done = True
        #         except WatchError
        #             continue

        lock_expiration = 3  # 3 minutes
        if r.setnx('lock', 'obtained'):  # attemp to obtain lock
            r.expire('lock', lock_expiration)
            pipe = r.pipeline()
            for num in deal_seq_nums:
                pipe.rpush(key, num)
            pipe.expire(key, cache_timeout)
            pipe.rpush(set_of_deal_list_keys, key)
            pipe.execute()
            r.delete('lock')  # release lock
Exemple #4
0
def post_deal():
    form = Deal_Form()
    # if current_user is None:
    #     msg = {"status": "error", "message": "user not logged in"}
    #     return msg
    if request.method == 'GET':
        return render_template('post_deal.html', form=form)
    elif request.method == 'POST':
        if form.validate_on_submit():
            title = request.form.get('title')
            title = string.strip(title)
            short_title = title[0:short_title_length - 1]
            short_title = string_to_url_fix(short_title)
            category = request.form.get('categories')
            category = string.lower(category)
            location = request.form.get('location', None)
            if location == "":
                location = None
            if location and is_amazon_url(location):
                location = gen_amazon_affiliate_url(location)
            description = request.form.get('description', None)
            user = get_current_user()
            ip = request.remote_addr
            new_deal = Deal(title=title, short_title=short_title,
                            location=location, category=category,
                            description=description, author_id=str(user.id),
                            num_votes=1, ip=ip)
            new_deal.save()

            new_deal_id = new_deal.id
            # updating redis cache
            store_deal(new_deal)
            insert_new_deal_into_list(category, new_deal.sequence_num)
            set_user_action_as_completed('vote', new_deal_id, user.sequence_num)
            for sort in sorts:
                r.delete("".join([user.name, '_', 'shared', '_', sort]))
            # updating mongodb or datastore
            user.deals_voted.append(str(new_deal_id))
            user.deals_submitted.append(str(new_deal_id))
            user.save()
            celery_tasks.upvote.delay(new_deal_id, user.id, request.remote_addr)

            #building this deal's url so to redirect the user
            next = Href('/')
            next = next('deals', new_deal.sequence_num, new_deal.short_title)
            msg = {'status': 'success', 'redirect': next}
            return jsonify(msg)
        else:
            #if form returns errors, return the errors to the users via js
            msg = {"status": "error"}
            if form.title.errors:
                msg["title_error"] = form.title.errors[0]
            if form.location.errors:
                msg["location_error"] = form.location.errors[0]
            if form.categories.errors:
                msg["category_error"] = form.categories.errors[0]
            if form.description.errors:
                msg["description_error"] = form.description.errors[0]
            return jsonify(msg)
    else:
        abort(404)
Exemple #5
0
def store_list_of_deals(key, deal_seq_nums):
    '''
    Storing a list of deals that correpsonds to a deal query into redis.
    This is used to reduce the need to query mongodb for a list of deals
    that correponds to that query e.g. "get all deals in electronics less
    than 1 wk old and sortd by popularity".

    However, race condition may occur. Therefore, we use pipe.watch and
    pipe.multi to ensure all redis transaction occur atomically. Also, if race
    conditions occur, we will retry up to 25 times. It is ok if we quit after
    25 tries because we will fallback to querying mongohq or whatever datastore
    instead. It will be slower, but we will return the correct result.
    '''
    if len(deal_seq_nums) == 0:
        # storing a -1 to denote that there are no deals associated with this key
        pipe = r.pipeline()
        pipe.rpush(key, 'None')
        pipe.expire(key, cache_timeout)
        pipe.execute()
    else:
        # creating a lock to prevent multiple threads from updating the same
        # key. In other words, we use setnx as a lock to prevent
        # race conditions between threads on updating the same key
        #
        # Also, we set the lock to expire in 10 seconds so if the lock
        # owner dies prior to releasing the lock, the lock will be released
        # automatically by redis
        #
        # Note to self:
        # It is possible that a thread has acquired a lock but the thread
        # dies before assigning an expiration to the lock or releases the lock.
        #
        # If that's the case, use the code below to ensure that we atomically
        # grab the lock and set the lock with an expiration. Because of
        # performance issues and potentially buggy code, the following piece
        # of code is not used
        #
        # lock_obtained = False
        # retries = 10
        # done = False
        # with r.pipeline() as pipe:
        #     while retries > 0 and not done:
        #         try:
        #             retries = retries - 1
        #             pipe.watch('lock')
        #             pipe.multi()
        #             pipe.setnx('lock', obtained)
        #             pipe.expires('lock', 10)
        #             lock_obtained = all(pipe.execute())
        #             done = True
        #         except WatchError
        #             continue

        lock_expiration = 3  # 3 minutes
        if r.setnx('lock', 'obtained'):  # attemp to obtain lock
            r.expire('lock', lock_expiration)
            pipe = r.pipeline()
            for num in deal_seq_nums:
                pipe.rpush(key, num)
            pipe.expire(key, cache_timeout)
            pipe.rpush(set_of_deal_list_keys, key)
            pipe.execute()
            r.delete('lock')  # release lock