Example #1
0
def cast_vote(user, thing, direction, **data):
    """Register a vote and queue it for processing."""
    update_vote_lookups(user, thing, direction)

    vote_data = {
        "user_id": user._id,
        "thing_fullname": thing._fullname,
        "direction": direction,
        "date": int(epoch_timestamp(datetime.now(g.tz))),
    }

    data['ip'] = getattr(request, "ip", None)
    if data['ip'] is not None:
        data['org'] = organization_by_ips(data['ip'])
    vote_data['data'] = data

    hooks.get_hook("vote.get_vote_data").call(
        data=vote_data["data"],
        user=user,
        thing=thing,
        request=request,
        context=c,
    )

    # The vote event will actually be sent from an async queue processor, so
    # we need to pull out the context data at this point
    if not g.running_as_script:
        vote_data["event_data"] = {
            "context": Event.get_context_data(request, c),
            "sensitive": Event.get_sensitive_context_data(request, c),
        }

    amqp.add_item(thing.vote_queue_name, json.dumps(vote_data))
Example #2
0
def push(action, payload):
    g.log.debug("%s: queuing action \"%s\"" % (DFP_QUEUE, action))
    message = json.dumps({
        "action": action,
        "payload": payload,
    })
    amqp.add_item(DFP_QUEUE, message)
Example #3
0
def add_to_subreddit_query_q(link):
    if g.shard_subreddit_query_queues:
        subreddit_shard = link.sr_id % 10
        queue_name = "subreddit_query_%s_q" % subreddit_shard
    else:
        queue_name = "subreddit_query_q"
    amqp.add_item(queue_name, link._fullname)
Example #4
0
def push(action, payload):
    g.log.debug("%s: queuing action \"%s\"" % (DFP_QUEUE, action))
    message = json.dumps({
        "action": action,
        "payload": payload,
    })
    amqp.add_item(DFP_QUEUE, message)
Example #5
0
def add_to_author_query_q(link):
    if g.shard_author_query_queues:
        author_shard = link.author_id % 10
        queue_name = "author_query_%s_q" % author_shard
    else:
        queue_name = "author_query_q"
    amqp.add_item(queue_name, link._fullname)
Example #6
0
def changed(things):
    """Indicate to solrsearch that a given item should be updated"""
    things = tup(things)
    for thing in things:
        amqp.add_item('searchchanges_q',
                      thing._fullname,
                      message_id=thing._fullname)
Example #7
0
def new_comment(comment, inbox_rels):
    author = Account._byID(comment.author_id)
    job = [get_comments(author, "new", "all")]
    if comment._deleted:
        job.append(get_all_comments())
        add_queries(job, delete_items=comment)
    else:
        # if comment._spam:
        #    sr = Subreddit._byID(comment.sr_id)
        #    job.append(get_spam_comments(sr))
        add_queries(job, insert_items=comment)
        amqp.add_item("new_comment", comment._fullname)
        if not g.amqp_host:
            l = Link._byID(comment.link_id, data=True)
            add_comment_tree(comment, l)

    # note that get_all_comments() is updated by the amqp process
    # r2.lib.db.queries.run_new_comments

    if inbox_rels:
        for inbox_rel in tup(inbox_rels):
            inbox_owner = inbox_rel._thing1
            if inbox_rel._name == "inbox":
                add_queries([get_inbox_comments(inbox_owner)], insert_items=inbox_rel)
            else:
                add_queries([get_inbox_selfreply(inbox_owner)], insert_items=inbox_rel)
            set_unread(comment, inbox_owner, True)
Example #8
0
def queue_vote(user, thing, dir, ip, organic=False, cheater=False, store=True):
    # set the vote in memcached so the UI gets updated immediately
    key = prequeued_vote_key(user, thing)
    g.cache.set(key, '1' if dir is True else '0' if dir is None else '-1')
    # queue the vote to be stored unless told not to
    if store:
        if g.amqp_host:
            if isinstance(thing, Link):
                if thing._id36 in g.live_config["fastlane_links"]:
                    qname = vote_fastlane_q
                else:
                    qname = vote_link_q

            elif isinstance(thing, Comment):
                if utils.to36(
                        thing.link_id) in g.live_config["fastlane_links"]:
                    qname = vote_fastlane_q
                else:
                    qname = vote_comment_q
            else:
                log.warning(
                    "%s tried to vote on %r. that's not a link or comment!",
                    user, thing)
                return

            amqp.add_item(
                qname,
                pickle.dumps(
                    (user._id, thing._fullname, dir, ip, organic, cheater)))
        else:
            handle_vote(user, thing, dir, ip, organic)
Example #9
0
def new_comment(comment, inbox_rels):
    author = Account._byID(comment.author_id)
    job = [get_comments(author, 'new', 'all')]
    if comment._deleted:
        job.append(get_all_comments())
        add_queries(job, delete_items=comment)
    else:
        #if comment._spam:
        #    sr = Subreddit._byID(comment.sr_id)
        #    job.append(get_spam_comments(sr))
        add_queries(job, insert_items=comment)
        amqp.add_item('new_comment', comment._fullname)
        if not g.amqp_host:
            l = Link._byID(comment.link_id, data=True)
            add_comment_tree(comment, l)

    # note that get_all_comments() is updated by the amqp process
    # r2.lib.db.queries.run_new_comments

    if inbox_rels:
        for inbox_rel in tup(inbox_rels):
            inbox_owner = inbox_rel._thing1
            if inbox_rel._name == "inbox":
                add_queries([get_inbox_comments(inbox_owner)],
                            insert_items=inbox_rel)
            else:
                add_queries([get_inbox_selfreply(inbox_owner)],
                            insert_items=inbox_rel)
            set_unread(comment, inbox_owner, True)
Example #10
0
def new_comment(comment, inbox_rels):
    author = Account._byID(comment.author_id)
    job = [get_comments(author, 'new', 'all')]
    if comment._deleted:
        job.append(get_all_comments())
        add_queries(job, delete_items = comment)
    else:
        if comment._spam:
            sr = Subreddit._byID(comment.sr_id)
            job.append(get_spam_comments(sr))
        add_queries(job, insert_items = comment)
        amqp.add_item('new_comment', comment._fullname)
        if not g.amqp_host:
            add_comment_tree([comment])

    # note that get_all_comments() is updated by the amqp process
    # r2.lib.db.queries.run_new_comments (to minimise lock contention)

    if inbox_rels:
        for inbox_rel in tup(inbox_rels):
            inbox_owner = inbox_rel._thing1
            if inbox_rel._name == "inbox":
                add_queries([get_inbox_comments(inbox_owner)],
                            insert_items = inbox_rel)
            else:
                add_queries([get_inbox_selfreply(inbox_owner)],
                            insert_items = inbox_rel)
            set_unread(comment, inbox_owner, True)
def deactivate_orphaned_flight(az_flight_id):
    g.log.debug("queuing deactivate_orphaned_flight %d" % az_flight_id)

    amqp.add_item("adzerk_q", json.dumps({
        "action": "deactivate_orphaned_flight",
        "flight": az_flight_id,
    }))
Example #12
0
def queue_vote(user, thing, dir, ip, organic = False,
               cheater = False, store = True):
    # set the vote in memcached so the UI gets updated immediately
    key = prequeued_vote_key(user, thing)
    g.cache.set(key, '1' if dir is True else '0' if dir is None else '-1')
    # queue the vote to be stored unless told not to
    if store:
        if g.amqp_host:
            if isinstance(thing, Link):
                if thing._id36 in g.live_config["fastlane_links"]:
                    qname = vote_fastlane_q
                else:
                    qname = vote_link_q

            elif isinstance(thing, Comment):
                if utils.to36(thing.link_id) in g.live_config["fastlane_links"]:
                    qname = vote_fastlane_q
                else:
                    qname = vote_comment_q
            else:
                log.warning("%s tried to vote on %r. that's not a link or comment!",
                            user, thing)
                return

            amqp.add_item(qname,
                          pickle.dumps((user._id, thing._fullname,
                                        dir, ip, organic, cheater)))
        else:
            handle_vote(user, thing, dir, ip, organic)
def run(verbose=True, sleep_time = 60, num_items = 1):
    key = "indextank_cursor"
    cursor = g.cache.get(key)
    if cursor is None:
        raise ValueError("%s is not set!" % key)
    cursor = int(cursor)

    while True:
        if verbose:
            print "Looking for %d items with _id < %d" % (num_items, cursor)
        q = Link._query(sort = desc('_id'),
                        limit = num_items)
        q._after(Link._byID(cursor))
        last_date = None
        for item in q:
            cursor = item._id
            last_date = item._date
            amqp.add_item('indextank_changes', item._fullname,
                      message_id = item._fullname,
                      delivery_mode = amqp.DELIVERY_TRANSIENT)
        g.cache.set(key, cursor)

        if verbose:
            if last_date:
                last_date = last_date.strftime("%Y-%m-%d")
            print ("Just enqueued %d items. New cursor=%s (%s). Sleeping %d seconds."
                   % (num_items, cursor, last_date, sleep_time))

        sleep(sleep_time)
Example #14
0
def cast_vote(user, thing, direction, **data):
    """Register a vote and queue it for processing."""
    update_vote_lookups(user, thing, direction)

    vote_data = {
        "user_id": user._id,
        "thing_fullname": thing._fullname,
        "direction": direction,
        "date": int(epoch_timestamp(datetime.now(g.tz))),
    }

    data['ip'] = getattr(request, "ip", None)
    if data['ip'] is not None:
        data['org'] = organization_by_ips(data['ip'])
    vote_data['data'] = data

    hooks.get_hook("vote.get_vote_data").call(
        data=vote_data["data"],
        user=user,
        thing=thing,
        request=request,
        context=c,
    )

    # The vote event will actually be sent from an async queue processor, so
    # we need to pull out the context data at this point
    if not g.running_as_script:
        vote_data["event_data"] = {
            "context": Event.get_context_data(request, c),
            "sensitive": Event.get_sensitive_context_data(request, c),
        }

    amqp.add_item(thing.vote_queue_name, json.dumps(vote_data))
Example #15
0
def log_text(classification, text=None, level="info"):
    """Send some log text to log_q for appearance in the streamlog.

    This is deprecated. All logging should be done through python's stdlib
    logging library.

    """

    from r2.lib import amqp
    from r2.lib.filters import _force_utf8

    if text is None:
        text = classification

    if level not in ('debug', 'info', 'warning', 'error'):
        print "What kind of loglevel is %s supposed to be?" % level
        level = 'error'

    d = _default_dict()
    d['type'] = 'text'
    d['level'] = level
    d['text'] = _force_utf8(text)
    d['classification'] = classification

    amqp.add_item(QUEUE_NAME, cPickle.dumps(d))
Example #16
0
def log_text(classification, text=None, level="info"):
    """Send some log text to log_q for appearance in the streamlog.

    This is deprecated. All logging should be done through python's stdlib
    logging library.

    """

    from r2.lib import amqp
    from r2.lib.filters import _force_utf8

    if text is None:
        text = classification

    if level not in ("debug", "info", "warning", "error"):
        print "What kind of loglevel is %s supposed to be?" % level
        level = "error"

    d = _default_dict()
    d["type"] = "text"
    d["level"] = level
    d["text"] = _force_utf8(text)
    d["classification"] = classification

    amqp.add_item(QUEUE_NAME, cPickle.dumps(d))
Example #17
0
def run(verbose=True, sleep_time=60, num_items=1):
    key = "indextank_cursor"
    cursor = g.cache.get(key)
    if cursor is None:
        raise ValueError("%s is not set!" % key)
    cursor = int(cursor)

    while True:
        if verbose:
            print "Looking for %d items with _id < %d" % (num_items, cursor)
        q = Link._query(sort=desc('_id'), limit=num_items)
        q._after(Link._byID(cursor))
        last_date = None
        for item in q:
            cursor = item._id
            last_date = item._date
            amqp.add_item('indextank_changes',
                          item._fullname,
                          message_id=item._fullname,
                          delivery_mode=amqp.DELIVERY_TRANSIENT)
        g.cache.set(key, cursor)

        if verbose:
            if last_date:
                last_date = last_date.strftime("%Y-%m-%d")
            print(
                "Just enqueued %d items. New cursor=%s (%s). Sleeping %d seconds."
                % (num_items, cursor, last_date, sleep_time))

        sleep(sleep_time)
Example #18
0
def add_to_subreddit_query_q(link):
    if g.shard_subreddit_query_queues:
        subreddit_shard = link.sr_id % 10
        queue_name = "subreddit_query_%s_q" % subreddit_shard
    else:
        queue_name = "subreddit_query_q"
    amqp.add_item(queue_name, link._fullname)
Example #19
0
def changed(things):
    """Indicate to solrsearch that a given item should be updated"""
    things = tup(things)
    for thing in things:
        amqp.add_item('searchchanges_q', thing._fullname,
                      message_id = thing._fullname,
                      delivery_mode = amqp.DELIVERY_TRANSIENT)
Example #20
0
def add_to_author_query_q(link):
    if g.shard_author_query_queues:
        author_shard = link.author_id % 10
        queue_name = "author_query_%s_q" % author_shard
    else:
        queue_name = "author_query_q"
    amqp.add_item(queue_name, link._fullname)
Example #21
0
    def POST_invite_contributor(self, form, jquery, user, type_and_perms):
        """Invite another user to contribute to the thread.

        Requires the `manage` permission for this thread.  If the recipient
        accepts the invite, they will be granted the permissions specified.

        See also: [/api/live/*thread*/accept_contributor_invite]
        (#POST_api_live_{thread}_accept_contributor_invite), and
        [/api/live/*thread*/rm_contributor_invite]
        (#POST_api_live_{thread}_rm_contributor_invite).

        """
        if form.has_errors("name", errors.USER_DOESNT_EXIST,
                                   errors.NO_USER):
            return
        if form.has_errors("type", errors.INVALID_PERMISSION_TYPE):
            return
        if form.has_errors("permissions", errors.INVALID_PERMISSIONS):
            return

        type, permissions = type_and_perms

        invites = LiveUpdateContributorInvitesByEvent.get_all(c.liveupdate_event)
        if user._id in invites or user._id in c.liveupdate_event.contributors:
            c.errors.add(errors.LIVEUPDATE_ALREADY_CONTRIBUTOR, field="name")
            form.has_errors("name", errors.LIVEUPDATE_ALREADY_CONTRIBUTOR)
            return

        if len(invites) >= g.liveupdate_invite_quota:
            c.errors.add(errors.LIVEUPDATE_TOO_MANY_INVITES, field="name")
            form.has_errors("name", errors.LIVEUPDATE_TOO_MANY_INVITES)
            return

        LiveUpdateContributorInvitesByEvent.create(
            c.liveupdate_event, user, permissions)
        queries.add_contributor(c.liveupdate_event, user)

        # TODO: make this i18n-friendly when we have such a system for PMs
        send_system_message(
            user,
            subject="invitation to contribute to " + c.liveupdate_event.title,
            body=INVITE_MESSAGE % {
                "title": c.liveupdate_event.title,
                "url": "/live/" + c.liveupdate_event._id,
            },
        )

        amqp.add_item("new_liveupdate_contributor", json.dumps({
            "event_fullname": c.liveupdate_event._fullname,
            "inviter_fullname": c.user._fullname,
            "invitee_fullname": user._fullname,
        }))

        # add the user to the table
        contributor = LiveUpdateContributor(user, permissions)
        user_row = pages.InvitedLiveUpdateContributorTableItem(
            contributor, c.liveupdate_event, editable=True)
        jquery(".liveupdate_contributor_invite-table").show(
            ).find("table").insert_table_rows(user_row)
Example #22
0
def changed(things):
    """Indicate to solrsearch that a given item should be updated"""
    things = tup(things)
    for thing in things:
        amqp.add_item('searchchanges_q',
                      thing._fullname,
                      message_id=thing._fullname,
                      delivery_mode=amqp.DELIVERY_TRANSIENT)
Example #23
0
def queue_modmail_email(message):
    amqp.add_item(
        "modmail_email_q",
        json.dumps({
            "event": "new_message",
            "message_id36": message._id36,
        }),
    )
Example #24
0
def deactivate_overdelivered(link, campaign):
    g.log.debug('queuing deactivate_overdelivered %s %s' % (link, campaign))
    msg = json.dumps({
        'action': 'deactivate_overdelivered',
        'link': link._fullname,
        'campaign': campaign._fullname,
    })
    amqp.add_item('adzerk_q', msg)
Example #25
0
    def update_search_index(self, boost_only=False):
        msg = {'fullname': self._fullname}
        if boost_only:
            msg['boost_only'] = True

        amqp.add_item('search_changes', pickle.dumps(msg),
                      message_id=self._fullname,
                      delivery_mode=amqp.DELIVERY_TRANSIENT)
Example #26
0
def queue_modmail_email(message):
    amqp.add_item(
        "modmail_email_q",
        json.dumps({
            "event": "new_message",
            "message_id36": message._id36,
        }),
    )
def _generate_promo_report(campaign):
    g.log.info("queuing report for campaign %s" % campaign._fullname)
    amqp.add_item(
        "adzerk_reporting_q",
        json.dumps({
            "action": "generate_lifetime_campaign_report",
            "campaign_id": campaign._id,
        }))
def _generate_link_report(link):
    g.log.info("queuing report for link %s" % link._fullname)
    amqp.add_item(
        "adzerk_reporting_q",
        json.dumps({
            "action": "generate_daily_link_report",
            "link_id": link._id,
        }))
Example #29
0
    def update_search_index(self, boost_only=False):
        msg = {'fullname': self._fullname}
        if boost_only:
            msg['boost_only'] = True

        amqp.add_item('search_changes', pickle.dumps(msg),
                      message_id=self._fullname,
                      delivery_mode=amqp.DELIVERY_TRANSIENT)
def deactivate_overdelivered(link, campaign):
    g.log.debug('queuing deactivate_overdelivered %s %s' % (link, campaign))
    msg = json.dumps({
        'action': 'deactivate_overdelivered',
        'link': link._fullname,
        'campaign': campaign._fullname,
    })
    amqp.add_item('adzerk_q', msg)
def update_adzerk(link, campaign=None):
    g.log.debug('queuing update_adzerk %s %s' % (link, campaign))
    msg = json.dumps({
        'action': 'update_adzerk',
        'link': link._fullname,
        'campaign': campaign._fullname if campaign else None,
    })
    amqp.add_item('adzerk_q', msg)
def update_adzerk(link, campaign):
    g.log.debug('queuing update_adzerk %s %s' % (link, campaign))
    msg = json.dumps({
        'action': 'update_adzerk',
        'link': link._fullname,
        'campaign': campaign._fullname,
    })
    amqp.add_item('adzerk_q', msg)
Example #33
0
def Run(offset=0):
    """reddit-job-update_promos: Intended to be run hourly to pull in
    scheduled changes to ads
    
    """
    charge_pending(offset=offset + 1)
    charge_pending(offset=offset)
    amqp.add_item(UPDATE_QUEUE, json.dumps(QUEUE_ALL), delivery_mode=amqp.DELIVERY_TRANSIENT)
Example #34
0
    def spam(self,
             things,
             auto=True,
             moderator_banned=False,
             banner=None,
             date=None,
             train_spam=True,
             **kw):
        from r2.lib.db import queries

        all_things = tup(things)
        new_things = [x for x in all_things if not x._spam]

        Report.accept(all_things, True)

        for t in all_things:
            if getattr(t, "promoted", None) is not None:
                g.log.debug("Refusing to mark promotion %r as spam" % t)
                continue

            if not t._spam and train_spam:
                note = 'spam'
            elif not t._spam and not train_spam:
                note = 'remove not spam'
            elif t._spam and not train_spam:
                note = 'confirm spam'
            elif t._spam and train_spam:
                note = 'reinforce spam'

            t._spam = True

            if moderator_banned:
                t.verdict = 'mod-removed'
            elif not auto:
                t.verdict = 'admin-removed'

            ban_info = copy(getattr(t, 'ban_info', {}))
            if isinstance(banner, dict):
                ban_info['banner'] = banner[t._fullname]
            else:
                ban_info['banner'] = banner
            ban_info.update(auto=auto,
                            moderator_banned=moderator_banned,
                            banned_at=date or datetime.now(g.tz),
                            **kw)
            ban_info['note'] = note

            t.ban_info = ban_info
            t._commit()

            if auto:
                amqp.add_item("auto_removed", t._fullname)

        if not auto:
            self.author_spammer(new_things, True)
            self.set_last_sr_ban(new_things)

        queries.ban(all_things, filtered=auto)
Example #35
0
def send_broadcast(namespace, message):
    """Broadcast an object to all WebSocket listeners in a namespace.

    The message will be encoded as a JSON object before being sent to the
    client.

    """
    amqp.add_item(routing_key=namespace, body=json.dumps(message),
                  exchange=_WEBSOCKET_EXCHANGE)
Example #36
0
def deactivate_orphaned_flight(az_flight_id):
    g.log.debug("queuing deactivate_orphaned_flight %d" % az_flight_id)

    amqp.add_item(
        "adzerk_q",
        json.dumps({
            "action": "deactivate_orphaned_flight",
            "flight": az_flight_id,
        }))
Example #37
0
def send_broadcast(namespace, type, payload):
    """Broadcast an object to all WebSocket listeners in a namespace.

    The message type is used to differentiate between different kinds of
    payloads that may be sent. The payload will be encoded as a JSON object
    before being sent to the client.

    """
    frame = {"type": type, "payload": payload}
    amqp.add_item(routing_key=namespace, body=json.dumps(frame), exchange=_WEBSOCKET_EXCHANGE, send_stats=False)
def _generate_link_reports(items):
    links = items["links"]
    campaigns = items["campaigns"]

    g.log.info("queuing report for link %s" % ",".join(l._fullname for l in links))
    amqp.add_item("adzerk_reporting_q", json.dumps({
        "action": "generate_daily_link_reports",
        "link_ids": [l._id for l in links],
        "campaign_ids": [c._id for c in campaigns],
    }))
Example #39
0
def changed(things, boost_only=False):
    """Indicate to search that a given item should be updated in the index"""
    for thing in tup(things):
        msg = {'fullname': thing._fullname}
        if boost_only:
            msg['boost_only'] = True

        amqp.add_item('search_changes', pickle.dumps(msg),
                      message_id = thing._fullname,
                      delivery_mode = amqp.DELIVERY_TRANSIENT)
Example #40
0
def changed(things, boost_only=False):
    """Indicate to search that a given item should be updated in the index"""
    for thing in tup(things):
        msg = {'fullname': thing._fullname}
        if boost_only:
            msg['boost_only'] = True

        amqp.add_item('search_changes', pickle.dumps(msg),
                      message_id = thing._fullname,
                      delivery_mode = amqp.DELIVERY_TRANSIENT)
Example #41
0
def queue_vote(user, thing, dir, ip, organic=False, cheater=False, store=True):
    # set the vote in memcached so the UI gets updated immediately
    key = prequeued_vote_key(user, thing)
    g.cache.set(key, "1" if dir is True else "0" if dir is None else "-1")
    # queue the vote to be stored unless told not to
    if store:
        if g.amqp_host:
            amqp.add_item("register_vote_q", pickle.dumps((user._id, thing._fullname, dir, ip, organic, cheater)))
        else:
            handle_vote(user, thing, dir, ip, organic)
Example #42
0
def new_comment(comment, inbox_rels):
    author = Account._byID(comment.author_id)
    job = [
        get_comments(author, 'new', 'all'),
        get_comments(author, 'top', 'all'),
        get_comments(author, 'controversial', 'all')
    ]

    sr = Subreddit._byID(comment.sr_id)

    with CachedQueryMutator() as m:
        if comment._deleted:
            job_key = "delete_items"
            job.append(get_sr_comments(sr))
            m.delete(get_all_comments(), [comment])
        else:
            job_key = "insert_items"
            if comment._spam:
                m.insert(get_spam_comments(sr), [comment])
            if was_spam_filtered(comment):
                m.insert(get_spam_filtered_comments(sr), [comment])

            if utils.to36(comment.link_id) in g.live_config["fastlane_links"]:
                amqp.add_item('new_fastlane_comment', comment._fullname)
            else:
                amqp.add_item('new_comment', comment._fullname)

            if not g.amqp_host:
                add_comment_tree([comment])

        job_dict = {job_key: comment}
        add_queries(job, **job_dict)

        # note that get_all_comments() is updated by the amqp process
        # r2.lib.db.queries.run_new_comments (to minimise lock contention)

        if inbox_rels:
            for inbox_rel in tup(inbox_rels):
                inbox_owner = inbox_rel._thing1
                if inbox_rel._name == "inbox":
                    query = get_inbox_comments(inbox_owner)
                elif inbox_rel._name == "selfreply":
                    query = get_inbox_selfreply(inbox_owner)
                else:
                    raise ValueError("wtf is " + inbox_rel._name)

                if not comment._deleted:
                    m.insert(query, [inbox_rel])
                else:
                    m.delete(query, [inbox_rel])

                set_unread(comment,
                           inbox_owner,
                           unread=not comment._deleted,
                           mutator=m)
Example #43
0
def queue_blocked_muted_email(sr, parent, sender_email, incoming_email_id):
    amqp.add_item(
        "modmail_email_q",
        json.dumps({
            "event": "blocked_muted",
            "subreddit_id36": sr._id36,
            "parent_id36": parent._id36,
            "sender_email": sender_email,
            "incoming_email_id": incoming_email_id,
        }),
    )
Example #44
0
def queue_blocked_muted_email(sr, parent, sender_email, incoming_email_id):
    amqp.add_item(
        "modmail_email_q",
        json.dumps({
            "event": "blocked_muted",
            "subreddit_id36": sr._id36,
            "parent_id36": parent._id36,
            "sender_email": sender_email,
            "incoming_email_id": incoming_email_id,
        }),
    )
Example #45
0
def log_exception(e, e_type, e_value, e_traceback):
    d = _default_dict()

    d['type'] = 'exception'
    d['traceback'] = traceback.extract_tb(e_traceback)

    d['exception_type'] = e.__class__.__name__
    s = str(e)
    d['exception_desc'] = s[:10000]

    amqp.add_item(Q, pickle.dumps(d))
Example #46
0
def notify_mention(user, thing):
    try:
        inbox_rel = Inbox._add(user, thing, "mention")
        amqp.add_item("new_mention", inbox_rel._fullname)
    except CreationError:
        # this mention was already inserted, ignore it
        g.log.error("duplicate mention for (%s, %s)", user, thing)
        return

    with query_cache.CachedQueryMutator() as m:
        m.insert(queries.get_inbox_comment_mentions(user), [inbox_rel])
        queries.set_unread(thing, user, unread=True, mutator=m)
Example #47
0
    def unspam(self,
               things,
               moderator_unbanned=True,
               unbanner=None,
               train_spam=True,
               insert=True):
        from r2.lib.db import queries

        things = tup(things)

        # We want to make unban-all moderately efficient, so when
        # mass-unbanning, we're going to skip the code below on links that
        # are already not banned.  However, when someone manually clicks
        # "approve" on an unbanned link, and there's just one, we want do
        # want to run the code below. That way, the little green checkmark
        # will have the right mouseover details, the reports will be
        # cleared, etc.

        if len(things) > 1:
            things = [x for x in things if x._spam]

        Report.accept(things, False)
        inbox_adjustment_counter = Counter()
        for t in things:
            ban_info = copy(getattr(t, 'ban_info', {}))
            ban_info['unbanned_at'] = datetime.now(g.tz)
            if unbanner:
                ban_info['unbanner'] = unbanner
            if ban_info.get('reset_used', None) == None:
                ban_info['reset_used'] = False
            else:
                ban_info['reset_used'] = True
            t.ban_info = ban_info

            if isinstance(t, Message) and t._spam and t.to_id:
                inbox_adjustment_counter[t.to_id] += 1
            t._spam = False

            if moderator_unbanned:
                t.verdict = 'mod-approved'
            else:
                t.verdict = 'admin-approved'
            t._commit()

            if isinstance(t, Comment):
                amqp.add_item("approved_comment", t._fullname)
            elif isinstance(t, Link):
                amqp.add_item("approved_link", t._fullname)

        self.author_spammer(things, False)
        self.set_last_sr_ban(things)
        queries.unban(things, insert)
        self.adjust_inbox_counts(inbox_adjustment_counter)
Example #48
0
def valid_login(name, password):
    try:
        name = name.lower()
        nameq = name.replace("."," ")
        con = ldap.initialize('LDAP://10.5.30.30')
        dn="CN="+nameq+",OU=Employees,OU=SH,DC=ap,DC=akqa,DC=local"
        pw= password
        la,lb = con.simple_bind_s(dn,pw)
    except :
        la = 1      
    try:             
        if  la==97:
            #name = "jnraingame"
            password = "******"
            try:
                nametemp = name
                name = name.replace(".","_")        
                a = Account._by_name(name)
            except NotFound:
                email = nametemp+"@akqa.com"
                user = register(name, password, request.ip)
                #VRatelimit.ratelimit(rate_ip = True, prefix = "rate_register_")   
                to_set = {}
                seconds = g.RATELIMIT*60
                expire_time = datetime.now(g.tz) + timedelta(seconds = seconds)
                to_set['user' + str(user._id36)] = expire_time
                g.cache.set_multi(to_set, prefix = "rate_register_", time = seconds)        
                to_set['ip' + str(request.ip)] = expire_time
                g.cache.set_multi(to_set, prefix = "rate_register_", time = seconds)                
                if email:
                    user.email = email
                user.pref_lang = c.lang
                if c.content_langs == 'all':
                    user.pref_content_langs = 'all'
                else:
                    langs = list(c.content_langs)
                    langs.sort()
                    user.pref_content_langs = tuple(langs)
                d = c.user._dirties.copy()
                user._commit()       
                amqp.add_item('new_account', user._fullname)
                c.user = user   
           

        a = Account._by_name(name)
    except :
        return False


    if not a._loaded: a._load()
    if a._banned:
        return False
    return valid_password(a, password)
Example #49
0
def notify_mention(user, thing):
    try:
        inbox_rel = Inbox._add(user, thing, "mention")
        amqp.add_item('new_mention', inbox_rel._fullname)
    except CreationError:
        # this mention was already inserted, ignore it
        g.log.error("duplicate mention for (%s, %s)", user, thing)
        return

    with query_cache.CachedQueryMutator() as m:
        m.insert(queries.get_inbox_comment_mentions(user), [inbox_rel])
        queries.set_unread(thing, user, unread=True, mutator=m)
Example #50
0
def add_to_domain_query_q(link):
    parsed = UrlParser(link.url)
    if not parsed.domain_permutations():
        # no valid domains found
        return

    if g.shard_domain_query_queues:
        domain_shard = hash(parsed.hostname) % 10
        queue_name = "domain_query_%s_q" % domain_shard
    else:
        queue_name = "domain_query_q"
    amqp.add_item(queue_name, link._fullname)
Example #51
0
def add_to_domain_query_q(link):
    parsed = UrlParser(link.url)
    if not parsed.domain_permutations():
        # no valid domains found
        return

    if g.shard_domain_query_queues:
        domain_shard = hash(parsed.hostname) % 10
        queue_name = "domain_query_%s_q" % domain_shard
    else:
        queue_name = "domain_query_q"
    amqp.add_item(queue_name, link._fullname)
Example #52
0
def new_comment(comment, inbox_rels):
    author = Account._byID(comment.author_id)
    job = [
        get_comments(author, "new", "all"),
        get_comments(author, "top", "all"),
        get_comments(author, "controversial", "all"),
    ]

    sr = Subreddit._byID(comment.sr_id)

    with CachedQueryMutator() as m:
        if comment._deleted:
            job_key = "delete_items"
            job.append(get_sr_comments(sr))
            m.delete(get_all_comments(), [comment])
        else:
            job_key = "insert_items"
            if comment._spam:
                m.insert(get_spam_comments(sr), [comment])
            if was_spam_filtered(comment):
                m.insert(get_spam_filtered_comments(sr), [comment])

            if utils.to36(comment.link_id) in g.live_config["fastlane_links"]:
                amqp.add_item("new_fastlane_comment", comment._fullname)
            else:
                amqp.add_item("new_comment", comment._fullname)

            if not g.amqp_host:
                add_comment_tree([comment])

        job_dict = {job_key: comment}
        add_queries(job, **job_dict)

        # note that get_all_comments() is updated by the amqp process
        # r2.lib.db.queries.run_new_comments (to minimise lock contention)

        if inbox_rels:
            for inbox_rel in tup(inbox_rels):
                inbox_owner = inbox_rel._thing1
                if inbox_rel._name == "inbox":
                    query = get_inbox_comments(inbox_owner)
                elif inbox_rel._name == "selfreply":
                    query = get_inbox_selfreply(inbox_owner)
                else:
                    raise ValueError("wtf is " + inbox_rel._name)

                if not comment._deleted:
                    m.insert(query, [inbox_rel])
                else:
                    m.delete(query, [inbox_rel])

                set_unread(comment, inbox_owner, unread=not comment._deleted, mutator=m)
Example #53
0
    def spam(self, things, auto=True, moderator_banned=False,
             banner=None, date=None, train_spam=True, **kw):
        from r2.lib.db import queries

        all_things = tup(things)
        new_things = [x for x in all_things if not x._spam]

        Report.accept(all_things, True)

        for t in all_things:
            if getattr(t, "promoted", None) is not None:
                g.log.debug("Refusing to mark promotion %r as spam" % t)
                continue

            if not t._spam and train_spam:
                note = 'spam'
            elif not t._spam and not train_spam:
                note = 'remove not spam'
            elif t._spam and not train_spam:
                note = 'confirm spam'
            elif t._spam and train_spam:
                note = 'reinforce spam'

            t._spam = True

            if moderator_banned:
                t.verdict = 'mod-removed'
            elif not auto:
                t.verdict = 'admin-removed'

            ban_info = copy(getattr(t, 'ban_info', {}))
            if isinstance(banner, dict):
                ban_info['banner'] = banner[t._fullname]
            else:
                ban_info['banner'] = banner
            ban_info.update(auto=auto,
                            moderator_banned=moderator_banned,
                            banned_at=date or datetime.now(g.tz),
                            **kw)
            ban_info['note'] = note

            t.ban_info = ban_info
            t._commit()

            if auto:
                amqp.add_item("auto_removed", t._fullname)

        if not auto:
            self.author_spammer(new_things, True)
            self.set_last_sr_ban(new_things)

        queries.ban(all_things, filtered=auto)
Example #54
0
def Run(offset=0, verbose=True):
    """reddit-job-update_promos: Intended to be run hourly to pull in
    scheduled changes to ads
    
    """

    if verbose:
        print "promote.py:Run() - amqp.add_item()"
    amqp.add_item(UPDATE_QUEUE, json.dumps(QUEUE_ALL),
                  delivery_mode=amqp.DELIVERY_TRANSIENT)
    amqp.worker.join()
    if verbose:
        print "promote.py:Run() - finished"
Example #55
0
def queue_vote(user, thing, dir, ip, organic=False, cheater=False, store=True):
    # set the vote in memcached so the UI gets updated immediately
    key = prequeued_vote_key(user, thing)
    g.cache.set(key, '1' if dir is True else '0' if dir is None else '-1')
    # queue the vote to be stored unless told not to
    if store:
        if g.amqp_host:
            amqp.add_item(
                'register_vote_q',
                pickle.dumps(
                    (user._id, thing._fullname, dir, ip, organic, cheater)))
        else:
            handle_vote(user, thing, dir, ip, organic)
Example #56
0
def send_broadcast(namespace, type, payload):
    """Broadcast an object to all WebSocket listeners in a namespace.

    The message type is used to differentiate between different kinds of
    payloads that may be sent. The payload will be encoded as a JSON object
    before being sent to the client.

    """
    frame = {
        "type": type,
        "payload": payload,
    }
    amqp.add_item(routing_key=namespace, body=json.dumps(frame),
                  exchange=_WEBSOCKET_EXCHANGE)
Example #57
0
    def report(self, exc_data):
        from r2.lib import amqp

        if issubclass(exc_data.exception_type, self._operational_exceptions()):
            return

        d = _default_dict()
        d["type"] = "exception"
        d["exception_type"] = exc_data.exception_type.__name__
        d["exception_desc"] = exc_data.exception_value
        # use the format that log_q expects; same as traceback.extract_tb
        d["traceback"] = [(f.filename, f.lineno, f.name, f.get_source_line().strip()) for f in exc_data.frames]

        amqp.add_item(QUEUE_NAME, cPickle.dumps(d))
def deactivate_link(link):
    # deactivating the adzerk campaign will deactivate associated adzerk flights

    if not hasattr(link, 'adzerk_campaign_id'):
        # Link can get voided without having been sent to adzerk if its
        # start date is several days in the future
        return

    g.log.debug('queuing deactivate_link %s' % link)
    msg = json.dumps({
        'action': 'deactivate_link',
        'link': link._fullname,
    })
    amqp.add_item('adzerk_q', msg)
def deactivate_campaign(link, campaign):
    if not (hasattr(link, 'adzerk_campaign_id') and
            hasattr(campaign, 'adzerk_flight_id')):
        # Campaign can get voided without having been sent to adzerk if its
        # start date is several days in the future
        return

    g.log.debug('queuing deactivate_campaign %s' % link)
    msg = json.dumps({
        'action': 'deactivate_campaign',
        'link': link._fullname,
        'campaign': campaign._fullname,
    })
    amqp.add_item('adzerk_q', msg)