def charge_campaign(link, campaign): if charged_or_not_needed(campaign): return user = Account._byID(link.author_id) success, reason = authorize.charge_transaction(user, campaign.trans_id, campaign._id) if not success: if reason == authorize.TRANSACTION_NOT_FOUND: # authorization hold has expired original_trans_id = campaign.trans_id campaign.trans_id = NO_TRANSACTION campaign._commit() text = ('voided expired transaction for %s: (trans_id: %d)' % (campaign, original_trans_id)) PromotionLog.add(link, text) return hooks.get_hook('promote.edit_campaign').call(link=link, campaign=campaign) if not is_promoted(link): update_promote_status(link, PROMOTE_STATUS.pending) emailer.queue_promo(link, campaign.total_budget_dollars, campaign.trans_id) text = ('auth charge for campaign %s, trans_id: %d' % (campaign._id, campaign.trans_id)) PromotionLog.add(link, text)
def delete_campaign(link, campaign): PromotionWeights.delete(link, campaign) void_campaign(link, campaign, reason='deleted_campaign') campaign.delete() PromotionLog.add(link, 'deleted campaign %s' % campaign._id) hooks.get_hook('promote.delete_campaign').call(link=link, campaign=campaign)
def make_daily_promotions(): # charge campaigns so they can go live charge_pending(offset=0) charge_pending(offset=1) # promote links and record ids of promoted links link_ids = set() for campaign, link in get_scheduled_promos(offset=0): link_ids.add(link._id) promote_link(link, campaign) # expire finished links q = Link._query(Link.c.promote_status == PROMOTE_STATUS.promoted, data=True) q = q._filter(not_(Link.c._id.in_(link_ids))) for link in q: update_promote_status(link, PROMOTE_STATUS.finished) emailer.finished_promo(link) # update subverbifys with promos all_live_promo_srnames(_update=True) _mark_promos_updated() finalize_completed_campaigns(daysago=1) hooks.get_hook('promote.make_daily_promotions').call(offset=0)
def use(self): hooks.get_hook("js_preload.use").call(js_preload=self) if self.data: return js.DataSource.use(self) else: return ''
def _on_create(self): hooks.get_hook("oauth2.create_token").call(token=self) # update the by-user view if self.user_id: self._by_user_view()._set_values(str(self.user_id), {self._id: ''}) return super(OAuth2AccessToken, self)._on_create()
def toggle_pause_campaign(link, campaign, should_pause): campaign.paused = should_pause campaign._commit() action = 'paused' if should_pause else 'resumed' PromotionLog.add(link, '%s campaign %s' % (action, campaign._id)) hooks.get_hook('promote.edit_campaign').call(link=link, campaign=campaign)
def review_fraud(link, is_fraud): link.fraud = is_fraud link._commit() PromotionLog.add( link, "marked as fraud" if is_fraud else "resolved as not fraud") queries.unset_payment_flagged_link(link) if is_fraud: reject_promotion(link, "fraud", notify_why=False) hooks.get_hook("promote.fraud_identified").call(link=link, sponsor=c.user)
def add_target_fields(self, target): if not target: return from v1.models import Comment, Link, Message self.add("target_id", target._id) self.add("target_fullname", target._fullname) self.add("target_age_seconds", target._age.total_seconds()) target_type = target.__class__.__name__.lower() if target_type == "link" and target.is_self: target_type = "self" self.add("target_type", target_type) # If the target is an Account or Subverbify (or has a "name" attr), # add the target_name if hasattr(target, "name"): self.add("target_name", target.name) # Add info about the target's author for comments, links, & messages if isinstance(target, (Comment, Link, Message)): author = target.author_slow if target._deleted or author._deleted: self.add("target_author_id", 0) self.add("target_author_name", "[deleted]") else: self.add("target_author_id", author._id) self.add("target_author_name", author.name) # Add info about the url being linked to for link posts if isinstance(target, Link): self.add_text("target_title", target.title) if not target.is_self: self.add("target_url", target.url) self.add("target_url_domain", target.link_domain()) # Add info about the link being commented on for comments if isinstance(target, Comment): link_fullname = Link._fullname_from_id36(to36(target.link_id)) self.add("link_id", target.link_id) self.add("link_fullname", link_fullname) # Add info about when target was originally posted for links/comments if isinstance(target, (Comment, Link)): self.add("target_created_ts", to_epoch_milliseconds(target._date)) hooks.get_hook("eventcollector.add_target_fields").call( event=self, target=target, )
def _set_media(link, force=False, **kwargs): sr = link.subverbify_slow # Do not process thumbnails for quarantined subverbifys if sr.quarantine: return if not link.is_self: if not force and (link.has_thumbnail or link.media_object): return if not force and link.promoted: return scrape_url = _get_scrape_url(link) if not scrape_url: if link.preview_object: # If the user edited out an image from a self post, we need to make # sure to remove its metadata. link.set_preview_object(None) link._commit() return youtube_scraper = feature.is_enabled("youtube_scraper", subverbify=sr.name) media = _scrape_media(scrape_url, force=force, use_youtube_scraper=youtube_scraper, **kwargs) if media and not link.promoted: # While we want to add preview images to self posts for the new apps, # let's not muck about with the old-style thumbnails in case that # breaks assumptions. if not link.is_self: link.thumbnail_url = media.thumbnail_url link.thumbnail_size = media.thumbnail_size link.set_media_object(media.media_object) link.set_secure_media_object(media.secure_media_object) link.set_preview_object(media.preview_object) link._commit() hooks.get_hook("scraper.set_media").call(link=link) if media.media_object or media.secure_media_object: amqp.add_item("new_media_embed", link._fullname)
def __init__(self, vote, effects=None): """Initialize a new set of vote effects. If a dict of previously-determined effects are passed in as `effects`, those will be used instead of calculating the effects. """ self.note_codes = {} self.validator = None if effects: self.affects_score = effects.pop("affects_score") self.affects_karma = effects.pop("affects_karma") self.other_effects = effects else: hook = hooks.get_hook("vote.get_validator") self.validator = hook.call_until_return(vote=vote, effects=self) self.affects_score = self.determine_affects_score(vote) self.affects_karma = self.determine_affects_karma(vote) self.other_effects = self.determine_other_effects(vote) self.karma_change = 0 if self.affects_karma: if vote.is_upvote: self.karma_change = 1 elif vote.is_downvote: self.karma_change = -1
def cast_vote(user, thing, direction, **data): """Register a vote and queue it for processing.""" if not isinstance(thing, (Link, Comment)): return update_vote_lookups(user, thing, direction) vote_data = { "user_id": user._id, "thing_fullname": thing._fullname, "direction": direction, "date": int(epoch_timestamp(datetime.now(g.tz))), } data['ip'] = getattr(request, "ip", None) if data['ip'] is not None: data['org'] = organization_by_ips(data['ip']) vote_data['data'] = data hooks.get_hook("vote.get_vote_data").call( data=vote_data["data"], user=user, thing=thing, request=request, context=c, ) # The vote event will actually be sent from an async queue processor, so # we need to pull out the context data at this point if not g.running_as_script: vote_data["event_data"] = { "context": Event.get_context_data(request, c), "sensitive": Event.get_sensitive_context_data(request, c), } try: vote_dump = json.dumps(vote_data) except UnicodeDecodeError: g.log.error("Got weird unicode in the vote data: %r", vote_data) return if isinstance(thing, Link): queue = "vote_link_q" elif isinstance(thing, Comment): queue = "vote_comment_q" amqp.add_item(queue, vote_dump)
def cache_poisoning_event(self, poison_info, request=None, context=None): """Create a 'cache_poisoning_server' event for event-collector poison_info: Details from the client about the poisoning event request, context: Should be pylons.request & pylons.c respectively """ poisoner_name = poison_info.pop("poisoner_name") event = Event( topic="cache_poisoning_events", event_type="ss.cache_poisoning", request=request, context=context, data=poison_info, truncatable_field="resp_headers", ) event.add("poison_blame_guess", "proxy") resp_headers = poison_info["resp_headers"] if resp_headers: # Check if the caching headers we got back match the current policy cache_policy = poison_info["cache_policy"] headers_valid = cache_headers_valid(cache_policy, resp_headers) event.add("cache_headers_valid", headers_valid) # try to determine what kind of poisoning we're dealing with if poison_info["source"] == "web": # Do we think they logged in the usual way, or do we think they # got poisoned with someone else's session cookie? valid_login_hook = hooks.get_hook("poisoning.guess_valid_login") if valid_login_hook.call_until_return(poisoner_name=poisoner_name): # Maybe a misconfigured local Squid proxy + multiple # clients? event.add("poison_blame_guess", "local_proxy") event.add("poison_credentialed_guess", False) elif (context.user_is_loggedin and context.user.name == poisoner_name): # Guess we got poisoned with a cookie-bearing response. event.add("poison_credentialed_guess", True) else: event.add("poison_credentialed_guess", False) elif poison_info["source"] == "mweb": # All mweb responses contain an OAuth token, so we have to assume # whoever got this response can perform actions as the poisoner event.add("poison_credentialed_guess", True) else: raise Exception("Unsupported source in cache_poisoning_event") # Check if the CF-Cache-Status header is present (this header is not # present if caching is disallowed.) If it is, the CDN caching rules # are all jacked up. if resp_headers and "cf-cache-status" in resp_headers: event.add("poison_blame_guess", "cdn") self.save_event(event)
def revoke(self): """Revokes (invalidates) this access token.""" self.revoked = True self._commit() if self.user_id: try: tba = self._by_user_view()._byID(self.user_id) del tba[self._id] except (tdb_cassandra.NotFound, KeyError): # Not fatal, since self.check_valid() will still be False. pass else: tba._commit() hooks.get_hook("oauth2.revoke_token").call(token=self)
def new_campaign(link, dates, target, frequency_cap, priority, location, platform, mobile_os, ios_devices, ios_version_range, android_devices, android_version_range, total_budget_pennies, cost_basis, bid_pennies): campaign = PromoCampaign.create( link, target, dates[0], dates[1], frequency_cap, priority, location, platform, mobile_os, ios_devices, ios_version_range, android_devices, android_version_range, total_budget_pennies, cost_basis, bid_pennies) PromotionWeights.add(link, campaign) PromotionLog.add(link, 'campaign %s created' % campaign._id) if not campaign.is_house: author = Account._byID(link.author_id, data=True) if getattr(author, "complimentary_promos", False): free_campaign(link, campaign, c.user) hooks.get_hook('promote.new_campaign').call(link=link, campaign=campaign) return campaign
def add_props(cls, user, wrapped): from v1.lib.wrapped import CachedVariable for item in wrapped: # insert replacement variable for timesince to allow for # caching of thing templates item.display = CachedVariable("display") item.timesince = CachedVariable("timesince") item.childlisting = CachedVariable("childlisting") score_fmt = getattr(item, "score_fmt", Score.number_only) item.display_score = map(score_fmt, item.voting_score) if item.cachable: item.render_score = item.display_score item.display_score = map(CachedVariable, ["scoredislikes", "scoreunvoted", "scorelikes"]) hooks.get_hook("add_props").call(items=wrapped)
def get_context_data(self, request, context): """Extract common data from the current request and context This is generally done explicitly in `__init__`, but is done by hand for votes before the request context is lost by the queuing. request, context: Should be pylons.request & pylons.c respectively """ data = {} if context.user_is_loggedin: data["user_id"] = context.user._id data["user_name"] = context.user.name else: if context.loid: data.update(context.loid.to_dict()) oauth2_client = getattr(context, "oauth2_client", None) if oauth2_client: data["oauth2_client_id"] = oauth2_client._id data["oauth2_client_name"] = oauth2_client.name data["oauth2_client_app_type"] = oauth2_client.app_type data["geoip_country"] = get_request_location(request, context) data["domain"] = request.host data["user_agent"] = request.user_agent data["user_agent_parsed"] = request.parsed_agent.to_dict() http_referrer = request.headers.get("Referer", None) if http_referrer: data["referrer_url"] = http_referrer data["referrer_domain"] = domain(http_referrer) hooks.get_hook("eventcollector.context_data").call( data=data, user=context.user, request=request, context=context, ) return data
def process_message(msg): vote_data = json.loads(msg.body) hook = hooks.get_hook('vote.validate_vote_data') if hook.call_until_return(msg=msg, vote_data=vote_data) is False: # Corrupt records in the queue. Ignore them. print "Ignoring invalid vote by %s on %s %s" % ( vote_data.get('user_id', '<unknown>'), vote_data.get('thing_fullname', '<unknown>'), vote_data) return timer = g.stats.get_timer("link_vote_processor") timer.start() user = Account._byID(vote_data.pop("user_id")) link = Link._by_fullname(vote_data.pop("thing_fullname")) # create the vote and update the voter's liked/disliked under lock so # that the vote state and cached query are consistent lock_key = "vote-%s-%s" % (user._id36, link._fullname) with g.make_lock("voting", lock_key, timeout=5): print "Processing vote by %s on %s %s" % (user, link, vote_data) try: vote = Vote( user, link, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.exception("Invalid type: %r", e.message) return vote.commit() timer.intermediate("create_vote_object") update_user_liked(vote) timer.intermediate("voter_likes") vote_valid = vote.is_automatic_initial_vote or vote.effects.affects_score link_valid = not (link._spam or link._deleted) if vote_valid and link_valid: add_to_author_query_q(link) add_to_subverbify_query_q(link) add_to_domain_query_q(link) timer.stop() timer.flush()
def handle_login(controller, form, responder, user, rem=None, signature=None, **kwargs): def _event(error): g.events.login_event('login_attempt', error_msg=error, user_name=request.urlvars.get('url_user'), remember_me=rem, signature=signature, request=request, context=c) if signature and not signature.is_valid(): _event(error="SIGNATURE") abort(403) hook_error = hooks.get_hook("account.login").call_until_return( responder=responder, request=request, context=c, ) # if any of the hooks returned an error, abort the login. The # set_error in this case also needs to exist in the hook. if hook_error: _event(error=hook_error) return exempt_ua = (request.user_agent and any( ua in request.user_agent for ua in g.config.get('exempt_login_user_agents', ()))) if (errors.LOGGED_IN, None) in c.errors: if user == c.user or exempt_ua: # Allow funky clients to re-login as the current user. c.errors.remove((errors.LOGGED_IN, None)) else: _event(error='LOGGED_IN') abort(verbify_http_error(409, errors.LOGGED_IN)) if responder.has_errors("ratelimit", errors.RATELIMIT): _event(error='RATELIMIT') elif responder.has_errors("passwd", errors.WRONG_PASSWORD): _event(error='WRONG_PASSWORD') else: controller._login(responder, user, rem) _event(error=None)
def apply_effects(self): """Apply the effects of the vote to the thing that was voted on.""" # remove the old vote if self.previous_vote and self.previous_vote.affected_thing_attr: self.thing._incr(self.previous_vote.affected_thing_attr, -1) # add the new vote if self.affected_thing_attr: self.thing._incr(self.affected_thing_attr, 1) if self.effects.affects_karma: change = self.effects.karma_change if self.previous_vote: change -= self.previous_vote.effects.karma_change if change: self.thing.author_slow.incr_karma( kind=self.thing.affects_karma_type, sr=self.thing.subverbify_slow, amt=change, ) hooks.get_hook("vote.apply_effects").call(vote=self)
def can_create_subverbify(self): hook = hooks.get_hook("account.can_create_subverbify") can_create = hook.call_until_return(account=self) if can_create is not None: return can_create min_age = timedelta(days=g.live_config["create_sr_account_age_days"]) if self._age < min_age: return False if (self.link_karma < g.live_config["create_sr_link_karma"] and self.comment_karma < g.live_config["create_sr_comment_karma"]): return False return True
def needs_captcha(self): if g.disable_captcha: return False hook = hooks.get_hook("account.is_captcha_exempt") captcha_exempt = hook.call_until_return(account=self) if captcha_exempt: return False if self.link_karma >= g.live_config["captcha_exempt_link_karma"]: return False if self.comment_karma >= g.live_config["captcha_exempt_comment_karma"]: return False return True
def get_media_embed(media_object): if not isinstance(media_object, dict): return embed_hook = hooks.get_hook("scraper.media_embed") media_embed = embed_hook.call_until_return(media_object=media_object) if media_embed: return media_embed if media_object.get("type") == "custom": return _make_custom_media_embed(media_object) if "oembed" in media_object: if media_object.get("type") == "youtube.com": return _YouTubeScraper.media_embed(media_object) return _EmbedlyScraper.media_embed(media_object)
def for_url(cls, url, autoplay=False, maxwidth=600, use_youtube_scraper=False): scraper = hooks.get_hook("scraper.factory").call_until_return(url=url) if scraper: return scraper if use_youtube_scraper and _YouTubeScraper.matches(url): return _YouTubeScraper(url, maxwidth=maxwidth) embedly_services = _fetch_embedly_services() for service_re in embedly_services: if service_re.match(url): return _EmbedlyScraper(url, autoplay=autoplay, maxwidth=maxwidth) return _ThumbnailOnlyScraper(url)
def failed_payment_method(user, link): user._incr('num_failed_payments') hooks.get_hook('promote.failed_payment').call(user=user, link=link)
def edit_campaign(link, campaign, dates, target, frequency_cap, priority, location, total_budget_pennies, cost_basis, bid_pennies, platform='desktop', mobile_os=None, ios_devices=None, ios_version_range=None, android_devices=None, android_version_range=None): changed = {} if dates[0] != campaign.start_date or dates[1] != campaign.end_date: original = '%s to %s' % (campaign.start_date, campaign.end_date) edited = '%s to %s' % (dates[0], dates[1]) changed['dates'] = (original, edited) campaign.start_date = dates[0] campaign.end_date = dates[1] if target != campaign.target: changed['target'] = (campaign.target, target) campaign.target = target if frequency_cap != campaign.frequency_cap: changed['frequency_cap'] = (campaign.frequency_cap, frequency_cap) campaign.frequency_cap = frequency_cap if priority != campaign.priority: changed['priority'] = (campaign.priority.name, priority.name) campaign.priority = priority if location != campaign.location: changed['location'] = (campaign.location, location) campaign.location = location if platform != campaign.platform: changed["platform"] = (campaign.platform, platform) campaign.platform = platform if mobile_os != campaign.mobile_os: changed["mobile_os"] = (campaign.mobile_os, mobile_os) campaign.mobile_os = mobile_os if ios_devices != campaign.ios_devices: changed['ios_devices'] = (campaign.ios_devices, ios_devices) campaign.ios_devices = ios_devices if android_devices != campaign.android_devices: changed['android_devices'] = (campaign.android_devices, android_devices) campaign.android_devices = android_devices if ios_version_range != campaign.ios_version_range: changed['ios_version_range'] = (campaign.ios_version_range, ios_version_range) campaign.ios_version_range = ios_version_range if android_version_range != campaign.android_version_range: changed['android_version_range'] = (campaign.android_version_range, android_version_range) campaign.android_version_range = android_version_range if total_budget_pennies != campaign.total_budget_pennies: void_campaign(link, campaign, reason='changed_budget') campaign.total_budget_pennies = total_budget_pennies if cost_basis != campaign.cost_basis: changed['cost_basis'] = (campaign.cost_basis, cost_basis) campaign.cost_basis = cost_basis if bid_pennies != campaign.bid_pennies: changed['bid_pennies'] = (campaign.bid_pennies, bid_pennies) campaign.bid_pennies = bid_pennies change_strs = map(lambda t: '%s: %s -> %s' % (t[0], t[1][0], t[1][1]), changed.iteritems()) change_text = ', '.join(change_strs) campaign._commit() # update the index PromotionWeights.reschedule(link, campaign) if not campaign.is_house: # make it a freebie, if applicable author = Account._byID(link.author_id, True) if getattr(author, "complimentary_promos", False): free_campaign(link, campaign, c.user) # record the changes if change_text: PromotionLog.add(link, 'edited %s: %s' % (campaign, change_text)) hooks.get_hook('promote.edit_campaign').call(link=link, campaign=campaign)
def js_config(extra_config=None): logged = c.user_is_loggedin and c.user.name user_id = c.user_is_loggedin and c.user._id user_in_timeout = c.user_is_loggedin and c.user.in_timeout sodium = bool(logged and c.user.sodium) controller_name = request.environ['pylons.routes_dict']['controller'] action_name = request.environ['pylons.routes_dict']['action'] route_name = controller_name + '.' + action_name cache_policy = "loggedout_www" if c.user_is_loggedin: cache_policy = "loggedin_www_new" # Canary for detecting cache poisoning poisoning_canary = None poisoning_report_mac = None if logged: if "pc" in c.cookies and len(c.cookies["pc"].value) == 2: poisoning_canary = c.cookies["pc"].value poisoning_report_mac = make_poisoning_report_mac( poisoner_canary=poisoning_canary, poisoner_name=logged, poisoner_id=user_id, cache_policy=cache_policy, source="web", route_name=route_name, ) mac = hmac.new(g.secrets["action_name"], route_name, hashlib.sha1) verification = mac.hexdigest() cur_subverbify = "" cur_sr_fullname = "" cur_listing = "" listing_over_18 = False pref_no_profanity = not logged or c.user.pref_no_profanity pref_media_preview = c.user.pref_media_preview if not feature.is_enabled("autoexpand_media_previews"): expando_preference = None elif pref_media_preview == "subverbify": expando_preference = "subverbify_default" elif pref_media_preview == "on": expando_preference = "auto_expand" else: expando_preference = "do_not_expand" pref_beta = c.user.pref_beta nsfw_media_acknowledged = logged and c.user.nsfw_media_acknowledged if isinstance(c.site, Subverbify) and not c.default_sr: cur_subverbify = c.site.name cur_sr_fullname = c.site._fullname cur_listing = cur_subverbify listing_over_18 = c.site.over_18 elif isinstance(c.site, DefaultSR): cur_listing = "frontpage" elif isinstance(c.site, FakeSubverbify): cur_listing = c.site.name if g.debug: events_collector_url = g.events_collector_test_url events_collector_key = g.secrets['events_collector_test_js_key'] events_collector_secret = g.secrets['events_collector_test_js_secret'] else: events_collector_url = g.events_collector_url events_collector_key = g.secrets['events_collector_js_key'] events_collector_secret = g.secrets['events_collector_js_secret'] config = { # is the user logged in? "logged": logged, # logged in user's id "user_id": user_id, # is user in timeout? "user_in_timeout": user_in_timeout, # the subverbify's name (for posts) "post_site": cur_subverbify, "cur_site": cur_sr_fullname, "cur_listing": cur_listing, # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # they're welcome to try to override this in the DOM because we just # disable the features server-side if applicable 'store_visits': sodium and c.user.pref_store_visits, # current domain "cur_domain": get_domain(subverbify=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(subverbify=False), "stats_domain": g.stats_domain or '', "stats_sample_rate": g.stats_sample_rate or 0, "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, "media_domain": g.media_domain, # does the client only want to communicate over HTTPS? "https_forced": feature.is_enabled("force_https"), # debugging? "debug": g.debug, "poisoning_canary": poisoning_canary, "poisoning_report_mac": poisoning_report_mac, "cache_policy": cache_policy, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubverbify), "tracker_url": "", # overridden below if configured "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "eventtracker_url": g.eventtracker_url, "anon_eventtracker_url": g.anon_eventtracker_url, "events_collector_url": events_collector_url, "events_collector_key": events_collector_key, "events_collector_secret": events_collector_secret, "feature_screenview_events": feature.is_enabled('screenview_events'), "static_root": static(''), "over_18": bool(c.over18), "listing_over_18": listing_over_18, "expando_preference": expando_preference, "pref_no_profanity": pref_no_profanity, "pref_beta": pref_beta, "nsfw_media_acknowledged": nsfw_media_acknowledged, "new_window": logged and bool(c.user.pref_newwindow), "mweb_blacklist_expressions": g.live_config['mweb_blacklist_expressions'], "sodium": sodium, "has_subscribed": logged and c.user.has_subscribed, "is_sponsor": logged and c.user_is_sponsor, "pageInfo": { "verification": verification, "actionName": route_name, }, "facebook_app_id": g.live_config["facebook_app_id"], "feature_new_report_dialog": feature.is_enabled('new_report_dialog'), "email_verified": logged and c.user.email and c.user.email_verified, } if g.tracker_url: config["tracker_url"] = tracking.get_pageview_pixel_url() if g.uncompressedJS: config["uncompressedJS"] = True if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def process_message(msg): from v1.lib.comment_tree import write_comment_scores from v1.lib.db.queries import ( add_queries, add_to_commentstree_q, get_comments, ) from v1.models.builder import get_active_sort_orders_for_link vote_data = json.loads(msg.body) hook = hooks.get_hook('vote.validate_vote_data') if hook.call_until_return(msg=msg, vote_data=vote_data) is False: # Corrupt records in the queue. Ignore them. print "Ignoring invalid vote by %s on %s %s" % ( vote_data.get('user_id', '<unknown>'), vote_data.get('thing_fullname', '<unknown>'), vote_data) return timer = g.stats.get_timer("comment_vote_processor") timer.start() user = Account._byID(vote_data.pop("user_id")) comment = Comment._by_fullname(vote_data.pop("thing_fullname")) print "Processing vote by %s on %s %s" % (user, comment, vote_data) try: vote = Vote( user, comment, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.exception("Invalid type: %r", e.message) return vote.commit() timer.intermediate("create_vote_object") vote_invalid = (not vote.effects.affects_score and not vote.is_automatic_initial_vote) comment_invalid = comment._spam or comment._deleted if vote_invalid or comment_invalid: timer.stop() timer.flush() return author = Account._byID(comment.author_id) add_queries( queries=[get_comments(author, sort, 'all') for sort in SORTS], insert_items=comment, ) timer.intermediate("author_queries") update_threshold = g.live_config['comment_vote_update_threshold'] update_period = g.live_config['comment_vote_update_period'] skip_score_update = (comment.num_votes > update_threshold and comment.num_votes % update_period != 0) # skip updating scores if this was the automatic initial vote. those # updates will be handled by new_comment. Also only update scores # periodically once a comment has many votes. if not vote.is_automatic_initial_vote and not skip_score_update: # check whether this link is using precomputed sorts, if it is # we'll need to push an update to commentstree_q link = Link._byID(comment.link_id) if get_active_sort_orders_for_link(link): # send this comment to commentstree_q where we will update # CommentScoresByLink, CommentTree (noop), and CommentOrderer add_to_commentstree_q(comment) else: # the link isn't using precomputed sorts, so just update the # scores write_comment_scores(link, [comment]) timer.intermediate("update_scores") timer.stop() timer.flush()
def handle_register(controller, form, responder, name, email, password, rem=None, newsletter_subscribe=False, sponsor=False, signature=None, **kwargs): def _event(error): g.events.login_event('register_attempt', error_msg=error, user_name=request.urlvars.get('url_user'), email=request.POST.get('email'), remember_me=rem, newsletter=newsletter_subscribe, signature=signature, request=request, context=c) if signature and not signature.is_valid(): _event(error="SIGNATURE") abort(403) if responder.has_errors('user', errors.USERNAME_TOO_SHORT): _event(error='USERNAME_TOO_SHORT') elif responder.has_errors('user', errors.USERNAME_INVALID_CHARACTERS): _event(error='USERNAME_INVALID_CHARACTERS') elif responder.has_errors('user', errors.USERNAME_TAKEN_DEL): _event(error='USERNAME_TAKEN_DEL') elif responder.has_errors('user', errors.USERNAME_TAKEN): _event(error='USERNAME_TAKEN') elif responder.has_errors('email', errors.BAD_EMAIL): _event(error='BAD_EMAIL') elif responder.has_errors('passwd', errors.SHORT_PASSWORD): _event(error='SHORT_PASSWORD') elif responder.has_errors('passwd', errors.BAD_PASSWORD): # BAD_PASSWORD is set when SHORT_PASSWORD is set _event(error='BAD_PASSWORD') elif responder.has_errors('passwd2', errors.BAD_PASSWORD_MATCH): _event(error='BAD_PASSWORD_MATCH') elif responder.has_errors('ratelimit', errors.RATELIMIT): _event(error='RATELIMIT') elif (not g.disable_captcha and responder.has_errors('captcha', errors.BAD_CAPTCHA)): _event(error='BAD_CAPTCHA') elif newsletter_subscribe and not email: c.errors.add(errors.NEWSLETTER_NO_EMAIL, field="email") form.has_errors("email", errors.NEWSLETTER_NO_EMAIL) _event(error='NEWSLETTER_NO_EMAIL') elif sponsor and not email: c.errors.add(errors.SPONSOR_NO_EMAIL, field="email") form.has_errors("email", errors.SPONSOR_NO_EMAIL) _event(error='SPONSOR_NO_EMAIL') else: try: user = register(name, password, request.ip) except AccountExists: c.errors.add(errors.USERNAME_TAKEN, field="user") form.has_errors("user", errors.USERNAME_TAKEN) _event(error='USERNAME_TAKEN') return VRatelimit.ratelimit(rate_ip=True, prefix="rate_register_") # anything else we know (email, languages)? if email: user.set_email(email) emailer.verify_email(user) user.pref_lang = c.lang user._commit() amqp.add_item('new_account', user._fullname) hooks.get_hook("account.registered").call(user=user) reject = hooks.get_hook("account.spotcheck").call(account=user) if any(reject): _event(error='ACCOUNT_SPOTCHECK') return if newsletter_subscribe and email: try: newsletter.add_subscriber(email, source="register") except newsletter.NewsletterError as e: g.log.warning("Failed to subscribe: %r" % e) controller._login(responder, user, rem) _event(error=None)
def update_promote_status(link, status): queries.set_promote_status(link, status) hooks.get_hook('promote.edit_promotion').call(link=link)
def watcher(data, stat): if data and data.startswith("gzip"): data = zlib.decompress(data[len("gzip"):]) self.data = json.loads(data or '{}') hooks.get_hook("worker.live_config.update").call()