def GET_policy_page(self, page, requested_rev): if c.render_style == 'compact': self.redirect('/wiki/' + page) if page == 'privacypolicy': wiki_name = g.wiki_page_privacy_policy pagename = _('privacy policy') elif page == 'useragreement': wiki_name = g.wiki_page_user_agreement pagename = _('user agreement') elif page == 'contentpolicy': wiki_name = g.wiki_page_content_policy pagename = _('content policy') else: abort(404) wp = WikiPage.get(Frontpage, wiki_name) revs = list(wp.get_revisions()) # collapse minor edits into revisions with reasons rev_info = [] last_edit = None for rev in revs: if rev.is_hidden: continue if not last_edit: last_edit = rev if rev._get('reason'): rev_info.append({ 'id': str(last_edit._id), 'title': rev._get('reason'), }) last_edit = None if requested_rev: try: display_rev = WikiRevision.get(requested_rev, wp._id) except (tdb_cassandra.NotFound, WikiBadRevision): abort(404) else: display_rev = revs[0] doc_html = wikimarkdown(display_rev.content, include_toc=False) soup = BeautifulSoup(doc_html.decode('utf-8')) toc = generate_table_of_contents(soup, prefix='section') self._number_sections(soup) self._linkify_headings(soup) content = PolicyView( body_html=unsafe(soup), toc_html=unsafe(toc), revs=rev_info, display_rev=str(display_rev._id), ) return PolicyPage( pagename=pagename, content=content, ).render()
def POST_message(self, level, logs): # Whitelist tags to keep the frontend from creating too many keys in statsd valid_frontend_log_tags = {"unknown", "jquery-migrate-bad-html"} # prevent simple CSRF by requiring a custom header if not request.headers.get("X-Loggit"): abort(403) uid = c.user._id if c.user_is_loggedin else "-" # only accept a maximum of 3 entries per request for log in logs[:3]: if "msg" not in log or "url" not in log: continue tag = "unknown" if log.get("tag") in valid_frontend_log_tags: tag = log["tag"] g.stats.simple_event("frontend.error." + tag) g.log.warning( "[web frontend] %s: %s | U: %s FP: %s UA: %s", level, log["msg"], uid, log["url"], request.user_agent ) VRatelimit.ratelimit(rate_user=False, rate_ip=True, prefix="rate_weblog_", seconds=10)
def GET_authorize(self, response_type, client, redirect_uri, scope, state, duration): """ First step in [OAuth 2.0](http://oauth.net/2/) authentication. End users will be prompted for their credentials (username/password) and asked if they wish to authorize the application identified by the **client_id** parameter with the permissions specified by the **scope** parameter. They are then redirected to the endpoint on the client application's side specified by **redirect_uri**. If the user granted permission to the application, the response will contain a **code** parameter with a temporary authorization code which can be exchanged for an access token at [/api/v1/access_token](#api_method_access_token). **redirect_uri** must match the URI configured for the client in the [app preferences](/prefs/apps). All errors will show a 400 error page along with some information on what option was wrong. """ self._check_employee_grants(client, scope) # Check redirect URI first; it will ensure client exists self._check_redirect_uri(client, redirect_uri) self._check_response_type_and_scope(response_type, scope) self._check_client_type_and_duration(response_type, client, duration) if not c.errors: return OAuth2AuthorizationPage(client, redirect_uri, scope, state, duration, response_type).render() else: abort(BadRequestError(errors.INVALID_OPTION))
def set_multireddit(): routes_dict = request.environ["pylons.routes_dict"] if "multipath" in routes_dict: multipath = routes_dict["multipath"].lower() multi_id = None if c.user_is_loggedin and routes_dict.get("my_multi"): multi_id = "/user/%s/m/%s" % (c.user.name.lower(), multipath) elif "username" in routes_dict: username = routes_dict["username"].lower() if c.user_is_loggedin: # redirect /user/foo/m/... to /me/m/... for user foo. if username == c.user.name.lower(): # trim off multi id url_parts = request.path_qs.split("/")[5:] url_parts.insert(0, "/me/m/%s" % multipath) abort(302, location="/".join(url_parts)) multi_id = "/user/%s/m/%s" % (username, multipath) if multi_id: try: c.site = LabeledMulti._byID(multi_id) except tdb_cassandra.NotFound: abort(404)
def POST_message(self, level, logs): # Whitelist tags to keep the frontend from creating too many keys in statsd valid_frontend_log_tags = { 'unknown', 'jquery-migrate-bad-html', } # prevent simple CSRF by requiring a custom header if not request.headers.get('X-Loggit'): abort(403) uid = c.user._id if c.user_is_loggedin else '-' # only accept a maximum of 3 entries per request for log in logs[:3]: if 'msg' not in log or 'url' not in log: continue tag = 'unknown' if log.get('tag') in valid_frontend_log_tags: tag = log['tag'] g.stats.simple_event('frontend.error.' + tag) g.log.warning('[web frontend] %s: %s | U: %s FP: %s UA: %s', level, log['msg'], uid, log['url'], request.user_agent) VRatelimit.ratelimit(rate_user=False, rate_ip=True, prefix="rate_weblog_", seconds=10)
def GET_crossdomain(self): # Our middleware is weird and won't let us add a route for just # '/crossdomain.xml'. Just 404 for other extensions. if request.environ.get('extension', None) != 'xml': abort(404) response.content_type = "text/x-cross-domain-policy" return CrossDomain().render(style='xml')
def abort_if_not_modified(self, last_modified, private=True, max_age=timedelta(0), must_revalidate=True): """Check If-Modified-Since and abort(304) if appropriate.""" if c.user_is_loggedin and not c.allow_loggedin_cache: return # HTTP timestamps round to nearest second. truncate this value for # comparisons. last_modified = last_modified.replace(microsecond=0) date_str = http_utils.http_date_str(last_modified) response.headers['last-modified'] = date_str cache_control = [] if private: cache_control.append('private') cache_control.append('max-age=%d' % max_age.total_seconds()) if must_revalidate: cache_control.append('must-revalidate') response.headers['cache-control'] = ', '.join(cache_control) modified_since = request.if_modified_since if modified_since and modified_since >= last_modified: abort(304, 'not modified')
def ratelimit_agent(agent): key = 'rate_agent_' + agent if g.cache.get(key): request.environ['retry_after'] = 1 abort(429) else: g.cache.set(key, 't', time = 1)
def ratelimit_agent(agent): key = "rate_agent_" + agent if g.cache.get(key): request.environ["retry_after"] = 1 abort(429) else: g.cache.set(key, "t", time=1)
def ratelimit_agent(agent, limit=10, slice_size=10): slice_size = min(slice_size, 60) time_slice, retry_after = _get_ratelimit_timeslice(slice_size) key = "rate_agent_" + agent + time.strftime("_%S", time_slice) g.cache.add(key, 0, time=slice_size + 1) if g.cache.incr(key) > limit: request.environ['retry_after'] = retry_after abort(429)
def run_sitewide_ratelimits(self): """Ratelimit users and add ratelimit headers to the response. Headers added are: X-Ratelimit-Used: Number of requests used in this period X-Ratelimit-Remaining: Number of requests left to use X-Ratelimit-Reset: Approximate number of seconds to end of period This function only has an effect if one of g.RL_SITEWIDE_ENABLED or g.RL_OAUTH_SITEWIDE_ENABLED are set to 'true' in the app configuration If the ratelimit is exceeded, a 429 response will be sent, unless the app configuration has g.ENFORCE_RATELIMIT off. Headers will be sent even on aborted requests. """ if c.cdn_cacheable or not is_api(): # No ratelimiting or headers for: # * Web requests (HTML) # * CDN requests (logged out via www.reddit.com) return elif c.oauth_user and g.RL_OAUTH_SITEWIDE_ENABLED: max_reqs = g.RL_OAUTH_MAX_REQS period = g.RL_OAUTH_RESET_SECONDS # Convert client_id to ascii str for use as memcache key client_id = c.oauth2_access_token.client_id.encode("ascii") # OAuth2 ratelimits are per user-app combination key = 'siterl-oauth-' + c.user._id36 + ":" + client_id elif g.RL_SITEWIDE_ENABLED: max_reqs = g.RL_MAX_REQS period = g.RL_RESET_SECONDS # API (non-oauth) limits are per-ip key = 'siterl-api-' + request.ip else: # Not in a context where sitewide ratelimits are on return period_start, retry_after = _get_ratelimit_timeslice(period) key += time.strftime("-%H%M%S", period_start) g.ratelimitcache.add(key, 0, time=retry_after + 1) # Increment the key to track the current request recent_reqs = g.ratelimitcache.incr(key) reqs_remaining = max(0, max_reqs - recent_reqs) c.ratelimit_headers = { "X-Ratelimit-Used": str(recent_reqs), "X-Ratelimit-Reset": str(retry_after), "X-Ratelimit-Remaining": str(reqs_remaining), } if reqs_remaining <= 0 and g.ENFORCE_RATELIMIT: # For non-abort situations, the headers will be added in post(), # to avoid including them in a pagecache response.headers.update(c.ratelimit_headers) abort(429)
def handle_login( controller, form, responder, user, rem=None, signature=None, **kwargs ): # check captcha before login (if any) since its answer might # change once c.user is set. captcha_shown = not signature and need_provider_captcha("login") def _event(error, captcha_shown=captcha_shown): g.events.login_event( 'login_attempt', error_msg=error, user_name=request.urlvars.get('url_user'), remember_me=rem, signature=signature, captcha_shown=captcha_shown, request=request, context=c) if signature and not signature.is_valid(): _event(error="SIGNATURE") abort(403) hook_error = hooks.get_hook("account.login").call_until_return( responder=responder, request=request, context=c, ) # if any of the hooks returned an error, abort the login. The # set_error in this case also needs to exist in the hook. if hook_error: _event(error=hook_error) return exempt_ua = (request.user_agent and any(ua in request.user_agent for ua in g.config.get('exempt_login_user_agents', ()))) if (errors.LOGGED_IN, None) in c.errors: if user == c.user or exempt_ua: # Allow funky clients to re-login as the current user. c.errors.remove((errors.LOGGED_IN, None)) else: _event(error='LOGGED_IN') abort(reddit_http_error(409, errors.LOGGED_IN)) if responder.has_errors("ratelimit", errors.RATELIMIT): _event(error='RATELIMIT') elif responder.has_errors("passwd", errors.WRONG_PASSWORD): _event(error='WRONG_PASSWORD') # last but not least, we have to check the captcha elif (not signature and not g.disable_captcha and not valid_provider_captcha(responder, "login")): _event(error='BAD_CAPTCHA') else: controller._login(responder, user, rem) _event(error=None)
def abort_with_error(error, code=None): if not code and not error.code: raise ValueError("Error %r missing status code" % error) abort( reddit_http_error( code=code or error.code, error_name=error.name, explanation=error.message, fields=error.fields ) )
def _check_client_type_and_duration(self, response_type, client, duration): if response_type == "token" and client.is_confidential(): # Prevent "confidential" clients from distributing tokens # in a non-confidential manner abort(BadRequestError(errors.OAUTH2_CONFIDENTIAL_TOKEN)) if response_type == "token" and duration != "temporary": # implicit grant -> No refresh tokens allowed abort(BadRequestError(errors.OAUTH2_NO_REFRESH_TOKENS_ALLOWED))
def pre(self): if g.disallow_db_writes: abort(403) set_extension(request.environ, "json") MinimalController.pre(self) require_https() if request.method != "OPTIONS": c.oauth2_client = self._get_client_auth()
def redirect_to_host(hostname, path=None): """Redirect (302) to the specified path and host.""" if path is None: path = request.path u = UrlParser(path) u.hostname = hostname # 307 redirect so request method is retained abort(307, location=u.unparse())
def on_validation_error(self, error): if not error.code: raise ValueError('Error %r missing status code' % error) abort(reddit_http_error( code=error.code, error_name=error.name, explanation=error.message, fields=error.fields, ))
def _get_client_auth(self): auth = request.headers.get("Authorization") try: client_id, client_secret = parse_http_basic(auth) client = OAuth2Client.get_token(client_id) require(client) require(constant_time_compare(client.secret, client_secret)) return client except RequirementException: abort(401, headers=[("WWW-Authenticate", 'Basic realm="reddit"')])
def ratelimit_agent(agent, limit=10, slice_size=10): slice_size = min(slice_size, 60) slice, remainder = map(int, divmod(time.time(), slice_size)) time_slice = time.gmtime(slice * slice_size) key = "rate_agent_" + agent + time.strftime("_%S", time_slice) g.cache.add(key, 0, time=slice_size + 1) if g.cache.incr(key) > limit: request.environ['retry_after'] = slice_size - remainder abort(429)
def ratelimit_agent(agent): SLICE_SIZE = 10 slice, remainder = map(int, divmod(time.time(), SLICE_SIZE)) time_slice = time.gmtime(slice * SLICE_SIZE) key = "rate_agent_" + agent + time.strftime("_%S", time_slice) g.cache.add(key, 0, time=SLICE_SIZE + 1) if g.cache.incr(key) > SLICE_SIZE: request.environ['retry_after'] = SLICE_SIZE - remainder abort(429)
def GET_listing(self, num, after, before, count, is_embed): reverse = False if before: reverse = True after = before query = LiveUpdateStream.query([c.liveupdate_event._id], count=num, reverse=reverse) if after: query.column_start = after builder = LiveUpdateBuilder(query=query, skip=True, reverse=reverse, num=num, count=count) listing = pages.LiveUpdateListing(builder) wrapped_listing = listing.listing() content = pages.LiveUpdateEventPage( event=c.liveupdate_event, listing=wrapped_listing, show_sidebar=not is_embed, ) c.js_preload.set_wrapped( "/live/" + c.liveupdate_event._id + "/about.json", Wrapped(c.liveupdate_event), ) c.js_preload.set_wrapped( "/live/" + c.liveupdate_event._id + ".json", wrapped_listing, ) # don't generate a url unless this is the main page of an event websocket_url = None if c.liveupdate_event.state == "live" and not after and not before: websocket_url = websockets.make_url( "/live/" + c.liveupdate_event._id, max_age=24 * 60 * 60) if not is_embed: return pages.LiveUpdatePage( content=content, websocket_url=websocket_url, page_classes=['liveupdate-event'], ).render() else: # ensure we're off the cookie domain before allowing embedding if request.host != g.media_domain: abort(404) c.allow_framing = True return pages.LiveUpdateEmbed( content=content, websocket_url=websocket_url, page_classes=['liveupdate-event'], ).render()
def search_fail(self, exception): from r2.lib.search import SearchException if isinstance(exception, SearchException + (socket.error,)): g.log.error("Search Error: %s" % repr(exception)) errpage = pages.RedditError(_("search failed"), strings.search_failed) request.environ['usable_error_content'] = errpage.render() request.environ['retry_after'] = 60 abort(503)
def POST_strike_update(self, form, jquery, update): if form.has_errors("id", errors.NO_THING_ID): return if not (c.liveupdate_permissions.allow("edit") or (c.user_is_loggedin and update.author_id == c.user._id)): abort(403) update.stricken = True LiveUpdateStream.add_update(c.liveupdate_event, update) _broadcast(type="strike", payload=update._fullname)
def set_subreddit(): # the r parameter gets added by javascript for POST requests so we # can reference c.site in api.py sr_name = request.environ.get("subreddit", request.POST.get("r")) domain = request.environ.get("domain") can_stale = request.method.upper() in ("GET", "HEAD") c.site = Frontpage if not sr_name: # check for cnames cname = request.environ.get("legacy-cname") if cname: sr = Subreddit._by_domain(cname) or Frontpage domain = g.domain if g.domain_prefix: domain = ".".join((g.domain_prefix, domain)) redirect_to("http://%s%s" % (domain, sr.path), _code=301) elif sr_name == "r": # reddits c.site = Sub elif "+" in sr_name: sr_names = sr_name.split("+") srs = set(Subreddit._by_name(sr_names, stale=can_stale).values()) if All in srs: c.site = All elif Friends in srs: c.site = Friends else: srs = [sr for sr in srs if not isinstance(sr, FakeSubreddit)] if len(srs) == 0: c.site = MultiReddit([], sr_name) elif len(srs) == 1: c.site = srs.pop() else: sr_ids = [sr._id for sr in srs] c.site = MultiReddit(sr_ids, sr_name) else: try: c.site = Subreddit._by_name(sr_name, stale=can_stale) except NotFound: sr_name = chksrname(sr_name) if sr_name: redirect_to("/reddits/search?q=%s" % sr_name) elif not c.error_page and not request.path.startswith("/api/login/"): abort(404) # if we didn't find a subreddit, check for a domain listing if not sr_name and isinstance(c.site, DefaultSR) and domain: c.site = DomainSR(domain) if isinstance(c.site, FakeSubreddit): c.default_sr = True
def GET_pixel(self, event, **kwargs): extension = request.environ.get("extension") if extension != "png": abort(404) record_activity(event) response.content_type = "image/png" response.headers["Cache-Control"] = "no-cache, max-age=0" response.headers["Pragma"] = "no-cache" response.headers["Expires"] = "Thu, 01 Jan 1970 00:00:00 GMT" return self._pixel_contents
def cross_domain_handler(self, *args, **kwargs): if request.params.get("hoist") == "cookie": # Cookie polling response if cors_perms["origin_check"](g.origin): name = request.environ["pylons.routes_dict"]["action_name"] resp = fn(self, *args, **kwargs) c.cookies.add('hoist_%s' % name, ''.join(tup(resp))) response.content_type = 'text/html' return "" else: abort(403) else: self.check_cors() return fn(self, *args, **kwargs)
def GET_pixel(self, event): extension = request.environ.get("extension") if extension != "png": abort(404) event_id = event[:50] # some very simple poor-man's validation user_id = hashlib.sha1(request.ip + request.user_agent).hexdigest() ActiveVisitorsByLiveUpdateEvent.touch(event_id, user_id) response.content_type = "image/png" response.headers["Cache-Control"] = "no-cache, max-age=0" response.headers["Pragma"] = "no-cache" response.headers["Expires"] = "Thu, 01 Jan 1970 00:00:00 GMT" return self._pixel_contents
def POST_use_item(self, item_name, target): try: inventory.consume_item(c.user, item_name) except inventory.NoSuchItemError: abort(400) c.user.f2p = "participated" c.user._commit() item = items.get_item(item_name) if not item.is_target_valid(target): abort(400) item.on_use(c.user, target) return json.dumps(c.state_changes)
def POST_message(self, level, logs): # prevent simple CSRF by requiring a custom header if not request.headers.get('X-Loggit'): abort(403) uid = c.user._id if c.user_is_loggedin else '-' # only accept a maximum of 3 entries per request for log in logs[:3]: g.log.warning('[web frontend] %s: %s | U: %s FP: %s UA: %s', level, log['msg'], uid, log['url'], request.user_agent) VRatelimit.ratelimit(rate_user=False, rate_ip=True, prefix="rate_weblog_", seconds=10)
def __before__(self): try: c.error_page = True RedditController.__before__(self) except (HTTPMovedPermanently, HTTPFound): # ignore an attempt to redirect from an error page pass except Exception as e: handle_awful_failure("ErrorController.__before__: %r" % e) # c.error_page is special-cased in a couple places to bypass # c.site checks. We shouldn't allow the user to get here other # than through `middleware.py:error_mapper`. if not request.environ.get('pylons.error_call'): abort(403, "direct access to error controller disallowed")
def POST_zendeskreply(self): request_body = request.POST recipient = request_body["recipient"] sender_email = request_body["sender"] from_ = request_body["from"] subject = request_body["subject"] body_plain = request_body["body-plain"] stripped_text = request_body["stripped-text"] timestamp = request_body["timestamp"] token = request_body["token"] signature = request_body["signature"] email_id = request_body["Message-Id"] if not validate_mailgun_webhook(timestamp, token, signature): # per Mailgun docs send a 406 so the message won't be retried abort(406, "invalid signature") message_id36 = parse_and_validate_reply_to_address(recipient) if not message_id36: # per Mailgun docs send a 406 so the message won't be retried abort(406, "invalid message") parent = Message._byID36(message_id36, data=True) to = Account._byID(parent.author_id, data=True) sr = Subreddit._byID(parent.sr_id, data=True) if stripped_text.startswith(ZENDESK_PREFIX): stripped_text = stripped_text[len(ZENDESK_PREFIX):].lstrip() if len(stripped_text) > 10000: body = stripped_text[:10000] + "\n\n--snipped--" else: body = stripped_text try: markdown_souptest(body) except SoupError: g.log.warning("bad markdown in modmail email: %s", body) abort(406, "invalid body") if parent.get_muted_user_in_conversation(): queue_blocked_muted_email(sr, parent, sender_email, email_id) return # keep the subject consistent message_subject = parent.subject if not message_subject.startswith("re: "): message_subject = "re: " + message_subject # from_ is like '"NAME (GROUP)" <*****@*****.**>' match = re.search("\"(?P<name>\w+) [\w ()]*\"", from_) from_sr = True author = Account.system_user() if match and match.group("name") in g.live_config['modmail_account_map']: zendesk_name = match.group("name") moderator_name = g.live_config['modmail_account_map'][zendesk_name] moderator = Account._by_name(moderator_name) if sr.is_moderator_with_perms(moderator, "mail"): author = moderator from_sr = False message, inbox_rel = Message._new( author=author, to=to, subject=message_subject, body=body, ip='0.0.0.0', parent=parent, sr=sr, from_sr=from_sr, can_send_email=False, sent_via_email=True, email_id=email_id, ) message._commit() queries.new_message(message, inbox_rel) g.stats.simple_event("mailgun.incoming.success") g.stats.simple_event("modmail_email.incoming_email")
def _auth_error(self, code, error): abort(code, headers=[("WWW-Authenticate", 'Bearer realm="reddit", error="%s"' % error)])
def abort404(self): abort(404, "not found")
def POST_report_cache_poisoning( self, report_mac, poisoner_name, poisoner_id, poisoner_canary, victim_canary, render_time, route_name, url, source, cache_policy, resp_headers, ): """Report an instance of cache poisoning and its details""" self.OPTIONS_report_cache_poisoning() if c.errors: abort(400) # prevent simple CSRF by requiring a custom header if not request.headers.get('X-Loggit'): abort(403) # Eh? Why are you reporting this if the canaries are the same? if poisoner_canary == victim_canary: abort(400) expected_mac = make_poisoning_report_mac( poisoner_canary=poisoner_canary, poisoner_name=poisoner_name, poisoner_id=poisoner_id, cache_policy=cache_policy, source=source, route_name=route_name, ) if not constant_time_compare(report_mac, expected_mac): abort(403) if resp_headers: try: resp_headers = json.loads(resp_headers) # Verify this is a JSON map of `header_name => [value, ...]` if not isinstance(resp_headers, dict): abort(400) for hdr_name, hdr_vals in resp_headers.iteritems(): if not isinstance(hdr_name, basestring): abort(400) if not all(isinstance(h, basestring) for h in hdr_vals): abort(400) except ValueError: abort(400) if not resp_headers: resp_headers = {} poison_info = dict( poisoner_name=poisoner_name, poisoner_id=str(poisoner_id), # Convert the JS timestamp to a standard one render_time=render_time * 1000, route_name=route_name, url=url, source=source, cache_policy=cache_policy, resp_headers=resp_headers, ) # For immediate feedback when tracking the effects of caching changes g.stats.simple_event("cache.poisoning.%s.%s" % (source, cache_policy)) # For longer-term diagnosing of caching issues g.events.cache_poisoning_event(poison_info, request=request, context=c) VRatelimit.ratelimit(rate_ip=True, prefix="rate_poison_", seconds=10) return self.api_wrapper({})
def POST_edit_campaign(self, form, jquery, link, campaign_id36, start, end, bid, target, priority, location, platform, mobile_os): if not link: return if platform in ('mobile', 'all') and not mobile_os: c.errors.add(errors.BAD_PROMO_MOBILE_OS, field='mobile_os') form.set_error(errors.BAD_PROMO_MOBILE_OS, 'mobile_os') return if platform == 'mobile' and priority.cpm: c.errors.add(errors.BAD_PROMO_MOBILE_PRIORITY, field='priority') form.set_error(errors.BAD_PROMO_MOBILE_PRIORITY, 'priority') return if not (c.user_is_sponsor or platform == 'desktop'): return abort(403, 'forbidden') if platform == 'desktop': mobile_os = None if not target: # run form.has_errors to populate the errors in the response form.has_errors('sr', errors.SUBREDDIT_NOEXIST, errors.SUBREDDIT_NOTALLOWED, errors.SUBREDDIT_REQUIRED) form.has_errors('collection', errors.COLLECTION_NOEXIST) form.has_errors('targeting', errors.INVALID_TARGET) return if not allowed_location_and_target(location, target): return abort(403, 'forbidden') cpm = PromotionPrices.get_price(c.user, target, location) if (form.has_errors('startdate', errors.BAD_DATE) or form.has_errors('enddate', errors.BAD_DATE)): return min_start, max_start, max_end = promote.get_date_limits( link, c.user_is_sponsor) if campaign_id36: promo_campaign = PromoCampaign._byID36(campaign_id36) if (promote.is_promoted(link) and promo_campaign.start_date.date() <= min_start and start != promo_campaign.start_date and promo_campaign.is_paid): c.errors.add(errors.START_DATE_CANNOT_CHANGE, field='startdate') form.has_errors('startdate', errors.START_DATE_CANNOT_CHANGE) return elif start.date() < min_start: c.errors.add(errors.DATE_TOO_EARLY, msg_params={'day': min_start.strftime("%m/%d/%Y")}, field='startdate') form.has_errors('startdate', errors.DATE_TOO_EARLY) return if start.date() > max_start: c.errors.add(errors.DATE_TOO_LATE, msg_params={'day': max_start.strftime("%m/%d/%Y")}, field='startdate') form.has_errors('startdate', errors.DATE_TOO_LATE) return if end.date() > max_end: c.errors.add(errors.DATE_TOO_LATE, msg_params={'day': max_end.strftime("%m/%d/%Y")}, field='enddate') form.has_errors('enddate', errors.DATE_TOO_LATE) return if end < start: c.errors.add(errors.BAD_DATE_RANGE, field='enddate') form.has_errors('enddate', errors.BAD_DATE_RANGE) return # Limit the number of PromoCampaigns a Link can have # Note that the front end should prevent the user from getting # this far existing_campaigns = list(PromoCampaign._by_link(link._id)) if len(existing_campaigns) > g.MAX_CAMPAIGNS_PER_LINK: c.errors.add(errors.TOO_MANY_CAMPAIGNS, msg_params={'count': g.MAX_CAMPAIGNS_PER_LINK}, field='title') form.has_errors('title', errors.TOO_MANY_CAMPAIGNS) return campaign = None if campaign_id36: try: campaign = PromoCampaign._byID36(campaign_id36, data=True) except NotFound: pass if campaign and (campaign._deleted or link._id != campaign.link_id): campaign = None if not campaign: return abort(404, 'not found') if priority.cpm: min_bid = 0 if c.user_is_sponsor else g.min_promote_bid max_bid = None if c.user_is_sponsor else g.max_promote_bid if bid is None or bid < min_bid or (max_bid and bid > max_bid): c.errors.add(errors.BAD_BID, field='bid', msg_params={'min': min_bid, 'max': max_bid or g.max_promote_bid}) form.has_errors('bid', errors.BAD_BID) return # you cannot edit the bid of a live ad unless it's a freebie if (campaign and bid != campaign.bid and promote.is_live_promo(link, campaign) and not campaign.is_freebie()): c.errors.add(errors.BID_LIVE, field='bid') form.has_errors('bid', errors.BID_LIVE) return else: bid = 0. # Set bid to 0 as dummy value is_frontpage = (not target.is_collection and target.subreddit_name == Frontpage.name) if not target.is_collection and not is_frontpage: # targeted to a single subreddit, check roadblock sr = target.subreddits_slow[0] roadblock = PromotedLinkRoadblock.is_roadblocked(sr, start, end) if roadblock and not c.user_is_sponsor: msg_params = {"start": roadblock[0].strftime('%m/%d/%Y'), "end": roadblock[1].strftime('%m/%d/%Y')} c.errors.add(errors.OVERSOLD, field='sr', msg_params=msg_params) form.has_errors('sr', errors.OVERSOLD) return # Check inventory campaign = campaign if campaign_id36 else None if not priority.inventory_override: oversold = has_oversold_error(form, campaign, start, end, bid, cpm, target, location) if oversold: return dates = (start, end) if campaign: promote.edit_campaign(link, campaign, dates, bid, cpm, target, priority, location, platform, mobile_os) else: campaign = promote.new_campaign(link, dates, bid, cpm, target, priority, location, platform, mobile_os) rc = RenderableCampaign.from_campaigns(link, campaign) jquery.update_campaign(campaign._fullname, rc.render_html())
def require_domain(required_domain): if not is_subdomain(request.host, required_domain): abort(ForbiddenError(errors.WRONG_DOMAIN))
def POST_delete_campaign(self, form, jquery, l, campaign): if not campaign or not l or l._id != campaign.link_id: return abort(404, 'not found') promote.delete_campaign(l, campaign)
def handle_register(controller, form, responder, name, email, password, rem=None, newsletter_subscribe=False, sponsor=False, signature=None, **kwargs): def _event(error): g.events.login_event('register_attempt', error_msg=error, user_name=request.urlvars.get('url_user'), email=request.POST.get('email'), remember_me=rem, newsletter=newsletter_subscribe, signature=signature, request=request, context=c) if signature and not signature.is_valid(): _event(error="SIGNATURE") abort(403) if feature.is_enabled('registration_disabled'): _event(error="DISABLED") abort(403) if responder.has_errors('user', errors.USERNAME_TOO_SHORT): _event(error='USERNAME_TOO_SHORT') elif responder.has_errors('user', errors.USERNAME_INVALID_CHARACTERS): _event(error='USERNAME_INVALID_CHARACTERS') elif responder.has_errors('user', errors.USERNAME_TAKEN_DEL): _event(error='USERNAME_TAKEN_DEL') elif responder.has_errors('user', errors.USERNAME_TAKEN): _event(error='USERNAME_TAKEN') elif responder.has_errors('email', errors.BAD_EMAIL): _event(error='BAD_EMAIL') elif responder.has_errors('passwd', errors.SHORT_PASSWORD): _event(error='SHORT_PASSWORD') elif responder.has_errors('passwd', errors.BAD_PASSWORD): # BAD_PASSWORD is set when SHORT_PASSWORD is set _event(error='BAD_PASSWORD') elif responder.has_errors('passwd2', errors.BAD_PASSWORD_MATCH): _event(error='BAD_PASSWORD_MATCH') elif responder.has_errors('ratelimit', errors.RATELIMIT): _event(error='RATELIMIT') elif (not g.disable_captcha and responder.has_errors('captcha', errors.BAD_CAPTCHA)): _event(error='BAD_CAPTCHA') elif newsletter_subscribe and not email: c.errors.add(errors.NEWSLETTER_NO_EMAIL, field="email") form.has_errors("email", errors.NEWSLETTER_NO_EMAIL) _event(error='NEWSLETTER_NO_EMAIL') elif sponsor and not email: c.errors.add(errors.SPONSOR_NO_EMAIL, field="email") form.has_errors("email", errors.SPONSOR_NO_EMAIL) _event(error='SPONSOR_NO_EMAIL') else: try: user = register(name, password, request.ip) except AccountExists: c.errors.add(errors.USERNAME_TAKEN, field="user") form.has_errors("user", errors.USERNAME_TAKEN) _event(error='USERNAME_TAKEN') return VRatelimit.ratelimit(rate_ip=True, prefix="rate_register_") # anything else we know (email, languages)? if email: user.set_email(email) emailer.verify_email(user) user.pref_lang = c.lang user._commit() amqp.add_item('new_account', user._fullname) hooks.get_hook("account.registered").call(user=user) reject = hooks.get_hook("account.spotcheck").call(account=user) if any(reject): _event(error='ACCOUNT_SPOTCHECK') return if newsletter_subscribe and email: try: newsletter.add_subscriber(email, source="register") except newsletter.NewsletterError as e: g.log.warning("Failed to subscribe: %r" % e) controller._login(responder, user, rem) # CUSTOM: Auto Subscribe All, calling subscribe_defaults() asap if feature.is_enabled('auto_subscribe_all'): Subreddit.subscribe_defaults(c.user) _event(error=None)
def handle_error(self, code, reason=None, **data): abort(reddit_http_error(code, reason, **data))
def set_subreddit(): #the r parameter gets added by javascript for POST requests so we #can reference c.site in api.py sr_name = request.environ.get("subreddit", request.POST.get('r')) domain = request.environ.get("domain") can_stale = request.method.upper() in ('GET', 'HEAD') c.site = Frontpage if not sr_name: #check for cnames cname = request.environ.get('legacy-cname') if cname: sr = Subreddit._by_domain(cname) or Frontpage domain = g.domain if g.domain_prefix: domain = ".".join((g.domain_prefix, domain)) redirect_to('http://%s%s' % (domain, sr.path), _code=301) elif sr_name == 'r': #reddits c.site = Sub elif '+' in sr_name: sr_names = sr_name.split('+') srs = Subreddit._by_name(sr_names, stale=can_stale).values() if All in srs: c.site = All elif Friends in srs: c.site = Friends else: srs = [sr for sr in srs if not isinstance(sr, FakeSubreddit)] multi_path = '/r/' + sr_name if not srs: c.site = MultiReddit(multi_path, []) elif len(srs) == 1: c.site = srs[0] else: c.site = MultiReddit(multi_path, srs) elif '-' in sr_name: sr_names = sr_name.split('-') if not sr_names[0].lower() == All.name.lower(): redirect_to("/subreddits/search?q=%s" % sr_name) srs = Subreddit._by_name(sr_names[1:], stale=can_stale).values() srs = [sr for sr in srs if not isinstance(sr, FakeSubreddit)] if not srs: c.site = All else: c.site = AllMinus(srs) else: try: c.site = Subreddit._by_name(sr_name, stale=can_stale) except NotFound: sr_name = chksrname(sr_name) if sr_name: redirect_to("/subreddits/search?q=%s" % sr_name) elif not c.error_page and not request.path.startswith( "/api/login/"): abort(404) #if we didn't find a subreddit, check for a domain listing if not sr_name and isinstance(c.site, DefaultSR) and domain: c.site = DomainSR(domain) if isinstance(c.site, FakeSubreddit): c.default_sr = True
def abort403(self): abort(403, "forbidden")
def GET_listing(self, num, after, before, count, is_embed, style_sr): """Get a list of updates posted in this thread. See also: [/api/live/*thread*/update](#POST_api_live_{thread}_update). """ # preemptively record activity for clients that don't send pixel pings. # this won't capture their continued visit, but will at least show a # correct activity count for short lived connections. record_activity(c.liveupdate_event._id) reverse = False if before: reverse = True after = before query = LiveUpdateStream.query([c.liveupdate_event._id], count=num, reverse=reverse) if after: query.column_start = after builder = LiveUpdateBuilder(query=query, skip=True, reverse=reverse, num=num, count=count) listing = pages.LiveUpdateListing(builder) wrapped_listing = listing.listing() if c.user_is_loggedin: report_type = LiveUpdateReportsByAccount.get_report( c.user, c.liveupdate_event) else: report_type = None content = pages.LiveUpdateEventApp( event=c.liveupdate_event, listing=wrapped_listing, show_sidebar=not is_embed, report_type=report_type, ) c.js_preload.set_wrapped( "/live/" + c.liveupdate_event._id + "/about.json", Wrapped(c.liveupdate_event), ) c.js_preload.set_wrapped( "/live/" + c.liveupdate_event._id + ".json", wrapped_listing, ) if not is_embed: return pages.LiveUpdateEventAppPage( content=content, page_classes=['liveupdate-app'], ).render() else: # ensure we're off the cookie domain before allowing embedding if request.host != g.media_domain: abort(404) c.allow_framing = True # interstitial redirects and nsfw settings are funky on the media # domain. just disable nsfw embeds. if c.liveupdate_event.nsfw: embed_page = pages.LiveUpdateEventEmbed( content=pages.LiveUpdateNSFWEmbed(), ) request.environ["usable_error_content"] = embed_page.render() abort(403) embed_page = pages.LiveUpdateEventEmbed( content=content, page_classes=['liveupdate-app'], ) if style_sr and getattr(style_sr, "type", "private") != "private": c.can_apply_styles = True c.allow_styles = True embed_page.subreddit_stylesheet_url = \ Reddit.get_subreddit_stylesheet_url(style_sr) return embed_page.render()
def handle_register( controller, form, responder, name, email, password, rem=None, newsletter_subscribe=False, sponsor=False, signature=None, **kwargs ): # check captcha before register (if any) since its answer might # change once c.user is set. captcha_shown = not signature and need_provider_captcha("register") def _event(error, captcha_shown=captcha_shown): g.events.login_event( 'register_attempt', error_msg=error, user_name=request.urlvars.get('url_user'), email=request.POST.get('email'), remember_me=rem, newsletter=newsletter_subscribe, captcha_shown=captcha_shown, signature=signature, request=request, context=c) if signature and not signature.is_valid(): _event(error="SIGNATURE") abort(403) if responder.has_errors('user', errors.USERNAME_TOO_SHORT): _event(error='USERNAME_TOO_SHORT') elif responder.has_errors('user', errors.USERNAME_INVALID_CHARACTERS): _event(error='USERNAME_INVALID_CHARACTERS') elif responder.has_errors('user', errors.USERNAME_TAKEN_DEL): _event(error='USERNAME_TAKEN_DEL') elif responder.has_errors('user', errors.USERNAME_TAKEN): _event(error='USERNAME_TAKEN') elif responder.has_errors('email', errors.BAD_EMAIL): _event(error='BAD_EMAIL') elif responder.has_errors('passwd', errors.SHORT_PASSWORD): _event(error='SHORT_PASSWORD') elif responder.has_errors('passwd', errors.BAD_PASSWORD): # BAD_PASSWORD is set when SHORT_PASSWORD is set _event(error='BAD_PASSWORD') elif responder.has_errors('passwd2', errors.BAD_PASSWORD_MATCH): _event(error='BAD_PASSWORD_MATCH') elif responder.has_errors('ratelimit', errors.RATELIMIT): _event(error='RATELIMIT') elif newsletter_subscribe and not email: c.errors.add(errors.NEWSLETTER_NO_EMAIL, field="email") form.has_errors("email", errors.NEWSLETTER_NO_EMAIL) _event(error='NEWSLETTER_NO_EMAIL') elif sponsor and not email: c.errors.add(errors.SPONSOR_NO_EMAIL, field="email") form.has_errors("email", errors.SPONSOR_NO_EMAIL) _event(error='SPONSOR_NO_EMAIL') # last but not least, we have to check the captcha elif (not signature and not g.disable_captcha and not valid_provider_captcha(responder, "register")): _event(error='BAD_CAPTCHA') else: try: user = register(name, password, request.ip) except AccountExists: c.errors.add(errors.USERNAME_TAKEN, field="user") form.has_errors("user", errors.USERNAME_TAKEN) _event(error='USERNAME_TAKEN') return VRatelimit.ratelimit(rate_ip=True, prefix="rate_register_") # anything else we know (email, languages)? if email: user.set_email(email) emailer.verify_email(user) user.pref_lang = c.lang if (is_api("html") and feature.is_enabled("new_user_onboarding", user=user)): user.has_been_onboarded = False user._commit() amqp.add_item('new_account', user._fullname) hooks.get_hook("account.registered").call(user=user) reject = hooks.get_hook("account.spotcheck").call(account=user) if any(reject): _event(error='ACCOUNT_SPOTCHECK') return if newsletter_subscribe and email: try: newsletter.add_subscriber(email, source="register") except newsletter.NewsletterError as e: g.log.warning("Failed to subscribe: %r" % e) controller._login(responder, user, rem) _event(error=None)
def POST_edit_campaign(self, form, jquery, link, campaign_id36, dates, bid, sr, targeting, priority, location): if not link: return start, end = dates or (None, None) if location and sr and not c.user_is_sponsor: # only sponsors can geotarget on subreddits location = None if location and location.metro: cpm = g.cpm_selfserve_geotarget_metro.pennies elif location: cpm = g.cpm_selfserve_geotarget_country.pennies else: author = Account._byID(link.author_id, data=True) cpm = author.cpm_selfserve_pennies if (form.has_errors('startdate', errors.BAD_DATE, errors.DATE_TOO_EARLY, errors.DATE_TOO_LATE) or form.has_errors('enddate', errors.BAD_DATE, errors.DATE_TOO_EARLY, errors.DATE_TOO_LATE, errors.BAD_DATE_RANGE)): return # check that start is not so late that authorization hold will expire if not c.user_is_sponsor: max_start = promote.get_max_startdate() if start > max_start: c.errors.add( errors.DATE_TOO_LATE, msg_params={'day': max_start.strftime("%m/%d/%Y")}, field='startdate') form.has_errors('startdate', errors.DATE_TOO_LATE) return # Limit the number of PromoCampaigns a Link can have # Note that the front end should prevent the user from getting # this far existing_campaigns = list(PromoCampaign._by_link(link._id)) if len(existing_campaigns) > g.MAX_CAMPAIGNS_PER_LINK: c.errors.add(errors.TOO_MANY_CAMPAIGNS, msg_params={'count': g.MAX_CAMPAIGNS_PER_LINK}, field='title') form.has_errors('title', errors.TOO_MANY_CAMPAIGNS) return campaign = None if campaign_id36: try: campaign = PromoCampaign._byID36(campaign_id36) except NotFound: pass if campaign and link._id != campaign.link_id: return abort(404, 'not found') if priority.cpm: min_bid = 0 if c.user_is_sponsor else g.min_promote_bid max_bid = None if c.user_is_sponsor else g.max_promote_bid if bid is None or bid < min_bid or (max_bid and bid > max_bid): c.errors.add(errors.BAD_BID, field='bid', msg_params={ 'min': min_bid, 'max': max_bid or g.max_promote_bid }) form.has_errors('bid', errors.BAD_BID) return # you cannot edit the bid of a live ad unless it's a freebie if (campaign and bid != campaign.bid and promote.is_live_promo(link, campaign) and not campaign.is_freebie()): c.errors.add(errors.BID_LIVE, field='bid') form.has_errors('bid', errors.BID_LIVE) return else: bid = 0. # Set bid to 0 as dummy value if targeting == 'one': if form.has_errors('sr', errors.SUBREDDIT_NOEXIST, errors.SUBREDDIT_NOTALLOWED, errors.SUBREDDIT_REQUIRED): # checking to get the error set in the form, but we can't # check for rate-limiting if there's no subreddit return roadblock = PromotedLinkRoadblock.is_roadblocked(sr, start, end) if roadblock and not c.user_is_sponsor: msg_params = { "start": roadblock[0].strftime('%m/%d/%Y'), "end": roadblock[1].strftime('%m/%d/%Y') } c.errors.add(errors.OVERSOLD, field='sr', msg_params=msg_params) form.has_errors('sr', errors.OVERSOLD) return elif targeting == 'none': sr = None # Check inventory campaign = campaign if campaign_id36 else None if not priority.inventory_override: oversold = has_oversold_error(form, campaign, start, end, bid, cpm, sr, location) if oversold: return if campaign: promote.edit_campaign(link, campaign, dates, bid, cpm, sr, priority, location) else: campaign = promote.new_campaign(link, dates, bid, cpm, sr, priority, location) rc = RenderableCampaign.from_campaigns(link, campaign) jquery.update_campaign(campaign._fullname, rc.render_html())
def set_subreddit(): #the r parameter gets added by javascript for POST requests so we #can reference c.site in api.py sr_name = request.environ.get("subreddit", request.POST.get('r')) domain = request.environ.get("domain") can_stale = request.method.upper() in ('GET', 'HEAD') c.site = Frontpage if not sr_name: #check for cnames cname = request.environ.get('legacy-cname') if cname: sr = Subreddit._by_domain(cname) or Frontpage domain = g.domain if g.domain_prefix: domain = ".".join((g.domain_prefix, domain)) redirect_to('http://%s%s' % (domain, sr.path), _code=301) elif sr_name == 'r': #reddits c.site = Sub elif '+' in sr_name: sr_names = sr_name.split('+') srs = Subreddit._by_name(sr_names, stale=can_stale).values() if All in srs: c.site = All elif Friends in srs: c.site = Friends else: srs = [sr for sr in srs if not isinstance(sr, FakeSubreddit)] multi_path = '/r/' + sr_name if not srs: c.site = MultiReddit(multi_path, []) elif len(srs) == 1: c.site = srs[0] else: c.site = MultiReddit(multi_path, srs) elif '-' in sr_name: sr_names = sr_name.split('-') base_sr_name, exclude_sr_names = sr_names[0], sr_names[1:] srs = Subreddit._by_name(sr_names, stale=can_stale) base_sr = srs.pop(base_sr_name, None) exclude_srs = [ sr for sr in srs.itervalues() if not isinstance(sr, FakeSubreddit) ] if base_sr == All: if exclude_srs: c.site = AllMinus(exclude_srs) else: c.site = All elif base_sr == Mod: if exclude_srs: c.site = ModMinus(exclude_srs) else: c.site = Mod else: redirect_to("/subreddits/search?q=%s" % sr_name) else: try: c.site = Subreddit._by_name(sr_name, stale=can_stale) except NotFound: sr_name = chksrname(sr_name) if sr_name: redirect_to("/subreddits/search?q=%s" % sr_name) elif not c.error_page and not request.path.startswith( "/api/login/"): abort(404) #if we didn't find a subreddit, check for a domain listing if not sr_name and isinstance(c.site, DefaultSR) and domain: # Redirect IDN to their IDNA name if necessary try: idna = _force_unicode(domain).encode("idna") if idna != domain: redirect_to("/domain/%s%s" % (idna, request.environ["PATH_INFO"])) except UnicodeError: domain = '' # Ensure valid_ascii_domain fails if not c.error_page and not valid_ascii_domain.match(domain): abort(404) c.site = DomainSR(domain) if isinstance(c.site, FakeSubreddit): c.default_sr = True
def _abort_oauth_error(self, error): g.stats.simple_event('oauth2.errors.%s' % error) abort(BadRequestError(error))
def POST_update_pay(self, form, jquery, link, campaign, customer_id, pay_id, edit, address, creditcard): if not g.authorizenetapi: return if not link or not campaign or link._id != campaign.link_id: return abort(404, 'not found') # Check inventory if campaign_has_oversold_error(form, campaign): return # check the campaign dates are still valid (user may have created # the campaign a few days ago) min_start, max_start, max_end = promote.get_date_limits( link, c.user_is_sponsor) if campaign.start_date.date() > max_start: msg = _("please change campaign start date to %(date)s or earlier") date = format_date(max_start, format="short", locale=c.locale) msg %= {'date': date} form.set_text(".status", msg) return if campaign.start_date.date() < min_start: msg = _("please change campaign start date to %(date)s or later") date = format_date(min_start, format="short", locale=c.locale) msg %= {'date': date} form.set_text(".status", msg) return new_payment = not pay_id address_modified = new_payment or edit if address_modified: address_fields = ["firstName", "lastName", "company", "address", "city", "state", "zip", "country", "phoneNumber"] card_fields = ["cardNumber", "expirationDate", "cardCode"] if (form.has_errors(address_fields, errors.BAD_ADDRESS) or form.has_errors(card_fields, errors.BAD_CARD)): return pay_id = edit_profile(c.user, address, creditcard, pay_id) if pay_id: promote.new_payment_method(user=c.user, ip=request.ip, address=address, link=link) if pay_id: success, reason = promote.auth_campaign(link, campaign, c.user, pay_id) if success: hooks.get_hook("promote.campaign_paid").call(link=link, campaign=campaign) if not address and g.authorizenetapi: profiles = get_account_info(c.user).paymentProfiles profile = {p.customerPaymentProfileId: p for p in profiles}[pay_id] address = profile.billTo promote.successful_payment(link, campaign, request.ip, address) jquery.payment_redirect(promote.promo_edit_url(link), new_payment, campaign.bid) return else: promote.failed_payment_method(c.user, link) msg = reason or _("failed to authenticate card. sorry.") form.set_text(".status", msg) else: promote.failed_payment_method(c.user, link) form.set_text(".status", _("failed to authenticate card. sorry."))
def _check_redirect_uri(self, client, redirect_uri): if not redirect_uri or not client or redirect_uri != client.redirect_uri: abort(ForbiddenError(errors.OAUTH2_INVALID_REDIRECT_URI))
def ratelimit_throttled(): ip = request.ip.strip() if is_throttled(ip): abort(429)
def POST_edit_campaign(self, form, jquery, link, campaign_id36, dates, bid, target, priority, location): if not link: return if not target: # run form.has_errors to populate the errors in the response form.has_errors('sr', errors.SUBREDDIT_NOEXIST, errors.SUBREDDIT_NOTALLOWED, errors.SUBREDDIT_REQUIRED) form.has_errors('collection', errors.COLLECTION_NOEXIST) form.has_errors('targeting', errors.INVALID_TARGET) return start, end = dates or (None, None) is_frontpage = (not target.is_collection and target.subreddit_name == Frontpage.name) if location: if c.user_is_sponsor: non_cpm_collection = target.is_collection and not priority.cpm is_subreddit = not target.is_collection if not (is_frontpage or non_cpm_collection or is_subreddit): # sponsors can location target the frontpage, collections # at non-cpm priority, or subreddits return abort(403, 'forbidden') else: if not is_frontpage: # regular users can only location target the frontpage return abort(403, 'forbidden') if location and location.metro: cpm = g.cpm_selfserve_geotarget_metro.pennies elif (target.is_collection and target.collection.name == "technology buffs"): # special price override -- technology collection is more expensive author = Account._byID(link.author_id, data=True) cpm = author.cpm_selfserve_pennies elif target.is_collection or is_frontpage: cpm = g.cpm_selfserve_collection.pennies else: author = Account._byID(link.author_id, data=True) cpm = author.cpm_selfserve_pennies if (form.has_errors('startdate', errors.BAD_DATE, errors.DATE_TOO_EARLY, errors.DATE_TOO_LATE) or form.has_errors('enddate', errors.BAD_DATE, errors.DATE_TOO_EARLY, errors.DATE_TOO_LATE, errors.BAD_DATE_RANGE)): return # check that start is not so late that authorization hold will expire if not c.user_is_sponsor: max_start = promote.get_max_startdate() if start > max_start: c.errors.add(errors.DATE_TOO_LATE, msg_params={'day': max_start.strftime("%m/%d/%Y")}, field='startdate') form.has_errors('startdate', errors.DATE_TOO_LATE) return # Limit the number of PromoCampaigns a Link can have # Note that the front end should prevent the user from getting # this far existing_campaigns = list(PromoCampaign._by_link(link._id)) if len(existing_campaigns) > g.MAX_CAMPAIGNS_PER_LINK: c.errors.add(errors.TOO_MANY_CAMPAIGNS, msg_params={'count': g.MAX_CAMPAIGNS_PER_LINK}, field='title') form.has_errors('title', errors.TOO_MANY_CAMPAIGNS) return campaign = None if campaign_id36: try: campaign = PromoCampaign._byID36(campaign_id36) except NotFound: pass if campaign and link._id != campaign.link_id: return abort(404, 'not found') if priority.cpm: min_bid = 0 if c.user_is_sponsor else g.min_promote_bid max_bid = None if c.user_is_sponsor else g.max_promote_bid if bid is None or bid < min_bid or (max_bid and bid > max_bid): c.errors.add(errors.BAD_BID, field='bid', msg_params={'min': min_bid, 'max': max_bid or g.max_promote_bid}) form.has_errors('bid', errors.BAD_BID) return # you cannot edit the bid of a live ad unless it's a freebie if (campaign and bid != campaign.bid and promote.is_live_promo(link, campaign) and not campaign.is_freebie()): c.errors.add(errors.BID_LIVE, field='bid') form.has_errors('bid', errors.BID_LIVE) return else: bid = 0. # Set bid to 0 as dummy value if not target.is_collection and not is_frontpage: # targeted to a single subreddit, check roadblock sr = target.subreddits_slow[0] roadblock = PromotedLinkRoadblock.is_roadblocked(sr, start, end) if roadblock and not c.user_is_sponsor: msg_params = {"start": roadblock[0].strftime('%m/%d/%Y'), "end": roadblock[1].strftime('%m/%d/%Y')} c.errors.add(errors.OVERSOLD, field='sr', msg_params=msg_params) form.has_errors('sr', errors.OVERSOLD) return # Check inventory campaign = campaign if campaign_id36 else None if not priority.inventory_override: oversold = has_oversold_error(form, campaign, start, end, bid, cpm, target, location) if oversold: return if campaign: promote.edit_campaign(link, campaign, dates, bid, cpm, target, priority, location) else: campaign = promote.new_campaign(link, dates, bid, cpm, target, priority, location) rc = RenderableCampaign.from_campaigns(link, campaign) jquery.update_campaign(campaign._fullname, rc.render_html())
def require_https(): if not c.secure: abort(ForbiddenError(errors.HTTPS_REQUIRED))
def process_response(cls): event_dict = json.loads(request.body) event = stripe.Event.construct_from(event_dict, g.STRIPE_SECRET_KEY) status = event.type if status == 'invoice.created': # sent 1 hr before a subscription is charged or immediately for # a new subscription invoice = event.data.object customer_id = invoice.customer account = account_from_stripe_customer_id(customer_id) # if the charge hasn't been attempted (meaning this is 1 hr before # the charge) check that the account can receive the gold if (not invoice.attempted and (not account or (account and account._banned))): # there's no associated account - delete the subscription # to cancel the charge g.log.error('no account for stripe invoice: %s', invoice) try: customer = stripe.Customer.retrieve(customer_id) customer.delete() except stripe.InvalidRequestError: pass elif status == 'invoice.payment_failed': invoice = event.data.object customer_id = invoice.customer buyer = account_from_stripe_customer_id(customer_id) webhook = Webhook(subscr_id=customer_id, buyer=buyer) return status, webhook event_type = cls.event_type_mappings.get(status) if not event_type: raise ValueError('Stripe: unrecognized status %s' % status) elif event_type == 'noop': return status, None charge = event.data.object description = charge.description invoice_id = charge.invoice transaction_id = 'S%s' % charge.id pennies = charge.amount months, days = months_and_days_from_pennies(pennies) if status == 'charge.failed' and invoice_id: # we'll get an additional failure notification event of # "invoice.payment_failed", don't double notify return 'dummy', None elif status == 'charge.failed' and not description: # create_customer can POST successfully but fail to create a # customer because the card is declined. This will trigger a # 'charge.failed' notification but without description so we can't # do anything with it return 'dummy', None elif invoice_id: # subscription charge - special handling customer_id = charge.customer buyer = account_from_stripe_customer_id(customer_id) if not buyer: charge_date = datetime.fromtimestamp(charge.created, tz=g.tz) # don't raise exception if charge date is within the past hour # db replication lag may cause the account lookup to fail if charge_date < timeago('1 hour'): raise ValueError('no buyer for charge: %s' % charge.id) else: abort(404, "not found") webhook = Webhook(transaction_id=transaction_id, subscr_id=customer_id, pennies=pennies, months=months, goldtype='autorenew', buyer=buyer) return status, webhook else: try: passthrough, buyer_name = description.split('-', 1) except (AttributeError, ValueError): g.log.error('stripe_error on charge: %s', charge) raise webhook = Webhook(passthrough=passthrough, transaction_id=transaction_id, pennies=pennies, months=months) return status, webhook