コード例 #1
0
class WebLogController(RedditController):
    on_validation_error = staticmethod(abort_with_error)

    @validate(
        VRatelimit(rate_user=False, rate_ip=True, prefix='rate_weblog_'),
        level=VOneOf('level', ('error',)),
        logs=VValidatedJSON('logs',
            VValidatedJSON.ArrayOf(VValidatedJSON.Object({
                'msg': VPrintable('msg', max_length=256),
                'url': VPrintable('url', max_length=256),
            }))
        ),
    )
    def POST_message(self, level, logs):
        # prevent simple CSRF by requiring a custom header
        if not request.headers.get('X-Loggit'):
            abort(403)

        uid = c.user._id if c.user_is_loggedin else '-'

        # only accept a maximum of 3 entries per request
        for log in logs[:3]:
            g.log.warning('[web frontend] %s: %s | U: %s FP: %s UA: %s',
                          level, log['msg'], uid, log['url'],
                          request.user_agent)

        VRatelimit.ratelimit(rate_user=False, rate_ip=True,
                             prefix="rate_weblog_", seconds=10)
コード例 #2
0
ファイル: web.py プロジェクト: znanl/reddit
class WebLogController(RedditController):
    on_validation_error = staticmethod(abort_with_error)

    @csrf_exempt
    @validate(
        VRatelimit(rate_user=False, rate_ip=True, prefix='rate_weblog_'),
        level=VOneOf('level', ('error', )),
        logs=VValidatedJSON(
            'logs',
            VValidatedJSON.ArrayOf(
                VValidatedJSON.PartialObject({
                    'msg':
                    VPrintable('msg', max_length=256),
                    'url':
                    VPrintable('url', max_length=256),
                    'tag':
                    VPrintable('tag', max_length=32),
                }))),
    )
    def POST_message(self, level, logs):
        # Whitelist tags to keep the frontend from creating too many keys in statsd
        valid_frontend_log_tags = {
            'unknown',
            'jquery-migrate-bad-html',
        }

        # prevent simple CSRF by requiring a custom header
        if not request.headers.get('X-Loggit'):
            abort(403)

        uid = c.user._id if c.user_is_loggedin else '-'

        # only accept a maximum of 3 entries per request
        for log in logs[:3]:
            if 'msg' not in log or 'url' not in log:
                continue

            tag = 'unknown'

            if log.get('tag') in valid_frontend_log_tags:
                tag = log['tag']

            g.stats.simple_event('frontend.error.' + tag)

            g.log.warning('[web frontend] %s: %s | U: %s FP: %s UA: %s', level,
                          log['msg'], uid, log['url'], request.user_agent)

        VRatelimit.ratelimit(rate_user=False,
                             rate_ip=True,
                             prefix="rate_weblog_",
                             seconds=10)
コード例 #3
0
ファイル: web.py プロジェクト: vivekbarsagadey/reddit
class WebLogController(RedditController):
    on_validation_error = staticmethod(abort_with_error)

    @csrf_exempt
    @validate(
        VRatelimit(rate_user=False, rate_ip=True, prefix='rate_weblog_'),
        level=VOneOf('level', ('error', )),
        logs=VValidatedJSON(
            'logs',
            VValidatedJSON.ArrayOf(
                VValidatedJSON.PartialObject({
                    'msg':
                    VPrintable('msg', max_length=256),
                    'url':
                    VPrintable('url', max_length=256),
                    'tag':
                    VPrintable('tag', max_length=32),
                }))),
    )
    def POST_message(self, level, logs):
        # Whitelist tags to keep the frontend from creating too many keys in statsd
        valid_frontend_log_tags = {
            'unknown',
            'jquery-migrate-bad-html',
        }

        # prevent simple CSRF by requiring a custom header
        if not request.headers.get('X-Loggit'):
            abort(403)

        uid = c.user._id if c.user_is_loggedin else '-'

        # only accept a maximum of 3 entries per request
        for log in logs[:3]:
            if 'msg' not in log or 'url' not in log:
                continue

            tag = 'unknown'

            if log.get('tag') in valid_frontend_log_tags:
                tag = log['tag']

            g.stats.simple_event('frontend.error.' + tag)

            g.log.warning('[web frontend] %s: %s | U: %s FP: %s UA: %s', level,
                          log['msg'], uid, log['url'], request.user_agent)

        VRatelimit.ratelimit(rate_user=False,
                             rate_ip=True,
                             prefix="rate_weblog_",
                             seconds=10)

    def OPTIONS_report_cache_poisoning(self):
        """Send CORS headers for cache poisoning reports."""
        if "Origin" not in request.headers:
            return
        origin = request.headers["Origin"]
        parsed_origin = UrlParser(origin)
        if not is_subdomain(parsed_origin.hostname, g.domain):
            return
        response.headers["Access-Control-Allow-Origin"] = origin
        response.headers["Access-Control-Allow-Methods"] = "POST"
        response.headers["Access-Control-Allow-Headers"] = \
            "Authorization, X-Loggit, "
        response.headers["Access-Control-Allow-Credentials"] = "false"
        response.headers['Access-Control-Expose-Headers'] = \
            self.COMMON_REDDIT_HEADERS

    @csrf_exempt
    @validate(
        VRatelimit(rate_user=False, rate_ip=True, prefix='rate_poison_'),
        report_mac=VPrintable('report_mac', 255),
        poisoner_name=VPrintable('poisoner_name', 255),
        poisoner_id=VInt('poisoner_id'),
        poisoner_canary=VPrintable('poisoner_canary', 2, min_length=2),
        victim_canary=VPrintable('victim_canary', 2, min_length=2),
        render_time=VInt('render_time'),
        route_name=VPrintable('route_name', 255),
        url=VPrintable('url', 2048),
        # To differentiate between web and mweb in the future
        source=VOneOf('source', ('web', 'mweb')),
        cache_policy=VOneOf(
            'cache_policy',
            ('loggedin_www', 'loggedin_www_new', 'loggedin_mweb')),
        # JSON-encoded response headers from when our script re-requested
        # the poisoned page
        resp_headers=nop('resp_headers'),
    )
    def POST_report_cache_poisoning(
        self,
        report_mac,
        poisoner_name,
        poisoner_id,
        poisoner_canary,
        victim_canary,
        render_time,
        route_name,
        url,
        source,
        cache_policy,
        resp_headers,
    ):
        """Report an instance of cache poisoning and its details"""

        self.OPTIONS_report_cache_poisoning()

        if c.errors:
            abort(400)

        # prevent simple CSRF by requiring a custom header
        if not request.headers.get('X-Loggit'):
            abort(403)

        # Eh? Why are you reporting this if the canaries are the same?
        if poisoner_canary == victim_canary:
            abort(400)

        expected_mac = make_poisoning_report_mac(
            poisoner_canary=poisoner_canary,
            poisoner_name=poisoner_name,
            poisoner_id=poisoner_id,
            cache_policy=cache_policy,
            source=source,
            route_name=route_name,
        )
        if not constant_time_compare(report_mac, expected_mac):
            abort(403)

        if resp_headers:
            try:
                resp_headers = json.loads(resp_headers)
                # Verify this is a JSON map of `header_name => [value, ...]`
                if not isinstance(resp_headers, dict):
                    abort(400)
                for hdr_name, hdr_vals in resp_headers.iteritems():
                    if not isinstance(hdr_name, basestring):
                        abort(400)
                    if not all(isinstance(h, basestring) for h in hdr_vals):
                        abort(400)
            except ValueError:
                abort(400)

        if not resp_headers:
            resp_headers = {}

        poison_info = dict(
            poisoner_name=poisoner_name,
            poisoner_id=str(poisoner_id),
            # Convert the JS timestamp to a standard one
            render_time=render_time * 1000,
            route_name=route_name,
            url=url,
            source=source,
            cache_policy=cache_policy,
            resp_headers=resp_headers,
        )

        # For immediate feedback when tracking the effects of caching changes
        g.stats.simple_event("cache.poisoning.%s.%s" % (source, cache_policy))
        # For longer-term diagnosing of caching issues
        g.events.cache_poisoning_event(poison_info, request=request, context=c)

        VRatelimit.ratelimit(rate_ip=True, prefix="rate_poison_", seconds=10)

        return self.api_wrapper({})
コード例 #4
0
from r2.lib.jsontemplates import (
    LabeledMultiJsonTemplate,
    LabeledMultiDescriptionJsonTemplate,
)
from r2.lib.errors import errors, RedditError

multi_sr_data_json_spec = VValidatedJSON.Object({
    'name':
    VSubredditName('name', allow_language_srs=True),
})

multi_json_spec = VValidatedJSON.Object({
    'visibility':
    VOneOf('visibility', ('private', 'public')),
    'subreddits':
    VValidatedJSON.ArrayOf(multi_sr_data_json_spec),
})

multi_description_json_spec = VValidatedJSON.Object({
    'body_md':
    VMarkdownLength('body_md', max_length=10000, empty_error=None),
})


class MultiApiController(RedditController):
    on_validation_error = staticmethod(abort_with_error)

    def pre(self):
        set_extension(request.environ, "json")
        RedditController.pre(self)
コード例 #5
0
ファイル: multi.py プロジェクト: yangzxstar/reddit
from r2.lib.pages.things import wrap_things
from r2.lib.jsontemplates import (
    LabeledMultiJsonTemplate,
    LabeledMultiDescriptionJsonTemplate,
)
from r2.lib.errors import errors, RedditError


multi_sr_data_json_spec = VValidatedJSON.Object({
    'name': VSubredditName('name', allow_language_srs=True),
})


multi_json_spec = VValidatedJSON.Object({
    'visibility': VOneOf('visibility', ('private', 'public')),
    'subreddits': VValidatedJSON.ArrayOf(multi_sr_data_json_spec),
})


multi_description_json_spec = VValidatedJSON.Object({
    'body_md': VMarkdownLength('body_md', max_length=10000, empty_error=None),
})


class MultiApiController(RedditController):
    on_validation_error = staticmethod(abort_with_error)

    def pre(self):
        set_extension(request.environ, "json")
        RedditController.pre(self)
コード例 #6
0
class WebLogController(RedditController):
    on_validation_error = staticmethod(abort_with_error)

    @csrf_exempt
    @validate(
        VRatelimit(rate_user=False, rate_ip=True, prefix='rate_weblog_'),
        level=VOneOf('level', ('error', )),
        logs=VValidatedJSON(
            'logs',
            VValidatedJSON.ArrayOf(
                VValidatedJSON.PartialObject({
                    'msg':
                    VPrintable('msg', max_length=256),
                    'url':
                    VPrintable('url', max_length=256),
                    'tag':
                    VPrintable('tag', max_length=32),
                }))),
    )
    def POST_message(self, level, logs):
        # Whitelist tags to keep the frontend from creating too many keys in statsd
        valid_frontend_log_tags = {
            'unknown',
            'jquery-migrate-bad-html',
        }

        # prevent simple CSRF by requiring a custom header
        if not request.headers.get('X-Loggit'):
            abort(403)

        uid = c.user._id if c.user_is_loggedin else '-'

        # only accept a maximum of 3 entries per request
        for log in logs[:3]:
            if 'msg' not in log or 'url' not in log:
                continue

            tag = 'unknown'

            if log.get('tag') in valid_frontend_log_tags:
                tag = log['tag']

            g.stats.simple_event('frontend.error.' + tag)

            g.log.warning('[web frontend] %s: %s | U: %s FP: %s UA: %s', level,
                          log['msg'], uid, log['url'], request.user_agent)

        VRatelimit.ratelimit(rate_user=False,
                             rate_ip=True,
                             prefix="rate_weblog_",
                             seconds=10)

    @csrf_exempt
    @validate(
        # set default to invalid number so we can ignore it later.
        dns_timing=VFloat('dnsTiming', min=0, num_default=-1),
        tcp_timing=VFloat('tcpTiming', min=0, num_default=-1),
        request_timing=VFloat('requestTiming', min=0, num_default=-1),
        response_timing=VFloat('responseTiming', min=0, num_default=-1),
        dom_loading_timing=VFloat('domLoadingTiming', min=0, num_default=-1),
        dom_interactive_timing=VFloat('domInteractiveTiming',
                                      min=0,
                                      num_default=-1),
        dom_content_loaded_timing=VFloat('domContentLoadedTiming',
                                         min=0,
                                         num_default=-1),
        action_name=VPrintable('actionName', max_length=256),
        verification=VPrintable('verification', max_length=256),
    )
    def POST_timings(self, action_name, verification, **kwargs):
        lookup = {
            'dns_timing': 'dns',
            'tcp_timing': 'tcp',
            'request_timing': 'request',
            'response_timing': 'response',
            'dom_loading_timing': 'dom_loading',
            'dom_interactive_timing': 'dom_interactive',
            'dom_content_loaded_timing': 'dom_content_loaded',
        }

        if not (action_name and verification):
            abort(422)

        expected_mac = hmac.new(g.secrets["action_name"], action_name,
                                hashlib.sha1).hexdigest()

        if not constant_time_compare(verification, expected_mac):
            abort(422)

        # action_name comes in the format 'controller.METHOD_action'
        stat_tpl = 'service_time.web.{}.frontend'.format(action_name)
        stat_aggregate = 'service_time.web.frontend'

        for key, name in lookup.iteritems():
            val = kwargs[key]
            if val >= 0:
                g.stats.simple_timing(stat_tpl + '.' + name, val)
                g.stats.simple_timing(stat_aggregate + '.' + name, val)

        abort(204)
コード例 #7
0
ファイル: user.py プロジェクト: shuapeng/reddit
    VList,
    VValidatedJSON,
)
from r2.models import Account, Trophy
import r2.lib.errors as errors
import r2.lib.validator.preferences as vprefs


PREFS_JSON_SPEC = VValidatedJSON.PartialObject({
    k[len("pref_"):]: v for k, v in
    vprefs.PREFS_VALIDATORS.iteritems()
    if k in Account._preference_attrs
})

PREFS_JSON_SPEC.spec["content_langs"] = VValidatedJSON.ArrayOf(
    VContentLang("content_langs")
)


class APIv1UserController(OAuth2ResourceController):
    def pre(self):
        OAuth2ResourceController.pre(self)
        self.authenticate_with_token()
        self.run_sitewide_ratelimits()

    def try_pagecache(self):
        pass

    @staticmethod
    def on_validation_error(error):
        abort_with_error(error, error.code or 400)