Пример #1
0
    def __init__(self, global_conf, app_conf, paths, **extra):
        """
        Globals acts as a container for objects available throughout
        the life of the application.

        One instance of Globals is created by Pylons during
        application initialization and is available during requests
        via the 'g' variable.

        ``global_conf``
            The same variable used throughout ``config/middleware.py``
            namely, the variables from the ``[DEFAULT]`` section of the
            configuration file.

        ``app_conf``
            The same ``kw`` dictionary used throughout
            ``config/middleware.py`` namely, the variables from the
            section in the config file for your application.

        ``extra``
            The configuration returned from ``load_config`` in 
            ``config/middleware.py`` which may be of use in the setup of
            your global variables.

        """

        global_conf.setdefault("debug", False)

        self.config = ConfigValueParser(global_conf)
        self.config.add_spec(self.spec)
        self.plugins = PluginLoader(self.config.get("plugins", []))

        self.stats = Stats(self.config.get('statsd_addr'),
                           self.config.get('statsd_sample_rate'))
        self.startup_timer = self.stats.get_timer("app_startup")
        self.startup_timer.start()

        self.paths = paths

        self.running_as_script = global_conf.get('running_as_script', False)
        
        # turn on for language support
        self.lang = getattr(self, 'site_lang', 'en')
        self.languages, self.lang_name = \
            get_active_langs(default_lang=self.lang)

        all_languages = self.lang_name.keys()
        all_languages.sort()
        self.all_languages = all_languages
        
        # set default time zone if one is not set
        tz = global_conf.get('timezone', 'UTC')
        self.tz = pytz.timezone(tz)
        
        dtz = global_conf.get('display_timezone', tz)
        self.display_tz = pytz.timezone(dtz)

        self.startup_timer.intermediate("init")
Пример #2
0
    def __init__(self, global_conf, app_conf, paths, **extra):
        """
        Globals acts as a container for objects available throughout
        the life of the application.

        One instance of Globals is created by Pylons during
        application initialization and is available during requests
        via the 'g' variable.

        ``global_conf``
            The same variable used throughout ``config/middleware.py``
            namely, the variables from the ``[DEFAULT]`` section of the
            configuration file.

        ``app_conf``
            The same ``kw`` dictionary used throughout
            ``config/middleware.py`` namely, the variables from the
            section in the config file for your application.

        ``extra``
            The configuration returned from ``load_config`` in 
            ``config/middleware.py`` which may be of use in the setup of
            your global variables.

        """

        global_conf.setdefault("debug", False)

        self.config = ConfigValueParser(global_conf)
        self.config.add_spec(self.spec)
        self.plugins = PluginLoader(self.config.get("plugins", []))

        self.stats = Stats(self.config.get('statsd_addr'),
                           self.config.get('statsd_sample_rate'))
        self.startup_timer = self.stats.get_timer("app_startup")
        self.startup_timer.start()

        self.paths = paths

        self.running_as_script = global_conf.get('running_as_script', False)
        
        # turn on for language support
        self.lang = getattr(self, 'site_lang', 'en')
        self.languages, self.lang_name = \
            get_active_langs(default_lang=self.lang)

        all_languages = self.lang_name.keys()
        all_languages.sort()
        self.all_languages = all_languages
        
        # set default time zone if one is not set
        tz = global_conf.get('timezone', 'UTC')
        self.tz = pytz.timezone(tz)
        
        dtz = global_conf.get('display_timezone', tz)
        self.display_tz = pytz.timezone(dtz)

        self.startup_timer.intermediate("init")
Пример #3
0
    def __init__(self, global_conf, app_conf, paths, **extra):
        """
        Globals acts as a container for objects available throughout
        the life of the application.

        One instance of Globals is created by Pylons during
        application initialization and is available during requests
        via the 'g' variable.

        ``global_conf``
            The same variable used throughout ``config/middleware.py``
            namely, the variables from the ``[DEFAULT]`` section of the
            configuration file.

        ``app_conf``
            The same ``kw`` dictionary used throughout
            ``config/middleware.py`` namely, the variables from the
            section in the config file for your application.

        ``extra``
            The configuration returned from ``load_config`` in 
            ``config/middleware.py`` which may be of use in the setup of
            your global variables.

        """

        global_conf.setdefault("debug", False)

        # reloading site ensures that we have a fresh sys.path to build our
        # working set off of. this means that forked worker processes won't get
        # the sys.path that was current when the master process was spawned
        # meaning that new plugins will be picked up on regular app reload
        # rather than having to restart the master process as well.
        reload(site)
        self.pkg_resources_working_set = pkg_resources.WorkingSet()

        self.config = ConfigValueParser(global_conf)
        self.config.add_spec(self.spec)
        self.plugins = PluginLoader(self.pkg_resources_working_set,
                                    self.config.get("plugins", []))

        self.stats = Stats(self.config.get('statsd_addr'),
                           self.config.get('statsd_sample_rate'))
        self.startup_timer = self.stats.get_timer("app_startup")
        self.startup_timer.start()

        self.paths = paths

        self.running_as_script = global_conf.get('running_as_script', False)

        # turn on for language support
        self.lang = getattr(self, 'site_lang', 'en')
        self.languages, self.lang_name = \
            get_active_langs(default_lang=self.lang)

        all_languages = self.lang_name.keys()
        all_languages.sort()
        self.all_languages = all_languages

        # set default time zone if one is not set
        tz = global_conf.get('timezone', 'UTC')
        self.tz = pytz.timezone(tz)

        dtz = global_conf.get('display_timezone', tz)
        self.display_tz = pytz.timezone(dtz)

        self.startup_timer.intermediate("init")
Пример #4
0
class Globals(object):
    spec = {
        ConfigValue.int: [
            'db_pool_size',
            'db_pool_overflow_size',
            'page_cache_time',
            'commentpane_cache_time',
            'num_mc_clients',
            'MAX_CAMPAIGNS_PER_LINK',
            'MIN_DOWN_LINK',
            'MIN_UP_KARMA',
            'MIN_DOWN_KARMA',
            'MIN_RATE_LIMIT_KARMA',
            'MIN_RATE_LIMIT_COMMENT_KARMA',
            'HOT_PAGE_AGE',
            'QUOTA_THRESHOLD',
            'ADMIN_COOKIE_TTL',
            'ADMIN_COOKIE_MAX_IDLE',
            'OTP_COOKIE_TTL',
            'hsts_max_age',
            'num_comments',
            'max_comments',
            'max_comments_gold',
            'max_comment_parent_walk',
            'max_sr_images',
            'num_serendipity',
            'sr_dropdown_threshold',
            'comment_visits_period',
            'butler_max_mentions',
            'min_membership_create_community',
            'bcrypt_work_factor',
            'cassandra_pool_size',
            'sr_banned_quota',
            'sr_wikibanned_quota',
            'sr_wikicontributor_quota',
            'sr_moderator_invite_quota',
            'sr_contributor_quota',
            'sr_quota_time',
            'sr_invite_limit',
            'thumbnail_hidpi_scaling',
            'wiki_keep_recent_days',
            'wiki_max_page_length_bytes',
            'wiki_max_page_name_length',
            'wiki_max_page_separators',
            'RL_RESET_MINUTES',
            'RL_OAUTH_RESET_MINUTES',
            'comment_karma_display_floor',
            'link_karma_display_floor',
        ],
        ConfigValue.float: [
            'default_promote_bid',
            'min_promote_bid',
            'max_promote_bid',
            'statsd_sample_rate',
            'querycache_prune_chance',
            'RL_AVG_REQ_PER_SEC',
            'RL_OAUTH_AVG_REQ_PER_SEC',
            'RL_LOGIN_AVG_PER_SEC',
        ],
        ConfigValue.bool: [
            'debug',
            'log_start',
            'sqlprinting',
            'template_debug',
            'reload_templates',
            'uncompressedJS',
            'css_killswitch',
            'db_create_tables',
            'disallow_db_writes',
            'disable_ratelimit',
            'amqp_logging',
            'read_only_mode',
            'disable_wiki',
            'heavy_load_mode',
            'disable_captcha',
            'disable_ads',
            'disable_require_admin_otp',
            'trust_local_proxies',
            'shard_link_vote_queues',
            'shard_commentstree_queues',
            'ENFORCE_RATELIMIT',
            'RL_SITEWIDE_ENABLED',
            'RL_OAUTH_SITEWIDE_ENABLED',
        ],
        ConfigValue.tuple: [
            'plugins',
            'stalecaches',
            'memcaches',
            'lockcaches',
            'permacache_memcaches',
            'rendercaches',
            'pagecaches',
            'memoizecaches',
            'srmembercaches',
            'relcaches',
            'ratelimitcaches',
            'cassandra_seeds',
            'automatic_reddits',
            'hardcache_categories',
            'case_sensitive_domains',
            'known_image_domains',
            'reserved_subdomains',
            'offsite_subdomains',
            'TRAFFIC_LOG_HOSTS',
            'exempt_login_user_agents',
            'timed_templates',
            'autoexpand_media_types',
            'multi_icons',
            'hide_subscribers_srs',
        ],
        ConfigValue.tuple_of(ConfigValue.int): [
            'thumbnail_size',
        ],
        ConfigValue.dict(ConfigValue.str, ConfigValue.int): [
            'agents',
        ],
        ConfigValue.str: [
            'wiki_page_registration_info',
            'wiki_page_privacy_policy',
            'wiki_page_user_agreement',
            'wiki_page_gold_bottlecaps',
            'fraud_email',
            'feedback_email',
            'share_reply',
            'nerds_email',
            'community_email',
            'smtp_server',
        ],
        ConfigValue.choice(ONE=CL_ONE, QUORUM=CL_QUORUM): [
            'cassandra_rcl',
            'cassandra_wcl',
        ],
        ConfigValue.timeinterval: [
            'ARCHIVE_AGE',
            "vote_queue_grace_period",
        ],
        config_gold_price: [
            'gold_month_price',
            'gold_year_price',
            'cpm_selfserve',
            'cpm_selfserve_geotarget_metro',
            'cpm_selfserve_collection',
        ],
    }

    live_config_spec = {
        ConfigValue.bool: [
            'frontend_logging',
        ],
        ConfigValue.int: [
            'captcha_exempt_comment_karma',
            'captcha_exempt_link_karma',
            'create_sr_account_age_days',
            'create_sr_comment_karma',
            'create_sr_link_karma',
            'cflag_min_votes',
        ],
        ConfigValue.float: [
            'cflag_lower_bound',
            'cflag_upper_bound',
            'spotlight_interest_sub_p',
            'spotlight_interest_nosub_p',
            'gold_revenue_goal',
            'invalid_key_sample_rate',
        ],
        ConfigValue.tuple: [
            'fastlane_links',
            'listing_chooser_sample_multis',
            'discovery_srs',
            'proxy_gilding_accounts',
        ],
        ConfigValue.str: [
            'listing_chooser_gold_multi',
            'listing_chooser_explore_sr',
        ],
        ConfigValue.dict(ConfigValue.int, ConfigValue.float): [
            'comment_tree_version_weights',
        ],
        ConfigValue.messages: [
            'welcomebar_messages',
            'sidebar_message',
            'gold_sidebar_message',
        ],
        ConfigValue.dict(ConfigValue.str, ConfigValue.float): [
            'pennies_per_server_second',
        ],
        ConfigValue.dict(ConfigValue.str, ConfigValue.choice(**PERMISSIONS)): [
            'employees',
        ],
    }

    def __init__(self, global_conf, app_conf, paths, **extra):
        """
        Globals acts as a container for objects available throughout
        the life of the application.

        One instance of Globals is created by Pylons during
        application initialization and is available during requests
        via the 'g' variable.

        ``global_conf``
            The same variable used throughout ``config/middleware.py``
            namely, the variables from the ``[DEFAULT]`` section of the
            configuration file.

        ``app_conf``
            The same ``kw`` dictionary used throughout
            ``config/middleware.py`` namely, the variables from the
            section in the config file for your application.

        ``extra``
            The configuration returned from ``load_config`` in 
            ``config/middleware.py`` which may be of use in the setup of
            your global variables.

        """

        global_conf.setdefault("debug", False)

        # reloading site ensures that we have a fresh sys.path to build our
        # working set off of. this means that forked worker processes won't get
        # the sys.path that was current when the master process was spawned
        # meaning that new plugins will be picked up on regular app reload
        # rather than having to restart the master process as well.
        reload(site)
        self.pkg_resources_working_set = pkg_resources.WorkingSet()

        self.config = ConfigValueParser(global_conf)
        self.config.add_spec(self.spec)
        self.plugins = PluginLoader(self.pkg_resources_working_set,
                                    self.config.get("plugins", []))

        self.stats = Stats(self.config.get('statsd_addr'),
                           self.config.get('statsd_sample_rate'))
        self.startup_timer = self.stats.get_timer("app_startup")
        self.startup_timer.start()

        self.paths = paths

        self.running_as_script = global_conf.get('running_as_script', False)

        # turn on for language support
        self.lang = getattr(self, 'site_lang', 'en')
        self.languages, self.lang_name = \
            get_active_langs(default_lang=self.lang)

        all_languages = self.lang_name.keys()
        all_languages.sort()
        self.all_languages = all_languages

        # set default time zone if one is not set
        tz = global_conf.get('timezone', 'UTC')
        self.tz = pytz.timezone(tz)

        dtz = global_conf.get('display_timezone', tz)
        self.display_tz = pytz.timezone(dtz)

        self.startup_timer.intermediate("init")

    def __getattr__(self, name):
        if not name.startswith('_') and name in self.config:
            return self.config[name]
        else:
            raise AttributeError

    def setup(self):
        self.queues = queues.declare_queues(self)

        self.extension_subdomains = dict(
            m="mobile",
            i="compact",
            api="api",
            rss="rss",
            xml="xml",
            json="json",
        )

        ################# PROVIDERS
        self.auth_provider = select_provider(
            self.config,
            self.pkg_resources_working_set,
            "r2.provider.auth",
            self.authentication_provider,
        )
        self.media_provider = select_provider(
            self.config,
            self.pkg_resources_working_set,
            "r2.provider.media",
            self.media_provider,
        )
        self.cdn_provider = select_provider(
            self.config,
            self.pkg_resources_working_set,
            "r2.provider.cdn",
            self.cdn_provider,
        )
        self.startup_timer.intermediate("providers")

        ################# CONFIGURATION
        # AMQP is required
        if not self.amqp_host:
            raise ValueError("amqp_host not set in the .ini")

        if not self.cassandra_seeds:
            raise ValueError("cassandra_seeds not set in the .ini")

        # heavy load mode is read only mode with a different infobar
        if self.heavy_load_mode:
            self.read_only_mode = True

        origin_prefix = self.domain_prefix + "." if self.domain_prefix else ""
        self.origin = "http://" + origin_prefix + self.domain

        self.trusted_domains = set([self.domain])
        if self.https_endpoint:
            https_url = urlparse(self.https_endpoint)
            self.trusted_domains.add(https_url.hostname)

        # load the unique hashed names of files under static
        static_files = os.path.join(self.paths.get('static_files'), 'static')
        names_file_path = os.path.join(static_files, 'names.json')
        if os.path.exists(names_file_path):
            with open(names_file_path) as handle:
                self.static_names = json.load(handle)
        else:
            self.static_names = {}

        # make python warnings go through the logging system
        logging.captureWarnings(capture=True)

        log = logging.getLogger('reddit')

        # when we're a script (paster run) just set up super simple logging
        if self.running_as_script:
            log.setLevel(logging.INFO)
            log.addHandler(logging.StreamHandler())

        # if in debug mode, override the logging level to DEBUG
        if self.debug:
            log.setLevel(logging.DEBUG)

        # attempt to figure out which pool we're in and add that to the
        # LogRecords.
        try:
            with open("/etc/ec2_asg", "r") as f:
                pool = f.read().strip()
            # clean up the pool name since we're putting stuff after "-"
            pool = pool.partition("-")[0]
        except IOError:
            pool = "reddit-app"
        self.log = logging.LoggerAdapter(log, {"pool": pool})

        # set locations
        locations = pkg_resources.resource_stream(__name__,
                                                  "../data/locations.json")
        self.locations = json.loads(locations.read())

        if not self.media_domain:
            self.media_domain = self.domain
        if self.media_domain == self.domain:
            print >> sys.stderr, (
                "Warning: g.media_domain == g.domain. " +
                "This may give untrusted content access to user cookies")
        if self.oauth_domain == self.domain:
            print >> sys.stderr, ("Warning: g.oauth_domain == g.domain. "
                                  "CORS requests to g.domain will be allowed")

        for arg in sys.argv:
            tokens = arg.split("=")
            if len(tokens) == 2:
                k, v = tokens
                self.log.debug("Overriding g.%s to %s" % (k, v))
                setattr(self, k, v)

        self.reddit_host = socket.gethostname()
        self.reddit_pid = os.getpid()

        if hasattr(signal, 'SIGUSR1'):
            # not all platforms have user signals
            signal.signal(signal.SIGUSR1, thread_dump)

        locale.setlocale(locale.LC_ALL, self.locale)

        # Pre-calculate ratelimit values
        self.RL_RESET_SECONDS = self.config["RL_RESET_MINUTES"] * 60
        self.RL_MAX_REQS = int(self.config["RL_AVG_REQ_PER_SEC"] *
                               self.RL_RESET_SECONDS)

        self.RL_OAUTH_RESET_SECONDS = self.config["RL_OAUTH_RESET_MINUTES"] * 60
        self.RL_OAUTH_MAX_REQS = int(self.config["RL_OAUTH_AVG_REQ_PER_SEC"] *
                                     self.RL_OAUTH_RESET_SECONDS)

        self.RL_LOGIN_MAX_REQS = int(self.config["RL_LOGIN_AVG_PER_SEC"] *
                                     self.RL_RESET_SECONDS)

        self.startup_timer.intermediate("configuration")

        ################# ZOOKEEPER
        # for now, zookeeper will be an optional part of the stack.
        # if it's not configured, we will grab the expected config from the
        # [live_config] section of the ini file
        zk_hosts = self.config.get("zookeeper_connection_string")
        if zk_hosts:
            from r2.lib.zookeeper import (connect_to_zookeeper, LiveConfig,
                                          LiveList)
            zk_username = self.config["zookeeper_username"]
            zk_password = self.config["zookeeper_password"]
            self.zookeeper = connect_to_zookeeper(zk_hosts,
                                                  (zk_username, zk_password))
            self.live_config = LiveConfig(self.zookeeper, LIVE_CONFIG_NODE)
            self.secrets = fetch_secrets(self.zookeeper)
            self.throttles = LiveList(self.zookeeper,
                                      "/throttles",
                                      map_fn=ipaddress.ip_network,
                                      reduce_fn=ipaddress.collapse_addresses)

            # close our zk connection when the app shuts down
            SHUTDOWN_CALLBACKS.append(self.zookeeper.stop)
        else:
            self.zookeeper = None
            parser = ConfigParser.RawConfigParser()
            parser.optionxform = str
            parser.read([self.config["__file__"]])
            self.live_config = extract_live_config(parser, self.plugins)
            self.secrets = extract_secrets(parser)
            self.throttles = tuple()  # immutable since it's not real

        self.startup_timer.intermediate("zookeeper")

        ################# PRIVILEGED USERS
        self.admins = PermissionFilteredEmployeeList(self.live_config,
                                                     type="admin")
        self.sponsors = PermissionFilteredEmployeeList(self.live_config,
                                                       type="sponsor")
        self.employees = PermissionFilteredEmployeeList(self.live_config,
                                                        type="employee")

        ################# MEMCACHE
        num_mc_clients = self.num_mc_clients

        # the main memcache pool. used for most everything.
        memcache = CMemcache(
            self.memcaches,
            min_compress_len=1400,
            num_clients=num_mc_clients,
            binary=True,
        )

        # a pool just used for @memoize results
        memoizecaches = CMemcache(
            self.memoizecaches,
            min_compress_len=50 * 1024,
            num_clients=num_mc_clients,
            binary=True,
        )

        # a pool just for srmember rels
        srmembercaches = CMemcache(
            self.srmembercaches,
            min_compress_len=96,
            num_clients=num_mc_clients,
            binary=True,
        )

        # a pool just for rels
        relcaches = CMemcache(
            self.relcaches,
            min_compress_len=96,
            num_clients=num_mc_clients,
            binary=True,
        )

        ratelimitcaches = CMemcache(
            self.ratelimitcaches,
            min_compress_len=96,
            num_clients=num_mc_clients,
        )

        # a smaller pool of caches used only for distributed locks.
        # TODO: move this to ZooKeeper
        self.lock_cache = CMemcache(self.lockcaches,
                                    binary=True,
                                    num_clients=num_mc_clients)
        self.make_lock = make_lock_factory(self.lock_cache, self.stats)

        # memcaches used in front of the permacache CF in cassandra.
        # XXX: this is a legacy thing; permacache was made when C* didn't have
        # a row cache.
        permacache_memcaches = CMemcache(self.permacache_memcaches,
                                         min_compress_len=1400,
                                         num_clients=num_mc_clients)

        # the stalecache is a memcached local to the current app server used
        # for data that's frequently fetched but doesn't need to be fresh.
        if self.stalecaches:
            stalecaches = CMemcache(self.stalecaches,
                                    binary=True,
                                    num_clients=num_mc_clients)
        else:
            stalecaches = None

        # rendercache holds rendered partial templates.
        rendercaches = CMemcache(
            self.rendercaches,
            noreply=True,
            no_block=True,
            num_clients=num_mc_clients,
            min_compress_len=480,
        )

        # pagecaches hold fully rendered pages
        pagecaches = CMemcache(
            self.pagecaches,
            noreply=True,
            no_block=True,
            num_clients=num_mc_clients,
            min_compress_len=1400,
        )

        self.startup_timer.intermediate("memcache")

        ################# CASSANDRA
        keyspace = "reddit"
        self.cassandra_pools = {
            "main":
            StatsCollectingConnectionPool(keyspace,
                                          stats=self.stats,
                                          logging_name="main",
                                          server_list=self.cassandra_seeds,
                                          pool_size=self.cassandra_pool_size,
                                          timeout=4,
                                          max_retries=3,
                                          prefill=False),
        }

        permacache_cf = Permacache._setup_column_family(
            'permacache',
            self.cassandra_pools[self.cassandra_default_pool],
        )

        self.startup_timer.intermediate("cassandra")

        ################# POSTGRES
        self.dbm = self.load_db_params()
        self.startup_timer.intermediate("postgres")

        ################# CHAINS
        # initialize caches. Any cache-chains built here must be added
        # to cache_chains (closed around by reset_caches) so that they
        # can properly reset their local components
        cache_chains = {}
        localcache_cls = (SelfEmptyingCache
                          if self.running_as_script else LocalCache)

        if stalecaches:
            self.cache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                memcache,
            )
        else:
            self.cache = MemcacheChain((localcache_cls(), memcache))
        cache_chains.update(cache=self.cache)

        if stalecaches:
            self.memoizecache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                memoizecaches,
            )
        else:
            self.memoizecache = MemcacheChain(
                (localcache_cls(), memoizecaches))
        cache_chains.update(memoizecache=self.memoizecache)

        if stalecaches:
            self.srmembercache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                srmembercaches,
            )
        else:
            self.srmembercache = MemcacheChain(
                (localcache_cls(), srmembercaches))
        cache_chains.update(srmembercache=self.srmembercache)

        if stalecaches:
            self.relcache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                relcaches,
            )
        else:
            self.relcache = MemcacheChain((localcache_cls(), relcaches))
        cache_chains.update(relcache=self.relcache)

        self.ratelimitcache = MemcacheChain(
            (localcache_cls(), ratelimitcaches))
        cache_chains.update(ratelimitcache=self.ratelimitcache)

        self.rendercache = MemcacheChain((
            localcache_cls(),
            rendercaches,
        ))
        cache_chains.update(rendercache=self.rendercache)

        self.pagecache = MemcacheChain((
            localcache_cls(),
            pagecaches,
        ))
        cache_chains.update(pagecache=self.pagecache)

        # the thing_cache is used in tdb_cassandra.
        self.thing_cache = CacheChain((localcache_cls(), ), check_keys=False)
        cache_chains.update(thing_cache=self.thing_cache)

        if stalecaches:
            permacache_cache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                permacache_memcaches,
                check_keys=False,
            )
        else:
            permacache_cache = CacheChain(
                (localcache_cls(), permacache_memcaches),
                check_keys=False,
            )
        cache_chains.update(permacache=permacache_cache)

        self.permacache = Permacache(
            permacache_cache,
            permacache_cf,
            lock_factory=self.make_lock,
        )

        # hardcache is used for various things that tend to expire
        # TODO: replace hardcache w/ cassandra stuff
        self.hardcache = HardcacheChain(
            (localcache_cls(), memcache, HardCache(self)),
            cache_negative_results=True,
        )
        cache_chains.update(hardcache=self.hardcache)

        # I know this sucks, but we need non-request-threads to be
        # able to reset the caches, so we need them be able to close
        # around 'cache_chains' without being able to call getattr on
        # 'g'
        def reset_caches():
            for name, chain in cache_chains.iteritems():
                chain.reset()
                if isinstance(chain, StaleCacheChain):
                    chain.stats = StaleCacheStats(self.stats, name)
                else:
                    chain.stats = CacheStats(self.stats, name)

        self.cache_chains = cache_chains

        self.reset_caches = reset_caches
        self.reset_caches()

        self.startup_timer.intermediate("cache_chains")

        # try to set the source control revision numbers
        self.versions = {}
        r2_root = os.path.dirname(os.path.dirname(self.paths["root"]))
        r2_gitdir = os.path.join(r2_root, ".git")
        self.short_version = self.record_repo_version("r2", r2_gitdir)

        if I18N_PATH:
            i18n_git_path = os.path.join(os.path.dirname(I18N_PATH), ".git")
            self.record_repo_version("i18n", i18n_git_path)

        self.startup_timer.intermediate("revisions")

    def setup_complete(self):
        self.startup_timer.stop()
        self.stats.flush()

        if self.log_start:
            self.log.error("%s:%s started %s at %s (took %.02fs)",
                           self.reddit_host, self.reddit_pid,
                           self.short_version,
                           datetime.now().strftime("%H:%M:%S"),
                           self.startup_timer.elapsed_seconds())

    def record_repo_version(self, repo_name, git_dir):
        """Get the currently checked out git revision for a given repository,
        record it in g.versions, and return the short version of the hash."""
        try:
            subprocess.check_output
        except AttributeError:
            # python 2.6 compat
            pass
        else:
            try:
                revision = subprocess.check_output(
                    ["git", "--git-dir", git_dir, "rev-parse", "HEAD"])
            except subprocess.CalledProcessError, e:
                self.log.warning("Unable to fetch git revision: %r", e)
            else:
Пример #5
0
    def __init__(self, global_conf, app_conf, paths, **extra):
        """
        Globals acts as a container for objects available throughout
        the life of the application.

        One instance of Globals is created by Pylons during
        application initialization and is available during requests
        via the 'g' variable.

        ``global_conf``
            The same variable used throughout ``config/middleware.py``
            namely, the variables from the ``[DEFAULT]`` section of the
            configuration file.

        ``app_conf``
            The same ``kw`` dictionary used throughout
            ``config/middleware.py`` namely, the variables from the
            section in the config file for your application.

        ``extra``
            The configuration returned from ``load_config`` in 
            ``config/middleware.py`` which may be of use in the setup of
            your global variables.

        """

        global_conf.setdefault("debug", False)

        # reloading site ensures that we have a fresh sys.path to build our
        # working set off of. this means that forked worker processes won't get
        # the sys.path that was current when the master process was spawned
        # meaning that new plugins will be picked up on regular app reload
        # rather than having to restart the master process as well.
        reload(site)
        self.pkg_resources_working_set = pkg_resources.WorkingSet()

        self.config = ConfigValueParser(global_conf)
        self.config.add_spec(self.spec)
        self.plugins = PluginLoader(self.pkg_resources_working_set,
                                    self.config.get("plugins", []))

        self.stats = Stats(self.config.get('statsd_addr'),
                           self.config.get('statsd_sample_rate'))
        self.startup_timer = self.stats.get_timer("app_startup")
        self.startup_timer.start()

        self.paths = paths

        self.running_as_script = global_conf.get('running_as_script', False)
        
        # turn on for language support
        self.lang = getattr(self, 'site_lang', 'en')
        self.languages, self.lang_name = \
            get_active_langs(default_lang=self.lang)

        all_languages = self.lang_name.keys()
        all_languages.sort()
        self.all_languages = all_languages
        
        # set default time zone if one is not set
        tz = global_conf.get('timezone', 'UTC')
        self.tz = pytz.timezone(tz)
        
        dtz = global_conf.get('display_timezone', tz)
        self.display_tz = pytz.timezone(dtz)

        self.startup_timer.intermediate("init")
Пример #6
0
class Globals(object):
    spec = {

        ConfigValue.int: [
            'db_pool_size',
            'db_pool_overflow_size',
            'page_cache_time',
            'commentpane_cache_time',
            'num_mc_clients',
            'MAX_CAMPAIGNS_PER_LINK',
            'MIN_DOWN_LINK',
            'MIN_UP_KARMA',
            'MIN_DOWN_KARMA',
            'MIN_RATE_LIMIT_KARMA',
            'MIN_RATE_LIMIT_COMMENT_KARMA',
            'HOT_PAGE_AGE',
            'QUOTA_THRESHOLD',
            'ADMIN_COOKIE_TTL',
            'ADMIN_COOKIE_MAX_IDLE',
            'OTP_COOKIE_TTL',
            'num_comments',
            'max_comments',
            'max_comments_gold',
            'num_default_reddits',
            'max_sr_images',
            'num_serendipity',
            'sr_dropdown_threshold',
            'comment_visits_period',
            'min_membership_create_community',
            'bcrypt_work_factor',
            'cassandra_pool_size',
            'sr_banned_quota',
            'sr_wikibanned_quota',
            'sr_wikicontributor_quota',
            'sr_moderator_invite_quota',
            'sr_contributor_quota',
            'sr_quota_time',
            'sr_invite_limit',
            'wiki_keep_recent_days',
            'wiki_max_page_length_bytes',
            'wiki_max_page_name_length',
            'wiki_max_page_separators',
            'min_promote_future',
            'max_promote_future',
            'RL_RESET_MINUTES',
            'RL_OAUTH_RESET_MINUTES',
        ],

        ConfigValue.float: [
            'min_promote_bid',
            'max_promote_bid',
            'statsd_sample_rate',
            'querycache_prune_chance',
            'RL_AVG_REQ_PER_SEC',
            'RL_OAUTH_AVG_REQ_PER_SEC',
        ],

        ConfigValue.bool: [
            'debug',
            'log_start',
            'sqlprinting',
            'template_debug',
            'reload_templates',
            'uncompressedJS',
            'css_killswitch',
            'db_create_tables',
            'disallow_db_writes',
            'disable_ratelimit',
            'amqp_logging',
            'read_only_mode',
            'disable_wiki',
            'heavy_load_mode',
            'disable_captcha',
            'disable_ads',
            'disable_require_admin_otp',
            'static_pre_gzipped',
            'static_secure_pre_gzipped',
            'trust_local_proxies',
            'shard_link_vote_queues',
            'shard_commentstree_queues',
            'subreddit_stylesheets_static',
            'ENFORCE_RATELIMIT',
            'RL_SITEWIDE_ENABLED',
            'RL_OAUTH_SITEWIDE_ENABLED',
        ],

        ConfigValue.tuple: [
            'plugins',
            'stalecaches',
            'memcaches',
            'lockcaches',
            'permacache_memcaches',
            'rendercaches',
            'pagecaches',
            'memoizecaches',
            'srmembercaches',
            'ratelimitcaches',
            'cassandra_seeds',
            'admins',
            'sponsors',
            'employees',
            'automatic_reddits',
            'hardcache_categories',
            'case_sensitive_domains',
            'reserved_subdomains',
            'offsite_subdomains',
            'TRAFFIC_LOG_HOSTS',
            'exempt_login_user_agents',
            'timed_templates',
            'autoexpand_media_types',
        ],

        ConfigValue.dict(ConfigValue.str, ConfigValue.int): [
            'agents',
        ],

        ConfigValue.str: [
            'wiki_page_registration_info',
            'wiki_page_privacy_policy',
            'wiki_page_user_agreement',
            'wiki_page_gold_bottlecaps',
        ],

        ConfigValue.choice: {
             'cassandra_rcl': {
                 'ONE': CL_ONE,
                 'QUORUM': CL_QUORUM
             },
             'cassandra_wcl': {
                 'ONE': CL_ONE,
                 'QUORUM': CL_QUORUM
             },
        },

        ConfigValue.timeinterval: [
            'ARCHIVE_AGE',
        ],

        config_gold_price: [
            'gold_month_price',
            'gold_year_price',
            'cpm_selfserve',
            'cpm_selfserve_geotarget_country',
            'cpm_selfserve_geotarget_metro',
        ],
    }

    live_config_spec = {
        ConfigValue.bool: [
            'frontend_logging',
        ],
        ConfigValue.float: [
            'spotlight_interest_sub_p',
            'spotlight_interest_nosub_p',
            'gold_revenue_goal',
        ],
        ConfigValue.tuple: [
            'fastlane_links',
            'listing_chooser_sample_multis',
            'discovery_srs',
        ],
        ConfigValue.str: [
            'listing_chooser_gold_multi',
            'listing_chooser_explore_sr',
        ],
        ConfigValue.dict(ConfigValue.int, ConfigValue.float): [
            'comment_tree_version_weights',
        ],
        ConfigValue.messages: [
            'welcomebar_messages',
            'sidebar_message',
            'gold_sidebar_message',
        ],
        ConfigValue.dict(ConfigValue.str, ConfigValue.float): [
            'pennies_per_server_second',
        ],
    }

    def __init__(self, global_conf, app_conf, paths, **extra):
        """
        Globals acts as a container for objects available throughout
        the life of the application.

        One instance of Globals is created by Pylons during
        application initialization and is available during requests
        via the 'g' variable.

        ``global_conf``
            The same variable used throughout ``config/middleware.py``
            namely, the variables from the ``[DEFAULT]`` section of the
            configuration file.

        ``app_conf``
            The same ``kw`` dictionary used throughout
            ``config/middleware.py`` namely, the variables from the
            section in the config file for your application.

        ``extra``
            The configuration returned from ``load_config`` in 
            ``config/middleware.py`` which may be of use in the setup of
            your global variables.

        """

        global_conf.setdefault("debug", False)

        # reloading site ensures that we have a fresh sys.path to build our
        # working set off of. this means that forked worker processes won't get
        # the sys.path that was current when the master process was spawned
        # meaning that new plugins will be picked up on regular app reload
        # rather than having to restart the master process as well.
        reload(site)
        self.pkg_resources_working_set = pkg_resources.WorkingSet()

        self.config = ConfigValueParser(global_conf)
        self.config.add_spec(self.spec)
        self.plugins = PluginLoader(self.pkg_resources_working_set,
                                    self.config.get("plugins", []))

        self.stats = Stats(self.config.get('statsd_addr'),
                           self.config.get('statsd_sample_rate'))
        self.startup_timer = self.stats.get_timer("app_startup")
        self.startup_timer.start()

        self.paths = paths

        self.running_as_script = global_conf.get('running_as_script', False)
        
        # turn on for language support
        self.lang = getattr(self, 'site_lang', 'en')
        self.languages, self.lang_name = \
            get_active_langs(default_lang=self.lang)

        all_languages = self.lang_name.keys()
        all_languages.sort()
        self.all_languages = all_languages
        
        # set default time zone if one is not set
        tz = global_conf.get('timezone', 'UTC')
        self.tz = pytz.timezone(tz)
        
        dtz = global_conf.get('display_timezone', tz)
        self.display_tz = pytz.timezone(dtz)

        self.startup_timer.intermediate("init")

    def __getattr__(self, name):
        if not name.startswith('_') and name in self.config:
            return self.config[name]
        else:
            raise AttributeError

    def setup(self):
        self.queues = queues.declare_queues(self)

        ################# PROVIDERS
        self.media_provider = select_provider(
            self.config,
            self.pkg_resources_working_set,
            "r2.provider.media",
            self.media_provider,
        )
        self.startup_timer.intermediate("providers")

        ################# CONFIGURATION
        # AMQP is required
        if not self.amqp_host:
            raise ValueError("amqp_host not set in the .ini")

        if not self.cassandra_seeds:
            raise ValueError("cassandra_seeds not set in the .ini")

        # heavy load mode is read only mode with a different infobar
        if self.heavy_load_mode:
            self.read_only_mode = True

        origin_prefix = self.domain_prefix + "." if self.domain_prefix else ""
        self.origin = "http://" + origin_prefix + self.domain

        self.trusted_domains = set([self.domain])
        if self.https_endpoint:
            https_url = urlparse(self.https_endpoint)
            self.trusted_domains.add(https_url.hostname)

        # load the unique hashed names of files under static
        static_files = os.path.join(self.paths.get('static_files'), 'static')
        names_file_path = os.path.join(static_files, 'names.json')
        if os.path.exists(names_file_path):
            with open(names_file_path) as handle:
                self.static_names = json.load(handle)
        else:
            self.static_names = {}

        # make python warnings go through the logging system
        logging.captureWarnings(capture=True)

        log = logging.getLogger('reddit')

        # when we're a script (paster run) just set up super simple logging
        if self.running_as_script:
            log.setLevel(logging.INFO)
            log.addHandler(logging.StreamHandler())

        # if in debug mode, override the logging level to DEBUG
        if self.debug:
            log.setLevel(logging.DEBUG)

        # attempt to figure out which pool we're in and add that to the
        # LogRecords.
        try:
            with open("/etc/ec2_asg", "r") as f:
                pool = f.read().strip()
            # clean up the pool name since we're putting stuff after "-"
            pool = pool.partition("-")[0]
        except IOError:
            pool = "reddit-app"
        self.log = logging.LoggerAdapter(log, {"pool": pool})

        # set locations
        self.locations = {}

        if not self.media_domain:
            self.media_domain = self.domain
        if self.media_domain == self.domain:
            print >> sys.stderr, ("Warning: g.media_domain == g.domain. " +
                   "This may give untrusted content access to user cookies")

        for arg in sys.argv:
            tokens = arg.split("=")
            if len(tokens) == 2:
                k, v = tokens
                self.log.debug("Overriding g.%s to %s" % (k, v))
                setattr(self, k, v)

        self.reddit_host = socket.gethostname()
        self.reddit_pid  = os.getpid()

        if hasattr(signal, 'SIGUSR1'):
            # not all platforms have user signals
            signal.signal(signal.SIGUSR1, thread_dump)

        locale.setlocale(locale.LC_ALL, self.locale)

        # Pre-calculate ratelimit values
        self.RL_RESET_SECONDS = self.config["RL_RESET_MINUTES"] * 60
        self.RL_MAX_REQS = int(self.config["RL_AVG_REQ_PER_SEC"] *
                                      self.RL_RESET_SECONDS)

        self.RL_OAUTH_RESET_SECONDS = self.config["RL_OAUTH_RESET_MINUTES"] * 60
        self.RL_OAUTH_MAX_REQS = int(self.config["RL_OAUTH_AVG_REQ_PER_SEC"] *
                                     self.RL_OAUTH_RESET_SECONDS)

        self.startup_timer.intermediate("configuration")

        ################# ZOOKEEPER
        # for now, zookeeper will be an optional part of the stack.
        # if it's not configured, we will grab the expected config from the
        # [live_config] section of the ini file
        zk_hosts = self.config.get("zookeeper_connection_string")
        if zk_hosts:
            from r2.lib.zookeeper import (connect_to_zookeeper,
                                          LiveConfig, LiveList)
            zk_username = self.config["zookeeper_username"]
            zk_password = self.config["zookeeper_password"]
            self.zookeeper = connect_to_zookeeper(zk_hosts, (zk_username,
                                                             zk_password))
            self.live_config = LiveConfig(self.zookeeper, LIVE_CONFIG_NODE)
            self.secrets = fetch_secrets(self.zookeeper)
            self.throttles = LiveList(self.zookeeper, "/throttles",
                                      map_fn=ipaddress.ip_network,
                                      reduce_fn=ipaddress.collapse_addresses)
        else:
            self.zookeeper = None
            parser = ConfigParser.RawConfigParser()
            parser.optionxform = str
            parser.read([self.config["__file__"]])
            self.live_config = extract_live_config(parser, self.plugins)
            self.secrets = extract_secrets(parser)
            self.throttles = tuple()  # immutable since it's not real

        self.startup_timer.intermediate("zookeeper")

        ################# MEMCACHE
        num_mc_clients = self.num_mc_clients

        # the main memcache pool. used for most everything.
        self.memcache = CMemcache(
            self.memcaches,
            min_compress_len=50 * 1024,
            num_clients=num_mc_clients,
        )

        # a pool just used for @memoize results
        memoizecaches = CMemcache(
            self.memoizecaches,
            min_compress_len=50 * 1024,
            num_clients=num_mc_clients,
        )

        # a pool just for srmember rels
        srmembercaches = CMemcache(
            self.srmembercaches,
            min_compress_len=96,
            num_clients=num_mc_clients,
        )

        ratelimitcaches = CMemcache(
            self.ratelimitcaches,
            min_compress_len=96,
            num_clients=num_mc_clients,
        )

        # a smaller pool of caches used only for distributed locks.
        # TODO: move this to ZooKeeper
        self.lock_cache = CMemcache(self.lockcaches,
                                    num_clients=num_mc_clients)
        self.make_lock = make_lock_factory(self.lock_cache, self.stats)

        # memcaches used in front of the permacache CF in cassandra.
        # XXX: this is a legacy thing; permacache was made when C* didn't have
        # a row cache.
        if self.permacache_memcaches:
            permacache_memcaches = CMemcache(self.permacache_memcaches,
                                             min_compress_len=50 * 1024,
                                             num_clients=num_mc_clients)
        else:
            permacache_memcaches = None

        # the stalecache is a memcached local to the current app server used
        # for data that's frequently fetched but doesn't need to be fresh.
        if self.stalecaches:
            stalecaches = CMemcache(self.stalecaches,
                                    num_clients=num_mc_clients)
        else:
            stalecaches = None

        # rendercache holds rendered partial templates.
        rendercaches = CMemcache(
            self.rendercaches,
            noreply=True,
            no_block=True,
            num_clients=num_mc_clients,
            min_compress_len=480,
        )

        # pagecaches hold fully rendered pages
        pagecaches = CMemcache(
            self.pagecaches,
            noreply=True,
            no_block=True,
            num_clients=num_mc_clients,
            min_compress_len=1400,
        )

        self.startup_timer.intermediate("memcache")

        ################# CASSANDRA
        keyspace = "reddit"
        self.cassandra_pools = {
            "main":
                StatsCollectingConnectionPool(
                    keyspace,
                    stats=self.stats,
                    logging_name="main",
                    server_list=self.cassandra_seeds,
                    pool_size=self.cassandra_pool_size,
                    timeout=4,
                    max_retries=3,
                    prefill=False
                ),
        }

        permacache_cf = CassandraCache(
            'permacache',
            self.cassandra_pools[self.cassandra_default_pool],
            read_consistency_level=self.cassandra_rcl,
            write_consistency_level=self.cassandra_wcl
        )

        self.startup_timer.intermediate("cassandra")

        ################# POSTGRES
        self.dbm = self.load_db_params()
        self.startup_timer.intermediate("postgres")

        ################# CHAINS
        # initialize caches. Any cache-chains built here must be added
        # to cache_chains (closed around by reset_caches) so that they
        # can properly reset their local components
        cache_chains = {}
        localcache_cls = (SelfEmptyingCache if self.running_as_script
                          else LocalCache)

        if stalecaches:
            self.cache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                self.memcache,
            )
        else:
            self.cache = MemcacheChain((localcache_cls(), self.memcache))
        cache_chains.update(cache=self.cache)

        if stalecaches:
            self.memoizecache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                memoizecaches,
            )
        else:
            self.memoizecache = MemcacheChain(
                (localcache_cls(), memoizecaches))
        cache_chains.update(memoizecache=self.memoizecache)

        if stalecaches:
            self.srmembercache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                srmembercaches,
            )
        else:
            self.srmembercache = MemcacheChain(
                (localcache_cls(), srmembercaches))
        cache_chains.update(srmembercache=self.srmembercache)

        self.ratelimitcache = MemcacheChain(
                (localcache_cls(), ratelimitcaches))
        cache_chains.update(ratelimitcache=self.ratelimitcache)

        self.rendercache = MemcacheChain((
            localcache_cls(),
            rendercaches,
        ))
        cache_chains.update(rendercache=self.rendercache)

        self.pagecache = MemcacheChain((
            localcache_cls(),
            pagecaches,
        ))
        cache_chains.update(pagecache=self.pagecache)

        # the thing_cache is used in tdb_cassandra.
        self.thing_cache = CacheChain((localcache_cls(),))
        cache_chains.update(thing_cache=self.thing_cache)

        self.permacache = CassandraCacheChain(
            localcache_cls(),
            permacache_cf,
            memcache=permacache_memcaches,
            lock_factory=self.make_lock,
        )
        cache_chains.update(permacache=self.permacache)

        # hardcache is used for various things that tend to expire
        # TODO: replace hardcache w/ cassandra stuff
        self.hardcache = HardcacheChain(
            (localcache_cls(), self.memcache, HardCache(self)),
            cache_negative_results=True,
        )
        cache_chains.update(hardcache=self.hardcache)

        # I know this sucks, but we need non-request-threads to be
        # able to reset the caches, so we need them be able to close
        # around 'cache_chains' without being able to call getattr on
        # 'g'
        def reset_caches():
            for name, chain in cache_chains.iteritems():
                chain.reset()
                chain.stats = CacheStats(self.stats, name)
        self.cache_chains = cache_chains

        self.reset_caches = reset_caches
        self.reset_caches()

        self.startup_timer.intermediate("cache_chains")

        # try to set the source control revision numbers
        self.versions = {}
        r2_root = os.path.dirname(os.path.dirname(self.paths["root"]))
        r2_gitdir = os.path.join(r2_root, ".git")
        self.short_version = self.record_repo_version("r2", r2_gitdir)

        if I18N_PATH:
            i18n_git_path = os.path.join(os.path.dirname(I18N_PATH), ".git")
            self.record_repo_version("i18n", i18n_git_path)

        self.startup_timer.intermediate("revisions")

    def setup_complete(self):
        self.startup_timer.stop()
        self.stats.flush()

        if self.log_start:
            self.log.error(
                "%s:%s started %s at %s (took %.02fs)",
                self.reddit_host,
                self.reddit_pid,
                self.short_version,
                datetime.now().strftime("%H:%M:%S"),
                self.startup_timer.elapsed_seconds()
            )

    def record_repo_version(self, repo_name, git_dir):
        """Get the currently checked out git revision for a given repository,
        record it in g.versions, and return the short version of the hash."""
        try:
            subprocess.check_output
        except AttributeError:
            # python 2.6 compat
            pass
        else:
            try:
                revision = subprocess.check_output(["git",
                                                    "--git-dir", git_dir,
                                                    "rev-parse", "HEAD"])
            except subprocess.CalledProcessError, e:
                self.log.warning("Unable to fetch git revision: %r", e)
            else:
Пример #7
0
class Globals(object):
    spec = {

        ConfigValue.int: [
            'db_pool_size',
            'db_pool_overflow_size',
            'page_cache_time',
            'commentpane_cache_time',
            'num_mc_clients',
            'MAX_CAMPAIGNS_PER_LINK',
            'MIN_DOWN_LINK',
            'MIN_UP_KARMA',
            'MIN_DOWN_KARMA',
            'MIN_RATE_LIMIT_KARMA',
            'MIN_RATE_LIMIT_COMMENT_KARMA',
            'HOT_PAGE_AGE',
            'ADMIN_COOKIE_TTL',
            'ADMIN_COOKIE_MAX_IDLE',
            'OTP_COOKIE_TTL',
            'hsts_max_age',
            'num_comments',
            'max_comments',
            'max_comments_gold',
            'max_comment_parent_walk',
            'max_sr_images',
            'num_serendipity',
            'comment_visits_period',
            'butler_max_mentions',
            'min_membership_create_community',
            'bcrypt_work_factor',
            'cassandra_pool_size',
            'sr_banned_quota',
            'sr_wikibanned_quota',
            'sr_wikicontributor_quota',
            'sr_moderator_invite_quota',
            'sr_contributor_quota',
            'sr_quota_time',
            'sr_invite_limit',
            'thumbnail_hidpi_scaling',
            'wiki_keep_recent_days',
            'wiki_max_page_length_bytes',
            'wiki_max_page_name_length',
            'wiki_max_page_separators',
            'RL_RESET_MINUTES',
            'RL_OAUTH_RESET_MINUTES',
            'comment_karma_display_floor',
            'link_karma_display_floor',
            'mobile_auth_gild_time',
            'default_total_budget_pennies',
            'min_total_budget_pennies',
            'max_total_budget_pennies',
            'default_bid_pennies',
            'min_bid_pennies',
            'max_bid_pennies',
            'frequency_cap_min',
            'frequency_cap_default',
            'eu_cookie_max_attempts',
        ],

        ConfigValue.float: [
            'statsd_sample_rate',
            'querycache_prune_chance',
            'RL_AVG_REQ_PER_SEC',
            'RL_OAUTH_AVG_REQ_PER_SEC',
            'RL_LOGIN_AVG_PER_SEC',
            'RL_LOGIN_IP_AVG_PER_SEC',
            'RL_SHARE_AVG_PER_SEC',
        ],

        ConfigValue.bool: [
            'debug',
            'log_start',
            'sqlprinting',
            'template_debug',
            'reload_templates',
            'uncompressedJS',
            'css_killswitch',
            'db_create_tables',
            'disallow_db_writes',
            'disable_ratelimit',
            'amqp_logging',
            'read_only_mode',
            'disable_wiki',
            'heavy_load_mode',
            'disable_captcha',
            'disable_ads',
            'disable_require_admin_otp',
            'trust_local_proxies',
            'shard_link_vote_queues',
            'shard_commentstree_queues',
            'authnet_validate',
            'ENFORCE_RATELIMIT',
            'RL_SITEWIDE_ENABLED',
            'RL_OAUTH_SITEWIDE_ENABLED',
            'enable_loggedout_experiments',
        ],

        ConfigValue.tuple: [
            'plugins',
            'stalecaches',
            'memcaches',
            'lockcaches',
            'permacache_memcaches',
            'memoizecaches',
            'srmembercaches',
            'relcaches',
            'ratelimitcaches',
            'hardcache_memcaches',
            'cassandra_seeds',
            'automatic_reddits',
            'hardcache_categories',
            'case_sensitive_domains',
            'known_image_domains',
            'reserved_subdomains',
            'offsite_subdomains',
            'TRAFFIC_LOG_HOSTS',
            'exempt_login_user_agents',
            'timed_templates',
            'autoexpand_media_types',
            'multi_icons',
            'hide_subscribers_srs',
            'mcrouter_addr',
        ],

        ConfigValue.tuple_of(ConfigValue.int): [
            'thumbnail_size',
            'mobile_ad_image_size',
        ],

        ConfigValue.tuple_of(ConfigValue.float): [
            'ios_versions',
            'android_versions',
        ],

        ConfigValue.dict(ConfigValue.str, ConfigValue.int): [
            'user_agent_ratelimit_regexes',
        ],

        ConfigValue.str: [
            'wiki_page_registration_info',
            'wiki_page_privacy_policy',
            'wiki_page_user_agreement',
            'wiki_page_gold_bottlecaps',
            'fraud_email',
            'feedback_email',
            'share_reply',
            'nerds_email',
            'community_email',
            'smtp_server',
            'events_collector_url',
            'events_collector_test_url',
            'search_provider',
        ],

        ConfigValue.choice(ONE=CL_ONE, QUORUM=CL_QUORUM): [
             'cassandra_rcl',
             'cassandra_wcl',
        ],

        ConfigValue.timeinterval: [
            'ARCHIVE_AGE',
            "vote_queue_grace_period",
        ],

        config_gold_price: [
            'gold_month_price',
            'gold_year_price',
            'cpm_selfserve',
            'cpm_selfserve_geotarget_metro',
            'cpm_selfserve_geotarget_country',
            'cpm_selfserve_collection',
        ],

        ConfigValue.baseplate(baseplate_config.Optional(baseplate_config.Endpoint)): [
            "activity_endpoint",
        ],

        ConfigValue.dict(ConfigValue.str, ConfigValue.str): [
            'emr_traffic_tags',
        ],
    }

    live_config_spec = {
        ConfigValue.bool: [
            'frontend_logging',
            'mobile_gild_first_login',
            'precomputed_comment_suggested_sort',
        ],
        ConfigValue.int: [
            'captcha_exempt_comment_karma',
            'captcha_exempt_link_karma',
            'create_sr_account_age_days',
            'create_sr_comment_karma',
            'create_sr_link_karma',
            'cflag_min_votes',
            'ads_popularity_threshold',
            'precomputed_comment_sort_min_comments',
            'comment_vote_update_threshold',
            'comment_vote_update_period',
        ],
        ConfigValue.float: [
            'cflag_lower_bound',
            'cflag_upper_bound',
            'spotlight_interest_sub_p',
            'spotlight_interest_nosub_p',
            'gold_revenue_goal',
            'invalid_key_sample_rate',
            'events_collector_vote_sample_rate',
            'events_collector_poison_sample_rate',
            'events_collector_mod_sample_rate',
            'events_collector_quarantine_sample_rate',
            'events_collector_modmail_sample_rate',
            'events_collector_report_sample_rate',
            'events_collector_submit_sample_rate',
            'events_collector_comment_sample_rate',
            'events_collector_use_gzip_chance',
            'https_cert_testing_probability',
            'precomputed_comment_sort_read_chance',
        ],
        ConfigValue.tuple: [
            'fastlane_links',
            'listing_chooser_sample_multis',
            'discovery_srs',
            'proxy_gilding_accounts',
            'mweb_blacklist_expressions',
            'global_loid_experiments',
            'precomputed_comment_sorts',
            'mailgun_domains',
        ],
        ConfigValue.str: [
            'listing_chooser_gold_multi',
            'listing_chooser_explore_sr',
        ],
        ConfigValue.dict(ConfigValue.int, ConfigValue.float): [
            'comment_tree_version_weights',
        ],
        ConfigValue.messages: [
            'welcomebar_messages',
            'sidebar_message',
            'gold_sidebar_message',
        ],
        ConfigValue.dict(ConfigValue.str, ConfigValue.int): [
            'ticket_groups',
            'ticket_user_fields', 
        ],
        ConfigValue.dict(ConfigValue.str, ConfigValue.float): [
            'pennies_per_server_second',
        ],
        ConfigValue.dict(ConfigValue.str, ConfigValue.str): [
            'employee_approved_clients',
            'modmail_forwarding_email',
            'modmail_account_map',
        ],
        ConfigValue.dict(ConfigValue.str, ConfigValue.choice(**PERMISSIONS)): [
            'employees',
        ],
    }

    def __init__(self, config, global_conf, app_conf, paths, **extra):
        """
        Globals acts as a container for objects available throughout
        the life of the application.

        One instance of Globals is created by Pylons during
        application initialization and is available during requests
        via the 'g' variable.

        ``config``
            The PylonsConfig object passed in from ``config/environment.py``

        ``global_conf``
            The same variable used throughout ``config/middleware.py``
            namely, the variables from the ``[DEFAULT]`` section of the
            configuration file.

        ``app_conf``
            The same ``kw`` dictionary used throughout
            ``config/middleware.py`` namely, the variables from the
            section in the config file for your application.

        ``extra``
            The configuration returned from ``load_config`` in 
            ``config/middleware.py`` which may be of use in the setup of
            your global variables.

        """

        global_conf.setdefault("debug", False)

        # reloading site ensures that we have a fresh sys.path to build our
        # working set off of. this means that forked worker processes won't get
        # the sys.path that was current when the master process was spawned
        # meaning that new plugins will be picked up on regular app reload
        # rather than having to restart the master process as well.
        reload(site)
        self.pkg_resources_working_set = pkg_resources.WorkingSet()

        self.config = ConfigValueParser(global_conf)
        self.config.add_spec(self.spec)
        self.plugins = PluginLoader(self.pkg_resources_working_set,
                                    self.config.get("plugins", []))

        self.stats = Stats(self.config.get('statsd_addr'),
                           self.config.get('statsd_sample_rate'))
        self.startup_timer = self.stats.get_timer("app_startup")
        self.startup_timer.start()

        self.baseplate = Baseplate()
        self.baseplate.configure_logging()
        self.baseplate.register(R2BaseplateObserver())

        self.paths = paths

        self.running_as_script = global_conf.get('running_as_script', False)
        
        # turn on for language support
        self.lang = getattr(self, 'site_lang', 'en')
        self.languages, self.lang_name = get_active_langs(
            config, default_lang=self.lang)

        all_languages = self.lang_name.keys()
        all_languages.sort()
        self.all_languages = all_languages
        
        # set default time zone if one is not set
        tz = global_conf.get('timezone', 'UTC')
        self.tz = pytz.timezone(tz)
        
        dtz = global_conf.get('display_timezone', tz)
        self.display_tz = pytz.timezone(dtz)

        self.startup_timer.intermediate("init")

    def __getattr__(self, name):
        if not name.startswith('_') and name in self.config:
            return self.config[name]
        else:
            raise AttributeError("g has no attr %r" % name)

    def setup(self):
        self.env = ''
        if (
            # handle direct invocation of "nosetests"
            "test" in sys.argv[0] or
            # handle "setup.py test" and all permutations thereof.
            "setup.py" in sys.argv[0] and "test" in sys.argv[1:]
        ):
            self.env = "unit_test"

        self.queues = queues.declare_queues(self)

        self.extension_subdomains = dict(
            simple="mobile",
            i="compact",
            api="api",
            rss="rss",
            xml="xml",
            json="json",
        )

        ################# PROVIDERS
        self.auth_provider = select_provider(
            self.config,
            self.pkg_resources_working_set,
            "r2.provider.auth",
            self.authentication_provider,
        )
        self.media_provider = select_provider(
            self.config,
            self.pkg_resources_working_set,
            "r2.provider.media",
            self.media_provider,
        )
        self.cdn_provider = select_provider(
            self.config,
            self.pkg_resources_working_set,
            "r2.provider.cdn",
            self.cdn_provider,
        )
        self.ticket_provider = select_provider(
            self.config,
            self.pkg_resources_working_set,
            "r2.provider.support",
            # TODO: fix this later, it refuses to pick up 
            # g.config['ticket_provider'] value, so hardcoding for now.
            # really, the next uncommented line should be:
            #self.ticket_provider,
            # instead of:
            "zendesk",
        )
        self.image_resizing_provider = select_provider(
            self.config,
            self.pkg_resources_working_set,
            "r2.provider.image_resizing",
            self.image_resizing_provider,
        )
        self.email_provider = select_provider(
            self.config,
            self.pkg_resources_working_set,
            "r2.provider.email",
            self.email_provider,
        )
        self.startup_timer.intermediate("providers")

        ################# CONFIGURATION
        # AMQP is required
        if not self.amqp_host:
            raise ValueError("amqp_host not set in the .ini")

        if not self.cassandra_seeds:
            raise ValueError("cassandra_seeds not set in the .ini")

        # heavy load mode is read only mode with a different infobar
        if self.heavy_load_mode:
            self.read_only_mode = True

        origin_prefix = self.domain_prefix + "." if self.domain_prefix else ""
        self.origin = self.default_scheme + "://" + origin_prefix + self.domain

        self.trusted_domains = set([self.domain])
        if self.https_endpoint:
            https_url = urlparse(self.https_endpoint)
            self.trusted_domains.add(https_url.hostname)

        # load the unique hashed names of files under static
        static_files = os.path.join(self.paths.get('static_files'), 'static')
        names_file_path = os.path.join(static_files, 'names.json')
        if os.path.exists(names_file_path):
            with open(names_file_path) as handle:
                self.static_names = json.load(handle)
        else:
            self.static_names = {}

        # make python warnings go through the logging system
        logging.captureWarnings(capture=True)

        log = logging.getLogger('reddit')

        # when we're a script (paster run) just set up super simple logging
        if self.running_as_script:
            log.setLevel(logging.INFO)
            log.addHandler(logging.StreamHandler())

        # if in debug mode, override the logging level to DEBUG
        if self.debug:
            log.setLevel(logging.DEBUG)

        # attempt to figure out which pool we're in and add that to the
        # LogRecords.
        try:
            with open("/etc/ec2_asg", "r") as f:
                pool = f.read().strip()
            # clean up the pool name since we're putting stuff after "-"
            pool = pool.partition("-")[0]
        except IOError:
            pool = "reddit-app"
        self.log = logging.LoggerAdapter(log, {"pool": pool})

        # set locations
        locations = pkg_resources.resource_stream(__name__,
                                                  "../data/locations.json")
        self.locations = json.loads(locations.read())

        if not self.media_domain:
            self.media_domain = self.domain
        if self.media_domain == self.domain:
            print >> sys.stderr, ("Warning: g.media_domain == g.domain. " +
                   "This may give untrusted content access to user cookies")
        if self.oauth_domain == self.domain:
            print >> sys.stderr, ("Warning: g.oauth_domain == g.domain. "
                    "CORS requests to g.domain will be allowed")

        for arg in sys.argv:
            tokens = arg.split("=")
            if len(tokens) == 2:
                k, v = tokens
                self.log.debug("Overriding g.%s to %s" % (k, v))
                setattr(self, k, v)

        self.reddit_host = socket.gethostname()
        self.reddit_pid  = os.getpid()

        if hasattr(signal, 'SIGUSR1'):
            # not all platforms have user signals
            signal.signal(signal.SIGUSR1, thread_dump)

        locale.setlocale(locale.LC_ALL, self.locale)

        # Pre-calculate ratelimit values
        self.RL_RESET_SECONDS = self.config["RL_RESET_MINUTES"] * 60
        self.RL_MAX_REQS = int(self.config["RL_AVG_REQ_PER_SEC"] *
                                      self.RL_RESET_SECONDS)

        self.RL_OAUTH_RESET_SECONDS = self.config["RL_OAUTH_RESET_MINUTES"] * 60
        self.RL_OAUTH_MAX_REQS = int(self.config["RL_OAUTH_AVG_REQ_PER_SEC"] *
                                     self.RL_OAUTH_RESET_SECONDS)

        self.RL_LOGIN_MAX_REQS = int(self.config["RL_LOGIN_AVG_PER_SEC"] *
                                     self.RL_RESET_SECONDS)
        self.RL_LOGIN_IP_MAX_REQS = int(self.config["RL_LOGIN_IP_AVG_PER_SEC"] *
                                        self.RL_RESET_SECONDS)
        self.RL_SHARE_MAX_REQS = int(self.config["RL_SHARE_AVG_PER_SEC"] *
                                     self.RL_RESET_SECONDS)

        # Compile ratelimit regexs
        user_agent_ratelimit_regexes = {}
        for agent_re, limit in self.user_agent_ratelimit_regexes.iteritems():
            user_agent_ratelimit_regexes[re.compile(agent_re)] = limit
        self.user_agent_ratelimit_regexes = user_agent_ratelimit_regexes

        self.startup_timer.intermediate("configuration")

        ################# ZOOKEEPER
        # for now, zookeeper will be an optional part of the stack.
        # if it's not configured, we will grab the expected config from the
        # [live_config] section of the ini file
        zk_hosts = self.config.get("zookeeper_connection_string")
        if zk_hosts:
            from r2.lib.zookeeper import (connect_to_zookeeper,
                                          LiveConfig, LiveList)
            zk_username = self.config["zookeeper_username"]
            zk_password = self.config["zookeeper_password"]
            self.zookeeper = connect_to_zookeeper(zk_hosts, (zk_username,
                                                             zk_password))
            self.live_config = LiveConfig(self.zookeeper, LIVE_CONFIG_NODE)
            self.secrets = fetch_secrets(self.zookeeper)
            self.throttles = LiveList(self.zookeeper, "/throttles",
                                      map_fn=ipaddress.ip_network,
                                      reduce_fn=ipaddress.collapse_addresses)

            # close our zk connection when the app shuts down
            SHUTDOWN_CALLBACKS.append(self.zookeeper.stop)
        else:
            self.zookeeper = None
            parser = ConfigParser.RawConfigParser()
            parser.optionxform = str
            parser.read([self.config["__file__"]])
            self.live_config = extract_live_config(parser, self.plugins)
            self.secrets = extract_secrets(parser)
            self.throttles = tuple()  # immutable since it's not real

        ################# PRIVILEGED USERS
        self.admins = PermissionFilteredEmployeeList(
            self.live_config, type="admin")
        self.sponsors = PermissionFilteredEmployeeList(
            self.live_config, type="sponsor")
        self.employees = PermissionFilteredEmployeeList(
            self.live_config, type="employee")

        # Store which OAuth clients employees may use, the keys are just for
        # readability.
        self.employee_approved_clients = \
            self.live_config["employee_approved_clients"].values()

        self.startup_timer.intermediate("zookeeper")

        ################# MEMCACHE
        num_mc_clients = self.num_mc_clients

        # the main memcache pool. used for most everything.
        memcaches = CMemcache(
            "main",
            self.memcaches,
            min_compress_len=1400,
            num_clients=num_mc_clients,
            validators=[validate_size_error],
        )

        # a pool just used for @memoize results
        memoizecaches = CMemcache(
            "memoize",
            self.memoizecaches,
            min_compress_len=50 * 1024,
            num_clients=num_mc_clients,
            validators=[validate_size_error],
        )

        # a pool just for srmember rels
        srmembercaches = CMemcache(
            "srmember",
            self.srmembercaches,
            min_compress_len=96,
            num_clients=num_mc_clients,
            validators=[validate_size_error],
        )

        # a pool just for rels
        relcaches = CMemcache(
            "rel",
            self.relcaches,
            min_compress_len=96,
            num_clients=num_mc_clients,
            validators=[validate_size_error],
        )

        ratelimitcaches = CMemcache(
            "ratelimit",
            self.ratelimitcaches,
            min_compress_len=96,
            num_clients=num_mc_clients,
            validators=[validate_size_error],
        )

        # a smaller pool of caches used only for distributed locks.
        self.lock_cache = CMemcache(
            "lock",
            self.lockcaches,
            num_clients=num_mc_clients,
            validators=[validate_size_error],
        )
        self.make_lock = make_lock_factory(self.lock_cache, self.stats)

        # memcaches used in front of the permacache CF in cassandra.
        # XXX: this is a legacy thing; permacache was made when C* didn't have
        # a row cache.
        permacache_memcaches = CMemcache("perma",
                                         self.permacache_memcaches,
                                         min_compress_len=1400,
                                         num_clients=num_mc_clients,
                                         validators=[],)

        # the stalecache is a memcached local to the current app server used
        # for data that's frequently fetched but doesn't need to be fresh.
        if self.stalecaches:
            stalecaches = CMemcache(
                "stale",
                self.stalecaches,
                num_clients=num_mc_clients,
                validators=[validate_size_error],
            )
        else:
            stalecaches = None

        # hardcache memcache pool
        hardcache_memcaches = CMemcache(
            "hardcache",
            self.hardcache_memcaches,
            binary=True,
            min_compress_len=1400,
            num_clients=num_mc_clients,
            validators=[validate_size_error],
        )

        self.startup_timer.intermediate("memcache")

        ################# MCROUTER
        self.mcrouter = Mcrouter(
            "mcrouter",
            self.mcrouter_addr,
            min_compress_len=1400,
            num_clients=1,
        )

        ################# THRIFT-BASED SERVICES
        activity_endpoint = self.config.get("activity_endpoint")
        if activity_endpoint:
            # make ActivityInfo objects rendercache-key friendly
            # TODO: figure out a more general solution for this if
            # we need to do this for other thrift-generated objects
            ActivityInfo.cache_key = lambda self, style: repr(self)

            activity_pool = ThriftConnectionPool(activity_endpoint, timeout=0.1)
            self.baseplate.add_to_context("activity_service",
                ThriftContextFactory(activity_pool, ActivityService.Client))

        self.startup_timer.intermediate("thrift")

        ################# CASSANDRA
        keyspace = "reddit"
        self.cassandra_pools = {
            "main":
                StatsCollectingConnectionPool(
                    keyspace,
                    stats=self.stats,
                    logging_name="main",
                    server_list=self.cassandra_seeds,
                    pool_size=self.cassandra_pool_size,
                    timeout=4,
                    max_retries=3,
                    prefill=False
                ),
        }

        permacache_cf = Permacache._setup_column_family(
            'permacache',
            self.cassandra_pools[self.cassandra_default_pool],
        )

        self.startup_timer.intermediate("cassandra")

        ################# POSTGRES
        self.dbm = self.load_db_params()
        self.startup_timer.intermediate("postgres")

        ################# CHAINS
        # initialize caches. Any cache-chains built here must be added
        # to cache_chains (closed around by reset_caches) so that they
        # can properly reset their local components
        cache_chains = {}
        localcache_cls = (SelfEmptyingCache if self.running_as_script
                          else LocalCache)

        if stalecaches:
            self.cache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                memcaches,
            )
        else:
            self.cache = CacheChain((localcache_cls(), memcaches))
        cache_chains.update(cache=self.cache)

        if stalecaches:
            self.thingcache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                self.mcrouter,
            )
        else:
            self.thingcache = CacheChain((localcache_cls(), self.mcrouter))
        cache_chains.update(thingcache=self.thingcache)

        if stalecaches:
            self.memoizecache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                memoizecaches,
            )
        else:
            self.memoizecache = MemcacheChain(
                (localcache_cls(), memoizecaches))
        cache_chains.update(memoizecache=self.memoizecache)

        if stalecaches:
            self.srmembercache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                srmembercaches,
            )
        else:
            self.srmembercache = MemcacheChain(
                (localcache_cls(), srmembercaches))
        cache_chains.update(srmembercache=self.srmembercache)

        if stalecaches:
            self.relcache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                relcaches,
            )
        else:
            self.relcache = MemcacheChain(
                (localcache_cls(), relcaches))
        cache_chains.update(relcache=self.relcache)

        self.ratelimitcache = MemcacheChain(
                (localcache_cls(), ratelimitcaches))
        cache_chains.update(ratelimitcache=self.ratelimitcache)

        # rendercache holds rendered partial templates.
        self.rendercache = MemcacheChain((
            localcache_cls(),
            self.mcrouter,
        ))
        cache_chains.update(rendercache=self.rendercache)

        # pagecaches hold fully rendered pages (includes comment panes)
        self.pagecache = MemcacheChain((
            localcache_cls(),
            self.mcrouter,
        ))
        cache_chains.update(pagecache=self.pagecache)

        # cassandra_local_cache is used for request-local caching in tdb_cassandra
        self.cassandra_local_cache = localcache_cls()
        cache_chains.update(cassandra_local_cache=self.cassandra_local_cache)

        if stalecaches:
            permacache_cache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                permacache_memcaches,
            )
        else:
            permacache_cache = CacheChain(
                (localcache_cls(), permacache_memcaches),
            )
        cache_chains.update(permacache=permacache_cache)

        self.permacache = Permacache(
            permacache_cache,
            permacache_cf,
            lock_factory=self.make_lock,
        )

        # hardcache is used for various things that tend to expire
        # TODO: replace hardcache w/ cassandra stuff
        self.hardcache = HardcacheChain(
            (localcache_cls(), hardcache_memcaches, HardCache(self)),
            cache_negative_results=True,
        )
        cache_chains.update(hardcache=self.hardcache)

        # I know this sucks, but we need non-request-threads to be
        # able to reset the caches, so we need them be able to close
        # around 'cache_chains' without being able to call getattr on
        # 'g'
        def reset_caches():
            for name, chain in cache_chains.iteritems():
                if isinstance(chain, TransitionalCache):
                    chain = chain.read_chain

                chain.reset()
                if isinstance(chain, LocalCache):
                    continue
                elif isinstance(chain, StaleCacheChain):
                    chain.stats = StaleCacheStats(self.stats, name)
                else:
                    chain.stats = CacheStats(self.stats, name)
        self.cache_chains = cache_chains

        self.reset_caches = reset_caches
        self.reset_caches()

        self.startup_timer.intermediate("cache_chains")

        # try to set the source control revision numbers
        self.versions = {}
        r2_root = os.path.dirname(os.path.dirname(self.paths["root"]))
        r2_gitdir = os.path.join(r2_root, ".git")
        self.short_version = self.record_repo_version("r2", r2_gitdir)

        if I18N_PATH:
            i18n_git_path = os.path.join(os.path.dirname(I18N_PATH), ".git")
            self.record_repo_version("i18n", i18n_git_path)

        # Initialize the amqp module globals, start the worker, etc.
        r2.lib.amqp.initialize(self)

        self.events = EventQueue()

        self.startup_timer.intermediate("revisions")

    def setup_complete(self):
        self.startup_timer.stop()
        self.stats.flush()

        if self.log_start:
            self.log.error(
                "%s:%s started %s at %s (took %.02fs)",
                self.reddit_host,
                self.reddit_pid,
                self.short_version,
                datetime.now().strftime("%H:%M:%S"),
                self.startup_timer.elapsed_seconds()
            )

        if einhorn.is_worker():
            einhorn.ack_startup()

    def record_repo_version(self, repo_name, git_dir):
        """Get the currently checked out git revision for a given repository,
        record it in g.versions, and return the short version of the hash."""
        try:
            subprocess.check_output
        except AttributeError:
            # python 2.6 compat
            pass
        else:
            try:
                revision = subprocess.check_output(["git",
                                                    "--git-dir", git_dir,
                                                    "rev-parse", "HEAD"])
            except subprocess.CalledProcessError, e:
                self.log.warning("Unable to fetch git revision: %r", e)
            else:
Пример #8
0
class Globals(object):
    spec = {

        ConfigValue.int: [
            'db_pool_size',
            'db_pool_overflow_size',
            'page_cache_time',
            'commentpane_cache_time',
            'num_mc_clients',
            'MIN_DOWN_LINK',
            'MIN_UP_KARMA',
            'MIN_DOWN_KARMA',
            'MIN_RATE_LIMIT_KARMA',
            'MIN_RATE_LIMIT_COMMENT_KARMA',
            'VOTE_AGE_LIMIT',
            'REPLY_AGE_LIMIT',
            'REPORT_AGE_LIMIT',
            'HOT_PAGE_AGE',
            'RATELIMIT',
            'QUOTA_THRESHOLD',
            'ADMIN_COOKIE_TTL',
            'ADMIN_COOKIE_MAX_IDLE',
            'OTP_COOKIE_TTL',
            'num_comments',
            'max_comments',
            'max_comments_gold',
            'num_default_reddits',
            'num_query_queue_workers',
            'max_sr_images',
            'num_serendipity',
            'sr_dropdown_threshold',
            'comment_visits_period',
            'min_membership_create_community',
            'bcrypt_work_factor',
            'cassandra_pool_size',
            'sr_banned_quota',
            'sr_wikibanned_quota',
            'sr_wikicontributor_quota',
            'sr_moderator_quota',
            'sr_contributor_quota',
            'sr_quota_time',
            'wiki_keep_recent_days',
            'wiki_max_page_length_bytes',
            'wiki_max_page_name_length',
            'wiki_max_page_separators',
        ],

        ConfigValue.float: [
            'min_promote_bid',
            'max_promote_bid',
            'statsd_sample_rate',
            'querycache_prune_chance',
        ],

        ConfigValue.bool: [
            'debug',
            'translator',
            'log_start',
            'sqlprinting',
            'template_debug',
            'reload_templates',
            'uncompressedJS',
            'enable_doquery',
            'use_query_cache',
            'write_query_queue',
            'css_killswitch',
            'db_create_tables',
            'disallow_db_writes',
            'exception_logging',
            'disable_ratelimit',
            'amqp_logging',
            'read_only_mode',
            'disable_wiki',
            'heavy_load_mode',
            's3_media_direct',
            'disable_captcha',
            'disable_ads',
            'disable_require_admin_otp',
            'static_pre_gzipped',
            'static_secure_pre_gzipped',
            'trust_local_proxies',
            'shard_link_vote_queues',
        ],

        ConfigValue.tuple: [
            'plugins',
            'stalecaches',
            'memcaches',
            'lockcaches',
            'permacache_memcaches',
            'rendercaches',
            'pagecaches',
            'cassandra_seeds',
            'admins',
            'sponsors',
            'automatic_reddits',
            'agents',
            'allowed_css_linked_domains',
            'authorized_cnames',
            'hardcache_categories',
            's3_media_buckets',
            'allowed_pay_countries',
            'case_sensitive_domains',
            'reserved_subdomains',
        ],

        ConfigValue.choice: {
             'cassandra_rcl': {
                 'ONE': CL_ONE,
                 'QUORUM': CL_QUORUM
             },
             'cassandra_wcl': {
                 'ONE': CL_ONE,
                 'QUORUM': CL_QUORUM
             },
        },

        ConfigValue.days: [
            'MODWINDOW',
        ],

        config_gold_price: [
            'gold_month_price',
            'gold_year_price',
        ],
    }

    live_config_spec = {
        ConfigValue.bool: [
            'frontpage_dart',
        ],
        ConfigValue.float: [
            'spotlight_interest_sub_p',
            'spotlight_interest_nosub_p',
        ],
        ConfigValue.tuple: [
            'sr_discovery_links',
            'fastlane_links',
        ],
        ConfigValue.dict(ConfigValue.int, ConfigValue.float): [
            'comment_tree_version_weights',
        ],
        ConfigValue.messages: [
            'goldvertisement_blurbs',
        ],
    }

    def __init__(self, global_conf, app_conf, paths, **extra):
        """
        Globals acts as a container for objects available throughout
        the life of the application.

        One instance of Globals is created by Pylons during
        application initialization and is available during requests
        via the 'g' variable.

        ``global_conf``
            The same variable used throughout ``config/middleware.py``
            namely, the variables from the ``[DEFAULT]`` section of the
            configuration file.

        ``app_conf``
            The same ``kw`` dictionary used throughout
            ``config/middleware.py`` namely, the variables from the
            section in the config file for your application.

        ``extra``
            The configuration returned from ``load_config`` in 
            ``config/middleware.py`` which may be of use in the setup of
            your global variables.

        """

        global_conf.setdefault("debug", False)

        self.config = ConfigValueParser(global_conf)
        self.config.add_spec(self.spec)
        self.plugins = PluginLoader(self.config.get("plugins", []))

        self.stats = Stats(self.config.get('statsd_addr'),
                           self.config.get('statsd_sample_rate'))
        self.startup_timer = self.stats.get_timer("app_startup")
        self.startup_timer.start()

        self.paths = paths

        self.running_as_script = global_conf.get('running_as_script', False)
        
        # turn on for language support
        self.lang = getattr(self, 'site_lang', 'en')
        self.languages, self.lang_name = \
            get_active_langs(default_lang=self.lang)

        all_languages = self.lang_name.keys()
        all_languages.sort()
        self.all_languages = all_languages
        
        # set default time zone if one is not set
        tz = global_conf.get('timezone', 'UTC')
        self.tz = pytz.timezone(tz)
        
        dtz = global_conf.get('display_timezone', tz)
        self.display_tz = pytz.timezone(dtz)

        self.startup_timer.intermediate("init")

    def __getattr__(self, name):
        if not name.startswith('_') and name in self.config:
            return self.config[name]
        else:
            raise AttributeError

    def setup(self):
        self.queues = queues.declare_queues(self)

        ################# CONFIGURATION
        # AMQP is required
        if not self.amqp_host:
            raise ValueError("amqp_host not set in the .ini")

        # This requirement doesn't *have* to be a requirement, but there are
        # bugs at the moment that will pop up if you violate it
        # XXX: get rid of these options. new query cache is always on.
        if self.write_query_queue and not self.use_query_cache:
            raise Exception("write_query_queue requires use_query_cache")

        if not self.cassandra_seeds:
            raise ValueError("cassandra_seeds not set in the .ini")

        # heavy load mode is read only mode with a different infobar
        if self.heavy_load_mode:
            self.read_only_mode = True

        origin_prefix = self.domain_prefix + "." if self.domain_prefix else ""
        self.origin = "http://" + origin_prefix + self.domain
        self.secure_domains = set([urlparse(self.payment_domain).netloc])

        self.trusted_domains = set([self.domain])
        self.trusted_domains.update(self.authorized_cnames)
        if self.https_endpoint:
            https_url = urlparse(self.https_endpoint)
            self.secure_domains.add(https_url.netloc)
            self.trusted_domains.add(https_url.hostname)
        if getattr(self, 'oauth_domain', None):
            self.secure_domains.add(self.oauth_domain)

        # load the unique hashed names of files under static
        static_files = os.path.join(self.paths.get('static_files'), 'static')
        names_file_path = os.path.join(static_files, 'names.json')
        if os.path.exists(names_file_path):
            with open(names_file_path) as handle:
                self.static_names = json.load(handle)
        else:
            self.static_names = {}

        #setup the logger
        self.log = logging.getLogger('reddit')
        self.log.addHandler(logging.StreamHandler())
        if self.debug:
            self.log.setLevel(logging.DEBUG)
        else:
            self.log.setLevel(logging.INFO)

        # set log level for pycountry which is chatty
        logging.getLogger('pycountry.db').setLevel(logging.CRITICAL)

        if not self.media_domain:
            self.media_domain = self.domain
        if self.media_domain == self.domain:
            print ("Warning: g.media_domain == g.domain. " +
                   "This may give untrusted content access to user cookies")

        for arg in sys.argv:
            tokens = arg.split("=")
            if len(tokens) == 2:
                k, v = tokens
                self.log.debug("Overriding g.%s to %s" % (k, v))
                setattr(self, k, v)

        self.reddit_host = socket.gethostname()
        self.reddit_pid  = os.getpid()

        if hasattr(signal, 'SIGUSR1'):
            # not all platforms have user signals
            signal.signal(signal.SIGUSR1, thread_dump)

        self.startup_timer.intermediate("configuration")

        ################# ZOOKEEPER
        # for now, zookeeper will be an optional part of the stack.
        # if it's not configured, we will grab the expected config from the
        # [live_config] section of the ini file
        zk_hosts = self.config.get("zookeeper_connection_string")
        if zk_hosts:
            from r2.lib.zookeeper import (connect_to_zookeeper,
                                          LiveConfig, LiveList)
            zk_username = self.config["zookeeper_username"]
            zk_password = self.config["zookeeper_password"]
            self.zookeeper = connect_to_zookeeper(zk_hosts, (zk_username,
                                                             zk_password))
            self.live_config = LiveConfig(self.zookeeper, LIVE_CONFIG_NODE)
            self.throttles = LiveList(self.zookeeper, "/throttles",
                                      map_fn=ipaddress.ip_network,
                                      reduce_fn=ipaddress.collapse_addresses)
        else:
            self.zookeeper = None
            parser = ConfigParser.RawConfigParser()
            parser.read([self.config["__file__"]])
            self.live_config = extract_live_config(parser, self.plugins)
            self.throttles = tuple()  # immutable since it's not real
        self.startup_timer.intermediate("zookeeper")

        ################# MEMCACHE
        num_mc_clients = self.num_mc_clients

        # the main memcache pool. used for most everything.
        self.memcache = CMemcache(self.memcaches, num_clients=num_mc_clients)

        # a smaller pool of caches used only for distributed locks.
        # TODO: move this to ZooKeeper
        self.lock_cache = CMemcache(self.lockcaches,
                                    num_clients=num_mc_clients)
        self.make_lock = make_lock_factory(self.lock_cache, self.stats)

        # memcaches used in front of the permacache CF in cassandra.
        # XXX: this is a legacy thing; permacache was made when C* didn't have
        # a row cache.
        if self.permacache_memcaches:
            permacache_memcaches = CMemcache(self.permacache_memcaches,
                                             num_clients=num_mc_clients)
        else:
            permacache_memcaches = None

        # the stalecache is a memcached local to the current app server used
        # for data that's frequently fetched but doesn't need to be fresh.
        if self.stalecaches:
            stalecaches = CMemcache(self.stalecaches,
                                    num_clients=num_mc_clients)
        else:
            stalecaches = None

        # rendercache holds rendered partial templates.
        rendercaches = CMemcache(
            self.rendercaches,
            noreply=True,
            no_block=True,
            num_clients=num_mc_clients,
        )

        # pagecaches hold fully rendered pages
        pagecaches = CMemcache(
            self.pagecaches,
            noreply=True,
            no_block=True,
            num_clients=num_mc_clients,
        )

        self.startup_timer.intermediate("memcache")

        ################# CASSANDRA
        keyspace = "reddit"
        self.cassandra_pools = {
            "main":
                StatsCollectingConnectionPool(
                    keyspace,
                    stats=self.stats,
                    logging_name="main",
                    server_list=self.cassandra_seeds,
                    pool_size=self.cassandra_pool_size,
                    timeout=4,
                    max_retries=3,
                    prefill=False
                ),
        }

        permacache_cf = CassandraCache(
            'permacache',
            self.cassandra_pools[self.cassandra_default_pool],
            read_consistency_level=self.cassandra_rcl,
            write_consistency_level=self.cassandra_wcl
        )

        self.startup_timer.intermediate("cassandra")

        ################# POSTGRES
        event.listens_for(engine.Engine, 'before_cursor_execute')(
            self.stats.pg_before_cursor_execute)
        event.listens_for(engine.Engine, 'after_cursor_execute')(
            self.stats.pg_after_cursor_execute)

        self.dbm = self.load_db_params()
        self.startup_timer.intermediate("postgres")

        ################# CHAINS
        # initialize caches. Any cache-chains built here must be added
        # to cache_chains (closed around by reset_caches) so that they
        # can properly reset their local components
        self.cache_chains = {}
        localcache_cls = (SelfEmptyingCache if self.running_as_script
                          else LocalCache)

        if stalecaches:
            self.cache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                self.memcache,
            )
        else:
            self.cache = MemcacheChain((localcache_cls(), self.memcache))
        self.cache_chains.update(cache=self.cache)

        self.rendercache = MemcacheChain((
            localcache_cls(),
            rendercaches,
        ))
        self.cache_chains.update(rendercache=self.rendercache)

        self.pagecache = MemcacheChain((
            localcache_cls(),
            pagecaches,
        ))
        self.cache_chains.update(pagecache=self.pagecache)

        # the thing_cache is used in tdb_cassandra.
        self.thing_cache = CacheChain((localcache_cls(),))
        self.cache_chains.update(thing_cache=self.thing_cache)

        self.permacache = CassandraCacheChain(
            localcache_cls(),
            permacache_cf,
            memcache=permacache_memcaches,
            lock_factory=self.make_lock,
        )
        self.cache_chains.update(permacache=self.permacache)

        # hardcache is used for various things that tend to expire
        # TODO: replace hardcache w/ cassandra stuff
        self.hardcache = HardcacheChain(
            (localcache_cls(), self.memcache, HardCache(self)),
            cache_negative_results=True,
        )
        self.cache_chains.update(hardcache=self.hardcache)

        # I know this sucks, but we need non-request-threads to be
        # able to reset the caches, so we need them be able to close
        # around 'cache_chains' without being able to call getattr on
        # 'g'
        cache_chains = self.cache_chains.copy()
        def reset_caches():
            for name, chain in cache_chains.iteritems():
                chain.reset()
                chain.stats = CacheStats(self.stats, name)

        self.reset_caches = reset_caches
        self.reset_caches()

        self.startup_timer.intermediate("cache_chains")

        # try to set the source control revision numbers
        self.versions = {}
        r2_root = os.path.dirname(os.path.dirname(self.paths["root"]))
        r2_gitdir = os.path.join(r2_root, ".git")
        self.short_version = self.record_repo_version("r2", r2_gitdir)

        if I18N_PATH:
            i18n_git_path = os.path.join(os.path.dirname(I18N_PATH), ".git")
            self.record_repo_version("i18n", i18n_git_path)

        self.startup_timer.intermediate("revisions")

    def setup_complete(self):
        self.startup_timer.stop()
        self.stats.flush()

        if self.log_start:
            self.log.error(
                "%s:%s started %s at %s (took %.02fs)",
                self.reddit_host,
                self.reddit_pid,
                self.short_version,
                datetime.now().strftime("%H:%M:%S"),
                self.startup_timer.elapsed_seconds()
            )

    def record_repo_version(self, repo_name, git_dir):
        """Get the currently checked out git revision for a given repository,
        record it in g.versions, and return the short version of the hash."""
        try:
            subprocess.check_output
        except AttributeError:
            # python 2.6 compat
            pass
        else:
            try:
                revision = subprocess.check_output(["git",
                                                    "--git-dir", git_dir,
                                                    "rev-parse", "HEAD"])
            except subprocess.CalledProcessError, e:
                self.log.warning("Unable to fetch git revision: %r", e)
            else:
Пример #9
0
class Globals(object):
    spec = {

        ConfigValue.int: [
            'db_pool_size',
            'db_pool_overflow_size',
            'page_cache_time',
            'commentpane_cache_time',
            'num_mc_clients',
            'MIN_DOWN_LINK',
            'MIN_UP_KARMA',
            'MIN_DOWN_KARMA',
            'MIN_RATE_LIMIT_KARMA',
            'MIN_RATE_LIMIT_COMMENT_KARMA',
            'VOTE_AGE_LIMIT',
            'REPLY_AGE_LIMIT',
            'REPORT_AGE_LIMIT',
            'HOT_PAGE_AGE',
            'RATELIMIT',
            'QUOTA_THRESHOLD',
            'ADMIN_COOKIE_TTL',
            'ADMIN_COOKIE_MAX_IDLE',
            'OTP_COOKIE_TTL',
            'num_comments',
            'max_comments',
            'max_comments_gold',
            'num_default_reddits',
            'num_query_queue_workers',
            'max_sr_images',
            'num_serendipity',
            'sr_dropdown_threshold',
            'comment_visits_period',
            'min_membership_create_community',
            'bcrypt_work_factor',
            'cassandra_pool_size',
            'sr_banned_quota',
            'sr_wikibanned_quota',
            'sr_wikicontributor_quota',
            'sr_moderator_quota',
            'sr_contributor_quota',
            'sr_quota_time',
            'wiki_keep_recent_days',
            'wiki_max_page_length_bytes',
            'wiki_max_page_name_length',
            'wiki_max_page_separators',
        ],

        ConfigValue.float: [
            'min_promote_bid',
            'max_promote_bid',
            'statsd_sample_rate',
            'querycache_prune_chance',
        ],

        ConfigValue.bool: [
            'debug',
            'translator',
            'log_start',
            'sqlprinting',
            'template_debug',
            'reload_templates',
            'uncompressedJS',
            'enable_doquery',
            'use_query_cache',
            'write_query_queue',
            'css_killswitch',
            'db_create_tables',
            'disallow_db_writes',
            'exception_logging',
            'disable_ratelimit',
            'amqp_logging',
            'read_only_mode',
            'wiki_disabled',
            'heavy_load_mode',
            's3_media_direct',
            'disable_captcha',
            'disable_ads',
            'disable_require_admin_otp',
            'static_pre_gzipped',
            'static_secure_pre_gzipped',
            'trust_local_proxies',
            'shard_link_vote_queues',
        ],

        ConfigValue.tuple: [
            'plugins',
            'stalecaches',
            'memcaches',
            'lockcaches',
            'permacache_memcaches',
            'rendercaches',
            'cassandra_seeds',
            'admins',
            'sponsors',
            'automatic_reddits',
            'agents',
            'allowed_css_linked_domains',
            'authorized_cnames',
            'hardcache_categories',
            's3_media_buckets',
            'allowed_pay_countries',
            'case_sensitive_domains',
            'reserved_subdomains',
        ],

        ConfigValue.choice: {
             'cassandra_rcl': {
                 'ONE': CL_ONE,
                 'QUORUM': CL_QUORUM
             },
             'cassandra_wcl': {
                 'ONE': CL_ONE,
                 'QUORUM': CL_QUORUM
             },
        },

        ConfigValue.days: [
            'MODWINDOW',
        ],

        config_gold_price: [
            'gold_month_price',
            'gold_year_price',
        ],
    }

    live_config_spec = {
        ConfigValue.bool: [
            'frontpage_dart',
        ],
        ConfigValue.float: [
            'spotlight_interest_sub_p',
            'spotlight_interest_nosub_p',
        ],
        ConfigValue.tuple: [
            'sr_discovery_links',
            'fastlane_links',
        ],
    }

    def __init__(self, global_conf, app_conf, paths, **extra):
        """
        Globals acts as a container for objects available throughout
        the life of the application.

        One instance of Globals is created by Pylons during
        application initialization and is available during requests
        via the 'g' variable.

        ``global_conf``
            The same variable used throughout ``config/middleware.py``
            namely, the variables from the ``[DEFAULT]`` section of the
            configuration file.

        ``app_conf``
            The same ``kw`` dictionary used throughout
            ``config/middleware.py`` namely, the variables from the
            section in the config file for your application.

        ``extra``
            The configuration returned from ``load_config`` in 
            ``config/middleware.py`` which may be of use in the setup of
            your global variables.

        """

        global_conf.setdefault("debug", False)

        self.config = ConfigValueParser(global_conf)
        self.config.add_spec(self.spec)
        self.plugins = PluginLoader(self.config.get("plugins", []))

        self.stats = Stats(self.config.get('statsd_addr'),
                           self.config.get('statsd_sample_rate'))
        self.startup_timer = self.stats.get_timer("app_startup")
        self.startup_timer.start()

        self.paths = paths

        self.running_as_script = global_conf.get('running_as_script', False)
        
        # turn on for language support
        self.lang = getattr(self, 'site_lang', 'en')
        self.languages, self.lang_name = \
            get_active_langs(default_lang=self.lang)

        all_languages = self.lang_name.keys()
        all_languages.sort()
        self.all_languages = all_languages
        
        # set default time zone if one is not set
        tz = global_conf.get('timezone', 'UTC')
        self.tz = pytz.timezone(tz)
        
        dtz = global_conf.get('display_timezone', tz)
        self.display_tz = pytz.timezone(dtz)

        self.startup_timer.intermediate("init")

    def __getattr__(self, name):
        if not name.startswith('_') and name in self.config:
            return self.config[name]
        else:
            raise AttributeError

    def setup(self):
        self.queues = queues.declare_queues(self)

        ################# CONFIGURATION
        # AMQP is required
        if not self.amqp_host:
            raise ValueError("amqp_host not set in the .ini")

        # This requirement doesn't *have* to be a requirement, but there are
        # bugs at the moment that will pop up if you violate it
        # XXX: get rid of these options. new query cache is always on.
        if self.write_query_queue and not self.use_query_cache:
            raise Exception("write_query_queue requires use_query_cache")

        if not self.cassandra_seeds:
            raise ValueError("cassandra_seeds not set in the .ini")

        # heavy load mode is read only mode with a different infobar
        if self.heavy_load_mode:
            self.read_only_mode = True

        origin_prefix = self.domain_prefix + "." if self.domain_prefix else ""
        self.origin = "http://" + origin_prefix + self.domain
        self.secure_domains = set([urlparse(self.payment_domain).netloc])

        self.trusted_domains = set([self.domain])
        self.trusted_domains.update(self.authorized_cnames)
        if self.https_endpoint:
            https_url = urlparse(self.https_endpoint)
            self.secure_domains.add(https_url.netloc)
            self.trusted_domains.add(https_url.hostname)
        if getattr(self, 'oauth_domain', None):
            self.secure_domains.add(self.oauth_domain)

        # load the unique hashed names of files under static
        static_files = os.path.join(self.paths.get('static_files'), 'static')
        names_file_path = os.path.join(static_files, 'names.json')
        if os.path.exists(names_file_path):
            with open(names_file_path) as handle:
                self.static_names = json.load(handle)
        else:
            self.static_names = {}

        #setup the logger
        self.log = logging.getLogger('reddit')
        self.log.addHandler(logging.StreamHandler())
        if self.debug:
            self.log.setLevel(logging.DEBUG)
        else:
            self.log.setLevel(logging.INFO)

        # set log level for pycountry which is chatty
        logging.getLogger('pycountry.db').setLevel(logging.CRITICAL)

        if not self.media_domain:
            self.media_domain = self.domain
        if self.media_domain == self.domain:
            print ("Warning: g.media_domain == g.domain. " +
                   "This may give untrusted content access to user cookies")

        for arg in sys.argv:
            tokens = arg.split("=")
            if len(tokens) == 2:
                k, v = tokens
                self.log.debug("Overriding g.%s to %s" % (k, v))
                setattr(self, k, v)

        self.reddit_host = socket.gethostname()
        self.reddit_pid  = os.getpid()

        if hasattr(signal, 'SIGUSR1'):
            # not all platforms have user signals
            signal.signal(signal.SIGUSR1, thread_dump)

        self.startup_timer.intermediate("configuration")

        ################# ZOOKEEPER
        # for now, zookeeper will be an optional part of the stack.
        # if it's not configured, we will grab the expected config from the
        # [live_config] section of the ini file
        zk_hosts = self.config.get("zookeeper_connection_string")
        if zk_hosts:
            from r2.lib.zookeeper import (connect_to_zookeeper,
                                          LiveConfig, LiveList)
            zk_username = self.config["zookeeper_username"]
            zk_password = self.config["zookeeper_password"]
            self.zookeeper = connect_to_zookeeper(zk_hosts, (zk_username,
                                                             zk_password))
            self.live_config = LiveConfig(self.zookeeper, LIVE_CONFIG_NODE)
            self.throttles = LiveList(self.zookeeper, "/throttles",
                                      map_fn=ipaddress.ip_network,
                                      reduce_fn=ipaddress.collapse_addresses)
        else:
            self.zookeeper = None
            parser = ConfigParser.RawConfigParser()
            parser.read([self.config["__file__"]])
            self.live_config = extract_live_config(parser, self.plugins)
            self.throttles = tuple()  # immutable since it's not real
        self.startup_timer.intermediate("zookeeper")

        ################# MEMCACHE
        num_mc_clients = self.num_mc_clients

        # the main memcache pool. used for most everything.
        self.memcache = CMemcache(self.memcaches, num_clients=num_mc_clients)

        # a smaller pool of caches used only for distributed locks.
        # TODO: move this to ZooKeeper
        self.lock_cache = CMemcache(self.lockcaches,
                                    num_clients=num_mc_clients)
        self.make_lock = make_lock_factory(self.lock_cache, self.stats)

        # memcaches used in front of the permacache CF in cassandra.
        # XXX: this is a legacy thing; permacache was made when C* didn't have
        # a row cache.
        if self.permacache_memcaches:
            permacache_memcaches = CMemcache(self.permacache_memcaches,
                                             num_clients=num_mc_clients)
        else:
            permacache_memcaches = None

        # the stalecache is a memcached local to the current app server used
        # for data that's frequently fetched but doesn't need to be fresh.
        if self.stalecaches:
            stalecaches = CMemcache(self.stalecaches,
                                    num_clients=num_mc_clients)
        else:
            stalecaches = None

        # rendercache holds rendered partial templates as well as fully
        # cached pages.
        rendercaches = CMemcache(
            self.rendercaches,
            noreply=True,
            no_block=True,
            num_clients=num_mc_clients,
        )

        self.startup_timer.intermediate("memcache")

        ################# CASSANDRA
        keyspace = "reddit"
        self.cassandra_pools = {
            "main":
                StatsCollectingConnectionPool(
                    keyspace,
                    stats=self.stats,
                    logging_name="main",
                    server_list=self.cassandra_seeds,
                    pool_size=self.cassandra_pool_size,
                    timeout=2,
                    max_retries=3,
                    prefill=False
                ),
        }

        permacache_cf = CassandraCache(
            'permacache',
            self.cassandra_pools[self.cassandra_default_pool],
            read_consistency_level=self.cassandra_rcl,
            write_consistency_level=self.cassandra_wcl
        )

        self.startup_timer.intermediate("cassandra")

        ################# POSTGRES
        event.listens_for(engine.Engine, 'before_cursor_execute')(
            self.stats.pg_before_cursor_execute)
        event.listens_for(engine.Engine, 'after_cursor_execute')(
            self.stats.pg_after_cursor_execute)

        self.dbm = self.load_db_params()
        self.startup_timer.intermediate("postgres")

        ################# CHAINS
        # initialize caches. Any cache-chains built here must be added
        # to cache_chains (closed around by reset_caches) so that they
        # can properly reset their local components
        self.cache_chains = {}
        localcache_cls = (SelfEmptyingCache if self.running_as_script
                          else LocalCache)

        if stalecaches:
            self.cache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                self.memcache,
            )
        else:
            self.cache = MemcacheChain((localcache_cls(), self.memcache))
        self.cache_chains.update(cache=self.cache)

        self.rendercache = MemcacheChain((
            localcache_cls(),
            rendercaches,
        ))
        self.cache_chains.update(rendercache=self.rendercache)

        # the thing_cache is used in tdb_cassandra.
        self.thing_cache = CacheChain((localcache_cls(),))
        self.cache_chains.update(thing_cache=self.thing_cache)

        self.permacache = CassandraCacheChain(
            localcache_cls(),
            permacache_cf,
            memcache=permacache_memcaches,
            lock_factory=self.make_lock,
        )
        self.cache_chains.update(permacache=self.permacache)

        # hardcache is used for various things that tend to expire
        # TODO: replace hardcache w/ cassandra stuff
        self.hardcache = HardcacheChain(
            (localcache_cls(), self.memcache, HardCache(self)),
            cache_negative_results=True,
        )
        self.cache_chains.update(hardcache=self.hardcache)

        # I know this sucks, but we need non-request-threads to be
        # able to reset the caches, so we need them be able to close
        # around 'cache_chains' without being able to call getattr on
        # 'g'
        cache_chains = self.cache_chains.copy()
        def reset_caches():
            for name, chain in cache_chains.iteritems():
                chain.reset()
                chain.stats = CacheStats(self.stats, name)

        self.reset_caches = reset_caches
        self.reset_caches()

        self.startup_timer.intermediate("cache_chains")

        # try to set the source control revision numbers
        self.versions = {}
        r2_root = os.path.dirname(os.path.dirname(self.paths["root"]))
        r2_gitdir = os.path.join(r2_root, ".git")
        self.short_version = self.record_repo_version("r2", r2_gitdir)

        if I18N_PATH:
            i18n_git_path = os.path.join(os.path.dirname(I18N_PATH), ".git")
            self.record_repo_version("i18n", i18n_git_path)

        self.startup_timer.intermediate("revisions")

    def setup_complete(self):
        self.startup_timer.stop()
        self.stats.flush()

        if self.log_start:
            self.log.error(
                "reddit app %s:%s started %s at %s (took %.02f seconds)",
                self.reddit_host,
                self.reddit_pid,
                self.short_version,
                datetime.now(),
                self.startup_timer.elapsed_seconds()
            )

    def record_repo_version(self, repo_name, git_dir):
        """Get the currently checked out git revision for a given repository,
        record it in g.versions, and return the short version of the hash."""
        try:
            subprocess.check_output
        except AttributeError:
            # python 2.6 compat
            pass
        else:
            try:
                revision = subprocess.check_output(["git",
                                                    "--git-dir", git_dir,
                                                    "rev-parse", "HEAD"])
            except subprocess.CalledProcessError, e:
                self.log.warning("Unable to fetch git revision: %r", e)
            else:
Пример #10
0
    def setup(self):
        # heavy load mode is read only mode with a different infobar
        if self.heavy_load_mode:
            self.read_only_mode = True

        if hasattr(signal, 'SIGUSR1'):
            # not all platforms have user signals
            signal.signal(signal.SIGUSR1, thread_dump)

        # initialize caches. Any cache-chains built here must be added
        # to cache_chains (closed around by reset_caches) so that they
        # can properly reset their local components

        localcache_cls = (SelfEmptyingCache
                          if self.running_as_script else LocalCache)
        num_mc_clients = self.num_mc_clients

        self.cache_chains = {}

        # for now, zookeeper will be an optional part of the stack.
        # if it's not configured, we will grab the expected config from the
        # [live_config] section of the ini file
        zk_hosts = self.config.get("zookeeper_connection_string")
        if zk_hosts:
            from r2.lib.zookeeper import (connect_to_zookeeper, LiveConfig,
                                          LiveList)
            zk_username = self.config["zookeeper_username"]
            zk_password = self.config["zookeeper_password"]
            self.zookeeper = connect_to_zookeeper(zk_hosts,
                                                  (zk_username, zk_password))
            self.live_config = LiveConfig(self.zookeeper, LIVE_CONFIG_NODE)
            self.throttles = LiveList(self.zookeeper,
                                      "/throttles",
                                      map_fn=ipaddress.ip_network,
                                      reduce_fn=ipaddress.collapse_addresses)
        else:
            self.zookeeper = None
            parser = ConfigParser.RawConfigParser()
            parser.read([self.config["__file__"]])
            self.live_config = extract_live_config(parser, self.plugins)
            self.throttles = tuple()  # immutable since it's not real

        self.memcache = CMemcache(self.memcaches, num_clients=num_mc_clients)
        self.lock_cache = CMemcache(self.lockcaches,
                                    num_clients=num_mc_clients)

        self.stats = Stats(self.config.get('statsd_addr'),
                           self.config.get('statsd_sample_rate'))

        event.listens_for(engine.Engine, 'before_cursor_execute')(
            self.stats.pg_before_cursor_execute)
        event.listens_for(engine.Engine, 'after_cursor_execute')(
            self.stats.pg_after_cursor_execute)

        self.make_lock = make_lock_factory(self.lock_cache, self.stats)

        if not self.cassandra_seeds:
            raise ValueError("cassandra_seeds not set in the .ini")

        keyspace = "reddit"
        self.cassandra_pools = {
            "main":
            StatsCollectingConnectionPool(keyspace,
                                          stats=self.stats,
                                          logging_name="main",
                                          server_list=self.cassandra_seeds,
                                          pool_size=self.cassandra_pool_size,
                                          timeout=2,
                                          max_retries=3,
                                          prefill=False),
        }

        perma_memcache = (CMemcache(self.permacache_memcaches,
                                    num_clients=num_mc_clients)
                          if self.permacache_memcaches else None)
        self.permacache = CassandraCacheChain(
            localcache_cls(),
            CassandraCache('permacache',
                           self.cassandra_pools[self.cassandra_default_pool],
                           read_consistency_level=self.cassandra_rcl,
                           write_consistency_level=self.cassandra_wcl),
            memcache=perma_memcache,
            lock_factory=self.make_lock)

        self.cache_chains.update(permacache=self.permacache)

        # hardcache is done after the db info is loaded, and then the
        # chains are reset to use the appropriate initial entries

        if self.stalecaches:
            self.cache = StaleCacheChain(
                localcache_cls(),
                CMemcache(self.stalecaches, num_clients=num_mc_clients),
                self.memcache)
        else:
            self.cache = MemcacheChain((localcache_cls(), self.memcache))
        self.cache_chains.update(cache=self.cache)

        self.rendercache = MemcacheChain(
            (localcache_cls(),
             CMemcache(self.rendercaches,
                       noreply=True,
                       no_block=True,
                       num_clients=num_mc_clients)))
        self.cache_chains.update(rendercache=self.rendercache)

        self.thing_cache = CacheChain((localcache_cls(), ))
        self.cache_chains.update(thing_cache=self.thing_cache)

        #load the database info
        self.dbm = self.load_db_params()

        # can't do this until load_db_params() has been called
        self.hardcache = HardcacheChain(
            (localcache_cls(), self.memcache, HardCache(self)),
            cache_negative_results=True)
        self.cache_chains.update(hardcache=self.hardcache)

        # I know this sucks, but we need non-request-threads to be
        # able to reset the caches, so we need them be able to close
        # around 'cache_chains' without being able to call getattr on
        # 'g'
        cache_chains = self.cache_chains.copy()

        def reset_caches():
            for name, chain in cache_chains.iteritems():
                chain.reset()
                chain.stats = CacheStats(self.stats, name)

        self.reset_caches = reset_caches
        self.reset_caches()

        # set the modwindow
        self.MODWINDOW = timedelta(self.MODWINDOW)

        self.REDDIT_MAIN = bool(os.environ.get('REDDIT_MAIN'))

        origin_prefix = self.domain_prefix + "." if self.domain_prefix else ""
        self.origin = "http://" + origin_prefix + self.domain
        self.secure_domains = set([urlparse(self.payment_domain).netloc])

        self.trusted_domains = set([self.domain])
        self.trusted_domains.update(self.authorized_cnames)
        if self.https_endpoint:
            https_url = urlparse(self.https_endpoint)
            self.secure_domains.add(https_url.netloc)
            self.trusted_domains.add(https_url.hostname)
        if getattr(self, 'oauth_domain', None):
            self.secure_domains.add(self.oauth_domain)

        # load the unique hashed names of files under static
        static_files = os.path.join(self.paths.get('static_files'), 'static')
        names_file_path = os.path.join(static_files, 'names.json')
        if os.path.exists(names_file_path):
            with open(names_file_path) as handle:
                self.static_names = json.load(handle)
        else:
            self.static_names = {}

        #setup the logger
        self.log = logging.getLogger('reddit')
        self.log.addHandler(logging.StreamHandler())
        if self.debug:
            self.log.setLevel(logging.DEBUG)
        else:
            self.log.setLevel(logging.INFO)

        # set log level for pycountry which is chatty
        logging.getLogger('pycountry.db').setLevel(logging.CRITICAL)

        if not self.media_domain:
            self.media_domain = self.domain
        if self.media_domain == self.domain:
            print("Warning: g.media_domain == g.domain. " +
                  "This may give untrusted content access to user cookies")

        self.reddit_host = socket.gethostname()
        self.reddit_pid = os.getpid()

        for arg in sys.argv:
            tokens = arg.split("=")
            if len(tokens) == 2:
                k, v = tokens
                self.log.debug("Overriding g.%s to %s" % (k, v))
                setattr(self, k, v)

        #if we're going to use the query_queue, we need amqp
        if self.write_query_queue and not self.amqp_host:
            raise Exception("amqp_host must be defined to use the query queue")

        # This requirement doesn't *have* to be a requirement, but there are
        # bugs at the moment that will pop up if you violate it
        if self.write_query_queue and not self.use_query_cache:
            raise Exception("write_query_queue requires use_query_cache")

        # try to set the source control revision numbers
        self.versions = {}
        r2_root = os.path.dirname(os.path.dirname(self.paths["root"]))
        r2_gitdir = os.path.join(r2_root, ".git")
        self.short_version = self.record_repo_version("r2", r2_gitdir)

        if I18N_PATH:
            i18n_git_path = os.path.join(os.path.dirname(I18N_PATH), ".git")
            self.record_repo_version("i18n", i18n_git_path)

        if self.log_start:
            self.log.error("reddit app %s:%s started %s at %s" %
                           (self.reddit_host, self.reddit_pid,
                            self.short_version, datetime.now()))
Пример #11
0
    def setup(self, global_conf):
        # heavy load mode is read only mode with a different infobar
        if self.heavy_load_mode:
            self.read_only_mode = True

        if hasattr(signal, 'SIGUSR1'):
            # not all platforms have user signals
            signal.signal(signal.SIGUSR1, thread_dump)

        # initialize caches. Any cache-chains built here must be added
        # to cache_chains (closed around by reset_caches) so that they
        # can properly reset their local components

        localcache_cls = (SelfEmptyingCache
                          if self.running_as_script else LocalCache)
        num_mc_clients = self.num_mc_clients

        self.cache_chains = {}

        self.memcache = CMemcache(self.memcaches, num_clients=num_mc_clients)
        self.make_lock = make_lock_factory(self.memcache)

        self.stats = Stats(global_conf.get('statsd_addr'),
                           global_conf.get('statsd_sample_rate'))

        if not self.cassandra_seeds:
            raise ValueError("cassandra_seeds not set in the .ini")

        keyspace = "reddit"
        self.cassandra_pools = {
            "main":
            StatsCollectingConnectionPool(keyspace,
                                          stats=self.stats,
                                          logging_name="main",
                                          server_list=self.cassandra_seeds,
                                          pool_size=len(self.cassandra_seeds),
                                          timeout=2,
                                          max_retries=3,
                                          prefill=False),
            "noretries":
            StatsCollectingConnectionPool(keyspace,
                                          stats=self.stats,
                                          logging_name="noretries",
                                          server_list=self.cassandra_seeds,
                                          pool_size=len(self.cassandra_seeds),
                                          timeout=.1,
                                          max_retries=0,
                                          prefill=False),
        }

        perma_memcache = (CMemcache(self.permacache_memcaches,
                                    num_clients=num_mc_clients)
                          if self.permacache_memcaches else None)
        self.permacache = CassandraCacheChain(
            localcache_cls(),
            CassandraCache('permacache',
                           self.cassandra_pools[self.cassandra_default_pool],
                           read_consistency_level=self.cassandra_rcl,
                           write_consistency_level=self.cassandra_wcl),
            memcache=perma_memcache,
            lock_factory=self.make_lock)

        self.cache_chains.update(permacache=self.permacache)

        # hardcache is done after the db info is loaded, and then the
        # chains are reset to use the appropriate initial entries

        if self.stalecaches:
            self.cache = StaleCacheChain(
                localcache_cls(),
                CMemcache(self.stalecaches, num_clients=num_mc_clients),
                self.memcache)
        else:
            self.cache = MemcacheChain((localcache_cls(), self.memcache))
        self.cache_chains.update(cache=self.cache)

        self.rendercache = MemcacheChain(
            (localcache_cls(),
             CMemcache(self.rendercaches,
                       noreply=True,
                       no_block=True,
                       num_clients=num_mc_clients)))
        self.cache_chains.update(rendercache=self.rendercache)

        self.servicecache = MemcacheChain(
            (localcache_cls(),
             CMemcache(self.servicecaches, num_clients=num_mc_clients)))
        self.cache_chains.update(servicecache=self.servicecache)

        self.thing_cache = CacheChain((localcache_cls(), ))
        self.cache_chains.update(thing_cache=self.thing_cache)

        #load the database info
        self.dbm = self.load_db_params(global_conf)

        # can't do this until load_db_params() has been called
        self.hardcache = HardcacheChain(
            (localcache_cls(), self.memcache, HardCache(self)),
            cache_negative_results=True)
        self.cache_chains.update(hardcache=self.hardcache)

        # I know this sucks, but we need non-request-threads to be
        # able to reset the caches, so we need them be able to close
        # around 'cache_chains' without being able to call getattr on
        # 'g'
        cache_chains = self.cache_chains.copy()

        def reset_caches():
            for name, chain in cache_chains.iteritems():
                chain.reset()
                chain.stats = CacheStats(self.stats, name)

        self.reset_caches = reset_caches
        self.reset_caches()

        #make a query cache
        self.stats_collector = QueryStats()

        # set the modwindow
        self.MODWINDOW = timedelta(self.MODWINDOW)

        self.REDDIT_MAIN = bool(os.environ.get('REDDIT_MAIN'))

        origin_prefix = self.domain_prefix + "." if self.domain_prefix else ""
        self.origin = "http://" + origin_prefix + self.domain
        self.secure_domains = set([urlparse(self.payment_domain).netloc])

        self.trusted_domains = set([self.domain])
        self.trusted_domains.update(self.authorized_cnames)
        if self.https_endpoint:
            https_url = urlparse(self.https_endpoint)
            self.secure_domains.add(https_url.netloc)
            self.trusted_domains.add(https_url.hostname)

        # load the unique hashed names of files under static
        static_files = os.path.join(self.paths.get('static_files'), 'static')
        names_file_path = os.path.join(static_files, 'names.json')
        if os.path.exists(names_file_path):
            with open(names_file_path) as handle:
                self.static_names = json.load(handle)
        else:
            self.static_names = {}

        #setup the logger
        self.log = logging.getLogger('reddit')
        self.log.addHandler(logging.StreamHandler())
        if self.debug:
            self.log.setLevel(logging.DEBUG)
        else:
            self.log.setLevel(logging.INFO)

        # set log level for pycountry which is chatty
        logging.getLogger('pycountry.db').setLevel(logging.CRITICAL)

        if not self.media_domain:
            self.media_domain = self.domain
        if self.media_domain == self.domain:
            print("Warning: g.media_domain == g.domain. " +
                  "This may give untrusted content access to user cookies")

        self.reddit_host = socket.gethostname()
        self.reddit_pid = os.getpid()

        for arg in sys.argv:
            tokens = arg.split("=")
            if len(tokens) == 2:
                k, v = tokens
                self.log.debug("Overriding g.%s to %s" % (k, v))
                setattr(self, k, v)

        #the shutdown toggle
        self.shutdown = False

        #if we're going to use the query_queue, we need amqp
        if self.write_query_queue and not self.amqp_host:
            raise Exception("amqp_host must be defined to use the query queue")

        # This requirement doesn't *have* to be a requirement, but there are
        # bugs at the moment that will pop up if you violate it
        if self.write_query_queue and not self.use_query_cache:
            raise Exception("write_query_queue requires use_query_cache")

        # try to set the source control revision number
        try:
            self.version = subprocess.check_output(
                ["git", "rev-parse", "HEAD"])
        except subprocess.CalledProcessError, e:
            self.log.info("Couldn't read source revision (%r)" % e)
            self.version = self.short_version = '(unknown)'
Пример #12
0
class Globals(object):
    spec = {
        ConfigValue.int: [
            "db_pool_size",
            "db_pool_overflow_size",
            "page_cache_time",
            "commentpane_cache_time",
            "num_mc_clients",
            "MAX_CAMPAIGNS_PER_LINK",
            "MIN_DOWN_LINK",
            "MIN_UP_KARMA",
            "MIN_DOWN_KARMA",
            "MIN_RATE_LIMIT_KARMA",
            "MIN_RATE_LIMIT_COMMENT_KARMA",
            "VOTE_AGE_LIMIT",
            "REPLY_AGE_LIMIT",
            "REPORT_AGE_LIMIT",
            "HOT_PAGE_AGE",
            "RATELIMIT",
            "QUOTA_THRESHOLD",
            "ADMIN_COOKIE_TTL",
            "ADMIN_COOKIE_MAX_IDLE",
            "OTP_COOKIE_TTL",
            "num_comments",
            "max_comments",
            "max_comments_gold",
            "num_default_reddits",
            "max_sr_images",
            "num_serendipity",
            "sr_dropdown_threshold",
            "comment_visits_period",
            "min_membership_create_community",
            "bcrypt_work_factor",
            "cassandra_pool_size",
            "sr_banned_quota",
            "sr_wikibanned_quota",
            "sr_wikicontributor_quota",
            "sr_moderator_invite_quota",
            "sr_contributor_quota",
            "sr_quota_time",
            "sr_invite_limit",
            "wiki_keep_recent_days",
            "wiki_max_page_length_bytes",
            "wiki_max_page_name_length",
            "wiki_max_page_separators",
        ],
        ConfigValue.float: ["min_promote_bid", "max_promote_bid", "statsd_sample_rate", "querycache_prune_chance"],
        ConfigValue.bool: [
            "debug",
            "log_start",
            "sqlprinting",
            "template_debug",
            "reload_templates",
            "uncompressedJS",
            "css_killswitch",
            "db_create_tables",
            "disallow_db_writes",
            "disable_ratelimit",
            "amqp_logging",
            "read_only_mode",
            "disable_wiki",
            "heavy_load_mode",
            "s3_media_direct",
            "disable_captcha",
            "disable_ads",
            "disable_require_admin_otp",
            "static_pre_gzipped",
            "static_secure_pre_gzipped",
            "trust_local_proxies",
            "shard_link_vote_queues",
            "shard_commentstree_queues",
        ],
        ConfigValue.tuple: [
            "plugins",
            "stalecaches",
            "memcaches",
            "lockcaches",
            "permacache_memcaches",
            "rendercaches",
            "pagecaches",
            "memoizecaches",
            "cassandra_seeds",
            "admins",
            "sponsors",
            "automatic_reddits",
            "allowed_css_linked_domains",
            "authorized_cnames",
            "hardcache_categories",
            "s3_media_buckets",
            "allowed_pay_countries",
            "case_sensitive_domains",
            "reserved_subdomains",
            "TRAFFIC_LOG_HOSTS",
            "exempt_login_user_agents",
            "timed_templates",
            "sample_multis",
        ],
        ConfigValue.dict(ConfigValue.str, ConfigValue.int): ["agents"],
        ConfigValue.str: ["wiki_page_registration_info", "wiki_page_privacy_policy", "wiki_page_user_agreement"],
        ConfigValue.choice: {
            "cassandra_rcl": {"ONE": CL_ONE, "QUORUM": CL_QUORUM},
            "cassandra_wcl": {"ONE": CL_ONE, "QUORUM": CL_QUORUM},
        },
        config_gold_price: ["gold_month_price", "gold_year_price"],
    }

    live_config_spec = {
        ConfigValue.bool: ["frontpage_dart"],
        ConfigValue.float: ["spotlight_interest_sub_p", "spotlight_interest_nosub_p"],
        ConfigValue.tuple: ["sr_discovery_links", "fastlane_links"],
        ConfigValue.dict(ConfigValue.int, ConfigValue.float): ["comment_tree_version_weights"],
        ConfigValue.messages: [
            "goldvertisement_blurbs",
            "goldvertisement_has_gold_blurbs",
            "welcomebar_messages",
            "sidebar_message",
            "gold_sidebar_message",
        ],
    }

    def __init__(self, global_conf, app_conf, paths, **extra):
        """
        Globals acts as a container for objects available throughout
        the life of the application.

        One instance of Globals is created by Pylons during
        application initialization and is available during requests
        via the 'g' variable.

        ``global_conf``
            The same variable used throughout ``config/middleware.py``
            namely, the variables from the ``[DEFAULT]`` section of the
            configuration file.

        ``app_conf``
            The same ``kw`` dictionary used throughout
            ``config/middleware.py`` namely, the variables from the
            section in the config file for your application.

        ``extra``
            The configuration returned from ``load_config`` in 
            ``config/middleware.py`` which may be of use in the setup of
            your global variables.

        """

        global_conf.setdefault("debug", False)

        self.config = ConfigValueParser(global_conf)
        self.config.add_spec(self.spec)
        self.plugins = PluginLoader(self.config.get("plugins", []))

        self.stats = Stats(self.config.get("statsd_addr"), self.config.get("statsd_sample_rate"))
        self.startup_timer = self.stats.get_timer("app_startup")
        self.startup_timer.start()

        self.paths = paths

        self.running_as_script = global_conf.get("running_as_script", False)

        # turn on for language support
        self.lang = getattr(self, "site_lang", "en")
        self.languages, self.lang_name = get_active_langs(default_lang=self.lang)

        all_languages = self.lang_name.keys()
        all_languages.sort()
        self.all_languages = all_languages

        # set default time zone if one is not set
        tz = global_conf.get("timezone", "UTC")
        self.tz = pytz.timezone(tz)

        dtz = global_conf.get("display_timezone", tz)
        self.display_tz = pytz.timezone(dtz)

        self.startup_timer.intermediate("init")

    def __getattr__(self, name):
        if not name.startswith("_") and name in self.config:
            return self.config[name]
        else:
            raise AttributeError

    def setup(self):
        self.queues = queues.declare_queues(self)

        ################# CONFIGURATION
        # AMQP is required
        if not self.amqp_host:
            raise ValueError("amqp_host not set in the .ini")

        if not self.cassandra_seeds:
            raise ValueError("cassandra_seeds not set in the .ini")

        # heavy load mode is read only mode with a different infobar
        if self.heavy_load_mode:
            self.read_only_mode = True

        origin_prefix = self.domain_prefix + "." if self.domain_prefix else ""
        self.origin = "http://" + origin_prefix + self.domain
        self.secure_domains = set([urlparse(self.payment_domain).netloc])

        self.trusted_domains = set([self.domain])
        self.trusted_domains.update(self.authorized_cnames)
        if self.https_endpoint:
            https_url = urlparse(self.https_endpoint)
            self.secure_domains.add(https_url.netloc)
            self.trusted_domains.add(https_url.hostname)
        if getattr(self, "oauth_domain", None):
            self.secure_domains.add(self.oauth_domain)

        # load the unique hashed names of files under static
        static_files = os.path.join(self.paths.get("static_files"), "static")
        names_file_path = os.path.join(static_files, "names.json")
        if os.path.exists(names_file_path):
            with open(names_file_path) as handle:
                self.static_names = json.load(handle)
        else:
            self.static_names = {}

        # make python warnings go through the logging system
        logging.captureWarnings(capture=True)

        log = logging.getLogger("reddit")

        # when we're a script (paster run) just set up super simple logging
        if self.running_as_script:
            log.setLevel(logging.INFO)
            log.addHandler(logging.StreamHandler())

        # if in debug mode, override the logging level to DEBUG
        if self.debug:
            log.setLevel(logging.DEBUG)

        # attempt to figure out which pool we're in and add that to the
        # LogRecords.
        try:
            with open("/etc/ec2_asg", "r") as f:
                pool = f.read().strip()
            # clean up the pool name since we're putting stuff after "-"
            pool = pool.partition("-")[0]
        except IOError:
            pool = "reddit-app"
        self.log = logging.LoggerAdapter(log, {"pool": pool})

        # make cssutils use the real logging system
        csslog = logging.getLogger("cssutils")
        cssutils.log.setLog(csslog)

        # load the country list
        countries_file_path = os.path.join(static_files, "countries.json")
        try:
            with open(countries_file_path) as handle:
                self.countries = json.load(handle)
            self.log.debug("Using countries.json.")
        except IOError:
            self.log.warning("Couldn't find countries.json. Using pycountry.")
            self.countries = get_countries_and_codes()

        if not self.media_domain:
            self.media_domain = self.domain
        if self.media_domain == self.domain:
            print("Warning: g.media_domain == g.domain. " + "This may give untrusted content access to user cookies")

        for arg in sys.argv:
            tokens = arg.split("=")
            if len(tokens) == 2:
                k, v = tokens
                self.log.debug("Overriding g.%s to %s" % (k, v))
                setattr(self, k, v)

        self.reddit_host = socket.gethostname()
        self.reddit_pid = os.getpid()

        if hasattr(signal, "SIGUSR1"):
            # not all platforms have user signals
            signal.signal(signal.SIGUSR1, thread_dump)

        self.startup_timer.intermediate("configuration")

        ################# ZOOKEEPER
        # for now, zookeeper will be an optional part of the stack.
        # if it's not configured, we will grab the expected config from the
        # [live_config] section of the ini file
        zk_hosts = self.config.get("zookeeper_connection_string")
        if zk_hosts:
            from r2.lib.zookeeper import connect_to_zookeeper, LiveConfig, LiveList

            zk_username = self.config["zookeeper_username"]
            zk_password = self.config["zookeeper_password"]
            self.zookeeper = connect_to_zookeeper(zk_hosts, (zk_username, zk_password))
            self.live_config = LiveConfig(self.zookeeper, LIVE_CONFIG_NODE)
            self.throttles = LiveList(
                self.zookeeper, "/throttles", map_fn=ipaddress.ip_network, reduce_fn=ipaddress.collapse_addresses
            )
        else:
            self.zookeeper = None
            parser = ConfigParser.RawConfigParser()
            parser.read([self.config["__file__"]])
            self.live_config = extract_live_config(parser, self.plugins)
            self.throttles = tuple()  # immutable since it's not real

        self.startup_timer.intermediate("zookeeper")

        ################# MEMCACHE
        num_mc_clients = self.num_mc_clients

        # the main memcache pool. used for most everything.
        self.memcache = CMemcache(self.memcaches, min_compress_len=50 * 1024, num_clients=num_mc_clients)

        # a pool just used for @memoize results
        memoizecaches = CMemcache(self.memoizecaches, min_compress_len=50 * 1024, num_clients=num_mc_clients)

        # a smaller pool of caches used only for distributed locks.
        # TODO: move this to ZooKeeper
        self.lock_cache = CMemcache(self.lockcaches, num_clients=num_mc_clients)
        self.make_lock = make_lock_factory(self.lock_cache, self.stats)

        # memcaches used in front of the permacache CF in cassandra.
        # XXX: this is a legacy thing; permacache was made when C* didn't have
        # a row cache.
        if self.permacache_memcaches:
            permacache_memcaches = CMemcache(
                self.permacache_memcaches, min_compress_len=50 * 1024, num_clients=num_mc_clients
            )
        else:
            permacache_memcaches = None

        # the stalecache is a memcached local to the current app server used
        # for data that's frequently fetched but doesn't need to be fresh.
        if self.stalecaches:
            stalecaches = CMemcache(self.stalecaches, num_clients=num_mc_clients)
        else:
            stalecaches = None

        # rendercache holds rendered partial templates.
        rendercaches = CMemcache(
            self.rendercaches, noreply=True, no_block=True, num_clients=num_mc_clients, min_compress_len=1400
        )

        # pagecaches hold fully rendered pages
        pagecaches = CMemcache(
            self.pagecaches, noreply=True, no_block=True, num_clients=num_mc_clients, min_compress_len=1400
        )

        self.startup_timer.intermediate("memcache")

        ################# CASSANDRA
        keyspace = "reddit"
        self.cassandra_pools = {
            "main": StatsCollectingConnectionPool(
                keyspace,
                stats=self.stats,
                logging_name="main",
                server_list=self.cassandra_seeds,
                pool_size=self.cassandra_pool_size,
                timeout=4,
                max_retries=3,
                prefill=False,
            )
        }

        permacache_cf = CassandraCache(
            "permacache",
            self.cassandra_pools[self.cassandra_default_pool],
            read_consistency_level=self.cassandra_rcl,
            write_consistency_level=self.cassandra_wcl,
        )

        self.startup_timer.intermediate("cassandra")

        ################# POSTGRES
        event.listens_for(engine.Engine, "before_cursor_execute")(self.stats.pg_before_cursor_execute)
        event.listens_for(engine.Engine, "after_cursor_execute")(self.stats.pg_after_cursor_execute)

        self.dbm = self.load_db_params()
        self.startup_timer.intermediate("postgres")

        ################# CHAINS
        # initialize caches. Any cache-chains built here must be added
        # to cache_chains (closed around by reset_caches) so that they
        # can properly reset their local components
        cache_chains = {}
        localcache_cls = SelfEmptyingCache if self.running_as_script else LocalCache

        if stalecaches:
            self.cache = StaleCacheChain(localcache_cls(), stalecaches, self.memcache)
        else:
            self.cache = MemcacheChain((localcache_cls(), self.memcache))
        cache_chains.update(cache=self.cache)

        if stalecaches:
            self.memoizecache = StaleCacheChain(localcache_cls(), stalecaches, memoizecaches)
        else:
            self.memoizecache = MemcacheChain((localcache_cls(), memoizecaches))
        cache_chains.update(memoizecache=self.memoizecache)

        self.rendercache = MemcacheChain((localcache_cls(), rendercaches))
        cache_chains.update(rendercache=self.rendercache)

        self.pagecache = MemcacheChain((localcache_cls(), pagecaches))
        cache_chains.update(pagecache=self.pagecache)

        # the thing_cache is used in tdb_cassandra.
        self.thing_cache = CacheChain((localcache_cls(),))
        cache_chains.update(thing_cache=self.thing_cache)

        self.permacache = CassandraCacheChain(
            localcache_cls(), permacache_cf, memcache=permacache_memcaches, lock_factory=self.make_lock
        )
        cache_chains.update(permacache=self.permacache)

        # hardcache is used for various things that tend to expire
        # TODO: replace hardcache w/ cassandra stuff
        self.hardcache = HardcacheChain((localcache_cls(), self.memcache, HardCache(self)), cache_negative_results=True)
        cache_chains.update(hardcache=self.hardcache)

        # I know this sucks, but we need non-request-threads to be
        # able to reset the caches, so we need them be able to close
        # around 'cache_chains' without being able to call getattr on
        # 'g'
        def reset_caches():
            for name, chain in cache_chains.iteritems():
                chain.reset()
                chain.stats = CacheStats(self.stats, name)

        self.cache_chains = cache_chains

        self.reset_caches = reset_caches
        self.reset_caches()

        self.startup_timer.intermediate("cache_chains")

        # try to set the source control revision numbers
        self.versions = {}
        r2_root = os.path.dirname(os.path.dirname(self.paths["root"]))
        r2_gitdir = os.path.join(r2_root, ".git")
        self.short_version = self.record_repo_version("r2", r2_gitdir)

        if I18N_PATH:
            i18n_git_path = os.path.join(os.path.dirname(I18N_PATH), ".git")
            self.record_repo_version("i18n", i18n_git_path)

        self.startup_timer.intermediate("revisions")

    def setup_complete(self):
        self.startup_timer.stop()
        self.stats.flush()

        if self.log_start:
            self.log.error(
                "%s:%s started %s at %s (took %.02fs)",
                self.reddit_host,
                self.reddit_pid,
                self.short_version,
                datetime.now().strftime("%H:%M:%S"),
                self.startup_timer.elapsed_seconds(),
            )

    def record_repo_version(self, repo_name, git_dir):
        """Get the currently checked out git revision for a given repository,
        record it in g.versions, and return the short version of the hash."""
        try:
            subprocess.check_output
        except AttributeError:
            # python 2.6 compat
            pass
        else:
            try:
                revision = subprocess.check_output(["git", "--git-dir", git_dir, "rev-parse", "HEAD"])
            except subprocess.CalledProcessError, e:
                self.log.warning("Unable to fetch git revision: %r", e)
            else:
Пример #13
0
class Globals(object):
    spec = {
        ConfigValue.int: [
            'db_pool_size',
            'db_pool_overflow_size',
            'commentpane_cache_time',
            'num_mc_clients',
            'MAX_CAMPAIGNS_PER_LINK',
            'MIN_DOWN_LINK',
            'MIN_UP_KARMA',
            'MIN_DOWN_KARMA',
            'MIN_RATE_LIMIT_KARMA',
            'MIN_RATE_LIMIT_COMMENT_KARMA',
            'HOT_PAGE_AGE',
            'ADMIN_COOKIE_TTL',
            'ADMIN_COOKIE_MAX_IDLE',
            'OTP_COOKIE_TTL',
            'hsts_max_age',
            'num_comments',
            'max_comments',
            'max_comments_gold',
            'max_comment_parent_walk',
            'max_sr_images',
            'num_serendipity',
            'comment_visits_period',
            'butler_max_mentions',
            'min_membership_create_community',
            'bcrypt_work_factor',
            'cassandra_pool_size',
            'sr_banned_quota',
            'sr_muted_quota',
            'sr_wikibanned_quota',
            'sr_wikicontributor_quota',
            'sr_moderator_invite_quota',
            'sr_contributor_quota',
            'sr_quota_time',
            'sr_invite_limit',
            'thumbnail_hidpi_scaling',
            'wiki_keep_recent_days',
            'wiki_max_page_length_bytes',
            'wiki_max_config_stylesheet_length_bytes',
            'wiki_max_page_name_length',
            'wiki_max_page_separators',
            'RL_RESET_MINUTES',
            'RL_OAUTH_RESET_MINUTES',
            'comment_karma_display_floor',
            'link_karma_display_floor',
            'mobile_auth_gild_time',
            'default_total_budget_pennies',
            'min_total_budget_pennies',
            'max_total_budget_pennies',
            'default_bid_pennies',
            'min_bid_pennies',
            'max_bid_pennies',
            'frequency_cap_min',
            'frequency_cap_default',
            'eu_cookie_max_attempts',
            'captcha_sol_length',
            'captcha_font_size',
            'banner_variants',
            'precompute_limit',
            'precompute_limit_hot',
            'hot_max_links_per_subreddit',
            'fetch_title_max_download_kb',
        ],
        ConfigValue.float: [
            'statsd_sample_rate',
            'querycache_prune_chance',
            'RL_AVG_REQ_PER_SEC',
            'RL_OAUTH_AVG_REQ_PER_SEC',
            'RL_LOGIN_AVG_PER_SEC',
            'RL_LOGIN_IP_AVG_PER_SEC',
            'RL_SHARE_AVG_PER_SEC',
            'tracing_sample_rate',
            'hot_period_seconds',
        ],
        ConfigValue.bool: [
            'debug',
            'log_start',
            'sqlprinting',
            'template_debug',
            'reload_templates',
            'uncompressedJS',
            'css_killswitch',
            'db_create_tables',
            'disallow_db_writes',
            'disable_ratelimit',
            'amqp_logging',
            'read_only_mode',
            'disable_wiki',
            'heavy_load_mode',
            'disable_captcha',
            'disable_ads',
            'disable_require_admin_otp',
            'trust_local_proxies',
            'shard_commentstree_queues',
            'shard_author_query_queues',
            'shard_subreddit_query_queues',
            'shard_domain_query_queues',
            'authnet_validate',
            'ENFORCE_RATELIMIT',
            'RL_SITEWIDE_ENABLED',
            'RL_OAUTH_SITEWIDE_ENABLED',
            'enable_loggedout_experiments',
            'disable_geoip_service',
            'disable_remote_fetch',
            'disable_newsletter',
            'remote_fetch_proxy_enabled',
            'gold_gilding_enabled',
            'sub_muting_enabled',
            'allsr_prefilter_allow_top',
            'site_index_user_configurable',
            'allow_top_affects_new',
            'allow_top_false_subreddits_tab',
            'block_user_show_comments',
            'block_user_show_links',
            'chat_guest_chat_enabled',
            'chat_all',
            'chat_front',
        ],
        ConfigValue.tuple: [
            'plugins',
            'stalecaches',
            'lockcaches',
            'permacache_memcaches',
            'cassandra_seeds',
            'automatic_reddits',
            'hardcache_categories',
            'case_sensitive_domains',
            'known_image_domains',
            'reserved_subdomains',
            'offsite_subdomains',
            'TRAFFIC_LOG_HOSTS',
            'exempt_login_user_agents',
            'autoexpand_media_types',
            'media_preview_domain_whitelist',
            'multi_icons',
            'hide_subscribers_srs',
            'mcrouter_addr',
            'permacache_domain_priority',
        ],
        ConfigValue.tuple_of(ConfigValue.int): [
            'thumbnail_size',
            'preview_image_max_size',
            'preview_image_min_size',
            'mobile_ad_image_size',
        ],
        ConfigValue.tuple_of(ConfigValue.float): [
            'ios_versions',
            'android_versions',
        ],
        ConfigValue.dict(ConfigValue.str, ConfigValue.int): [
            'user_agent_ratelimit_regexes',
        ],
        ConfigValue.str: [
            'wiki_page_registration_info',
            'wiki_page_privacy_policy',
            'wiki_page_user_agreement',
            'wiki_page_gold_bottlecaps',
            'fraud_email',
            'feedback_email',
            'share_reply',
            'community_email',
            'smtp_server',
            'events_collector_url',
            'events_collector_test_url',
            'search_provider',
            'remote_fetch_proxy_url',
            'brander_community',
            'brander_community_plural',
            'imgur_client_id',
        ],
        ConfigValue.choice(ONE=CL_ONE, QUORUM=CL_QUORUM): [
            'cassandra_rcl',
            'cassandra_wcl',
        ],
        ConfigValue.choice(zookeeper="zookeeper", config="config"): [
            "liveconfig_source",
            "secrets_source",
        ],
        ConfigValue.timeinterval: [
            'ARCHIVE_AGE',
            "vote_queue_grace_period",
        ],
        config_gold_price: [
            'gold_month_price',
            'gold_year_price',
            'cpm_selfserve',
            'cpm_selfserve_geotarget_metro',
            'cpm_selfserve_geotarget_country',
            'cpm_selfserve_collection',
        ],
        ConfigValue.baseplate(
            baseplate_config.Optional(baseplate_config.Endpoint)): [
            "activity_endpoint",
            "tracing_endpoint",
        ],
        ConfigValue.dict(ConfigValue.str, ConfigValue.str): [
            'emr_traffic_tags',
        ],
    }

    live_config_spec = {
        ConfigValue.bool: [
            'frontend_logging',
            'mobile_gild_first_login',
            'precomputed_comment_suggested_sort',
        ],
        ConfigValue.int: [
            'captcha_exempt_comment_karma',
            'captcha_exempt_link_karma',
            'create_sr_account_age_days',
            'create_sr_comment_karma',
            'create_sr_link_karma',
            'cflag_min_votes',
            'ads_popularity_threshold',
            'precomputed_comment_sort_min_comments',
            'comment_vote_update_threshold',
            'comment_vote_update_period',
            'create_sr_ratelimit_once_per_days',
        ],
        ConfigValue.float: [
            'cflag_lower_bound',
            'cflag_upper_bound',
            'spotlight_interest_sub_p',
            'spotlight_interest_nosub_p',
            'gold_revenue_goal',
            'invalid_key_sample_rate',
            'events_collector_vote_sample_rate',
            'events_collector_poison_sample_rate',
            'events_collector_mod_sample_rate',
            'events_collector_quarantine_sample_rate',
            'events_collector_modmail_sample_rate',
            'events_collector_report_sample_rate',
            'events_collector_submit_sample_rate',
            'events_collector_comment_sample_rate',
            'events_collector_use_gzip_chance',
            'https_cert_testing_probability',
        ],
        ConfigValue.tuple: [
            'fastlane_links',
            'listing_chooser_sample_multis',
            'discovery_srs',
            'proxy_gilding_accounts',
            'mweb_blacklist_expressions',
            'global_loid_experiments',
            'precomputed_comment_sorts',
            'mailgun_domains',
        ],
        ConfigValue.str: [
            'listing_chooser_gold_multi',
            'listing_chooser_explore_sr',
        ],
        ConfigValue.messages: [
            'welcomebar_messages',
            'sidebar_message',
            'gold_sidebar_message',
        ],
        ConfigValue.dict(ConfigValue.str, ConfigValue.int): [
            'ticket_groups',
            'ticket_user_fields',
        ],
        ConfigValue.dict(ConfigValue.str, ConfigValue.float): [
            'pennies_per_server_second',
        ],
        ConfigValue.dict(ConfigValue.str, ConfigValue.str): [
            'employee_approved_clients',
            'mobile_auth_allowed_clients',
            'modmail_forwarding_email',
            'modmail_account_map',
        ],
        ConfigValue.dict(ConfigValue.str, ConfigValue.choice(**PERMISSIONS)): [
            'employees',
        ],
    }

    def __init__(self, config, global_conf, app_conf, paths, **extra):
        """
        Globals acts as a container for objects available throughout
        the life of the application.

        One instance of Globals is created by Pylons during
        application initialization and is available during requests
        via the 'g' variable.

        ``config``
            The PylonsConfig object passed in from ``config/environment.py``

        ``global_conf``
            The same variable used throughout ``config/middleware.py``
            namely, the variables from the ``[DEFAULT]`` section of the
            configuration file.

        ``app_conf``
            The same ``kw`` dictionary used throughout
            ``config/middleware.py`` namely, the variables from the
            section in the config file for your application.

        ``extra``
            The configuration returned from ``load_config`` in 
            ``config/middleware.py`` which may be of use in the setup of
            your global variables.

        """

        global_conf.setdefault("debug", False)

        # reloading site ensures that we have a fresh sys.path to build our
        # working set off of. this means that forked worker processes won't get
        # the sys.path that was current when the master process was spawned
        # meaning that new plugins will be picked up on regular app reload
        # rather than having to restart the master process as well.
        reload(site)
        self.pkg_resources_working_set = pkg_resources.WorkingSet()

        self.config = ConfigValueParser(global_conf)
        self.config.add_spec(self.spec)
        self.plugins = PluginLoader(self.pkg_resources_working_set,
                                    self.config.get("plugins", []))

        self.stats = Stats(self.config.get('statsd_addr'),
                           self.config.get('statsd_sample_rate'))
        self.startup_timer = self.stats.get_timer("app_startup")
        self.startup_timer.start()

        self.baseplate = Baseplate()
        self.baseplate.configure_logging()
        self.baseplate.register(R2BaseplateObserver())
        self.baseplate.configure_tracing(
            "r2",
            tracing_endpoint=self.config.get("tracing_endpoint"),
            sample_rate=self.config.get("tracing_sample_rate"),
        )

        self.paths = paths

        self.running_as_script = global_conf.get('running_as_script', False)

        # turn on for language support
        self.lang = getattr(self, 'site_lang', 'en')
        self.languages, self.lang_name = get_active_langs(
            config, default_lang=self.lang)

        all_languages = self.lang_name.keys()
        all_languages.sort()
        self.all_languages = all_languages

        # set default time zone if one is not set
        tz = global_conf.get('timezone', 'UTC')
        self.tz = pytz.timezone(tz)

        dtz = global_conf.get('display_timezone', tz)
        self.display_tz = pytz.timezone(dtz)

        self.startup_timer.intermediate("init")

    def __getattr__(self, name):
        if not name.startswith('_') and name in self.config:
            return self.config[name]
        else:
            raise AttributeError("g has no attr %r" % name)

    def setup(self):
        self.env = ''
        if (
                # handle direct invocation of "nosetests"
                "test" in sys.argv[0] or
                # handle "setup.py test" and all permutations thereof.
                "setup.py" in sys.argv[0] and "test" in sys.argv[1:]):
            self.env = "unit_test"

        self.queues = queues.declare_queues(self)

        self.extension_subdomains = dict(
            simple="mobile",
            i="compact",
            api="api",
            rss="rss",
            xml="xml",
            json="json",
        )
        # SaidIt CUSTOM
        self.extension_subdomains[
            self.config['extension_subdomain_mobile_v2']] = self.config[
                'extension_subdomain_mobile_v2_render_style']

        ################# PROVIDERS
        self.auth_provider = select_provider(
            self.config,
            self.pkg_resources_working_set,
            "r2.provider.auth",
            self.authentication_provider,
        )
        self.media_provider = select_provider(
            self.config,
            self.pkg_resources_working_set,
            "r2.provider.media",
            self.media_provider,
        )
        self.cdn_provider = select_provider(
            self.config,
            self.pkg_resources_working_set,
            "r2.provider.cdn",
            self.cdn_provider,
        )
        self.ticket_provider = select_provider(
            self.config,
            self.pkg_resources_working_set,
            "r2.provider.support",
            # TODO: fix this later, it refuses to pick up
            # g.config['ticket_provider'] value, so hardcoding for now.
            # really, the next uncommented line should be:
            #self.ticket_provider,
            # instead of:
            "zendesk",
        )
        self.image_resizing_provider = select_provider(
            self.config,
            self.pkg_resources_working_set,
            "r2.provider.image_resizing",
            self.image_resizing_provider,
        )
        self.email_provider = select_provider(
            self.config,
            self.pkg_resources_working_set,
            "r2.provider.email",
            self.email_provider,
        )
        self.startup_timer.intermediate("providers")

        ################# CONFIGURATION
        # AMQP is required
        if not self.amqp_host:
            raise ValueError("amqp_host not set in the .ini")

        if not self.cassandra_seeds:
            raise ValueError("cassandra_seeds not set in the .ini")

        # heavy load mode is read only mode with a different infobar
        if self.heavy_load_mode:
            self.read_only_mode = True

        origin_prefix = self.domain_prefix + "." if self.domain_prefix else ""
        self.origin = self.default_scheme + "://" + origin_prefix + self.domain

        self.trusted_domains = set([self.domain])
        if self.https_endpoint:
            https_url = urlparse(self.https_endpoint)
            self.trusted_domains.add(https_url.hostname)

        # load the unique hashed names of files under static
        static_files = os.path.join(self.paths.get('static_files'), 'static')
        names_file_path = os.path.join(static_files, 'names.json')
        if os.path.exists(names_file_path):
            with open(names_file_path) as handle:
                self.static_names = json.load(handle)
        else:
            self.static_names = {}

        # make python warnings go through the logging system
        logging.captureWarnings(capture=True)

        log = logging.getLogger('reddit')

        # when we're a script (paster run) just set up super simple logging
        if self.running_as_script:
            log.setLevel(logging.INFO)
            log.addHandler(logging.StreamHandler())

        # if in debug mode, override the logging level to DEBUG
        if self.debug:
            log.setLevel(logging.DEBUG)

        # attempt to figure out which pool we're in and add that to the
        # LogRecords.
        try:
            with open("/etc/ec2_asg", "r") as f:
                pool = f.read().strip()
            # clean up the pool name since we're putting stuff after "-"
            pool = pool.partition("-")[0]
        except IOError:
            pool = "reddit-app"
        self.log = logging.LoggerAdapter(log, {"pool": pool})

        # set locations
        locations = pkg_resources.resource_stream(__name__,
                                                  "../data/locations.json")
        self.locations = json.loads(locations.read())

        if not self.media_domain:
            self.media_domain = self.domain
        if self.media_domain == self.domain:
            print >> sys.stderr, (
                "Warning: g.media_domain == g.domain. " +
                "This may give untrusted content access to user cookies")
        if self.oauth_domain == self.domain:
            print >> sys.stderr, ("Warning: g.oauth_domain == g.domain. "
                                  "CORS requests to g.domain will be allowed")

        for arg in sys.argv:
            tokens = arg.split("=")
            if len(tokens) == 2:
                k, v = tokens
                self.log.debug("Overriding g.%s to %s" % (k, v))
                setattr(self, k, v)

        self.reddit_host = socket.gethostname()
        self.reddit_pid = os.getpid()

        if hasattr(signal, 'SIGUSR1'):
            # not all platforms have user signals
            signal.signal(signal.SIGUSR1, thread_dump)

        locale.setlocale(locale.LC_ALL, self.locale)

        # Pre-calculate ratelimit values
        self.RL_RESET_SECONDS = self.config["RL_RESET_MINUTES"] * 60
        self.RL_MAX_REQS = int(self.config["RL_AVG_REQ_PER_SEC"] *
                               self.RL_RESET_SECONDS)

        self.RL_OAUTH_RESET_SECONDS = self.config["RL_OAUTH_RESET_MINUTES"] * 60
        self.RL_OAUTH_MAX_REQS = int(self.config["RL_OAUTH_AVG_REQ_PER_SEC"] *
                                     self.RL_OAUTH_RESET_SECONDS)

        self.RL_LOGIN_MAX_REQS = int(self.config["RL_LOGIN_AVG_PER_SEC"] *
                                     self.RL_RESET_SECONDS)
        self.RL_LOGIN_IP_MAX_REQS = int(
            self.config["RL_LOGIN_IP_AVG_PER_SEC"] * self.RL_RESET_SECONDS)
        self.RL_SHARE_MAX_REQS = int(self.config["RL_SHARE_AVG_PER_SEC"] *
                                     self.RL_RESET_SECONDS)

        # Compile ratelimit regexs
        user_agent_ratelimit_regexes = {}
        for agent_re, limit in self.user_agent_ratelimit_regexes.iteritems():
            user_agent_ratelimit_regexes[re.compile(agent_re)] = limit
        self.user_agent_ratelimit_regexes = user_agent_ratelimit_regexes

        self.startup_timer.intermediate("configuration")

        ################# ZOOKEEPER
        zk_hosts = self.config["zookeeper_connection_string"]
        zk_username = self.config["zookeeper_username"]
        zk_password = self.config["zookeeper_password"]
        self.zookeeper = connect_to_zookeeper(zk_hosts,
                                              (zk_username, zk_password))

        self.throttles = IPNetworkLiveList(
            self.zookeeper,
            root="/throttles",
            reduced_data_node="/throttles_reduced",
        )

        parser = ConfigParser.RawConfigParser()
        parser.optionxform = str
        parser.read([self.config["__file__"]])

        if self.config["liveconfig_source"] == "zookeeper":
            self.live_config = LiveConfig(self.zookeeper, LIVE_CONFIG_NODE)
        else:
            self.live_config = extract_live_config(parser, self.plugins)

        if self.config["secrets_source"] == "zookeeper":
            self.secrets = fetch_secrets(self.zookeeper)
        else:
            self.secrets = extract_secrets(parser)

        ################# PRIVILEGED USERS
        self.admins = PermissionFilteredEmployeeList(self.live_config,
                                                     type="admin")
        self.sponsors = PermissionFilteredEmployeeList(self.live_config,
                                                       type="sponsor")
        self.employees = PermissionFilteredEmployeeList(self.live_config,
                                                        type="employee")

        # Store which OAuth clients employees may use, the keys are just for
        # readability.
        self.employee_approved_clients = \
            self.live_config["employee_approved_clients"].values()

        self.mobile_auth_allowed_clients = self.live_config[
            "mobile_auth_allowed_clients"].values()

        self.startup_timer.intermediate("zookeeper")

        ################# MEMCACHE
        num_mc_clients = self.num_mc_clients

        # a smaller pool of caches used only for distributed locks.
        self.lock_cache = CMemcache(
            "lock",
            self.lockcaches,
            num_clients=num_mc_clients,
        )
        self.make_lock = make_lock_factory(self.lock_cache, self.stats)

        # memcaches used in front of the permacache CF in cassandra.
        # XXX: this is a legacy thing; permacache was made when C* didn't have
        # a row cache.
        permacache_memcaches = CMemcache(
            "perma",
            self.permacache_memcaches,
            min_compress_len=1400,
            num_clients=num_mc_clients,
        )

        # the stalecache is a memcached local to the current app server used
        # for data that's frequently fetched but doesn't need to be fresh.
        if self.stalecaches:
            stalecaches = CMemcache(
                "stale",
                self.stalecaches,
                num_clients=num_mc_clients,
            )
        else:
            stalecaches = None

        self.startup_timer.intermediate("memcache")

        ################# MCROUTER
        self.mcrouter = Mcrouter(
            "mcrouter",
            self.mcrouter_addr,
            min_compress_len=1400,
            num_clients=num_mc_clients,
        )

        ################# THRIFT-BASED SERVICES
        activity_endpoint = self.config.get("activity_endpoint")
        if activity_endpoint:
            # make ActivityInfo objects rendercache-key friendly
            # TODO: figure out a more general solution for this if
            # we need to do this for other thrift-generated objects
            ActivityInfo.cache_key = lambda self, style: repr(self)

            activity_pool = ThriftConnectionPool(activity_endpoint,
                                                 timeout=0.1)
            self.baseplate.add_to_context(
                "activity_service",
                ThriftContextFactory(activity_pool, ActivityService.Client))

        self.startup_timer.intermediate("thrift")

        ################# CASSANDRA
        keyspace = "reddit"
        self.cassandra_pools = {
            "main":
            StatsCollectingConnectionPool(keyspace,
                                          stats=self.stats,
                                          logging_name="main",
                                          server_list=self.cassandra_seeds,
                                          pool_size=self.cassandra_pool_size,
                                          timeout=4,
                                          max_retries=3,
                                          prefill=False),
        }

        permacache_cf = Permacache._setup_column_family(
            'permacache',
            self.cassandra_pools[self.cassandra_default_pool],
        )

        self.startup_timer.intermediate("cassandra")

        ################# POSTGRES
        self.dbm = self.load_db_params()
        self.startup_timer.intermediate("postgres")

        ################# CHAINS
        # initialize caches. Any cache-chains built here must be added
        # to cache_chains (closed around by reset_caches) so that they
        # can properly reset their local components
        cache_chains = {}
        localcache_cls = (SelfEmptyingCache
                          if self.running_as_script else LocalCache)

        if stalecaches:
            self.gencache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                self.mcrouter,
            )
        else:
            self.gencache = CacheChain((localcache_cls(), self.mcrouter))
        cache_chains.update(gencache=self.gencache)

        if stalecaches:
            self.thingcache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                self.mcrouter,
            )
        else:
            self.thingcache = CacheChain((localcache_cls(), self.mcrouter))
        cache_chains.update(thingcache=self.thingcache)

        if stalecaches:
            self.memoizecache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                self.mcrouter,
            )
        else:
            self.memoizecache = MemcacheChain(
                (localcache_cls(), self.mcrouter))
        cache_chains.update(memoizecache=self.memoizecache)

        if stalecaches:
            self.srmembercache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                self.mcrouter,
            )
        else:
            self.srmembercache = MemcacheChain(
                (localcache_cls(), self.mcrouter))
        cache_chains.update(srmembercache=self.srmembercache)

        if stalecaches:
            self.relcache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                self.mcrouter,
            )
        else:
            self.relcache = MemcacheChain((localcache_cls(), self.mcrouter))
        cache_chains.update(relcache=self.relcache)

        self.ratelimitcache = MemcacheChain((localcache_cls(), self.mcrouter))
        cache_chains.update(ratelimitcache=self.ratelimitcache)

        # rendercache holds rendered partial templates.
        self.rendercache = MemcacheChain((
            localcache_cls(),
            self.mcrouter,
        ))
        cache_chains.update(rendercache=self.rendercache)

        # commentpanecaches hold fully rendered comment panes
        self.commentpanecache = MemcacheChain((
            localcache_cls(),
            self.mcrouter,
        ))
        cache_chains.update(commentpanecache=self.commentpanecache)

        # cassandra_local_cache is used for request-local caching in tdb_cassandra
        self.cassandra_local_cache = localcache_cls()
        cache_chains.update(cassandra_local_cache=self.cassandra_local_cache)

        if stalecaches:
            permacache_cache = StaleCacheChain(
                localcache_cls(),
                stalecaches,
                permacache_memcaches,
            )
        else:
            permacache_cache = CacheChain(
                (localcache_cls(), permacache_memcaches), )
        cache_chains.update(permacache=permacache_cache)

        self.permacache = Permacache(
            permacache_cache,
            permacache_cf,
            lock_factory=self.make_lock,
        )

        # hardcache is used for various things that tend to expire
        # TODO: replace hardcache w/ cassandra stuff
        self.hardcache = HardcacheChain(
            (localcache_cls(), HardCache(self)),
            cache_negative_results=True,
        )
        cache_chains.update(hardcache=self.hardcache)

        # I know this sucks, but we need non-request-threads to be
        # able to reset the caches, so we need them be able to close
        # around 'cache_chains' without being able to call getattr on
        # 'g'
        def reset_caches():
            for name, chain in cache_chains.iteritems():
                if isinstance(chain, TransitionalCache):
                    chain = chain.read_chain

                chain.reset()
                if isinstance(chain, LocalCache):
                    continue
                elif isinstance(chain, StaleCacheChain):
                    chain.stats = StaleCacheStats(self.stats, name)
                else:
                    chain.stats = CacheStats(self.stats, name)

        self.cache_chains = cache_chains

        self.reset_caches = reset_caches
        self.reset_caches()

        self.startup_timer.intermediate("cache_chains")

        # try to set the source control revision numbers
        self.versions = {}
        r2_root = os.path.dirname(os.path.dirname(self.paths["root"]))
        r2_gitdir = os.path.join(r2_root, ".git")
        self.short_version = self.record_repo_version("r2", r2_gitdir)

        if I18N_PATH:
            i18n_git_path = os.path.join(os.path.dirname(I18N_PATH), ".git")
            self.record_repo_version("i18n", i18n_git_path)

        # Initialize the amqp module globals, start the worker, etc.
        r2.lib.amqp.initialize(self)

        self.events = EventQueue()

        self.startup_timer.intermediate("revisions")

    def setup_complete(self):
        self.startup_timer.stop()
        self.stats.flush()

        if self.log_start:
            self.log.error("%s:%s started %s at %s (took %.02fs)",
                           self.reddit_host, self.reddit_pid,
                           self.short_version,
                           datetime.now().strftime("%H:%M:%S"),
                           self.startup_timer.elapsed_seconds())

        if einhorn.is_worker():
            einhorn.ack_startup()

    def record_repo_version(self, repo_name, git_dir):
        """Get the currently checked out git revision for a given repository,
        record it in g.versions, and return the short version of the hash."""
        try:
            subprocess.check_output
        except AttributeError:
            # python 2.6 compat
            pass
        else:
            try:
                revision = subprocess.check_output(
                    ["git", "--git-dir", git_dir, "rev-parse", "HEAD"])
            except subprocess.CalledProcessError, e:
                self.log.warning("Unable to fetch git revision: %r", e)
            else: