from types import TupleType from repoze.lru import ExpiringLRUCache from bs4 import BeautifulSoup log = logging.getLogger("urltitle") config = None bot = None TITLE_LAG_MAXIMUM = 10 # Caching for url titles cache_timeout = 300 # 300 second timeout for cache cache = ExpiringLRUCache(10, cache_timeout) def init(botref): global config global bot bot = botref config = bot.config.get("module_urltitle", {}) def __get_bs(url): # Fetch the content and measure how long it took start = datetime.now() r = bot.get_url(url) end = datetime.now()
def make_cache(self, name, maxsize, timeout): if name in self.caches: raise Exception() self.caches[name] = ExpiringLRUCache(maxsize, timeout)
from repoze.lru import CacheMaker, ExpiringLRUCache from schematics.models import Model from schematics.transforms import blacklist from schematics.types import BaseType, StringType, DateTimeType, LongType, FloatType, NumberType, BooleanType from schematics.types.serializable import serializable from server import in_request_context from utils import dotget, epoch logger = logging.getLogger('wigo.model') INDEX_FIELD = re.compile('\{(.*?)\}', re.I) DEFAULT_EXPIRING_TTL = timedelta(days=20) cache_maker = CacheMaker(maxsize=1000, timeout=60) model_cache = ExpiringLRUCache(50000, 60 * 60) class JsonType(BaseType): def _mock(self, context=None): return {} def to_native(self, value, context=None): return value def to_primitive(self, value, context=None): return value def field_memoize(field=None): def inner(f):
# Our libs from ..models import DATE_FORMAT from ..models import Post from ..models import Upvote from ..models import User # Constants ENTRIES_PER_PAGE = 25 MAX_CACHE_ENTRIES = 50 CACHE_TIMEOUT_IN_SECONDS = 3600 # 1 hour # Initialize blueprint blueprint = Blueprint('root', __name__) # Initialize LRU cache cache = ExpiringLRUCache(MAX_CACHE_ENTRIES, default_timeout=CACHE_TIMEOUT_IN_SECONDS) @blueprint.route('/') @blueprint.route('/page/<int:page>') @blueprint.route('/r/<subreddit>') @blueprint.route('/r/<subreddit>/page/<int:page>') @blueprint.route('/posts') @blueprint.route('/posts/<int:year>/<int:month>/<int:day>/<slug>') def index(subreddit=None, page=1, year=None, month=None, day=None, slug=None): app_config = current_app.config env = app_config.get('APP_ENV', 'dev').lower() webpack_dev_server_hostname = app_config.get('WEBPACK_DEV_SERVER_HOSTNAME', '') unminified_filename = 'app/js/index.bundle.js'
KILLED = False parser = argparse.ArgumentParser(description='Lisa does smart things with IPs.') parser.add_argument('--file', default=str(conf.GEOIP_DB_FILE), help='path to mmdb file (default: %s)' % conf.GEOIP_DB_FILE) parser.add_argument('--log', default=conf.LOG_LEVEL, metavar='LOG_LEVEL', help='Log level (default: %s)' % conf.LOG_LEVEL) parser.add_argument('-v', '--verbose', action='store_true') parser.add_argument('--benchmark', action='store_true', help='Process queue then print stats and exit') args = parser.parse_args() logging.basicConfig(level=getattr(logging, args.log.upper()), format='%(asctime)s: %(message)s') rate_limiter = ExpiringLRUCache(10000, 60) def handle_signals(signum, frame): # NOTE: Makes this thing non-thread-safe # Should not be too difficult to fix if we # need/want threads. global KILLED KILLED = True log.info('Attempting to shut down') def rate_limit_ip(ip): """Return boolean whether the IP is rate limited""" calls = rate_limiter.get(ip, 0) if calls: