def scanPage(self, elements, connection, request, extra = {}): global request_actions, key_fetch_actions, hash_fetch_actions "Scan for missing elements" elements.update(extra) logged_in = self.find_prefix(elements, 'session_') is not None data = self.find_prefix(elements, 'page_')['response'].body matches = self.specialization_re.findall(data) missing_keys = [] for match in matches: # Parse element try: parts = match.strip().split() command = parts[0].lower() element_type = parts[1].lower() element_id = parts[2] except: mail.error('Error in scanPage:\n%s' % traceback.format_exc()) continue log.msg("Matched element (%s %s %s)" % (command, element_type, element_id)) if element_type not in request_actions: if element_type in key_fetch_actions or element_type in hash_fetch_actions or logged_in: key = self.store.elementHash(request, element_type, element_id) if key and key not in missing_keys: missing_keys.append(key) if missing_keys: log.msg("Fetching missing keys %s" % repr(missing_keys)) d = self.store.get(missing_keys, request) d.addCallback(self.renderPage, connection, request, elements) else: self.renderPage({}, connection, request, elements)
def lineReceived(self, line): if not self.active: return #log.msg('Line: %s' % repr(line)) if not self.object: self.object = HTTPObject(self.object_count) self.object.received_on = self.received_on self.object_count += 1 #log.msg('mode: %s' % self.request.mode) if self.object.mode == 'status': try: parts = line.split() if parts[0].upper() in ['GET', 'PUT', 'POST', 'DELETE', 'HEAD']: self.object.method, self.object.uri, self.object.protocol = parts self.object.uri = self.object.uri else: self.object.protocol = parts[0] self.object.status = int(parts[1]) self.object.message = ' '.join(parts[2:]) self.object.mode = 'headers' return except: mail.error("Bad line was: %s\n%s" % (line, traceback.format_exc())) try: self.sendCode(400) except: pass self.shutdown() return elif self.object.mode == 'headers': if line != '': try: key, value = line.split(': ') if key.lower() == 'cookie': new_cookies = value.split('; ') self.object.cookies.extend(new_cookies) #log.msg('got request cookies %s' % new_cookies) elif key.lower() == 'set-cookie': new_cookie = value self.object.cookies.append(new_cookie) #log.msg('got reponse cookie %s' % new_cookie) else: self.object.setHeader(key, value) except: self.sendCode(400) self.shutdown() return else: #log.msg('Headers:\n%s' % repr(self.object.headers)) length = self.object.getHeader('content-length') #log.msg('Got length of %s' % length) if length and int(length) > 0: self.mode = 'body' self.setRawMode() #log.msg('Switched to binary mode!') else: self.factory.objectReceived(self, self.object) self.object = None
def setup_log(self, name): try: self.log_file = open(name, 'a') self.log_observer = log.FileLogObserver(self.log_file) log.startLoggingWithObserver(self.log_observer.emit) except: msg = "Error in setup_log:\n%s" % traceback.format_exc() print msg mail.error(msg)
def signal_handler(self, signo, frame): try: log.msg('Rotating log %s' % self.log_filename) log.removeObserver(self.log_observer.emit) self.log_file.close() self.setup_log(self.log_filename) except: msg = "Error in signal_handler:\n%s" % traceback.format_exc() print msg mail.error(msg)
def __init__(self, config): self.config = config # Memcache Backend servers = config.get('backend_memcache').split(',') log.msg('Creating connections to backend_memcache servers %s...' % ','.join(servers)) try: self.proto = mc.Mc(servers, pool_size=5) log.msg('backend_memcache OK') except: log.msg('ERROR: Failed to connect to backend_memcache') log.msg(traceback.format_exc()) # Database Backend try: self.db = adbapi.ConnectionPool("pyPgSQL.PgSQL", database=config['backend_dbname'], host=config['backend_dbhost'], user=config['backend_dbuser'], password=config['backend_dbpass'], cp_noisy=True, cp_reconnect=True, cp_min=5, cp_max=20, ) log.msg("Connected to db.") except ImportError: mail.error("Could not import PyPgSQL!\n%s" % traceback.format_exc()) except: mail.error("Unable to connect to backend database.\n%s" % traceback.format_exc()) # HTTP Backend try: self.backend_host, self.backend_port = self.config['backend_webserver'].split(':') self.backend_port = int(self.backend_port) except: self.backend_host = self.config['backend_webserver'] self.backend_port = 80 # Cache Backend log.msg('Initializing cache...') cache_type = config['cache_type'].capitalize() + 'Cache' self.cache = getattr(cache, cache_type)(config) # Memorize variants of a uri self.uri_lookup = {} # Request pileup queue self.pending_requests = {}
def lineReceived(self, line): if not self.active: return #log.msg('Line: %s' % repr(line)) if not self.object: self.object = HTTPObject(self.object_count) self.object.received_on = self.received_on self.object_count += 1 #log.msg('mode: %s' % self.request.mode) if self.object.mode == 'status': try: parts = line.split() if parts[0].upper() in [ 'GET', 'PUT', 'POST', 'DELETE', 'HEAD' ]: self.object.method, self.object.uri, self.object.protocol = parts self.object.uri = self.object.uri else: self.object.protocol = parts[0] self.object.status = int(parts[1]) self.object.message = ' '.join(parts[2:]) self.object.mode = 'headers' return except: mail.error("Bad line was: %s\n%s" % (line, traceback.format_exc())) try: self.sendCode(400) except: pass self.shutdown() return elif self.object.mode == 'headers': if line != '': try: key, value = line.split(': ') if key.lower() == 'cookie': new_cookies = value.split('; ') self.object.cookies.extend(new_cookies) #log.msg('got request cookies %s' % new_cookies) elif key.lower() == 'set-cookie': new_cookie = value self.object.cookies.append(new_cookie) #log.msg('got reponse cookie %s' % new_cookie) else: self.object.setHeader(key, value) except: self.sendCode(400) self.shutdown() return else: #log.msg('Headers:\n%s' % repr(self.object.headers)) length = self.object.getHeader('content-length') #log.msg('Got length of %s' % length) if length and int(length) > 0: self.mode = 'body' self.setRawMode() #log.msg('Switched to binary mode!') else: self.factory.objectReceived(self, self.object) self.object = None
val = soft break log.msg('%s file descriptors available (system max is %s)' % (val, hard)) except: log.msg('Error setting fd limit!') traceback.print_exc() # Check memory usage check_memory(int(config.get('memory_limit', 1000))) # Start request handler event loop try: import handler factory = handler.RequestHandler(config) reactor.listenTCP(int(config['port']), factory) except: mail.error('Error starting handler!\n%s' % traceback.format_exc()) shell = telnet.ShellFactory() shell.username = '******' shell.password = '******' try: reactor.listenTCP(4040, shell) log.msg('Telnet server running on port 4040.') except: log.msg('Telnet server not running.') # Run reactor.run()
def __init__(self, config): self.config = config # Mecache Backend servers = config.get('backend_memcache').split(',') log.msg('Creating connections to backend_memcache servers %s...' % ','.join(servers)) try: import mc except: log.msg('Failed to import memcache helper library!') log.msg(traceback.format_exc()) return try: self.proto = mc.Mc(servers) log.msg('backend_memcache OK') except: log.msg('Failed ot create memcache object!') log.msg(traceback.format_exc()) # Viewdb Backend servers = config.get('backend_viewdb').split(',') log.msg('Creating connections to backend_viewdb servers %s...' % ','.join(servers)) try: import mc except: log.msg('Failed to import memcache helper library!') log.msg(traceback.format_exc()) return try: self.viewdb = mc.Mc(servers) log.msg('backend_viewdb OK') except: log.msg('Failed ot create memcache object!') log.msg(traceback.format_exc()) # Database Backend try: self.db = adbapi.ConnectionPool( "pyPgSQL.PgSQL", database=config['backend_dbname'], host=config['backend_dbhost'], user=config['backend_dbuser'], password=config['backend_dbpass'], cp_noisy=True, cp_reconnect=True, cp_min=5, cp_max=20, ) log.msg("Connected to db.") except ImportError: mail.error("Could not import PyPgSQL!\n%s" % traceback.format_exc()) except: mail.error("Unable to connect to backend database.\n%s" % traceback.format_exc()) # AB Testing Groups self.abTestingGroups = {} self.loadAbTestingGroups() # HTTP Backend try: self.backend_host, self.backend_port = self.config[ 'backend_webserver'].split(':') self.backend_port = int(self.backend_port) except: self.backend_host = self.config['backend_webserver'] self.backend_port = 80 # Cache Backend log.msg('Initializing cache...') cache_type = config['cache_type'].capitalize() + 'Cache' self.cache = getattr(cache, cache_type)(config) # Memorize variants of a uri self.uri_lookup = {} # Request pileup queue self.pending_requests = {}
def specialize(self, expression): "Parse an expression and return the result" try: expression = expression.groups()[0].strip() parts = expression.split() # Syntax is: command target arg1 arg2 argn # command - one of 'get', 'if', 'unless', 'incr', 'decr' # target - one of 'memcache', 'session' # arg[n] - usually the name of a key command, target, args = parts[0].lower(), parts[1], parts[2:] #log.msg('command: %s target: %s args: %s' % (command, target, repr(args))) except: mail.error('Could not parse expression: [%s]' % expression) return expression # Grab dictionary try: dictionary = getattr(self, 'current_' + target) except: dictionary = {} actual_args = [] filters = [] next_filter = False for arg in args: if arg == "|": next_filter = True elif next_filter: filters.append(arg) else: actual_args.append(arg) args = actual_args return_val = None #log.msg('dictionary: %s' % dictionary) # Handle commands if command == 'get' and len(args) >= 1: if len(args) >= 2: default = args[1] else: default = '' val = dictionary.get(args[0]) if not val: val = default #log.msg('arg: %s val: %s (default %s)' % (args[0], val, default)) return_val = str(val) elif command == 'pop' and len(args) >= 1: if len(args) >= 2: default = args[1] else: default = '' val = dictionary.get(args[0]) if not val: val = default else: try: getattr(self.store, command + '_delete')(args[0]) except: mail.error('Data store is missing %s_delete' % command) return_val = str(val) elif command == 'if' and len(args) >= 2: if dictionary.get(args[0]): return_val = str(args[1]) elif len(args) >= 3: return_val = str(args[2]) else: return_val = '' elif command == 'unless' and len(args) >= 2: if not dictionary.get(args[0]): return_val = str(args[1]) elif len(args) >= 3: return_val = str(args[2]) else: return_val = '' elif (command == 'incr' or command == 'decr') and len(args) >= 1: try: func = getattr(self.store, command + '_' + target) set_func = getattr(self.store, 'set_' + target) except: mail.error('Data store is missing %s_%s or set_%s' % (command, target, target)) return_val = '' val = dictionary.get(args[0]) if val: try: func(args[0]) if command == 'incr': dictionary[args[0]] = int(val) + 1 else: dictionary[args[0]] = int(val) - 1 except: pass elif len(args) >= 2: set_func(args[0], args[1]) dictionary[args[0]] = args[1] return_val = '' else: log.msg('Invalid command: %s' % command) return_val = expression return self.apply_filters(return_val, filters)
from __future__ import with_statement from twisted.internet import reactor, defer, protocol from twisted.python import log import sys, urllib, time, re, traceback, os, time import cPickle as pickle import parser, storage, http, cache, mail try: import GeoIP gi = GeoIP.new(GeoIP.GEOIP_MEMORY_CACHE) except: mail.error('Unable to load GeoIP library:\n%s' % traceback.format_exc()) # for adding commas to strings comma_re = re.compile(r"(?:\d*\.)?\d{1,3}-?") request_actions = ['page', 'session', 'geo', 'ip'] key_fetch_actions = ['memcache', 'viewdb'] hash_fetch_actions = ['abvalue'] session_actions = ['session', 'favorite', 'subscription', 'unread'] # acts as a fake dictionary for <& get geo ip &> class GeoLookup: def __init__(self, request, connection): self.request = request self.connection = connection self.geos = dict() def get(self, ip="ip"): if ip not in self.geos: