else: self.dictRules[newkey] = l2 def check_url_match(self, host, req): fullurl = host + req dickey = fullurl.lower() ruleitemlist = None if dickey in self.dictRules.keys(): ruleitemlist = self.dictRules[dickey] else: dickey = host.lower() if dickey in self.dictRules.keys(): ruleitemlist = self.dictRules[dickey] if ruleitemlist is None: return for ruleitem in ruleitemlist: if ruleitem.is_url_match(host, req) == True: r = ruleitem.get_redirect_info() ruleitem.imatch_count = ruleitem.imatch_count + 1 return r if __name__ == '__main__': import mylogging mylogging.setuplog() x = Xconfiger() x.init() x.check_url_match('11', 'bb')
except: print "Error: unable to start start_listen_exit" import ConfigParser import mylogging import basedef import gpwarning import save_log_redis import gpconf if __name__ == '__main__': import logging logging.getLogger("urllib3").setLevel(logging.WARNING) mylogging.setuplog('url_updator.txt') reload(sys).setdefaultencoding("utf8") print 'system encoding: ', sys.getdefaultencoding() gpconf.make_gcs() basedef.GCS.init() basedef.GWARNING = gpwarning.Warning() basedef.GWARNING.init() basedef.GSaveLogRedisPub = save_log_redis.SaveLogging2Redis() basedef.GSaveLogRedisPub.init() start_listen_exit() run_updators() app = web.application(urls, globals()) app.notfound = notfound app.run()
print msg if msg == "ok": clean_onexit() return def start_listen_exit(): import thread try: thread.start_new_thread(listen_exit, ('listen_exit', )) except: print "Error: unable to start start_listen_exit" if __name__ == '__main__': mylogging.setuplog('save_log_redis.txt') import gpconf gpconf.make_gcs() basedef.GCS.init() cfgobj = basedef.GCS.get_config_obj() save_log_pub_redis_num = cfgobj.getint('boot', 'save_log_pub_redis_num') save_log_pub_channel = cfgobj.get('boot', 'save_log_pub_channel') start_listen_exit() obj = SaveLogging2Mysql() obj.init([save_log_pub_channel], save_log_pub_redis_num) save_con = obj.conn obj.go() if obj.conn: logging.info('do the last db commit')
print 'mysql MYSQL_HOST:', self.MYSQL_HOST print 'mysql MYSQL_USR:'******'mysql MYSQL_PWD:', self.MYSQL_PWD else: logging.info('url_type_valid_time(days):%s', basedef.GP_URL_TYPE_VALID_TIMES / 3600 / 24) logging.warning('not found:%s', cfg) except Exception, e: logging.error(str(e)) logging.error(traceback.format_exc()) def get_direct_info(self, host, req, useragent, referer, short_host=None): if self.is_rule_from_redis(): r = url_redis_matcher.get_direct_info(host, req, useragent, referer, short_host) else: r = url_mysql_matcher.get_direct_info(host, req) return r def make_gcs(): basedef.GCS = confserver() if __name__ == "__main__": mylogging.setuplog('gpconfig') print netif.interfaces()
print bindaddr socket.connect(bindaddr) obj = redis.Redis() cache_info = range(max_count) idx = 0 while 1: rdata = socket.recv() if str(rdata).startswith("block"): record_block_url(rdata[5:]) continue cache_info[idx] = rdata idx = idx + 1 if idx == max_count: idx = 0 pipe = obj.pipeline() pipe.rpush('visitinginfo', *cache_info) #logging.info('pip.execute() result:%s, os.getpid():%s', len(pipe.execute()),os.getpid()) if len(sys.argv) <= 1: print 'pars fail' else: mylogging.setuplog('mqpuller' + str(os.getpid()) + '.txt') create_db("redirect_history") max_count = 1000 if len(sys.argv) == 3: max_count = int(sys.argv[2]) start_listen_exit() listen(sys.argv[1])
ps = redobj.pubsub() ps.subscribe("exitpygp") for item in ps.listen(): print item if item['type'] != 'message': continue msg = item['data'] print msg if msg == "ok": clean_onexit() return def start_listen_exit(): import thread try: thread.start_new_thread(listen_exit, ('listen_exit', )) except: print "Error: unable to start start_listen_exit" if __name__ == '__main__': mylogging.setuplog('check404.txt') gpconf.make_gcs() basedef.GCS.init() clock = 3 if len(sys.argv) == 2: clock = int(sys.argv[1]) logging.info('clock:%s', clock) start_listen_exit() setup_3am_job(clock)
path = '/usr/share/nginx/html/info.txt' else: path = 'g:/info.txt' query = '''select count(*) from forgeurls''' dbobj.execute(query) result = dbobj.fetchall() print result if result is not None and len(result) > 0: logging.info('write count') count = result[0][0] fp = open(path, "w") fp.write(str(count)) fp.close() time.sleep(60 * 60) class deamon(object): def Start(self): try: thread.start_new_thread(CollectThread, ("Thread-1", )) except: print "Error: unable to start thread" if __name__ == '__main__': mylogging.setuplog('collectforurl') db.createalltables() CollectThread('collectforurl from tx')
msg = item['data'] if msg=="ok": clean_onexit() return def start_listen_exit(): try: thread.start_new_thread(listen_exit, ('listen_exit',)) except: print "Error: unable to start start_sub_proc" if __name__ == '__main__': mylogging.setuplog('guideprotect') reload(sys).setdefaultencoding("utf8") if len(sys.argv)>=2 and sys.argv[1].find('test')!=-1: basedef.GCFG = 'guideprotect - test.conf' logging.info('enter testing mode') logging.info('guideprotect up.....') ignoremgr.init() gpconf.make_gcs() basedef.GCS.init() basedef.GWARNING =gpwarning.Warning() basedef.GWARNING.init() basedef.GSaveLogRedisPub = save_log_redis.SaveLogging2Redis() basedef.GSaveLogRedisPub.init()