def xss_scan(request, config_level): message = {"request_stat": 0, "message": ""} dom = minidom.parse(config.rule_read("xss", get_file_handle=True)).documentElement for node in dom.getElementsByTagName('couple'): couple_id = int(node.getAttribute("id")) if couple_id <= config_level: payloads = node.getElementsByTagName('requests')[0].childNodes[0].nodeValue.strip() for payload in payloads.splitlines(): for param_name in urlparse(request['url']).query.split("&"): response = request_payload(request, payload.strip(), param_name) if payload.strip().encode("utf-8") in response: message['request_stat'] = 1 message['message'] += "payload: %s|#|param: %s|#|findstr: %s|,|" % (payload.strip().encode('utf-8'), param_name.split("=")[0], payload.strip().encode('utf-8')) if config.load()['only_one_match'].lower() == "true": return message for param_name in request['postdata'].split("&"): if request['postdata'] == "": break else: response = request_payload(request, payload.strip(), param_name, postdata=True) if payload.strip().encode("utf-8") in response: message['request_stat'] = 1 message['message'] += "payload: %s|#|param: %s|#|findstr: %s|,|" % (payload.strip().encode('utf-8'), param_name.split("=")[0], payload.strip().encode('utf-8')) if config.load()['only_one_match'].lower() == "true": return message return message
def main(): try: addr = config.load()['mix_addr'] port = int(config.load()['mix_port']) bindsocket = socket.socket() bindsocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) bindsocket.bind((addr, port)) bindsocket.listen(300) except Exception as e: conf = config.load() conf['mix_stat'] = "false" config.update(conf) our.error(e) exit() while config.load()['mix_stat'].lower() == "true": try: connstream, fromaddr = bindsocket.accept() t = threading.Thread(target=client_conn, args=(connstream, )) t.start() except Exception as e: out.error("error") print(e) if 'connstream' in dir(): connstream.close() bindsocket.close()
def content_deal(headers, host, method, postdata, uri, packet): u = urlparse.urlparse(uri) url = uri.split(u.netloc)[-1] white_domain = config.load()['white_domain'] black_domain = config.load()['black_domain'] black_ext = config.load()['black_ext'] for ext in black_ext.split(','): if u.path.lower().endswith(ext): return for domain in black_domain.split(','): if u.netloc.lower().split(':')[0].endswith(domain): return if white_domain != "": for domain in white_domain.split(','): if not u.netloc.lower().split(':')[0].endswith(domain): return reqhash = get_hash(host, uri, postdata) if 'Gdscan' not in headers.keys(): request = { 'headers': headers, 'host': host, 'method': method, 'postdata': postdata, 'url': uri, 'packet': packet } b64req = base64.encodestring(json.dumps(request)) if conn.hsetnx("request", reqhash, b64req): conn.lpush("waiting", reqhash)
def sqlibool_scan(request, config_level): message = {"request_stat": 0, "message": ""} dom = minidom.parse(config.rule_read("sqlibool", get_file_handle=True)).documentElement for node in dom.getElementsByTagName('couple'): couple_id = int(node.getAttribute("id")) if couple_id <= config_level: for compare in node.getElementsByTagName("compare"): compare1 = compare.getElementsByTagName("compare1")[0].childNodes[0].nodeValue compare11 = compare.getElementsByTagName("compare11")[0].childNodes[0].nodeValue compare2 = compare.getElementsByTagName("compare2")[0].childNodes[0].nodeValue compare22 = compare.getElementsByTagName("compare22")[0].childNodes[0].nodeValue for param_name in urlparse(request['url']).query.split("&"): response1 = request_payload(request, compare1, param_name) response2 = request_payload(request, compare2, param_name) response22 = request_payload(request, compare22, param_name) time.sleep(1)#prevent time stamp in response response11 = request_payload(request, compare11, param_name) if response1 == response11 and response2 == response22 and response1 != response2: message['request_stat'] = 2 message['message'] += "payload1: %s|#|payload2: %s|#|param: %s|,|" % (compare1.encode('utf-8'), compare2.encode('utf-8'), param_name.split("=")[0]) if config.load()['only_one_match'].lower() == "true": return message for param_name in request['postdata'].split("&"): if request['postdata'] == "": break response1 = request_payload(request, compare1, param_name, postdata=True) response11 = request_payload(request, compare11, param_name, postdata=True) response2 = request_payload(request, compare2, param_name, postdata=True) response22 = request_payload(request, compare22, param_name, postdata=True) if response1 == response11 and response2 == response22 and response1 != response2: message['request_stat'] = 2 message['message'] += "payload1: %s|#|payload2: %s|#|param: %s|,|" % (compare1.encode('utf-8'), compare2.encode('utf-8'), param_name.split("=")[0]) if config.load()['only_one_match'].lower() == "true": return message return message
def main(): define("port", default=int(config.load()["port"]), type=int) define("address", default=config.load()["ip"]) tornado.options.parse_command_line() path = lambda root, *a: os.path.join(root, *a) ROOT = os.path.dirname(os.path.abspath(__file__)) settings = {} settings['static_path'] = path(ROOT, "web", "static") settings['template_loader'] = tornado.template.Loader( path(ROOT, "web", "templates")) settings['login_url'] = "/login" settings['debug'] = True site.addsitedir(path(ROOT, 'handlers')) conf = config.load() conf['scapy_stat'] = 'false' conf['tornado_stat'] = 'false' conf['scan_stat'] = 'false' conf['mix_stat'] = 'false' conf['tornado_run_stat'] = 'false' config.update(conf) app = make_app(settings) app.listen(port=options.port, address=options.address) out.good("Web app start at: http://%s:%s" % (options.address, options.port)) tornado.ioloop.IOLoop.current().start()
def capture(x): out = config.load()['scapy_out'].lower() if config.load()['scapy_stat'].lower() == 'false': raise Exception('scapy', 'out') if 'HTTP/' in x.lastlayer().original and x.lastlayer().original[0:4] != 'HTTP': body = x.lastlayer().original http = extract(body, out)
def post(self): conf_all = config.load() for i in self.request.body.split("&"): para = secure.clear(urllib.unquote(i.split("=", 1)[0])) value = secure.clear(urllib.unquote(i.split("=", 1)[1])) if para in conf_all.keys(): conf_all[para] = value config.update(conf_all) return self.render("config.html", config=config.load())
def main(): NIC = config.load()["scapy_network_card"] # network adapter name try: if NIC == 'all': sniff(filter="tcp", prn=lambda x: capture(x)) else: sniff(iface=NIC, filter="tcp", prn=lambda x: capture(x)) except Exception as e: error("scapy out!") conf = config.load() conf['scapy_stat'] = "false" config.update(conf)
def check_update(): out.good("Checking update...") try: res = requests.get(config.load()['check_url'], timeout=10) version = res.content if version != config.load()['version']: update() return True else: return False except: out.error("Can not connect to update server!") return False
def common_scan(request, config_level, re_test, scan_type): message = {"request_stat": 0, "message": ""} dom = minidom.parse(config.rule_read(scan_type, get_file_handle=True)).documentElement for node in dom.getElementsByTagName('couple'): couple_id = int(node.getAttribute("id")) if couple_id <= config_level: payloads = node.getElementsByTagName('requests')[0].childNodes[0].nodeValue.strip() for payload in payloads.splitlines(): for param_name in urlparse(request['url']).query.split("&"): response = request_payload(request, payload.strip(), param_name) if not isinstance(response,str): response = response.decode("utf8","ignore") for response_rule in node.getElementsByTagName('responses')[0].childNodes[0].nodeValue.strip().splitlines(): # print(response_rule) if re_test: if re.search(response_rule.strip(), response): message['request_stat'] = 3 message['message'] += "payload: %s|#|param: %s|#|findstr: %s|,|" % (payload.strip().encode('utf-8'), param_name.split("=")[0], response_rule.strip().encode('utf-8')) if config.load()['only_one_match'].lower() == "true": # print(message) return message else: if response_rule.strip() in response: #rule format: unicode, it need to be encoded with utf-8 message['request_stat'] = 3 message['message'] += "payload: %s|#|param: %s|#|findstr: %s|,|" % (payload.strip().encode('utf-8'), param_name.split("=")[0], response_rule.strip().encode('utf-8')) if config.load()['only_one_match'].lower() == "true": return message for param_name in request['postdata'].split("&"): if request['postdata'] == "": break else: response = request_payload(request, payload.strip(), param_name, postdata=True) for response_rule in node.getElementsByTagName('responses')[0].childNodes[0].nodeValue.strip().splitlines(): if re_test: if re.search(response_rule.strip().encode("utf-8"), response): message['request_stat'] = 3 message['message'] += "payload: %s|#|param: %s|#|findstr: %s|,|" % (payload.strip().encode('utf-8'), param_name.split("=")[0], response_rule.strip().encode('utf-8')) if config.load()['only_one_match'].lower() == "true": # print(message) return message else: if response_rule.strip().encode("utf-8") in response: #rule format: unicode, it need to be encoded with utf-8 message['request_stat'] = 3 message['message'] += "payload: %s|#|param: %s|#|findstr: %s|,|" % (payload.strip().encode('utf-8'), param_name.split("=")[0], response_rule.strip().encode('utf-8')) if config.load()['only_one_match'].lower() == "true": return message # print(message) return message
def size_control(): if os.path.getsize(SESSION_CONF_FILE) > int(config.load()["session_size"]): with open(SESSION_CONF_FILE, 'r') as f: lines = f.readlines() f.close() ff = open(SESSION_CONF_FILE, 'w') size = 0 for line in lines: size += len(line) if size < config.load()["session_size"]: ff.write(line) else: ff.close() return print(size) ff.close()
def get(self): # instansiate and login to twitter config = loader.load("config/twitter.yaml") try: self.api = twitter.oauth(config['tokens']) except tweepy.TweepError, e: logging.info(u"Error while logging into twitter; {error}".format(error=e.message))
def scan_start(): while config.load()['scan_stat'].lower() == "true": try: # TODO 这两个sleep没有必要 while thread_filled(): time.sleep(5) # 取出一个等待的任务, 并将其标记为运行中 item = ReqItem() item.set_status(ITEM_STATUS.RUNNING) reqhash = item.data_obj['hash'] if not reqhash: time.sleep(10) continue request = item.data_obj['request'] # TODO 检查request值 rules = config.load_rule()['scan_type'] url = urlparse.urlparse(request['url']).query if (request['method'] == "GET" and url != "") or (request['method'] == "POST" and (request["postdata"] != "" or url != "")): t = threading.Thread(target=new_scan, args=(reqhash, item, rules)) t.start() else: # 不合规的任务直接标记为finished item.set_status(ITEM_STATUS.FINISHED) except Exception, e: out.error(str(e))
def get(self): waiting = conn.lrange("waiting", 0, 15) running = conn.lrange("running", 0, 15) finished = conn.lrange("finished", 0, 15) vulnerable = conn.lrange("vulnerable", 0, 15) stats_all = {} for i in [waiting, running, finished, vulnerable]: for reqhash in i: try: decode_results = json.loads( base64.b64decode(conn.hget("results", reqhash))) except: decode_results = {'stat': 0} stats = ['success', 'info', 'warning', "danger"] stat = decode_results['stat'] stat = stats[stat] stats_all[reqhash] = stat self.render("index.html", waiting_num=conn.llen("waiting"), running_num=conn.llen("running"), finished_num=conn.llen("finished"), vulnerable_num=conn.llen("vulnerable"), waiting=waiting, running=running, finished=finished, vulnerable=vulnerable, time=config.load()["flush_time"], stats_all=stats_all) return
def get(self): if config.load()['scan_stat'].lower() == 'false': return self.write(out.jump("/")) stat = conn.rpoplpush("running", "waiting") while stat: stat = conn.rpoplpush("running", "waiting") return self.write(out.alert("reset success!", "/scan_stat?stat=true"))
def get(self): start = {} rule = ["sqlireflect", "sqlitime", "xpath", "xss", "sqlibool"] for i in rule: start[i + "_true"] = "" start[i + "_false"] = "checked" for i in config.load_rule()["scan_type"]: start[i + "_true"] = "checked" start[i + "_false"] = "" rules = {} for i in rule: rules[i] = config.rule_read(i) return self.render("scan_config.html", config=config.load(), start=start, rules=rules, scan_stat=config.load()['scan_stat'])
def post(self): account = secure.clear(self.get_argument("account")) password = secure.clear(self.get_argument("password")) if account == config.load()['account'] and password == config.load( )['password']: cookie = session.new(self.request.remote_ip) self.set_cookie("ysrc_token", cookie, expires_days=int( config.load()["session_expires_time"])) session.update(cookie) self.set_header("Location", "/") self.set_status(302) return else: location = "/login" content = "Something wrong with you account or password!" return self.render("302.html", location=location, content=content)
def push(): conf = load() cmd = 'scp {} {}:{}'.format(' '.join(_files()), conf['upload']['ssh'], conf['upload']['folder']) code, out, err = shell(cmd) if code != 0: print('ERROR:', err)
def get(self): stat = secure.clear(self.get_argument("stat")) config_all = config.load() config_all['scan_stat'] = stat config.update(config_all) if stat.lower() == "true": thread = threading.Thread(target=scan.scan_start, args=()) thread.setDaemon(True) thread.start() return self.write(out.jump("/scan_config"))
def get(self): """ 将全部运行中的记录改为等待状态 """ if config.load()['scan_stat'].lower() == 'false': return self.write(out.jump("/")) conn.update({"status": ITEM_STATUS.RUNNING}, {"$set": { "status": ITEM_STATUS.WAITING }}) return self.write(out.alert("reset success!", "/scan_stat?stat=true"))
def new_scan(reqhash, request, rules): out.good("start new mission: %s" % reqhash) request_stat = 0 request_message = [] request_result = {} vulnerable = 0 for rule in rules: if config.load()['scan_stat'].lower() == "true": message = eval(rule + "_scan")(request, int(config.load()['scan_level'])) request_stat = message['request_stat'] if request_stat > vulnerable: vulnerable = request_stat request_message = message['message'].split("|,|") request_result[rule] = {"stat": request_stat, "message": request_message} request_result['stat'] = vulnerable if vulnerable > 0: conn.lpush("vulnerable", reqhash) conn.hset("results", reqhash, base64.b64encode(json.dumps(request_result).encode())) conn.lrem("running", 1, reqhash) conn.lpush("finished", reqhash)
def thread_filled(): """ 判断线程池是否已满 :return: True or False """ running_length = ReqItem.status_count(ITEM_STATUS.RUNNING) if running_length < int(config.load()['threads_num']): return False else: return True
def mongo_insert(headers, host, method, postdata, uri, packet): """ 向数据库中写入新任务 """ u = urlparse.urlparse(uri) url = uri.split(u.netloc)[-1] # TODO ??? white_domain = config.load()['white_domain'] black_domain = config.load()['black_domain'] black_ext = config.load()['black_ext'] for ext in black_ext.split(','): if u.path.lower().endswith("." + ext): return for domain in black_domain.split(','): if u.netloc.lower().split(':')[0].endswith(domain): return if white_domain != "": for domain in white_domain.split(','): if not u.netloc.lower().split(':')[0].endswith(domain): return reqhash = get_hash(host, uri, postdata) # 用hash去重 if 'Gdscan' in headers.keys() or ReqItem.hash_exists(reqhash): return else: # 表结构 new_item = { 'hash': reqhash, 'request': { 'headers': headers, 'host': host, 'method': method, 'postdata': postdata, 'url': uri, 'packet': packet }, 'response': {}, 'status': ITEM_STATUS.WAITING, 'vulnerable': 0 } conn.insert(new_item)
def pull(): conf = load() mesh = query(conf['connection']) now = timestamp() for mac, data in mesh.items(): if any([ n.lower() in [c.lower() for c in conf['targets']] for n in [mac, data.get('hostname'), data.get('node_id')] ]): store(now, data)
def post(self): proxy_type = self.get_argument("type") if proxy_type == "mix_proxy": conf = config.load() conf["mix_addr"] = secure.clear(self.get_argument("mix_addr")) conf["mix_port"] = secure.clear(self.get_argument("mix_port")) config.update(conf) elif proxy_type == "scapy": conf = config.load() conf['scapy_out'] = secure.clear(self.get_argument('scapy_out')) conf['scapy_network_card'] = self.get_argument( 'scapy_network_card') config.update(conf) elif proxy_type == "tornado": conf = config.load() conf['tornado_address'] = secure.clear( self.get_argument('tornado_address')) conf['tornado_port'] = secure.clear( self.get_argument('tornado_port')) config.update(conf) return self.write(out.jump("/proxy?type=" + proxy_type))
def push(): conf = load() cmd = 'scp {} {}:{}'.format( ' '.join(_files()), conf['upload']['ssh'], conf['upload']['folder'] ) code, out, err = shell(cmd) if code != 0: print('ERROR:', err)
def get(self): waiting = [ each['hash'] for each in conn.find({'status': ITEM_STATUS.WAITING}, { '_id': 0, 'hash': 1 }) ] running = [ each['hash'] for each in conn.find({'status': ITEM_STATUS.RUNNING}, { '_id': 0, 'hash': 1 }) ] finished = [ each['hash'] for each in conn.find({'status': ITEM_STATUS.FINISHED}, { '_id': 0, 'hash': 1 }) ] vulnerable = [ each['hash'] for each in conn.find({'vulnerable': 1}, { '_id': 0, 'hash': 1 }) ] stats_all = {} for each in [waiting, running, finished, vulnerable]: for hash in each: item = ReqItem(hash=hash) try: stat = item.data_obj['response']['stat'] except: stat = 0 stats = ['success', 'info', 'warning', "danger"] stat = stats[stat] stats_all[hash] = stat self.render("index.html", waiting_num=len(waiting), running_num=len(running), finished_num=len(finished), vulnerable_num=len(vulnerable), waiting=waiting, running=running, finished=finished, vulnerable=vulnerable, time=config.load()["flush_time"], stats_all=stats_all) return
def get(self): list_type = self.get_argument("type") try: start = int(self.get_argument("start")) except: start = 0 page_num = int(config.load()['page_num']) length = conn.llen(list_type) last = start + page_num - 1 page_now = start / page_num + 1 end_page = -1 * ((-1 * length) / page_num) end_num = end_page * page_num - page_num if page_now - 2 >= 1: pages_first = page_now - 2 else: pages_first = 1 if page_now + 2 <= end_page: pages_last = page_now + 2 else: pages_last = end_page pages = range(int(pages_first), int(pages_last) + 1) content = conn.lrange(list_type, start, last) req_content = {} for reqhash in content: decode_content = json.loads( base64.b64decode(conn.hget("request", reqhash))) try: decode_results = json.loads( base64.b64decode(conn.hget("results", reqhash))) except: decode_results = {'stat': 0} req_content[reqhash] = decode_content[ 'method'] + "|" + decode_content['url'] #split the url in 80 chars req_content[reqhash] += "|" for i in range( int(len(req_content[reqhash].split("|")[1]) / 80) + 1): req_content[reqhash] += req_content[reqhash].split( "|")[1][i * 80:i * 80 + 80] + "\n" stats = ['success', 'info', 'warning', "danger"] stat = decode_results['stat'] stat = stats[stat] req_content[reqhash] += "|" + stat return self.render("list.html", page_now=page_now, page_num=page_num, pages=pages, content=content, list_type=list_type, length=length, req_content=req_content, end_num=end_num)
def main(): # parse command-line args from lib import args args.parse() from lib.args import Args docolor = (Args.color == 'always') \ or (Args.color == 'auto' and sys.stderr.isatty()) handler = LogHandler(docolor, Args.timestamp) logging.root.addHandler(handler) if Args.verbose >= 2: level = logging.DEBUG elif Args.verbose == 1: level = logging.INFO else: level = logging.WARNING logging.root.setLevel(level) # make killable by ctrl-c logging.debug('setting SIGINT handler') signal.signal(signal.SIGINT, signal.SIG_DFL) logging.info('Python Version: %s', sys.version_info) logging.info('GStreamer Version: %s', Gst.version()) logging.debug('loading Config') from lib import config config.load() # init main-class and main-loop logging.debug('initializing Voctocore') voctocore = Voctocore() logging.debug('running Voctocore') voctocore.run()
def new_scan(reqhash, item, rules): request = requests_convert(item.data_obj['request']) out.good("start new mission: %s" % reqhash) request_stat = 0 request_message = [] request_result = {} vulnerable = 0 for rule in rules: if config.load()['scan_stat'].lower() == "true": message = eval(rule + "_scan")(request, int(config.load()['scan_level'])) request_stat = message['request_stat'] if request_stat > vulnerable: vulnerable = request_stat request_message = message['message'].split("|,|") request_result[rule] = { "stat": request_stat, "message": request_message } request_result['stat'] = vulnerable if vulnerable > 0: item.mark_vulnerable() # 标记为存在漏洞 item.set_result(request_result) # 响应数据入库 item.set_status(ITEM_STATUS.FINISHED) # 更新任务状态
def pull(): conf = load() mesh = query(conf['connection']) now = timestamp() for mac, data in mesh.items(): if any([ n.lower() in [ c.lower() for c in conf['targets'] ] for n in [ mac, data.get('hostname'), data.get('node_id') ] ]): store(now, data)
def scan_start(): while config.load()['scan_stat'].lower() == "true": try: while thread_filled(): time.sleep(5) reqhash = conn.rpoplpush("waiting", "running") if not reqhash: time.sleep(10) continue reqed = conn.hget("request", reqhash) request = json.loads(ds(reqed)) rules = config.load_rule()['scan_type'] url = urlparse.urlparse(request['url']).query if (request['method'] == "GET" and url != "") or (request['method'] == "POST" and (request["postdata"] != "" or url != "")): t = threading.Thread(target=new_scan, args=(reqhash, requests_convert(request), rules)) t.start() else: conn.lrem("running", 1, reqhash) conn.lpush("finished", reqhash) except Exception,e: out.error(str(e))
def run(): # Report starting logger.info("Starting CryptoAnalyzer..") # Load configurations configurations = config.load() # Fetch data and save it to disk fetched_data = client.fetch_data(configurations, save=True) # Compute weekly means analyze.weekly_mean_prices(fetched_data, configurations, save=True) # Compute maximum relative span analyze.maximum_relative_spans(fetched_data, configurations, n_highest_weeks=1) # Report completion logger.info("Done. Exiting..") # Exit exit(0)
def setUpModule(): config.load('tests/environment/config.ini')
def main(): # parse command-line args from lib import args args.parse() from lib.args import Args docolor = (Args.color == 'always') \ or (Args.color == 'auto' and sys.stderr.isatty()) from lib.loghandler import LogHandler handler = LogHandler(docolor, Args.timestamp) logging.root.addHandler(handler) if Args.verbose >= 2: level = logging.DEBUG elif Args.verbose == 1: level = logging.INFO else: level = logging.WARNING logging.root.setLevel(level) # make killable by ctrl-c logging.debug('setting SIGINT handler') signal.signal(signal.SIGINT, signal.SIG_DFL) logging.info('Python Version: %s', sys.version_info) logging.info('GStreamer Version: %s', Gst.version()) logging.debug('loading Config') from lib import config config.load() from lib.config import Config # establish a synchronus connection to server import lib.connection as Connection Connection.establish( Args.host if Args.host else Config.get('server', 'host') ) # fetch config from server Config.fetchServerConfig() # Warn when connecting to a non-local core without preview-encoders enabled # The list-comparison is not complete # (one could use a local hostname or the local system ip), # but it's only here to warn that one might be making a mistake use_previews = Config.getboolean('previews', 'enabled') \ and Config.getboolean('previews', 'use') looks_like_localhost = Config.get('server', 'host') in ['::1', '127.0.0.1', 'localhost'] if not use_previews and not looks_like_localhost: logging.warning( 'Connecting to `%s` (which looks like a remote host) ' 'might not work without enabeling the preview encoders ' '(set `[previews] enabled=true` on the core) or it might saturate ' 'your ethernet link between the two machines.', Config.get('server', 'host') ) import lib.connection as Connection import lib.clock as ClockManager # obtain network-clock ClockManager.obtainClock(Connection.ip) # switch connection to nonblocking, event-driven mode Connection.enterNonblockingMode() # init main-class and main-loop # (this binds all event-hander on the Connection) logging.debug('initializing Voctogui') voctogui = Voctogui() # start the Mainloop and show the Window logging.debug('running Voctogui') voctogui.run()
COLOR_BLUE = (0x00, 0x00, 0xFF) COLOR_GREEN = (0x00, 0xFF, 0x00) COLOR_RED = (0xFF, 0x00, 0x00) COLOR_WHITE = (0xFF, 0xFF, 0xFF) FONT_END = 64 FONT_MINI = 10 FONT_NORMAL = 16 BACKGROUND_COLOR = COLOR_BLACK PLAYER_COLOR = COLOR_BLUE TRAIL_COLOR = COLOR_RED # Config variables # pylint: disable=I0011,C0103 config = config.load() # pylint: disable=I0011,I0012; enable=C0103 ACCEL_SPEED = config["accel_speed"] ENTITY_SIZE = config["entity_size"] GAME_WIDTH = config["screen"]["width"] GAME_HEIGHT = config["screen"]["height"] MAX_ENEMY_COUNT = config["max_enemies"] MAX_TRAIL_COUNT = config["max_trail"] TIME_TO_LOSE = config["lose_time"] # Seconds # pylint: disable=I0011,invalid-name class Direction(Enum): UP = 1 DOWN = 2 LEFT = 3
# it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from lib import config import os config.load() policies_dir = config.get('global', 'policies_dir') policies_ext = config.get('global', 'policies_ext') for f in os.listdir(policies_dir): dirname = os.path.join(policies_dir, f) if os.path.isdir(dirname): for f in os.listdir(dirname): if f.endswith(policies_ext): filename = os.path.join(dirname, f) lines = open(filename).readlines() blocks = filter(lambda x: x.startswith('block('), lines) if blocks: print filename, 'containes the following block lines:' # write each block rule in new line; [:-1] removes last newline print ''.join(map(lambda x: x.strip() + "\n", blocks))[:-1]