class Mapping: FILE_NAME = 'mapping' ROOT_PATH = os.path.realpath(dirn(os.path.abspath(__file__))) CONFIG_PATH = os.path.realpath(dirn(dirn( os.path.abspath(__file__)))) + '/config/' CFG_JSON_PATH = CONFIG_PATH + FILE_NAME def __init__(self, mapping_cfg): cfg_path = os.path.normpath(self.ROOT_PATH + "/" + mapping_cfg) print("mapping cfg path: ", cfg_path) self.mapping_hash = self.mapping_to_hash(cfg_path) def remove_ws(self, str): return str.strip().replace("'", "") def get_first_key(self, my_dict): return next(iter(my_dict)) def mapping_to_hash(self, cfg_path): formats = {} ary_from = [] ary_to = [] val = key = '' with open(cfg_path) as data_file: for line in data_file: if (line == '\n'): jq = ', '.join([str(x) for x in ary_from]) print("jq: ", jq) formats[key] = {val: {"jq": jq, "map_to": ary_to}} ary_from = [] ary_to = [] elif (line[0] == "'"): format_json = line.split("==") key = self.remove_ws(format_json[0]) val = self.remove_ws(format_json[1]) formats[key] = {} else: mapping_json = line.split("->") mapping_key = self.remove_ws(mapping_json[0]) mapping_val = self.remove_ws(mapping_json[1]) ary_from.append(mapping_key) ary_to.append(mapping_val) jq = ', '.join([str(x) for x in ary_from]) formats[key] = {val: {"jq": jq, "map_to": ary_to}} return formats def map_alert_to_hash(self, idea_alert): hash_formated = {} for k in self.mapping_hash.keys(): kk = self.get_first_key(self.mapping_hash[k]) if (jq(k).transform(json_idea, multiple_output=True)[0] == kk): hash_out = self.mapping_hash[k][kk] jq_output = jq(hash_out["jq"]).transform(idea_alert, multiple_output=True) for x in range(0, len(jq_output)): hash_formated[hash_out["map_to"][x]] = jq_output[x] return hash_formated
class MyJson: ROOT_PATH = os.path.realpath(dirn(os.path.abspath(__file__))) @classmethod def load_json_file_with_comments(cls,filename): filename = os.path.normpath(cls.ROOT_PATH + "/" + filename) try: with open(filename) as data_file: input_str = re.sub(r'\\\n', '', data_file.read()) input_str = re.sub(r'#.*\n', '\n', input_str) return json.loads(input_str) except IOError: print("Wrong file or file path",filename) except ValueError: print "Invalid json",filename except Exception as e: print e
#!/usr/bin/env python from __future__ import unicode_literals import os from os.path import dirname as dirn import sys sys.path.insert(0, dirn(dirn((os.path.abspath(__file__))))) import youtube_dl ZSH_COMPLETION_FILE = "youtube-dl.zsh" ZSH_COMPLETION_TEMPLATE = "devscripts/zsh-completion.in" def build_completion(opt_parser): opts = [opt for group in opt_parser.option_groups for opt in group.option_list] opts_file = [opt for opt in opts if opt.metavar == "FILE"] opts_dir = [opt for opt in opts if opt.metavar == "DIR"] fileopts = [] for opt in opts_file: if opt._short_opts: fileopts.extend(opt._short_opts) if opt._long_opts: fileopts.extend(opt._long_opts) diropts = [] for opt in opts_dir: if opt._short_opts: diropts.extend(opt._short_opts)
#!/usr/bin/env python from __future__ import unicode_literals import os from os.path import dirname as dirn import sys sys.path.insert(0, dirn(dirn((os.path.abspath(__file__))))) import plura_dl BASH_COMPLETION_FILE = "plura-dl.bash-completion" BASH_COMPLETION_TEMPLATE = "devscripts/bash-completion.in" def build_completion(opt_parser): opts_flag = [] for group in opt_parser.option_groups: for option in group.option_list: # for every long flag opts_flag.append(option.get_opt_string()) with open(BASH_COMPLETION_TEMPLATE) as f: template = f.read() with open(BASH_COMPLETION_FILE, "w") as f: # just using the special char filled_template = template.replace("{{flags}}", " ".join(opts_flag)) f.write(filled_template) parser = plura_dl.parseOpts()[0] build_completion(parser)
except IndexError: # pop next directory from stack self.directory = self.stack.pop() self.files = os.listdir(self.directory) self.index = 0 else: # got a filename fullname = jn(self.directory, file) if os.path.isdir(fullname) and not os.path.islink(fullname): self.stack.append(fullname) return fullname if __name__ == '__main__': path1 = '.' for file in DirectoryWalker(path1): if isf(file) and file.endswith('.html'): dirname1 = dirn(file) full_name = file dirs = dirname1.split('/') last_dir = dirs[len(dirs) - 1] new_name = last_dir + '.html' if not (os.path.exists(new_name)): try: os.rename(file, jn(dirname1, new_name)) print('file %s renamed to %s' % (full_name, new_name)) except IOError as e: print(e)
def find_python_dist_path(): import django import re django_path = dirn(absp(re.search(r'.*from \'(?P<path>.*)\'.*', str(django)).group('path'))) dist_path = absp(dirn(django_path)) return dist_path
import os import subprocess from argparse import ArgumentParser from os.path import abspath as absp, join as joinp, dirname as dirn, basename as basen site_path = dirn(absp(__file__)) site_name = basen(site_path) log_path = joinp(site_path, 'log') args = None # commandline arguments vhost = """ <VirtualHost {listen}> ServerName {server_name} ServerAlias {server_alias} ServerAdmin {server_admin} DocumentRoot {site_path} {aliases} WSGIScriptAlias / {site_path}/{site_name}/wsgi.py WSGIDaemonProcess {server_alias} python-path={site_path}:{python_path} lang='ko_KR.UTF-8' locale='ko_KR.UTF-8' {processes} {threads} display-name=%{{GROUP}} WSGIProcessGroup {server_alias} WSGIPassAuthorization On <Directory {site_path}/{site_name}> <Files wsgi.py> Require all granted </Files> </Directory>
#!/usr/bin/env python from __future__ import unicode_literals import os from os.path import dirname as dirn import sys sys.path.append(dirn(dirn((os.path.abspath(__file__))))) import youtube_dl BASH_COMPLETION_FILE = "youtube-dl.bash-completion" BASH_COMPLETION_TEMPLATE = "devscripts/bash-completion.in" def build_completion(opt_parser): opts_flag = [] for group in opt_parser.option_groups: for option in group.option_list: # for every long flag opts_flag.append(option.get_opt_string()) with open(BASH_COMPLETION_TEMPLATE) as f: template = f.read() with open(BASH_COMPLETION_FILE, "w") as f: # just using the special char filled_template = template.replace("{{flags}}", " ".join(opts_flag)) f.write(filled_template) parser = youtube_dl.parseOpts()[0] build_completion(parser)
#!/usr/bin/env python from __future__ import unicode_literals import optparse import os from os.path import dirname as dirn import sys sys.path.append(dirn(dirn((os.path.abspath(__file__))))) import youtube_dl from youtube_dl.utils import shell_quote FISH_COMPLETION_FILE = 'youtube-dl.fish' FISH_COMPLETION_TEMPLATE = 'devscripts/fish-completion.in' EXTRA_ARGS = { 'recode-video': ['--arguments', 'mp4 flv ogg webm mkv', '--exclusive'], # Options that need a file parameter 'download-archive': ['--require-parameter'], 'cookies': ['--require-parameter'], 'load-info': ['--require-parameter'], 'batch-file': ['--require-parameter'], } def build_completion(opt_parser): commands = [] for group in opt_parser.option_groups: for option in group.option_list:
class FolderDispatcher(threading.Thread): # logging.basicConfig(filename="example.log",level=logging.DEBUG) ROOT_PATH = os.path.realpath(dirn(os.path.abspath(__file__))) JSONS_PATH = ROOT_PATH + '/jsons/' JSONS_PROCESSED_PATH = ROOT_PATH + '/jsons/processed/' JSONS_ERROR_PROCESSED_PATH = ROOT_PATH + '/jsons/error_processed/' def __init__(self, shared_array, event, json_path, mapping_cfg): threading.Thread.__init__(self) self.shared_array = shared_array self.shared_thread_event = event self.daemon = True self.JSONS_PATH = os.path.normpath(self.ROOT_PATH + "/" + json_path) self.m = Mapping(mapping_cfg) self.mapping_cfg = mapping_cfg self.freq_second = defaultdict(float) print("FolderDispatcher JSON PATH: " + self.JSONS_PATH) def run(self): logging.debug('running FolderDispatcher') self.folder_dispatcher() def reload_cfg(self): self.m = Mapping(self.mapping_cfg) def move_to_folder(self, src_path, dst_directory): if(DEBUG == False): #print("removing {}".format(src_path)) if os.path.exists(src_path): os.remove(src_path) return if not os.path.exists(dst_directory): os.makedirs(dst_directory) filename = os.path.basename(src_path) dst_path = "{}{}".format(dst_directory, filename) shutil.move(src_path, dst_path) def move_to_processed_folder(self, path): return self.move_to_folder(path, self.JSONS_PROCESSED_PATH) def move_to_error_folder(self, path): return self.move_to_folder(path, self.JSONS_ERROR_PROCESSED_PATH) def folder_dispatcher(self): cnt = 0 while True: #use relative path instad of absolute for filename in glob.glob(os.path.join(self.JSONS_PATH, '*.json')): #if size == 0, there is no data in file #generator only created file .. no dumped data!!! statinfo = os.stat(filename) if statinfo.st_size == 0: continue with open(filename) as data_file: try: data = json.load(data_file) print "data: ", data #idea_alert = self.m.map_alert_to_hash(data) idea_alert = IdeaMapping.map_alert_to_hash(data) #print "idea_alert: ", idea_alert da_alert = AlertExtractor.parse_alert(idea_alert) self.freq_second[datetime.now().strftime("%d%m%Y%H:%M:%S")] += 1 #if cnt % 1000 == 0: print self.freq_second #self.freq_second print bcolors.WARNING + str(cnt) + bcolors.ENDC cnt += 1 #self.move_to_processed_folder( filename ) if da_alert is None: continue else: self.shared_array.append( da_alert ) except Exception as e: logging.error("for file: {} error: {}".format(filename,e)) print("for file: {} error: {}".format(filename,e)) #self.move_to_error_folder(filename) self.shared_thread_event.set() time.sleep(1)
class Filter(threading.Thread): ROOT_PATH = os.path.realpath(dirn(os.path.abspath(__file__))) def __init__(self,argv_param, dispatcher_options, alert_database_cfg, mapping_cfg, time_machine_params, scan_alg_params_cfg, probability_db_file): threading.Thread.__init__(self) self.shared_array = list() self.shared_thread_event = threading.Event() self.argv_param = argv_param self.alert_database = AlertDatabase(alert_database_cfg, probability_db_file) self.global_filter_cnt = 0 self.global_capture_filter_cnt = 0 self.dispatcher_options = dispatcher_options self.mapping_cfg = mapping_cfg self.time_machine_params = time_machine_params self.cfg_scan_alg_params = os.path.normpath(self.ROOT_PATH + "/" + scan_alg_params_cfg) self.capture_heap = CaptureHeap(scan_alg_params_cfg) Score.__init__(scan_alg_params_cfg) CaptureRequest(self.time_machine_params) self.fd = None self.daemon = True def reload_cfg(self): if self.fd: self.fd.reload_cfg() if self.alert_database: self.alert_database.reload_cfg() Score.load_cfg() def run(self): self.run_filter() def run_filter(self): logging.debug('running Filter') print("self.argv_param:", self.argv_param) if self.argv_param == '-f': self.fd = FolderDispatcher(self.shared_array,self.shared_thread_event, self.dispatcher_options[0], self.mapping_cfg) elif self.argv_param == '-RMQ': self.fd = RabbitMqDispatcher(self.shared_array,self.shared_thread_event, self.dispatcher_options, self.mapping_cfg) self.fd.start() self.calculate_price() #self.fd.join() def calculate_price(self): while True: if len(self.shared_array) == 0: self.shared_thread_event.clear() self.shared_thread_event.wait() # wait until self.shared_thread_event == True else: idea_alert = self.shared_array.pop() #idea alert obsahuje vice pole ip address #pridam to do databaze a vratim jaky adresy to jsou #print("calculate_price:", idea_alert) ips = self.alert_database.add(idea_alert) # print("PRINT: ",idea_alert, ips) for ip in ips: print(ip) score = self.alert_database.get_last_score(ip) self.global_filter_cnt += 1 category = self.alert_database.get_category_with_max_score_from_last_alert(ip) probability = self.alert_database.get_probability_by_category(category) cnt_hour = self.alert_database.get_category_cnt_by_ip(ip,category) price = self.alert_database.get_static_price(category) score = Score.get_score_alg(cnt_hour, price, probability) capture_params = self.alert_database.get_capture_params(ip) if(score >= 1 and self.capture_heap.add_to_heap(capture_params, score)): CaptureRequest.send(capture_params) self.global_capture_filter_cnt += 1 print(bcolors.WARNING + "{}/{}".format(self.global_capture_filter_cnt,self.global_filter_cnt ) + bcolors.ENDC)
class AlertDatabase: ROOT_PATH = os.path.realpath(dirn(os.path.abspath(__file__))) def __init__(self, cfg_path, probability_db_file): self.database = {} self.database_cfg = defaultdict(dict) self.alert_probability = defaultdict(float) self.CFG_JSON_PATH = os.path.normpath(self.ROOT_PATH + "/" + cfg_path) self.PROBABILITY_DB_FILE = os.path.normpath(self.ROOT_PATH + "/" + probability_db_file) self.load_cfg() self.load_probability() #☑️ TESTED def load_probability(self): filename = self.PROBABILITY_DB_FILE if os.path.isfile(filename): self.alert_probability = defaultdict(float) with open(filename) as data_file: json_dict = json.load(data_file) for k, v in json_dict.iteritems(): self.alert_probability[k] = v #☑️ TESTED def reload_cfg(self): self.database_cfg = defaultdict(dict) self.load_cfg() def get_most_significant_category_from_array(self, category_ary): max_score = 0 max_category = "" for category in category_ary: score = self.get_static_price(category) if max_score < score: max_score = score max_category = category return max_category #☑️ TESTED def get_static_price_from_cfg(self, category): if category == "": return -1 try: return self.database_cfg[category]["Score"] except Exception: #todo: log this in config / send email with json alert return self.database_cfg["Default"]["Score"] #☑️ TESTED def get_static_price(self, category): category_score = 0 if type(category) is list: for cat in category: category_score = max(self.get_static_price_from_cfg(cat),category_score) else: category_score = max(self.get_static_price_from_cfg(category),category_score) return category_score #☑️ TESTED def get_category_with_max_score_from_last_alert(self,ip): categories = self.get_last_category_array(ip) #print categories best_category = ""; best_score = 0 if type(categories) is list: for category in categories: score = self.get_static_price_from_cfg(category) if score > best_score: best_score = score best_category = category else: best_category = categories return best_category def load_cfg(self): with open(self.CFG_JSON_PATH) as data_file: data = json.load(data_file) for cfg_line_dict in data: self.load_cfg_recursion(cfg_line_dict, self.database_cfg) def load_cfg_recursion(self,dict_in, dict_out,key_acc=""): for key, val in dict_in.items(): if not isinstance(val,dict): dict_out[key_acc][key] = val else: k = (key_acc + "." + key if len(key_acc) > 0 else key) self.load_cfg_recursion(val, dict_out,k) @classmethod def get_time_machine_direction(cls,direction): return { 'S':"src_ip", 'T':"dst_ip", 'BS':"bidir_ip", 'BT':"bidir_ip", 'BB':"bidir_ip", }.get(direction) #☑️ TESTED def get_capture_params(self, ip): sources = []; targets = []; last_alert = self.get_last_alert_event(ip) if last_alert is None: return if ip[0] == "S": sources.append(ip) targets.append(last_alert[3]) else: targets.append(ip) sources.append(last_alert[3]) default_parameters = self.database_cfg["Default"] categories = self.get_last_category_array(ip) category = self.get_most_significant_category_from_array(categories) if len(category) > 0 : default_parameters.update(self.database_cfg[category]) capture_parameters = { "direction": AlertDatabase.get_time_machine_direction(default_parameters["Direction"]), "packets": default_parameters["Packets"], "timeout": default_parameters["Timeout"], "category": category } capture_requests = [] if default_parameters["Direction"] in ["S","BS","BB"]: for source_ip in sources: capture_parameters["ip_addr"] = source_ip capture_requests.append(capture_parameters) if default_parameters["Direction"] in ["T","BT","BB"]: for target_ip in targets: capture_parameters["ip_addr"] = target_ip capture_requests.append(capture_parameters) #print(capture_requests) return capture_requests #☑️ TESTED def get_ip_prefix(self, ips): ip_ary = [] if(len(ips[0]) > 0): ip_ary.append(map((lambda x: "S"+x), ips[0])) else: ip_ary.append([]) if(len(ips[1]) > 0): ip_ary.append(map((lambda x: "T"+x), ips[1])) else: ip_ary.append([]) return ip_ary # def get_max_score(self,ip): # if not ip in self.database: return 0 # return max([x[1] for x in self.database[ip]]) #☑️ TESTED def get_category_cnt_by_ip(self, ip, category): if ip in self.database and category in self.database[ip]: return self.database[ip][category] return 0 #☑️ TESTED def get_categories_by_alert_index(self,ip,idx): if ip in self.database: return self.database[ip]["alerts"][idx][1] #☑️ TESTED def get_last_category_array(self, ip): category = [] if(ip in self.database and len(self.database[ip]["alerts"]) > 0): #category = self.database[ip]["alerts"][-1][1] category = self.get_categories_by_alert_index(ip, -1) return category #☑️ TESTED def get_last_score(self,ip): if(ip in self.database and len(self.database[ip]["alerts"]) > 0): return self.get_static_price(self.get_last_category_array(ip)) return -1 #☑️ TESTED def get_last_alert_event(self, ip): if(ip in self.database and len(self.database[ip]["alerts"]) > 0): return self.database[ip]["alerts"][-1] return None def print_database(self): print("-----DATABASE-----") for key, value in self.database.items() : print ("{} -> {}".format(key,value)) print("-----DATABASE-----") #☑️ TESTED def add_to_probability_database(self,categories): for category in categories: self.alert_probability[category] += 1 self.alert_probability["cnt"] += 1 #print(self.alert_probability["cnt"], self.PROBABILITY_DB_FILE) if self.alert_probability["cnt"] % 1000 == 0: with open(self.PROBABILITY_DB_FILE, 'w') as data_file: j = json.dumps(self.alert_probability) print >> data_file, j return self.alert_probability #☑️ TESTED def get_probability_by_category(self, category): return self.alert_probability[category] / self.alert_probability["cnt"] #❌ otestovat def recalculate_cnt_hour(self, ip): #todo: otestovat date_min = datetime.now(pytz.timezone("UTC")) - timedelta(hours=1) for idx, da_alert in enumerate(self.database[ip]["alerts"]): if(date_min > da_alert[0]): categories = self.get_categories_by_alert_index(ip, idx) for category in categories: self.database[ip][category] -= 1 self.database[ip]["cnt"] -= 1 del self.database[ip]["alerts"][idx] def parse_category_to_ip(self, ip, category_ary): #globalni citac category -> musim i odebirat if ip not in self.database: return for category in category_ary: if category not in self.database[ip]: self.database[ip][category] = 0 self.database[ip][category] += 1 return self.database[ip] def database(self): return self.database def add(self,da_alert): #print("da_alert: ", da_alert) if da_alert is None: return ips = self.get_ip_prefix(da_alert["ips"]) source_ips = ips[0]; target_ips = ips[1] ips_to_return = source_ips + target_ips #print("source_ips: {}, target_ips: {}, category: {}".format(source_ips, target_ips, da_alert["category"])) self.add_to_probability_database(da_alert["category"]) #pocita celkovou pravepodoost vyskytu for i in range(0,2): next_ary = ips[(i + 1) % 2] for ip in ips[i]: minor_ips_ary = next_ary if not ip in self.database: self.database[ip] = {"cnt": 0, "alerts": []} self.database[ip]["alerts"].append([ da_alert["time"], da_alert["category"], da_alert["node"], minor_ips_ary ]) self.database[ip]["cnt"] += 1 self.parse_category_to_ip(ip, da_alert["category"]) #pridava citac categorije k prislusnemu alertu self.recalculate_cnt_hour(ip) #self.print_database() return ips_to_return
from os.path import abspath as apat from os.path import dirname as dirn from b_aws_testing_framework.credentials import Credentials from b_aws_testing_framework.tools.cdk_testing.cdk_tool_config import CdkToolConfig from b_aws_testing_framework.tools.cdk_testing.testing_manager import TestingManager CDK_PATH = dirn(apat(__file__)) MANAGER = TestingManager(Credentials(), CdkToolConfig(CDK_PATH)) def pytest_configure(*args, **kwargs): """ Called after command line options have been parsed and all plugins and initial conftest files been loaded. """ MANAGER.set_global_prefix() MANAGER.prepare_infrastructure() def pytest_unconfigure(*args, **kwargs): """ Called before test process is exited. """ MANAGER.destroy_infrastructure()
#!/usr/bin/env python import os from os.path import dirname as dirn import sys import re from setuptools import find_packages from argparse import SUPPRESS from typing import Final, Tuple path = dirn(dirn((os.path.abspath(__file__)))) sys.path.insert(0, path) from pudb.run import get_argparse_parser PACKAGES: Final = find_packages(path) PACKAGE: Final = PACKAGES[0] BINNAME: Final = "pudb3" ZSH_COMPLETION_FILE: Final = "_" + BINNAME ZSH_COMPLETION_TEMPLATE: Final = os.path.join(dirn(os.path.abspath(__file__)), "zsh_completion.in") pat_class = re.compile("'(.*)'") flags = [] for action in get_argparse_parser()._actions: if len(action.option_strings) > 1: optionstr = "{" + ",".join(action.option_strings) + "}" elif len(action.option_strings) == 1: optionstr = action.option_strings[0] else: optionstr = "" if action.dest in ["help", "version"]: prefix = "'(- *)'"