def get_pattern(self): freq_size = len(self._freq) print_info('Using pattern language "random". Selecting {} random pattern from our freq. subgraphs'.format(self._kernel_size)) if self._kernel_size >= freq_size: print_info("Warning: Trying to select to more freq. subgraphs then present. Returning all subgraphs instead...") return range(freq_size) return sorted(random.sample(range(freq_size), self._kernel_size))
def close(self): packet = receive_segment(3) info = self.segment.get_info(packet) helper.print_info(info, 'IN:') if(info[5]): packet=self.segment.gen_packet(self.seq,ackn=self.ackn, fin=1, ack=1) self.send_packet(packet) return True
def random(self): for percent in [0.2, 0.3, 0.5]: print_info('Testing Random Selection with {}%'.format(percent * 100)) # Pattern language get random pattern _idxs = Random(self._freq, int(percent * len(self._freq))).get_pattern() _ds = convert.dataset_to_vectors(self._iso_map, self._ds_clss, _idxs) test_SVM(_ds) print_info('\n-----[End Test]-----\n')
def get_pattern(self): freq_size = len(self._freq) print_info('Using pattern language "graphlet-select". Searching subgraphs with min.inc. {} and max.inc. {} vertices'.format(self._min, self._max)) pattern = [] for idx in range(freq_size): num_vert = self._freq[idx].get_num_vertices() if num_vert >= self._min and num_vert <= self._max: pattern.append(idx) print_info('Found {} pattern matching the criterium'.format(len(pattern))) return pattern
def send_ack(self): pass # TODO: Schritt 1-3, Aufgabe 4 # this should be used to generate packets # seqn, ackn,syn, fin,ack, payload need to be modified according to the # packet to be sent packet = self.segment.gen_packet(seqn, ackn, syn, fin, ack, payload) # this function should be used to put them on the UDP socket info = self.segment.get_info(packet) helper.print_info(info, 'OUT:') self.send_packet(packet)
def run_training(args, params): # print run info helper.print_info(args, params, model=None, which_info='params') # setting comet tracker tracker = None if args.use_comet: tracker = init_comet(args, params) print("In [run_training]: Comet experiment initialized...") if 'dual_glow' in args.model: models.train_dual_glow(args, params, tracker) else: model = models.init_model(args, params) optimizer = optim.Adam(model.parameters()) reverse_cond = data_handler.retrieve_rev_cond(args, params, run_mode='train') train_configs = trainer.init_train_configs(args) # resume training if args.resume_train: optim_step = args.last_optim_step checkpoints_path = helper.compute_paths(args, params)['checkpoints_path'] model, optimizer, _, lr = load_checkpoint(checkpoints_path, optim_step, model, optimizer) if lr is None: # if not saved in checkpoint lr = params['lr'] trainer.train(args, params, train_configs, model, optimizer, lr, tracker, resume=True, last_optim_step=optim_step, reverse_cond=reverse_cond) # train from scratch else: lr = params['lr'] trainer.train(args, params, train_configs, model, optimizer, lr, tracker, reverse_cond=reverse_cond)
def send_request(self): # increment sequence number self.seq += 1 payload = struct.pack('i', self.num_segments) #payload = b'' #seqn=self.tx_next,payload=payload,ackn=self.rx_max+1 packet = tcpo.gen_packet(seqn=self.seq, ackn=self.ackn, payload=payload) info = self.segment.get_info(packet) helper.print_info(info, 'OUT:') self.send_packet(packet) return True
def receive_data(self): while goon: packet = receive_segment(5) info = self.segment.get_info(packet) helper.print_info(info, 'IN:') if packet != []: seqn, ackn, syn, ack, payload, fin = self.segment.get_info( packet) data = self.segment.unpack(packet) #print(data.payload) # TODO errorhandling return True return False
def wait_ack(self): packet = receive_segment(3) info = self.segment.get_info(packet) helper.print_info(info, 'IN:') self.seq = info[0] + 1 self.ackn = info[1] # ack if(info[3]): packet=self.segment.gen_packet(self.seq,ackn=self.ackn,ack=1) self.send_packet(packet) self.segments_in_flight.pop(0) return True return
def wait_request(self): data = receive_segment(3) packet = self.segment.unpack(data) info = self.segment.get_info(data) helper.print_info(info, 'IN:') self.num_segments = struct.unpack("i",packet.payload)[0] # payload if(info[4]): packet = self.segment.gen_packet(self.seq,ack=1,ackn=self.seq) info = self.segment.get_info(packet) helper.print_info(info, 'OUT:') send_segment(packet, self.segment.get_info(packet)) return True else: return False
def init_model(args, params): assert len(params['n_flow']) == params['n_block'] if args.model == 'c_flow' or 'improved' in args.model: left_configs, right_configs = init_model_configs(args) model = TwoGlows(params, left_configs, right_configs) elif 'c_glow' in args.model: model = init_c_glow(args, params) else: raise NotImplementedError print(f'In [init_model]: init model done. Model is on: {device}') helper.print_info(args, params, model, which_info='model') return model.to(device)
def cork(self): _idxs = CORK(self._freq, self._iso_map, self._ds_clss, self._desc).get_pattern() print_info("Selected {} pattern using CORK".format(len(_idxs))) print_info(_idxs) _ds = convert.dataset_to_vectors(self._iso_map, self._ds_clss, _idxs) test_SVM(_ds) print_info('\n-----[End Test]-----\n')
def get_pattern(self): print_info('Using CORK to mine discriminative freq. subgraphs.') # init siblings_map freq_count = len(self._freq) siblings_map = [0] * freq_count for i in range(len(siblings_map)): curr_code = self._freq[i] for j in range(i+1, freq_count): if len(self._freq[j]) <= len(curr_code): siblings_map[i] = j break if siblings_map[i] == 0: siblings_map[i] = freq_count idxs = [] con = True while(con): next_idx = -1 i = 0 while i < len(self._freq): if self.calc_cork(i) > self.calc_cork(next_idx, initial=(len(idxs) == 0)): next_idx = i if self.max_cork(i) <= self.calc_cork(next_idx): i = siblings_map[i] else: i += 1 if self.calc_cork(next_idx) > self.calc_cork(): idxs.append(next_idx) self.extend_ds(next_idx, True) print_info("selected {}".format(next_idx)) else: con = False return idxs
def wait_syn(self): data = receive_segment(10) info = self.segment.get_info(data) helper.print_info(info, 'IN:') # syn if(info == (0, 0, True, False, 0, False)): packet = self.segment.unpack(data) # build syn, ack newpacket = self.segment.gen_packet(seqn=self.seq, ackn=1, syn=1,ack=1) info = self.segment.get_info(newpacket) helper.print_info(info, 'OUT:') # send syn, ack send_segment(newpacket, self.segment.get_info(newpacket)) data = receive_segment(10) info = self.segment.get_info(data) helper.print_info(info, 'IN:') if(info == (1, 1, False, True, 0, False)): return True return True return False
def connect(self): self.seq=0 self.ackn=0 # build syn packet = tcpo.gen_packet(seqn=0, syn=1) # send syn info = self.segment.get_info(packet) helper.print_info(info, 'syn OUT:') self.send_packet(packet) # wait for response packet = receive_segment(3) info = self.segment.get_info(packet) helper.print_info(info, 'syn ack IN:') # syn ack # seqn = 0, ackn = my_seqn+1 if(info == (0, 1, True, True, 0, False)): packet = tcpo.gen_packet(seqn=1,ackn=1, ack=1) info = self.segment.get_info(packet) helper.print_info(info, 'ack OUT:') self.send_packet(packet) return True return False
def main(): """Main""" default_actions = ['create'] default_editors = ['style_desc', 'vim'] parser = argparse.ArgumentParser( description='Makes your code easy to edit/compile/run/test/check') parser.add_argument( 'filename', help=('filename (with or without the extension)')) parser.add_argument( '--language', '-l', help=('programming language to consider (default: %(default)s)'), choices=sorted(LANGUAGE_NAMES.keys()) + ['autodetect'], default='autodetect') parser.add_argument( '--style', '-s', help=('code style to be used (default is to get the one specific to the language - if any)'), choices=sorted(CODESTYLES.keys())) parser.add_argument( '--action', '-a', action='append', help=('action(s) to perform (default: %s)' % default_actions), choices=[ 'create', 'edit', 'run', 'check', 'compile', 'coverage', 'debug', 'info', 'upload', 'minify', 'pretty', 'obfuscate', 'doctest', 'interactive', 'gendoc', 'to_py3', 'uml', 'man', 'unittest', 'functionlist', 'profile', 'metrics', 'display', 'shell'], # this list could be generated default=[]) parser.add_argument( '--failure', '-f', help=('behavior on failure (default: %(default)s)'), choices=['stop', 'continue'], default='stop') parser.add_argument( '--extension_mode', '-e', help=('extension mode (default: %(default)s)'), choices=['auto', 'never', 'always'], default='auto') parser.add_argument( '--override_file', '-o', help=('override already existing file (default: %(default)s)'), choices=['n', 'y'], default='n') parser.add_argument( '--modeline', '-m', help=('location for modeline (editor settings) (default: %(default)s)'), choices=['top', 'bottom', 'both', 'none'], default='top') parser.add_argument( '--text-editors', '-t', action='append', help=('text editors for modelines (default: %s)' % default_editors), choices=MODELINE_SUPPORTED_EDITORS, default=[]) args = parser.parse_args() # Workaround issue http://bugs.python.org/issue16399 if not args.action: args.action = default_actions if not args.text_editors: args.text_editors = default_editors language = args.language if language == 'autodetect': language = detect_language_from_filename(args.filename) print_info('Detected language is %s' % language) if language is None: return assert language in LANGUAGE_NAMES results = LANGUAGE_NAMES[language].perform_actions(args) greentick = '\033[92m✔' redcross = '\033[91m✘' undocolor = '\033[0m' for action, ret in results: print_info( (greentick if ret else redcross) + undocolor + ' ' + action) return all(res for _, res in results)
def main(): toolAckMessage = "Amazon Macie is not a free service. By using this tool you acknowledge you are responsible for all charges and actions!" print_headline("Amazon Macie Activation Process Tool") print("!!!!!!!!!!") print_warning(toolAckMessage) print_info( "See Amazon Macie pricing: https://aws.amazon.com/macie/pricing/") print("!!!!!!!!!!") cont = confirmation("Do you wish to continue?") if not cont: sys.exit(0) print_padding(len(toolAckMessage), '-') selectedOrg = select("Select organization root:", get_roots(), lambda x: x['Name']) selectedOu = select("Select Child OU:", list_ous(selectedOrg['Id']), lambda x: x['Name']) allAccounts = confirmation("Do you want to use all accounts in the OU?") accounts = list_accounts(selectedOu['Id']) selectedAccounts = accounts if allAccounts == False: selectedAccounts = [ select("Select Target Account:", accounts, lambda x: x['Name']) ] rawTags = ask( "What tags should be added to the Macie enrolled accounts? (Format: key:value;key:value)" ) tags = {} for t in rawTags.split(';'): spl = t.split(':') tags[spl[0]] = spl[1] listOfAccountConfirm = '\n'.join( sorted(entry['Id'] for entry in selectedAccounts)) agree = confirmation( 'Accounts:\n{2}\nDo you wish to enable Macie in {0} account{1}?'. format(len(selectedAccounts), 's' if len(selectedAccounts) == 0 else '', listOfAccountConfirm)) if agree == False: sys.exit(0) # Create Macie Client and set the parent account as the delgated org account parentAccountId = get_account_id() rootMacie = boto3.client('macie2') print('Making {0} the Macie admin account'.format(parentAccountId)) try: rootMacie.enable_organization_admin_account( adminAccountId=parentAccountId) print('Enabling auto-enable in {0}'.format(parentAccountId)) rootMacie.update_organization_configuration(autoEnable=True) except: pass for selectedAccount in selectedAccounts: print('Associating {0} with master account {1} and enabling Macie'. format(selectedAccount['Id'], parentAccountId)) rootMacie.create_member(account={ 'accountId': selectedAccount['Id'], 'email': selectedAccount['Email'] }, tags=tags) pass
def graphlet_select(self): # pattern language graphlet-select, graphs with nodes in a certain range _idxs = Graphlets(self._freq, 5, 3).get_pattern() _ds = convert.dataset_to_vectors(self._iso_map, self._ds_clss, _idxs) test_SVM(_ds) print_info('\n-----[End Test]-----\n')
from weka.core.classes import Random from weka.core.dataset import Instances from weka.filters import Filter import os #jvm.start(class_path=['/root/PycharmProjects/untitled/venv1/lib/python2.7/site-packages/weka/lib/weka.jar', # '/root/PycharmProjects/untitled/venv1/lib/python2.7/site-packages/weka/lib/python-weka-wrapper.jar']) jvm.start() #data_dir = "stuff/iris.arff" #data_file = helper.get_data_dir() + os.sep + "iris.arff" data_file = "/root/PycharmProjects/untitled/stuff/iris.arff" helper.print_info("Loading dataset: " + data_file) loader = Loader("weka.core.converters.ArffLoader") data = loader.load_file(data_file) data.class_is_last() print(data) classifier = Classifier(classname="weka.classifiers.trees.J48") # randomize data folds = 10 seed = 1 rnd = Random(seed) rand_data = Instances.copy_instances(data) rand_data.randomize(rnd) if rand_data.class_attribute.is_nominal:
def test_SVM(data): # perform SVM accuracy testing print_info('Start training of our SVM') accuracies, predictions = perform_SVM(data['vector'], data['class']) print_info(accuracies)
def test(ds_name, minSup, params='', random=False, graphlet=False, cork=False): if len(helper.tests_run) == 0: if random: helper.tests_run += "random" if graphlet: helper.tests_run += "graphlet" if cork: helper.tests_run += "cork" print_info('\n-----[BEGIN]-----\n') dataset = TUDataset(root='./tmp/{}'.format(ds_name), name=ds_name) print_info('Starting Tests with dataset: {}, containing {} Graphs'.format( ds_name, len(dataset))) is_directed = dataset[0].is_directed() # create the run arguments for gSpan-python cwd = os.getcwd() f_name = '{}.data.txt'.format(ds_name) f_path = path.join(cwd, 'graphs', f_name) args_str = '--min_support {min} --directed {directed} {params} --verbose FALSE --where TRUE {ds}'.format( ds=f_path, min=int(len(dataset) * minSup), directed=is_directed, params=params) print_info('Running with params: {}'.format(args_str)) FLAGS, _ = parser.parse_known_args(args=args_str.split()) # mine with gSpan print_info( "Starting mining with gSpan-Algorithm and minSup sigma = {}%".format( minSup * 100)) gs = main(FLAGS) _report = gs._report_df print_info("\nFinished mining. Found {} freq. subgraphs.".format( len(_report))) _freq = [] sum_edges = 0 sum_vertices = 0 for dfs in _report['dfs']: _freq.append(dfs) sum_edges += len(dfs) sum_vertices += dfs.get_num_vertices() print_info("Durchschnitt Knoten: {}, Durchschnitt Kanten: {}\n".format( sum_vertices / len(_freq), sum_edges / len(_freq))) _desc = [desc for desc in _report['description']] # get info needed for testing ds_graph_classes = dataset.data.y.tolist() # graph classes isomorph_graphs = [gids for gids in _report['isomorph_graphs']] # perform test _tests = tests.Tests(_freq, isomorph_graphs, ds_graph_classes, _desc) _tests.run(random, graphlet, cork) print_info('\n-----[END]-----\n')