def attack(self): schema = None futures = list() payloads = list() session = ElapsedFuturesSession(max_workers=self.THREADS) if not os.path.isfile(self.SCHEMA): print("Schema file {} not found!".format(self.SCHEMA)) return False with open(self.SCHEMA, 'r') as f: schema = json.load(f) key_found = utils.find_key(schema, self.KEY) if len(key_found) == 0: print("Key {} not found!".format(self.KEY)) return False path = self.PAYLOADS_FOLDER[self.ATTACK] if self.TYPE: path = "{}/{}.txt".format(self.PAYLOADS_FOLDER[self.ATTACK], self.TYPE) if os.path.isdir(path): with os.scandir(path) as files: for item in files: with open(item, 'r') as sqlfile: payloads.extend(sqlfile.readlines()) else: if not os.path.isfile(path): print("File {} not found!".format(path)) return False with open(path, 'r') as sqlfile: payloads = sqlfile.readlines() # create futures for payload in payloads: utils.edit_value(schema, self.KEY, payload.rstrip()) future = session.post(self.GQL_ENDPOINT, headers=utils.set_request_headers(), json=schema) # payloads containing brackets or other chars may be escaped in schema and so # this way I can check for real what payload has been sent # TODO escape chars so they made it into json future.payload = utils.find_key(schema, self.KEY)[0] futures.append(future) # attack! for future in as_completed(futures): response = future.result() print("Tested: {}".format(future.payload)) print("Status: {}".format(response.status_code)) print("{}".format(response.text)) print("Elapsed: {}s".format(response.elapsed)) print("="*10)
def add_category_to_dict(self, category_name, object_name, category_bin): """ { related class to the document : list of values } e.g. { Region: ['EMEA', 'Latin America'] } :category_name string :object_name string :param category_bin: dict :return dict """ if category_name == 'Industry': category_name = 'Industries' elif category_name == 'Role': category_name = 'Roles' elif category_name == 'Task': category_name = 'Tasks' elif category_name == 'Solution': category_name = 'Solutions' related_class = utils.find_key( settings.OPENKM['categories'], category_name) # get the related class to document if not related_class: logger.debug('%s not found in OPENKM[\'categories\']', category_name) return category_bin if related_class not in category_bin: category_bin.update({related_class: [object_name]}) else: category_bin[related_class].append(object_name) return category_bin
def set_attributes(self, property_map, document_properties, document): for document_property in document_properties: if hasattr(document_property, 'name' ) and 'okp:gsaProperties' not in document_property.name: if property_map.get(document_property.name, None): meta = property_map.get(document_property.name, None) if 'choices' in meta: option = self.get_option(document_property.options) if option and meta['choices']: value = utils.find_key(dict(meta['choices']), option.label) setattr(document, meta['attribute'], value) elif option and not meta['choices']: if meta['attribute'] == 'type': if not hasattr(document, 'type') or not document.type: document.set_default_type() document.set_type(option.label) document.save() else: # sorry this is a horrible special case # will come back and refactor this out soon if meta['attribute'] == 'languages': try: self.set_language(document, option) except Exception, e: print e else: setattr(document, meta['attribute'], option.value) else: setattr(document, meta['attribute'], document_property.value)
def set_attributes(self, property_map, document_properties, document): for document_property in document_properties: if hasattr(document_property, 'name') and 'okp:gsaProperties' not in document_property.name: if property_map.get(document_property.name, None): meta = property_map.get(document_property.name, None) if 'choices' in meta: option = self.get_option(document_property.options) if option and meta['choices']: value = utils.find_key(dict(meta['choices']), option.label) setattr(document, meta['attribute'], value) elif option and not meta['choices']: if meta['attribute'] == 'type': setattr(document.type, 'name', option.value) else: # sorry this is a horrible special case # will come back and refactor this out soon if meta['attribute'] == 'languages': try: self.set_language(document, option) except Exception, e: print e else: setattr(document, meta['attribute'], option.value) else: setattr(document, meta['attribute'], document_property.value)
def add_category_to_dict(self, category_name, object_name, category_bin): """ { related class to the document : list of values } e.g. { Region: ['EMEA', 'Latin America'] } :category_name string :object_name string :param category_bin: dict :return dict """ if category_name == 'Industry': category_name = 'Industries' elif category_name == 'Role': category_name = 'Roles' elif category_name == 'Task': category_name = 'Tasks' elif category_name == 'Solution': category_name = 'Solutions' related_class = utils.find_key(settings.OPENKM['categories'], category_name) # get the related class to document if not related_class: logger.debug('%s not found in OPENKM[\'categories\']', category_name) return category_bin if related_class not in category_bin: category_bin.update({related_class: [object_name]}) else: category_bin[related_class].append(object_name) return category_bin
def categories(self, document, okm_document): ''' :param document: a Django model instance for your document :param okm_document: an OpenKM Document instance ''' category_bin = {} # add the categories from OpenKM to the dict if hasattr(okm_document, 'categories'): for category in okm_document.categories: try: category_name, object_name = utils.get_category_from_path( category.path) # find the category # use the map to translate the OKM category name to the Django model name sync_categories = SyncCategories() model_name = utils.find_key(sync_categories.map, category_name) category_bin = self.add_category_to_dict( model_name, object_name, category_bin) except ValueError, e: logger.debug(e) print 'Category bin: ', category_bin for related_class, values in category_bin.items(): try: # get the related manager for the class _set = getattr( document, "%s" % related_class.__name__.lower()) # get the m2m manager _set.clear() # remove the current objects # special case for Tasks. this would be better as one to one, but need to maps to the unicode val if related_class.__name__ == 'Task': values = [ self.sanitize_task_description(value) for value in values ] # get the objects and add them to the model objects = [ related_class.objects.get(name=value) for value in values ] [_set.add(obj) for obj in objects] except AttributeError, e: print e logger.debug(e) except Exception, e: print e logger.debug(e)
def categories(self, document, okm_document): ''' :param document: a Django model instance for your document :param okm_document: an OpenKM Document instance ''' category_bin = {} # add the categories from OpenKM to the dict if hasattr(okm_document, 'categories'): for category in okm_document.categories: try: category_name, object_name = utils.get_category_from_path(category.path) # find the category # use the map to translate the OKM category name to the Django model name sync_categories = SyncCategories() model_name = utils.find_key(sync_categories.map, category_name) category_bin = self.add_category_to_dict(model_name, object_name, category_bin) except ValueError, e: logger.exception(e) print 'Category bin: ', category_bin for related_class, values in category_bin.items(): try: # get the related manager for the class _set = getattr(document, "%s" % related_class.__name__.lower()) # get the m2m manager _set.clear() # remove the current objects # special case for Tasks. this would be better as one to one, but need to maps to the unicode val if related_class.__name__ == 'Task': values = [self.sanitize_task_description(value) for value in values] # get the objects and add them to the model objects = [related_class.objects.get(name__contains=value) for value in values] print('Adding the following categories: %s', objects) [_set.add(object) for object in objects] except AttributeError, e: print e logger.exception(e) except Exception, e: print e logger.exception(e)
import os from utils import find_key import subprocess_test as sptest if __name__=='__main__': import argparse parser = argparse.ArgumentParser("Run tests on homework assignment") parser.add_argument("-v", "--verbose", action='store_true', help="Be Verbose") parser.add_argument("-f", "--file", type=argparse.FileType('r'), help="input file") parser.add_argument("-o", "--output-dir", default=".", help="output directory for reference output") args = parser.parse_args() myaml = yaml.load(args.file.read()) teststruct = find_key('tests', myaml) file_path = find_key('file_path', myaml) if args.verbose: sys.stderr.write("Found grading struct:\n{0}".format(yaml.dump(teststruct))) sys.stderr.write("Running tests in {0} to generate reference output\n".format(file_path)) output_path = os.path.realpath(args.output_dir) if not os.path.isdir(output_path): std.stderr.write("{0}: output path must be a valid directory\n") sys.exit(1) sptest.run_tests(teststruct, base_path=file_path, output_path=output_path) #for (pid, project_path) in imap(shlex.split, sys.stdin):
sys.stderr.write("{0}: stupid TypeError: {1}\n".format(file_path, e)) sys.exit(1) return None if __name__ == '__main__': import argparse parser = argparse.ArgumentParser("Run tests on homework assignment") parser.add_argument("-v", "--verbose", action='store_true', help="Be Verbose") parser.add_argument("-f", "--file", type=argparse.FileType('r'), help="input file") parser.add_argument("-o", "--output-dir", default=".", help="output directory for reference output") parser.add_argument("-n", "--session-name", default="grading", help="Session name") args = parser.parse_args() myaml = yaml.load(args.file.read()) reviewstruct = find_key('review', myaml) if args.verbose: sys.stderr.write("Found review struct: {0}\n".format(reviewstruct)) ec = EmacsClient(servername=args.session_name) for (pid, project_path) in imap(shlex.split, sys.stdin): project_path = urllib.unquote(project_path) for review in reviewstruct: subprocess.call(['tmux', 'send-keys', '-t', 'grading:0.1', "cd \"{path}\"".format(path=os.path.realpath(project_path)), 'C-m']) subprocess.call(['tmux', 'send-keys', '-t', 'grading:0.1', "ls *", 'C-m']) if 'src' in review: actual_files = [ first_match(src_file) for src_file in review['src'] if first_match(src_file) ] for review_file in actual_files[:-1]: if args.verbose:
def get_random_config(self): """ Build a mutated version of the user's model that incorporates the new hyperparameters settings defined by `hyperparams`. """ self.all_batchnorm = False self.all_drop = False new_params = {} if not self.net_params: mutated = self.model else: layers = [] used_acts = [] all_act = False all_drop = False all_batchnorm = False num_layers = len(self.model) i = 0 used_acts.append(self.model[1].__str__()) for layer_hp in self.net_params.keys(): layer, hp = layer_hp.split('_', 1) if layer.isdigit(): layer_num = int(layer) diff = layer_num - i if diff > 0: for j in range(diff + 1): layers.append(self.model[i + j]) i += diff if hp == 'act': space = find_key(self.net_params, '{}_act'.format(layer_num)) hyperp = sample_from(space) new_params["act"] = hyperp new_act = str2act(hyperp) used_acts.append(new_act.__str__()) layers.append(new_act) i += 1 elif hp == 'dropout': layers.append(self.model[i]) space = find_key(self.net_params, '{}_drop'.format(layer_num)) hyperp = sample_from(space) new_params["drop"] = hyperp layers.append(nn.Dropout(p=hyperp)) else: pass elif diff == 0: layers.append(self.model[i]) if hp == 'act': space = find_key(self.net_params, '{}_act'.format(layer_num)) hyperp = sample_from(space) new_params["act"] = hyperp new_act = str2act(hyperp) used_acts.append(new_act.__str__()) layers.append(new_act) i += 1 elif hp == 'dropout': i += 1 layers.append(self.model[i]) space = find_key(self.net_params, '{}_drop'.format(layer_num)) hyperp = sample_from(space) new_params["drop"] = hyperp layers.append(nn.Dropout(p=hyperp)) else: pass else: if hp == 'act': space = find_key(self.net_params, '{}_act'.format(layer_num)) hyperp = sample_from(space) new_params["act"] = hyperp new_act = str2act(hyperp) used_acts.append(new_act.__str__()) layers[i] = new_act elif hp == 'dropout': space = find_key(self.net_params, '{}_drop'.format(layer_num)) hyperp = sample_from(space) new_params["drop"] = hyperp layers.append(nn.Dropout(p=hyperp)) layers.append(self.model[i]) else: pass i += 1 else: if (i < num_layers) and (len(layers) < num_layers): for j in range(num_layers - i): layers.append(self.model[i + j]) i += 1 if layer == "all": if hp == "act": space = self.net_params['all_act'] hyperp = sample_from(space) all_act = False if hyperp == [0] else True elif hp == "dropout": space = self.net_params['all_dropout'] hyperp = sample_from(space) all_drop = False if hyperp == [0] else True elif hp == "batchnorm": space = self.net_params['all_batchnorm'] hyperp = sample_from(space) all_batchnorm = True if hyperp == 1 else False else: pass used_acts = sorted(set(used_acts), key=used_acts.index) if all_act: old_act = used_acts[0] space = self.net_params['all_act'][1][1] hyperp = sample_from(space) new_params["all_act"] = hyperp new_act = str2act(hyperp) used_acts.append(new_act.__str__()) for i, l in enumerate(layers): if l.__str__() == old_act: layers[i] = new_act if all_batchnorm: self.all_batchnorm = True new_params["all_batch"] = True target_acts = used_acts if not all_act else used_acts[1:] for i, l in enumerate(layers): if l.__str__() in target_acts: if 'Linear' in layers[i - 1].__str__(): bn = nn.BatchNorm2d(layers[i - 1].out_features) else: bn = nn.BatchNorm2d(layers[i - 1].out_channels) layers.insert(i + 1, bn) if 'Linear' in layers[-2].__str__(): bn = nn.BatchNorm2d(layers[i - 1].out_features) else: bn = nn.BatchNorm2d(layers[i - 1].out_channels) layers.insert(-1, bn) if all_drop: self.all_drop = True new_params["all_drop"] = True target_acts = used_acts if not all_act else used_acts[1:] space = self.net_params['all_dropout'][1][1] hyperp = sample_from(space) for i, l in enumerate(layers): if l.__str__() in target_acts: layers.insert(i + 1 + all_batchnorm, nn.Dropout(p=hyperp)) sizes = {} for k, v in self.size_params.items(): layer_num = int(k.split("_", 1)[0]) layer_num += (layer_num // 2) * (self.all_batchnorm + self.all_drop) hyperp = sample_from(v) new_params["{}_hidden_size".format(layer_num)] = hyperp sizes[layer_num] = hyperp for layer, size in sizes.items(): in_dim = layers[layer].in_features layers[layer] = nn.Linear(in_dim, size) if self.all_batchnorm: layers[layer + 2] = nn.BatchNorm2d(size) next_layer = layer + (2 + self.all_batchnorm + self.all_drop) out_dim = layers[next_layer].out_features layers[next_layer] = nn.Linear(size, out_dim) mutated = nn.Sequential(*layers) self._init_weights_biases(mutated) mutated.ckpt_name = str(uuid.uuid4().hex) mutated.new_params = new_params mutated.early_stopped = False return mutated
# # Make a new player object that is currently in the 'outside' room. dirs = ['n', 's', 'e', 'w'] player = Player('Dom', rooms['outside'], []) run_count = 0 while True: run_count += 1 room_key = find_key(player.current_room) if run_count == 1: print("""\n--------------------------------------------------------------------------------------------\n Welcome to Lambda Quest IV! Will you find the treasure, or perish along the way like the many adventurers that came before you?\n --------------------------------------------------------------------------------------------\n""") player.print_current_room() player.current_room.check_light(player) player.look_around() # # GET PLAYER INPUT #