def expire(self): # get reference to group by id try: group_ref = db.reference("/groups/" + self.group_id) group_dict = group_ref.get() members_list = group_dict["members"].keys() # Prepare message message_title = "Group has expired!" message_body = "The group " + group_dict[ 'groupname'] + " has expired." message = Message(self.group_id, message_title, message_body) # Create resource manager rm = ResourceManager(self.group_id) # Set the current group of everyone who was in this group to null for member in members_list: user_group_ref = db.reference("/users/" + member + "/current_group") user_group_ref.delete() #Clean all resources group_ref.delete() rm.delete_blobs() # Notify all users in group message.send() except Exception as ex: print(ex) return
def delete_group(): #get headers data = request.headers id_token = data['Authorization'] try: decoded_token = auth.verify_id_token(id_token) except Exception as ex: return make_response('user not found', 401) # get user information uid = decoded_token["uid"] group_id = data["group_id"] user = decoded_token["name"] try: # get reference to group by id group_ref = db.reference("/groups/" + group_id) group_dict = group_ref.get() members_list = group_dict["members"].keys() #uid belongs to owner -> Delete group if uid == group_dict['owner']['id']: #Prepare message message_title = "Group deleted!" message_body = "The group " + group_dict[ 'groupname'] + " has been deleted by the owner" message = Message(group_id, message_title, message_body) #Create resource manager rm = ResourceManager(group_id) #Set the current group of everyone who was in this group to null for member in members_list: user_group_ref = db.reference("/users/" + member + "/current_group") user_group_ref.delete() #Clean all resources group_ref.delete() rm.delete_blobs() # Notify all users in group message.send() return make_response('group deleted', 200) #uid belongs to member -> Leave group elif uid in group_dict['members']: #Set the current group of user to null group_ref.child('members/' + uid).delete() user_group_ref = db.reference("/users/" + uid + "/current_group") user_group_ref.delete() # Notify all users in group message_title = "A member left your group!" message_body = user + " has left your group " + group_dict[ 'groupname'] message = Message(group_id, message_title, message_body) message.send() return make_response('left group', 200) else: return make_response('not authorized', 401) except Exception as ex: return make_response('Leaving group unsuccessful', 500)
def main(): screensize = (640,480) pygame.init() screen = pygame.display.set_mode(screensize) pygame.mixer.music.load(os.path.join("data","music","theme.mid")) pygame.mixer.music.play(-1) # initialize resources before objects rm = ResourceManager().initialize() om = ObjectManager().initialize() ship = om.ships.sprites()[0] # start the game loop clock = pygame.time.Clock() done = False while not(done): clock.tick(30) # check user events keys = pygame.key.get_pressed() if keys[K_LEFT]: ship.left() if keys[K_RIGHT]: ship.right() if keys[K_UP]: ship.accelerate() if not keys[K_UP]: ship.coast() if keys[K_SPACE]: ship.fire() # update objects om.objects.update() # draw everything screen.blit(ResourceManager().nebula, (0,0)) om.objects.draw(screen) pygame.display.flip() # poll keyboard for event in pygame.event.get(): if event.type == QUIT: done = True elif event.type == KEYDOWN and event.key == K_ESCAPE: done = True elif event.type == USEREVENT: # game over done = True
def __init__(self, position, velocity, counter=100): MOB.__init__(self, position, velocity) rm = ResourceManager() rm.missileSound.play() self.image = rm.missile self.rect = rm.missile.get_rect() self.counter = counter
def update(self): om = ObjectManager() rm = ResourceManager() MOB.update(self) self.counter -= 1 if self.counter < 0: self.kill() #check for collisions asteroid = pygame.sprite.spritecollideany(self, om.asteroids) if asteroid: self.kill() asteroid.kill() rm.explosionSound.play() if type(asteroid) == BigAsteroid: for x in range(2): om.addAsteroid( MedAsteroid(asteroid.position, (-2 + 4 * random.random(), -2 + 4 * random.random()))) elif type(asteroid) == MedAsteroid: for x in range(2): om.addAsteroid( SmallAsteroid(asteroid.position, (-3 + 6 * random.random(), -3 + 6 * random.random())))
def __init__(self, id, spawn_x, spawn_y, aov, v_max, maxHealth, r=30, alpha=0, texturePath="textures/robot_base.png"): super().__init__() self.id = id self.pos = QVector2D(spawn_x, spawn_y) self.spawn = QVector2D(spawn_x, spawn_y) self.aov = aov self.r = r self.alpha = alpha # unit: degrees self.texture = ResourceManager.getTexture(texturePath) self.a = 0 # unit: pixels/second^2 self.a_max = A_MAX # unit: pixels/second^2 self.v = 0 # unit: pixels/second self.v_max = v_max # unit pixels/second self.a_alpha = 0 # unit: degrees/second^2 self.a_alpha_max = A_ALPHA_MAX # unit: degrees/second^2 self.v_alpha = 0 # unit: degrees/second self.v_alpha_max = V_ALPHA_MAX # unit: degrees/second self.guns = [] self.selected_gun = None self.currentGunIndex = 0 self.maxHealth = maxHealth self.health = maxHealth self.healthBar = HealthBar(maxHealth) self.active = True self.protected = False self.timeToRespawn = 0 self.protectionTime = 0 self.deathSound = ResourceManager.getSoundEffect("sounds/death.wav") self.respawnSound = ResourceManager.getSoundEffect( "sounds/respawn.wav")
def __init__(self, position): rm = ResourceManager() pygame.sprite.Sprite.__init__(self) rm.explosionSound.play() self.images = rm.explosion self.rect = self.images[0].get_rect() self.rect.center = position self.nframes = len(self.images) self.frame = 0 self.image = self.images[0] self.aspeed = 0.5
def populate_individuals(database_filename, resource_folder, output_filename, max_iters=None, save_as_owl=False): """Populate the ontology with individuals parsed from the database. """ logging.info("Loading resources...") resmgr = ResourceManager(resource_folder) resmgr.load() ontmgr = OntologyManager(resource_folder) ontmgr.load() logging.info("Parsing the database...") literals = list() for article in iter_db_rows(database_filename, max_iters, "Parsing database"): literals.append(WikitextLiteral.from_article(resmgr, *article)) logging.info("Creating individuals...") for literal in tqdm.tqdm(literals, desc="Creating individuals", bar_format=TQDM_BAR_FORMAT): ontmgr.add_individual(literal) for entry in literal.entries: ontmgr.add_individual(entry) for sense in entry.senses: ontmgr.add_individual(sense) logging.info("Creating properties...") for literal in tqdm.tqdm(literals, desc="Creating properties", bar_format=TQDM_BAR_FORMAT): ontmgr.add_properties(literal) for entry in literal.entries: ontmgr.add_properties(entry) for sense in entry.senses: ontmgr.add_properties(sense) del literals logging.info("Saving ontology to %s", os.path.realpath(output_filename)) ontmgr.save(output_filename, save_as_owl) logging.info("Done populating the ontology!")
def __init__(self): x, y = pygame.display.get_surface().get_size() MOB.__init__(self, (x / 2, y / 2), (0, 0)) rm = ResourceManager() self.thrustSound = rm.thrustSound self.friction = 0.99 self.acceleration = 0.2 self.images = rm.ship self.ship = rm.ship self.shipflame = rm.shipflame self.rect = self.images[0].get_rect() self.heading = 0.0 self.image = self.images[0] self.velocity = (0.0, 0.0) self.thrustSoundPlaying = False self.fireTimer = 0
def delete_resource(resource_id): valid = False user_info = None if 'jwt' in flask.request.cookies: valid, user_info = Jwt().validate_jwt(flask.request.cookies['jwt']) if not valid: return responses.get_json_error_response("Bad login") try: user_id = user_info['_id'] except: return responses.get_invalid_request() ResourceManager(db).delete(user_id, resource_id) return responses.get_created()
def user_cp(): """Return the user control panel app""" if 'jwt' in flask.request.cookies: valid, user_info = Jwt().validate_jwt(flask.request.cookies['jwt']) user_id = user_info['_id'] user_name = user_info['username'] else: return flask.redirect(LOGIN_WEB_PATH) if valid: resources = ResourceManager(db).get_resources(user_id) # TODO move cp.html from the login folder return flask.render_template('cp.html', resources=resources, user_name=user_name) return flask.redirect(LOGIN_WEB_PATH)
def update(self): #update timers self.fireTimer = max(self.fireTimer - 1, 0) om = ObjectManager() #update image self.image = self.images[int(self.heading) % 36] #update velocity u, v = self.velocity self.velocity = (u * self.friction, v * self.friction) #update position MOB.update(self) #check for collisions asteroids = om.asteroids if pygame.sprite.spritecollideany(self, asteroids): # game over self.kill() om.addExplosion(Explosion(self.position)) om.addLoseMessage() ResourceManager().loseSound.play() pygame.time.set_timer(USEREVENT, 5000)
def load_resource(user_name, path, params): body, headers, r_type = ResourceManager(db).get_resource_for_display( user_name, path) if body is None: return flask.render_template('404.html') if r_type == "proxy": resp = get_proxy_response(request, body, params) elif r_type == "script": resp, logs = scr.run(request, body, params) scr_logger.save(user_name, path, logs) else: resp = flask.Response(body) # Set pre-configured headers for key, value in headers.iteritems(): resp.headers[key] = value return resp
def create_resource(): valid = False user_info = None if 'jwt' in flask.request.cookies: valid, user_info = Jwt().validate_jwt(flask.request.cookies['jwt']) if not valid: return responses.get_json_error_response("Bad login") try: user_id = user_info['_id'] name = flask.request.form['name'] path = flask.request.form['path'] r_type = flask.request.form['type'] body = flask.request.form['body'] headers = flask.request.form['headers'] except: return responses.get_invalid_request() ResourceManager(db).create(user_id, name, path, r_type, body, headers) return responses.get_created()
def __init__(self, world, location, ai_name, initial_size=DEFAULT_INIT_SIZE): self.__world = world self.__ai_name = ai_name self.__location = location self.__threshold_size = ( DEFAULT_INIT_SIZE ) # every time the size increases to 10 times this, the sphere of influence increases self.__size = initial_size # this is the current size of the population self.__location_hashes = list() self.__location_mapping = dict() self.__key_location_mapping = dict() self.__known_locations = list() self.__running_hash = hashlib.md5() self.__efficiencies = dict([(r, 0.1) for r in RSC_TYPE_MAP]) attrs = world.getProperties(location) self.resources = ResourceManager(attrs["elevation"], attrs["temperature"], attrs["wetness"], population=self) self.__sphere = set([location]) # sphere of influence -- a radius of tiles based on population size self.__sphere_radius = 0 # number of rings in sphere of influence for loc in range(0, self.__world.getNumFaces()): key = self.generateUniqueStringKey(self.__location_hashes) self.__location_hashes.append(key) self.__location_mapping[loc] = key # map real location index to random string self.__key_location_mapping[key] = loc self.computeSphere() # compute the radius of tiles that makes the sphere of influence
def _train_and_decode(HOME): random.seed(4) # fix the random seed vsm = EMBEDDINGS_LEVY_DEPS_300 # vector space model to use lexicon = LEXICON_FULL_BRACKETS_FIX # lexicon to use (mind the all_unknown setting!) multiword_averaging = False # treatment of multiword predicates, false - use head embedding, true - use avg all_unknown = False # makes the lexicon treat all LU as unknown, corresponds to the no-lex setting conf = Config(SharingDNNClassifier, SentenceBowMapper, lexicon, vsm, multiword_averaging, all_unknown, None, None, None) print("Starting resource manager") sources = ResourceManager(HOME) print("Running the experiments!") g_train = get_graphs(*sources.get_corpus(CORPUS_DAS_TRAIN)) lexicon = Lexicon() # go to configuration, check which lexicon is needed, locate the lexicon in FS, load the lexicon lexicon.load_from_list(sources.get_lexicon(conf.get_lexicon())) # same for VSM vsm = VSM(sources.get_vsm(conf.get_vsm())) mapper = conf.get_feat_extractor()(vsm, lexicon) # prepare the data X_train, y_train, lemmapos_train, gid_train = mapper.get_matrix(g_train) # train the model clf = conf.get_clf()(lexicon, conf.get_all_unknown(), conf.get_num_components(), conf.get_max_sampled(), conf.get_num_epochs()) clf.train(X_train, y_train, lemmapos_train) # prepare test data g_test = get_graphs(*sources.get_corpus(CORPUS_DAS_TEST)) X_test, y_test, lemmapos_test, gid_test = mapper.get_matrix(g_test) # predict and compare with open(os.path.join(HOME, 'test.frames.predicted'), 'w') as output_stream: for x, y_true, lemmapos, gid, g in zip(X_test, y_test, lemmapos_test, gid_test, g_test): y_predicted = clf.predict(x, lemmapos) print(lexicon.get_frame(y_predicted), file=output_stream)
def __init__(self, position, velocity): MOB.__init__(self, position, velocity) rm = ResourceManager() self.image = rm.smallAsteroid self.rect = rm.smallAsteroid.get_rect()
class Population: def __init__(self, world, location, ai_name, initial_size=DEFAULT_INIT_SIZE): self.__world = world self.__ai_name = ai_name self.__location = location self.__threshold_size = ( DEFAULT_INIT_SIZE ) # every time the size increases to 10 times this, the sphere of influence increases self.__size = initial_size # this is the current size of the population self.__location_hashes = list() self.__location_mapping = dict() self.__key_location_mapping = dict() self.__known_locations = list() self.__running_hash = hashlib.md5() self.__efficiencies = dict([(r, 0.1) for r in RSC_TYPE_MAP]) attrs = world.getProperties(location) self.resources = ResourceManager(attrs["elevation"], attrs["temperature"], attrs["wetness"], population=self) self.__sphere = set([location]) # sphere of influence -- a radius of tiles based on population size self.__sphere_radius = 0 # number of rings in sphere of influence for loc in range(0, self.__world.getNumFaces()): key = self.generateUniqueStringKey(self.__location_hashes) self.__location_hashes.append(key) self.__location_mapping[loc] = key # map real location index to random string self.__key_location_mapping[key] = loc self.computeSphere() # compute the radius of tiles that makes the sphere of influence def getRunningHash(self): return self.__running_hash def getAIName(self): return self.__ai_name def getLocation(self): return self.__location # home tile def getThresholdSize(self): return self.__threshold_size # when a population's home location changes, its sphere of influence must be re-computed def setNewLocation(self, new_loc): self.__location = new_loc self.__sphere = set([self.__location]) self.__sphere_radius = 0 self.computeSphere() def increaseEfficiency(self, rsc_name, people_in): new_eff = equations.increased_efficiency(self.__efficiencies[rsc_name], people_in) self.__efficiencies[rsc_name] = new_eff return new_eff def getEfficiency(self, rsc_name): return self.__efficiencies[rsc_name] def getSize(self): return self.__size def calculateFoodConsumed(self): return equations.food_consumed(self.__size) def resolveNewPopulation(self): # Examine the stockpiles, and compute our new population given the surplus or shortage # food_cons = self.calculateFoodConsumed() # food_growth_factor = reduce(lambda x,key : x+self.resources.food_amts[key], self.resources.food_amts, 0) # food_growth_factor = min(0, food_growth_factor - food_cons) # pop_change = self.__size*((.01+.001) * food_growth_factor / food_cons + 0.001) # print('Original pop %f (delta %f)' % (self.__size, pop_change)) all_foods = self.resources.getStockpilesByType(T_FOOD) all_waters = self.resources.getStockpilesByType(T_WATER) pop_change = equations.population_change(self.__size, all_foods, all_waters) sphere_change = self.changeSize(pop_change) return pop_change, sphere_change # if the population size increases to 10 times the threshold, another ring of tiles # is added to the population's sphere of influence and the threshold increases accordingly # likewise, if a population size decreases to a tenth of the threshold, a ring of tiles # is removed from the population's sphere of influence def changeSize(self, pop_change): self.__size += pop_change sphere_change = {} pow10size = self.getPow10Size() if pow10size > RING_THRESHOLD * self.__threshold_size: self.__threshold_size *= RING_THRESHOLD sphere_change["tiles"] = self.increaseRadius() sphere_change["which"] = "increase" if sphere_change["tiles"] == []: sphere_change = {} elif pow10size <= self.__threshold_size / RING_THRESHOLD: self.__threshold_size = max(self.__threshold_size / RING_THRESHOLD, self.__threshold_size) sphere_change["tiles"] = self.decreaseRadius() sphere_change["which"] = "decrease" if sphere_change["tiles"] == []: sphere_change = {} return sphere_change def increaseRadius(self): new_neighbors = set() for tile in self.__sphere: for neighbor in self.__world.getNeighbors(tile): if neighbor not in new_neighbors and neighbor not in self.__sphere: new_neighbors.add(neighbor) # since sphere is a set, no repeats are added self.__sphere = self.__sphere.union(new_neighbors) self.__sphere_radius += 1 return list(new_neighbors) def decreaseRadius(self): old_sphere = self.__sphere self.__sphere = set([self.__location]) self.__sphere_radius = 0 self.computeSphere() tiles_removed = old_sphere.difference(self.__sphere) return list(tiles_removed) def addCellToKnownLocations(self, location): self.__known_locations.append(self.__location_mapping[location]) def buildKnownLocationDict(self): loc_dict = {} loc_dict["size"] = self.__size # so that the AI can know how many people are available to use for an action for location in self.__known_locations: real_loc = self.__key_location_mapping[location] neighbors = map(lambda loc: self.__location_mapping[loc], self.__world.getNeighbors(real_loc)) loc_struct = { "neighbors": neighbors, "resources": self.__world.getResourcesJSON(real_loc), "properties": self.__world.getProperties(real_loc), "resourcesum": self.__world.getResourcesTotal(real_loc), } loc_dict[self.__location_mapping[real_loc]] = loc_struct loc_dict["home"] = self.__location_mapping[self.__location] return loc_dict def generateUniqueStringKey(self, thedict): newid = random.randint(0x1000, 0xFFFFFFF) while hex(newid) in thedict: newid = random.randint(0x1000, 0xFFFFFFF) return hex(newid) def getHashedLocation(self, real_loc): return self.__location_mapping[real_loc] def getActualLocation(self, thehash): return self.__key_location_mapping[thehash] def isPower(self, num, base): if base == 1 and num != 1: return False if base == 1 and num == 1: return True if base == 0 and num != 1: return False power = int(math.log(num, base) + 0.5) return base ** power == num def getPow10Size(self): if self.isPower(self.__size, 10): return self.__size i = 1 while i * 10 < self.__size: i *= 10 return i def computeRadius(self): pow10size = self.getPow10Size() radius = 0 while pow10size >= self.__threshold_size: radius += 1 pow10size /= RING_THRESHOLD return radius # keep adding another ring of tiles to the population's sphere of influence # according to the initial size; with the default init size, a population gets # one ring of tiles def computeSphere(self): radius_tile_set = set() for neighbor in self.__world.getNeighbors(self.__location): radius_tile_set.add(neighbor) self.__sphere = radius_tile_set self.__sphere_radius += 1 init_radius = self.computeRadius() for i in xrange(1, init_radius): self.__threshold_size *= RING_THRESHOLD self.increaseRadius() def getSphere(self): return self.__sphere def getSphereRadius(self): return self.__sphere_radius
from globals import * from data import get_graphs from resources import ResourceManager from reporting import ConllReporter def check_corpora_read_ok(sources, out): print "Checking datasets" # set corpora to test gere for corpus in [ CORPUS_YAGS_TEST, CORPUS_DAS_TRAIN, CORPUS_DAS_TEST, CORPUS_YAGS_TEST, CORPUS_MASC_TEST, CORPUS_TW_G_TEST, COPRUS_TW_M_TEST, CORPUS_TW_S_TEST ]: g = get_graphs(*sources.get_corpus(corpus), verbose=False) reporter = ConllReporter(out + corpus + ".conll") reporter.report(g) if __name__ == "__main__": src = "your/path/here" root = ResourceManager(src) check_corpora_read_ok(root, "your/path/here/tmp")
def process_events(cfg_csv_path, cfg_csv_parsing, cfg_open_edx_spec, timestamp_format): print('****** Processing events *******') events_processing_duration = time.time() # MOOCdb storage interface moocdb = MOOCdb(cfg_csv_path['moocdb_csv_dir']) # Instanciating the piping architecture event_formatter = EventFormatter(moocdb, TIMESTAMP_FORMAT=timestamp_format) resource_manager = ResourceManager(moocdb, HIERARCHY_ROOT='https://') event_manager = EventManager(moocdb) submission_manager = SubmissionManager(moocdb) curation_helper = CurationHelper(cfg_csv_path['moocdb_csv_dir']) clickevents_manager = ClickEventsManager(moocdb) print("Processing %s" % cfg_csv_path['edx_track_event_path']) extract = extractor.CSVExtractor(cfg_csv_path, cfg_csv_parsing) num_rows = int( check_output(["wc", "-l", cfg_csv_path['edx_track_event_path']]).split(" ")[0]) event_count = 0 for raw_event in extract: event_count += 1 if event_count % 500 == 0: progress = 'Progress: %0.4f%%' % (100.0 * float(event_count) / float(num_rows)) # A print statement is not used here because # a newline is automatically appended on each print, whereas # we want to use the CR character to move the terminal # pointer back to the beginning of the same line. print(progress) sys.stdout.write("\033[F") # Skip events explicitly not handled by qpipe if event_formatter.pass_filter(raw_event) is False: continue event = event_formatter.polish(raw_event) resource_id = resource_manager.create_resource(event) event.set_data_attr('resource_id', resource_id) submission_manager.update_submission_tables(event) curation_helper.record_curation_hints(event) clickevents_manager.record(event, cfg_open_edx_spec) event_manager.store_event(event) print('* All events processed') print('* Writing CSV output to : %s' % cfg_csv_path['moocdb_csv_dir']) event_formatter.serialize() event_manager.serialize() resource_manager.serialize(pretty_print_to=cfg_csv_path['resource_hierarchy_path']) submission_manager.serialize(pretty_print_to=cfg_csv_path['problem_hierarchy_path']) curation_helper.serialize() print('* Writing resource hierarchy to : %s' % cfg_csv_path['resource_hierarchy_path']) print('* Writing problem hierarchy to : %s' % cfg_csv_path['problem_hierarchy_path']) metadata_file_path = os.path.join(cfg_csv_path['moocdb_csv_dir'], 'metadata.csv') try: os.remove(metadata_file_path) print('* Removed old metadata file at %s' % metadata_file_path) except OSError: pass print('* Writing metadata row to : %s' % metadata_file_path) try: with open(metadata_file_path, 'w') as metafile: process = Popen(['git', 'describe', '--always'], stdout=PIPE) commit_hash, err = process.communicate() commit_hash = commit_hash.rstrip() if err is None else '' events_processing_duration = ( int(time.time() - events_processing_duration)) / 60 # minutes metafile.write('%s,%s\n' % (commit_hash, events_processing_duration)) except OSError: pass moocdb.close()
def _train_all(HOME): random.seed(4) # fix the random seed vsms = [EMBEDDINGS_LEVY_DEPS_300] # vector space model to use lexicons = [LEXICON_FULL_BRACKETS_FIX] # lexicon to use (mind the all_unknown setting!) multiword_averaging = [False] # treatment of multiword predicates, false - use head embedding, true - use avg all_unknown = [False, True] # makes the lexicon treat all LU as unknown, corresponds to the no-lex setting # WSABIE params num_components = [1500] max_sampled = [10] # maximum number of negative samples used during WARP fitting 'warp' num_epochs = [500] configs = [] for lexicon in lexicons: for all_unk in all_unknown: # DummyMapper doesn't do anything configs += [Config(DataMajorityBaseline, DummyMapper, lexicon, None, False, all_unk, None, None, None)] configs += [Config(LexiconMajorityBaseline, DummyMapper, lexicon, None, False, all_unk, None, None, None)] # Add configurations for NN classifiers for lexicon in lexicons: for vsm in vsms: for mwa in multiword_averaging: for all_unk in all_unknown: configs += [Config(SharingDNNClassifier, SentenceBowMapper, lexicon, vsm, mwa, all_unk, None, None, None)] configs += [Config(SharingDNNClassifier, DependentsBowMapper, lexicon, vsm, mwa, all_unk, None, None, None)] # Add configurations for WSABIE classifiers for lexicon in lexicons: for vsm in vsms: for mwa in multiword_averaging: for all_unk in all_unknown: for num_comp in num_components: for max_sampl in max_sampled: for num_ep in num_epochs: configs += [Config(WsabieClassifier, SentenceBowMapper, lexicon, vsm, mwa, all_unk, num_comp, max_sampl, num_ep)] configs += [Config(WsabieClassifier, DependentsBowMapper, lexicon, vsm, mwa, all_unk, num_comp, max_sampl, num_ep)] print("Starting resource manager") sources = ResourceManager(HOME) print("Initializing reporters") reports = ReportManager(sources.out) print("Running the experiments!") runs = len(configs)*len(CORPORA_TRAIN)*len(CORPORA_TEST) print (len(configs), "configurations, ", len(CORPORA_TRAIN)*len(CORPORA_TEST), " train-test pairs -> ", \ runs, " runs") current_train = 0 current_config = 0 current_test = 0 for corpus_train in CORPORA_TRAIN: current_train += 1 current_config = 0 g_train = get_graphs(*sources.get_corpus(corpus_train)) reports.conll_reporter_train.report(g_train) for conf in configs: current_config += 1 start_time = time.time() lexicon = Lexicon() # go to configuration, check which lexicon is needed, locate the lexicon in FS, load the lexicon lexicon.load_from_list(sources.get_lexicon(conf.get_lexicon())) reports.lexicon_reporter.report(lexicon) # same for VSM vsm = VSM(sources.get_vsm(conf.get_vsm())) mapper = conf.get_feat_extractor()(vsm, lexicon) # prepare the data X_train, y_train, lemmapos_train, gid_train = mapper.get_matrix(g_train) # train the model clf = conf.get_clf()(lexicon, conf.get_all_unknown(), conf.get_num_components(), conf.get_max_sampled(), conf.get_num_epochs()) clf.train(X_train, y_train, lemmapos_train) current_test = 0 for corpus_test in CORPORA_TEST: score = Score() # storage for scores score_v = Score() # storage for verb-only scores score_known = Score() # storage for known lemma-only scores start_time = time.time() reports.set_config(conf, corpus_train, corpus_test) current_test += 1 # prepare test data g_test = get_graphs(*sources.get_corpus(corpus_test)) reports.conll_reporter_test.report(g_test) X_test, y_test, lemmapos_test, gid_test = mapper.get_matrix(g_test) # predict and compare for x, y_true, lemmapos, gid, g in zip(X_test, y_test, lemmapos_test, gid_test, g_test): y_predicted = clf.predict(x, lemmapos) correct = y_true == y_predicted score.consume(correct, lexicon.is_ambiguous(lemmapos), lexicon.is_unknown(lemmapos), y_true) if lemmapos.endswith(".v"): score_v.consume(correct, lexicon.is_ambiguous(lemmapos), lexicon.is_unknown(lemmapos), y_true) if not lexicon.is_unknown(lemmapos): score_known.consume(correct, lexicon.is_ambiguous(lemmapos), lexicon.is_unknown(lemmapos), y_true) reports.result_reporter.report(gid, g, lemmapos, y_predicted, y_true, lexicon) reports.summary_reporter.report(corpus_train, corpus_test, conf, score, time.time() - start_time) reports.summary_reporter_v.report(corpus_train, corpus_test, conf, score_v, time.time() - start_time) reports.summary_reporter_known.report(corpus_train, corpus_test, conf, score_known, time.time() - start_time) print ("============ STATUS: - train", current_train, "/", len(CORPORA_TRAIN), "conf", current_config, "/", len(configs), "test", current_test, "/", len(CORPORA_TEST))
# singleton interface: _objectmanager = _ObjectManager() def ObjectManager(): return _objectmanager #### some tests: if __name__ == "__main__": try: pygame.init() screen = pygame.display.set_mode((640, 480)) rm = ResourceManager().initialize() om = ObjectManager().initialize() ship = om.ships.sprites()[0] om.addAsteroid(MedAsteroid((200, 200), (1, -2))) om.addAsteroid(SmallAsteroid((300, 300), (-2, 2))) pygame.time.set_timer(USEREVENT + 3, 2000) pygame.time.set_timer(USEREVENT + 4, 3000) clock = pygame.time.Clock() done = False while not (done): clock.tick(30) keys = pygame.key.get_pressed() if keys[K_LEFT]:
for max_sampl in max_sampled: for num_ep in num_epochs: configs += [ Config(WsabieClassifier, SentenceBowMapper, lexicon, vsm, mwa, all_unk, num_comp, max_sampl, num_ep) ] configs += [ Config(WsabieClassifier, DependentsBowMapper, lexicon, vsm, mwa, all_unk, num_comp, max_sampl, num_ep) ] print "Starting resource manager" sources = ResourceManager(HOME) print "Initializing reporters" reports = ReportManager(sources.out) print "Running the experiments!" runs = len(configs) * len(CORPORA_TRAIN) * len(CORPORA_TEST) print len(configs), "configurations, ", len(CORPORA_TRAIN)*len(CORPORA_TEST), " train-test pairs -> ", \ runs, " runs" current_train = 0 current_config = 0 current_test = 0 for corpus_train in CORPORA_TRAIN: current_train += 1 current_config = 0