def __init__(self): self.configFile = "configL.json" self.configDir = "/etc/sqd" self.baseConfigFile = "config.json" self.config = dict() self.leaderConfig = dict() self.workers = dict() # mapping between ip and msgObject || each message object has inbox self.clients = dict() # mapping between ip and msgObject || each message object has inbox self.job_functions = { "worker": { "add": self.addWorker, "remove" : self.removeWorker }, "client": { "add": self.addClient, "remove" : self.removeClient } } # checking config directory if utils.checkCreateDir(self.configDir): if not os.path.exists(os.path.join(self.configDir, self.configFile )) \ and not os.path.exists(os.path.join(self.configDir, self.baseConfigFile )) : self.createFromSample("all") print "Config created from sample..." else: print "Configs are messed up, please check and clean. " if os.path.exists (os.path.join (self.configDir, self.configFile )): self.leaderConfig = utils.readJSON(os.path.join (self.configDir, self.configFile )) print "Leader config updated..." else: self.createFromSample("leader") self.leaderConfig = utils.readJSON(os.path.join (self.configDir, self.configFile )) print "Leader config updated..." if os.path.exists(os.path.join(self.configDir, self.baseConfigFile )): self.config = utils.readJSON(os.path.join(self.configDir, self.baseConfigFile)) print "Base config updated..." else: self.createFromSample("main") self.config = utils.readJSON(os.path.join(self.configDir, self.baseConfigFile)) print "Base config updated..." # [TODO] start messengers for each worker and clients available. pass
def createFromSample(self, type): # [TODO] DO clean cluster start with only self as part of fresh cluster # for now on the fresh start we initialize using the sample config, which is created manually if(type == "leader" or type =="all"): confL = utils.readJSON("config/configL_sample.json") utils.writeJSON(os.path.join(self.configDir, self.configFile), confL) if(type == "main" or type =="all"): conf = utils.readJSON("config/config_sample.json") utils.writeJSON(os.path.join(self.configDir, self.baseConfigFile), conf) pass
def stage_changes(self, players): ''' Stage a player to an save file ''' load_file = utils.readJSON(self.saves_file) while players: player = players.pop() if not load_file or player not in load_file.keys(): json_obj = { player.telegram_id: { 'username': player.username, 'achievements': player.achievements, 'main_title': player.main_title, 'level': player.level, 'points': player.points, 'titles': player.titles, 'lattest_chat': player.lattest_chat } } utils.appendJSON(self.saves_file, json_obj) else: load_file[player.telegram_id] = { 'username': player.username, 'achievements': player.achievements, 'main_title': player.main_title, 'level': player.level, 'points': player.points, 'titles': player.titles, 'lattest_chat': player.lattest_chat } utils.writeJSON(load_file, self.saves_file)
def load_player(self, response, player_id, player_username): ''' Loads a instance of a player from the saves file or creates one and adds it too cache ''' cachedPlayer = self.get_cached_player(player_id, player_username) if cachedPlayer: return cachedPlayer load_file = utils.readJSON(self.saves_file) if not load_file or player_id not in load_file.keys(): response.append(f"@{player_username} registrado com sucesso") player = Player(player_username, player_id) else: load_player = load_file[player_id] player = Player(username=player_username, telegram_id=player_id, achievements=load_player["achievements"], level=load_player["level"], points=load_player["points"], titles=load_player["titles"], lattest_chat=load_player["lattest_chat"], main_title=load_player["main_title"]) self.cached_players.append(player) return player
def __init__(self): # Initializes squirrel daemon with necessary information # --- self.gameId = -1 # store in config self.configFile = "configC.json" self.configDir = "/etc/sqd" self.baseConfigFile = "config.json" self.config = dict() self.clientConfig = dict() self.msgObj = None # This is msgServer, initialized when we start the worker self.jobMap = { "iamClient" : self.iamClient } if utils.checkCreateDir (self.configDir): if not os.path.exists(os.path.join(self.configDir, self.configFile )) \ and not os.path.exists(os.path.join(self.configDir, self.baseConfigFile )) : self.createFromSample("all") print "Config created from sample..." else: print "Configs are messed up, please check and clean. " if os.path.exists (os.path.join (self.configDir, self.configFile )): self.clientConfig = utils.readJSON(os.path.join (self.configDir, self.configFile )) print "Client config updated..." else: self.createFromSample("client") self.clientConfig = utils.readJSON(os.path.join (self.configDir, self.configFile )) print "Client config updated from sample..." if os.path.exists(os.path.join(self.configDir, self.baseConfigFile )): self.config = utils.readJSON(os.path.join(self.configDir, self.baseConfigFile)) print "Base config updated..." else: self.createFromSample("main") self.config = utils.readJSON(os.path.join(self.configDir, self.baseConfigFile)) print "Base config updated from sample...." pass
def get_ranks(): players = utils.readJSON("./static/players.json") ranks = ["Rank de players:\n"] for index, value in enumerate( sorted(list(players.values()), key=itemgetter("points"), reverse=True)): ranks.append(f"{index + 1}) {value['username']}: {value['points']} xp") return ranks
def __init__(self): self.configFile = "configW.json" self.configDir = "/etc/sqd" self.baseConfigFile = "config.json" self.config = dict() self.workerConfig = dict() self.msgObj = None # This is msgServer, initialized when we start the worker self.jobMap = { "addToCluster" : self.addToCluster, "iamAlive" : self.iamAlive, "addClient": self.addClient } if utils.checkCreateDir (self.configDir): if not os.path.exists(os.path.join(self.configDir, self.configFile )) \ and not os.path.exists(os.path.join(self.configDir, self.baseConfigFile )) : self.createFromSample("all") print "Config created from sample..." else: print "Configs are messed up, please check and clean. " if os.path.exists (os.path.join (self.configDir, self.configFile )): self.workerConfig = utils.readJSON(os.path.join (self.configDir, self.configFile )) print "Worker config updated..." else: self.createFromSample("worker") self.workerConfig = utils.readJSON(os.path.join (self.configDir, self.configFile )) print "Worker config updated from sample..." if os.path.exists(os.path.join(self.configDir, self.baseConfigFile )): self.config = utils.readJSON(os.path.join(self.configDir, self.baseConfigFile)) print "Base config updated..." else: self.createFromSample("main") self.config = utils.readJSON(os.path.join(self.configDir, self.baseConfigFile)) print "Base config updated from sample...." pass
def main(): ap = argparse.ArgumentParser(description='Analyse Grid Search output files'); # ap.add_argument('--gsConfDir', default='./gsConfigs/') # ap.add_argument('--gsOutDir', default='./gs_out/') ap.add_argument('--gsVersion', type=int, default=2) args = ap.parse_args() globals().update(vars(args)) gsConfDir = './gsConfigsV{}/'.format(gsVersion) gsOutDir = './gsOutV{}/'.format(gsVersion) gsParams = utils.Bunch(GridSearchParams.getGSParams(gsVersion)) scoreLineToken = '#0 score ' aggregate = [] confIdRange = xrange(0, gsParams.nRuns/len(gsParams.bmNames)) for configId in confIdRange: for bmName in gsParams.bmNames: gsConfName = 'gs_{}_{}'.format(configId, bmName) gsConfFile = utils.normPath('{}/{}.json'.format(gsConfDir, gsConfName)) gsOutLogFile = utils.normPath('{}/{}/log'.format(gsOutDir, gsConfName)) conf = utils.readJSON(gsConfFile) with open(gsOutLogFile, 'r') as file: scoreLine = [l for l in file.read().split('\n') if scoreLineToken in l] tokenClearLine = scoreLine[0][scoreLine[0].find(scoreLineToken)+len(scoreLineToken):] colonClearLine = tokenClearLine[:tokenClearLine.rfind(':')] score = float(colonClearLine) aggregate.append({ 'gsConfName': gsConfName, 'conf': conf, 'score': score }) # Average the 3 bm scores averaged = [] for configId in confIdRange: gsConfHeader = 'gs_{}_'.format(configId) gsGroup = [a for a in aggregate if gsConfHeader in a['gsConfName']] assert(len(gsGroup) == 3) avgScore = sum([a['score'] for a in gsGroup]) / 3.0 averaged.append({ 'gsConfName': gsGroup[0]['gsConfName'], 'conf': gsGroup[0]['conf'], 'score': avgScore }) scores = [a['score'] for a in averaged] chromoLenDevs = [a['conf']['chromoLenDev'] for a in averaged] gaSurviveRates = [a['conf']['gaSurviveRate'] for a in averaged] gaCrossRates = [a['conf']['gaCrossRate'] for a in averaged] estimateIndependentBest(confIdRange, scores, averaged, chromoLenDevs, gaSurviveRates, gaCrossRates, gsParams.chromoLenDevs, gsParams.gaSurviveRates, gsParams.gaCrossRates, gsParams.pmRatios) estimateBestMedoid(confIdRange, scores, averaged, chromoLenDevs, gaSurviveRates, gaCrossRates, gsParams.chromoLenDevs, gsParams.gaSurviveRates, gsParams.gaCrossRates, gsParams.pmRatios) utils.pauseCode()
def __init__(self, opt, subset): ''' subsets: 'train' or 'val' ''' # read additional info like dictionary, etc print('DataLoader loading json file: %s' % opt.inputJson) info = utils.readJSON(opt.inputJson) # add <START> and <END> to vocabulary self.word2ind = info['word2ind'] self.vocabSize = len(info['word2ind']) print('Vocabulary size (with <PAD>, <START> and <END>): %d' % self.vocabSize) # construct ind2word ind2word = {} for word, ind in info['word2ind'].iteritems(): ind2word[ind] = word self.ind2word = ind2word # read questions, answers and options print('DataLoader loading h5 file: %s' % opt.inputQues) quesFile = h5py.File(opt.inputQues, 'r') print('DataLoader loading h5 file: %s' % opt.inputImg) imgFile = h5py.File(opt.inputImg, 'r') # read question related information questions = np.array(quesFile['ques_' + subset]) # 82783 * 10 * 20 question_lengths = np.array(quesFile['ques_length_' + subset]) # 82783 * 10 options = np.array(quesFile['opt_' + subset]) # (82783, 10, 100) option_lengths = np.array(quesFile['opt_length_' + subset]) # (252298,) option_list = np.array(quesFile['opt_list_' + subset]) # (252298, 20) answers = np.array(quesFile['ans_' + subset]) # 82783 * 10 * 20 answer_lengths = np.array(quesFile['ans_length_' + subset]) # 82783 * 10 answer_ids = np.array(quesFile['ans_index_' + subset]) # 82783 * 10 captions = np.array(quesFile['cap_' + subset]) # (82783, 40) caption_lengths = np.array(quesFile['cap_length_' + subset]) # (82783,) print('DataLoader loading h5 file: %s' % opt.inputImg) imgFile = h5py.File(opt.inputImg, 'r') print('Reading image features..') imgFeats = np.array(imgFile['images_' + subset + '_1']) # Normalize the image features (if needed) if opt.imgNorm: print('Normalizing image features..') imgFeats = imgFeats / np.expand_dims( a=np.linalg.norm(x=imgFeats, axis=1), axis=1) # done reading, close files quesFile.close() imgFile.close() # print information for data type self.num_dialogues = questions.shape[0] self.num_rounds = questions.shape[1] self.caption_max_length = captions.shape[1] self.question_max_length = questions.shape[2] self.answer_max_length = option_list.shape[1] print( '\n%s:\n\tNo. of dialogues: %d\n\tNo. of rounds: %d\n\tMax length of captions: %d\n\tMax length of questions: %d\n\tMax length of answers: %d\n' % (subset, self.num_dialogues, self.num_rounds, self.caption_max_length, self.question_max_length, self.answer_max_length)) self.imgFeats = imgFeats self.captions = captions self.caption_lengths = caption_lengths self.questions = questions self.question_lengths = question_lengths self.answers = answers self.answer_lengths = answer_lengths self.answer_ids = answer_ids targets = np.zeros((self.num_dialogues, 10, 21), dtype=np.int) for d in range(self.num_dialogues): for r in range(10): targets[d][r] = np.insert(answers[d][r], answer_lengths[d][r], self.word2ind['<END>']) self.targets = targets self.options = options self.option_list = option_list self.option_lengths = option_lengths
import utils from operator import itemgetter ACHIEVEMENTS = utils.readCSV("./static/achievements.csv") TITLES = utils.readJSON("./static/titles.json") def get_max_level(achievement_list): max_points = 0 max_level = 0 for achv in achievement_list: if int(achv['points']) > 0: max_points += int(achv['points']) tmp_max_points = max_points while tmp_max_points / 2 > 0: max_level += 1 tmp_max_points = int(tmp_max_points / 2) return max_level, max_points def get_levelup_requirements(): max_level, max_points = get_max_level(ACHIEVEMENTS) level_info = list() while max_points / 2 > 0: level_info.append(max_points) max_points = int(max_points / 2) return list(reversed(level_info))