def process_response(): global oncall_eta, team_status from_number = request.values.get('From', None) message_body = str(request.values.get('Body', None)).lower() user = get_user_from_number(from_number) if user != None and user.netID in get_oncall_team().values(): log("sender valid") command_string = message_body.split()[0] if represents_int(command_string): log("sender reporting eta") try: oncall_eta[user.netID] = int(command_string) team_status[user.netID] = -1 client.messages.create(to=from_number, from_=default_from_phone, body="Thank you for your response. You said your eta is " + str(command_string) + " minutes.") except: client.messages.create(to=from_number, from_=default_from_phone, body="Your text is invalid. Please type \"X\" where X is a whole number.") elif command_string == "status": log("sender requesting team status") response = "STATUS:" for i in oncall_eta: response += ", " + user.netID + " - " + str(oncall_eta[user.netID]) response = response[8:] + response[9:] client.messages.create(to=from_number, from_=default_from_phone, body=response) else: log("invalid command") client.messages.create(to=from_number, from_=default_from_phone, body="Your text is invalid. Please type \"X\" or \"status\"") else: log("sender invalid") client.messages.create(to=from_number, from_=default_from_phone, body="") return ""
def save_model(self, session, savefile=None): if savefile is None: savefile = self._model_savefile ensure_parent_directories(savefile) log("Saving model to: {}".format(savefile)) saver = tf.train.Saver() saver.save(session, savefile)
def add_command(args, func): cmd = args['name'] if cmd is None: print('No name found for a command...ignoring...') return registered_text = "Registered a command with the name '%s'" % cmd new_command = {cmd: func} commands.update(new_command) if 'permission' in args and args['permission'] is not None: new_permission = {cmd: args['permission']} command_permissions.update(new_permission) registered_text += " with the permission %s" % str( new_permission.get(cmd)) if 'help' in args and args['help'] is not None: new_help = {cmd: args['help']} command_helps.update(new_help) registered_text += " the help text of %s" % str(new_help.get(cmd)) if 'aliases' in args and args['aliases'] is not None: aliases = args['aliases'].split(", ") for alias in aliases: commands.update({alias: func}) registered_text += " and the aliases of %s" % str(list(aliases)) logger.log(registered_text) handler = Handler(func) func_handlers.update({func: handler})
def test(self, episodes_num=None, deterministic=True): if episodes_num is None: episodes_num = self.test_episodes_per_epoch test_start_time = time.time() test_rewards = [] test_actions = [] test_frameskips = [] for _ in trange(episodes_num, desc="Testing", file=sys.stdout, leave=False, disable=not self.enable_progress_bar): total_reward, actions, frameskips, _ = self.run_episode(deterministic=deterministic, return_stats=True) test_rewards.append(total_reward) test_actions += actions test_frameskips += frameskips self.doom_wrapper.reset() if self.local_network.has_state(): self.local_network.reset_state() test_end_time = time.time() test_duration = test_end_time - test_start_time min_score = np.min(test_rewards) max_score = np.max(test_rewards) mean_score = np.mean(test_rewards) score_std = np.std(test_rewards) log( "TEST: mean: {}, min: {}, max: {}, test time: {}".format( green("{:0.3f}±{:0.2f}".format(mean_score, score_std)), red("{:0.3f}".format(min_score)), blue("{:0.3f}".format(max_score)), sec_to_str(test_duration))) return test_rewards, test_actions, test_frameskips
def load_native_xls(): startRow = global_v.XLS_START_ROW maxColumns = global_v.XLS_MAX_COL symbolClasses = [] logger.log("Opening file: " + str(global_v.NATIVE_FILE_PATH)) wb = open_workbook(global_v.NATIVE_FILE_PATH) for s in wb.sheets(): for row in range(startRow, s.nrows): characteristics = [] for col in range(s.ncols): if maxColumns > 0 and col == maxColumns + 1: break currentValue = float( str(s.cell(row, col).value).replace(',', '.')) # create new symbol class if ((row != startRow and col == 0 and currentValue != symbolClasses[len(symbolClasses) - 1].name) or (row == startRow and col == 0)): symbolClass = SymbolClass(int(currentValue), ColorChooser().get_color()) symbolClasses.append(symbolClass) if col == 0: continue characteristics.append(currentValue) distortedClass = SymbolClass(symbolClass.name, symbolClass.color) distortedClass.characteristicsValues = characteristics random.choice((symbolClass.learning_set, symbolClass.test_set)).append(distortedClass) return symbolClasses
def _print_train_log(self, scores, overall_start_time, last_log_time, steps): current_time = time.time() mean_score = np.mean(scores) score_std = np.std(scores) min_score = np.min(scores) max_score = np.max(scores) elapsed_time = time.time() - overall_start_time global_steps = self._global_steps_counter.get() local_steps_per_sec = steps / (current_time - last_log_time) global_steps_per_sec = global_steps / elapsed_time global_mil_steps_per_hour = global_steps_per_sec * 3600 / 1000000.0 log( "TRAIN: {}(GlobalSteps), {} episodes, mean: {}, min: {}, max: {}, " "\nLocalSpd: {:.0f} STEPS/s GlobalSpd: " "{} STEPS/s, {:.2f}M STEPS/hour, total elapsed time: {}".format( global_steps, len(scores), green("{:0.3f}±{:0.2f}".format(mean_score, score_std)), red("{:0.3f}".format(min_score)), blue("{:0.3f}".format(max_score)), local_steps_per_sec, blue("{:.0f}".format( global_steps_per_sec)), global_mil_steps_per_hour, sec_to_str(elapsed_time) ))
def _parse_stream(self, bot): stream = JSONStream(self.flow_user_api_key) gen = stream.fetch([self.channel], active=True) for data in gen: process_message = type(data) == dict and (data['event'] == "message" or data['event'] == "comment") if process_message and ("user" in data and self.user != data["user"]): self.spoken = False bot_input = BotInput() if type(data['content']) is dict: bot_input.message = data["content"]['text'] elif "content" in data: bot_input.message = data["content"] else: break if "user" in data and int(data["user"]) > 0: try: bot_input.nick = self.get_user_by_id(data["user"])["nick"] self.user_id = data["user"] if random.random() < (self.chattiness / 100): logger.log("Randomly sending message to %s" % bot_input.nick) self.private_message(data["user"], random.choice(self.responses["private_messages"])) except Exception as e: logger.error(e) self.say(bot.responses["stranger"]) elif "external_name" in data: bot_input.nick = data["external_name"] else: bot_input.nick = "anonymous" bot_input.bot = bot self.user_nick = bot_input.nick marvin.process(bot_input, self)
def __min_max(nativeElements, MIN_INDEX=0, MAX_INDEX=1): # Get the dimension dim = len(nativeElements[0].learning_set[0].characteristicsValues) # Set up min max values of each characterstic min_max_values = [[0] * 2 for x in range(dim)] for min_max_value in min_max_values: min_max_value[MIN_INDEX] = 99999 min_max_value[MAX_INDEX] = -99999 logger.log_header("Normalizing Native", styles=[logger.LogHeaderStyle.SUB_HEADER]) for nativeClass in nativeElements: # Find max and min of each characterstic for element in nativeClass.learning_set: # Dimensions must fit if len(element.characteristicsValues) != dim: logger.log("Incorrect dimensions. Exiting...") sys.exit() # Find min max for i in range(0, dim): charValue = element.characteristicsValues[i] # Min if min_max_values[i][MIN_INDEX] >= charValue: min_max_values[i][MIN_INDEX] = charValue # Max if min_max_values[i][MAX_INDEX] <= charValue: min_max_values[i][MAX_INDEX] = charValue # Find max and min of each characterstic for element in nativeClass.test_set: # Dimensions must fit if len(element.characteristicsValues) != dim: logger.log("Incorrect dimensions. Exiting...") sys.exit() # Find min max for i in range(0, dim): charValue = element.characteristicsValues[i] # Min if min_max_values[i][MIN_INDEX] >= charValue: min_max_values[i][MIN_INDEX] = charValue # Max if min_max_values[i][MAX_INDEX] <= charValue: min_max_values[i][MAX_INDEX] = charValue """ for i in range(0, dim): logger.log("Value #" + str(i), filename="test.txt") logger.log(min_max_values[i][MIN_INDEX], filename="test.txt", styles=[logger.LogStyle.NONE]) logger.log(min_max_values[i][MAX_INDEX], filename="test.txt", styles=[logger.LogStyle.NONE]) """ return min_max_values
def _train_common(settings): run_id_string = "{}/{}".format(settings["network_class"], strftime(settings["date_format"])) if settings["run_tag"] is not None: run_id_string = str(settings["run_tag"]) + "/" + run_id_string if settings["logdir"] is not None: logfile = os.path.join(settings["logdir"], settings["scenario_tag"], run_id_string) setup_file_logger(logfile) settings["run_id_string"] = run_id_string log("Settings:") print_settings(settings) os.environ['TF_CPP_MIN_LOG_LEVEL'] = str(settings["tf_log_level"]) model_dir = os.path.join(settings["models_path"], settings["scenario_tag"], run_id_string) model_file = os.path.join(model_dir, "model") settings_output_file = os.path.join(model_dir, SETTINGS_SAVEFILE) ensure_parent_directories(settings_output_file) log("Saving settings to: {}".format(settings_output_file)) ruamel.yaml.YAML().dump(settings, open(settings_output_file, "w")) return model_file
def test_dqn(): args = parse_test_dqn_args() settings_file = os.path.join(args.model, SETTINGS_SAVEFILE) modelfile = os.path.join(args.model, MODEL_FILE) settings = load_settings(DEFAULT_DQN_SETTINGS_FILE, [settings_file]) _test_common(args, settings) from _dqn_algo import DQN dqn = DQN(**settings) config = tf.ConfigProto() config.gpu_options.allow_growth = True session = tf.InteractiveSession(config=config) session.run(tf.global_variables_initializer()) dqn.load_model(session, modelfile) log("\nScores: ") scores = [] for _ in range(args.episodes_num): reward = dqn.run_test_episode(session) scores.append(reward) print("{0:3f}".format(reward)) print() log("\nMean score: {:0.3f}".format(np.mean(scores)))
def init(_problem_size, _header=""): global problem_size problem_size = _problem_size global current_size current_size = 0 global init_time init_time = time.time() global header header = _header # Get the dimensions of the console columns, rows = shutil.get_terminal_size((80, 20)) column_count = int(columns) # Make sure that the progress bar is not leaning to new line global MAX_BAR_COUNT if column_count < MAX_BAR_COUNT - 10: MAX_BAR_COUNT = column_count - 10 logger.log("PROGRESS BAR INIT: " + str([header]), styles=[logger.LogStyle.SEPARATOR_START], spaces=3)
def validate(self, classify_path = None): total_score = 0.0 K = 3 if classify_path is None: for filename in [f for f in os.listdir(self.validation_path) if os.path.isfile(self.validation_path + f)]: logger.log("classifying %s" % filename) scores = audio.classify(self.train_set, self.validation_path + filename) label = filename.replace('_', ' ').replace('.mp3', '').title() top_pick = scores[0] second = scores[1] third = scores[2] fourth = scores[3] if top_pick[1] == label: confidence_1 = (float(second[0]) - float(top_pick[0])) / float(second[0]) confidence_2 = (float(third[0]) - float(top_pick[0])) / float(third[0]) confidence_3 = (float(fourth[0]) - float(top_pick[0])) / float(fourth[0]) total_score += (confidence_1 + confidence_2 + confidence_3) / float(K) print "classify correct, confidence: ", (confidence_1 + confidence_2 + confidence_3) / float(K) else: print "incorrect classify %s %s" % (top_pick[1], label) print " first > ", top_pick print " second > ", second else: logger.log(audio.classify(self.train_set, self.validation_path + classify_path)) self.train_set.default_validation_score = total_score self.train_set.save() return total_score
def cluster_evaluation(max_k, symbolClasses): start_k = 2 best_ks = [] for cl in symbolClasses: prediction_str = [] data = cl.learning_set logger.log("Symbol: " + str([cl.name])) max_ps = 0 best_k = 1 for k in range(start_k, max_k + 1): p_bar.init(0, "ps(" + str(k) + ")") avg_ps = 0 for j in range(0, global_v.MAX_ITER_CLUS_EVALUATION): ps = prediction_strength(data, k) avg_ps += ps prediction_str.append(ps) avg_ps /= global_v.MAX_ITER_CLUS_EVALUATION logger.log("prediction_strength(" + str(k) + ") = " + str(avg_ps)) p_bar.finish() if max_ps <= avg_ps: max_ps = avg_ps best_k = k best_ks.append(best_k) return best_ks
def save_model(self): savedir = os.path.dirname(self._model_savefile) if not os.path.exists(savedir): log("Creating directory: {}".format(savedir)) os.makedirs(savedir) log("Saving model to: {}".format(self._model_savefile)) saver = tf.train.Saver(self.local_network.get_params()) saver.save(self._session, self._model_savefile)
def work(self, status): message = self.messages.startSupplyMessage() log(message, Types.verbose) if self.stateController.currentState.existsShipNeedSupply(): if self.stateController.currentState.key != StateKey.combatPreparationQuickSupply: self.stateController.transit(Transitions.selectQuickSupply) self.stateController.behave(Behaviors.supplyAllShips) return status
def get_users(self): endpoint = "users" url = "https://api.flowdock.com/v1/%s" user_endpoint = url % endpoint logger.log("hitting endpoint: %s" % user_endpoint) self.users = web.get_json(user_endpoint, self.username, self.password) return self.users
def _belongs_to_native(point, nativeElements, classify_geometry): if classify_geometry == CLASSIFY_CUBOID: return _belongs_to_cuboid(point, nativeElements) elif classify_geometry == CLASSIFY_ELLIPSOID: return _belongs_to_ellipsoid(point, nativeElements) else: logger.log("No such test, shutting down...") sys.exit()
def main(args): parse_args(args) if sys.version_info < (2, 7): sys.exit("Sorry, requires Python 2.7.") synced.initialize(True) logger.log("End of main", logger.DEBUG)
def workCombat(self, status): if self.stateController.currentState.key != StateKey.sailingOffCombat: self.stateController.transit(Transitions.selectCombat) message = self.messages.startSelectStateMessage( self.task.name, self.stateController.currentState.key) log(message, Types.verbose) self.stateController.transit(Transitions.selectStage) return Status.normal
def run(self): # TODO this method is ugly, make it nicer ...and it's the same as above.... really TODO!! # Basically code copied from base class with unfreezing try: overall_start_time = time.time() last_log_time = overall_start_time local_steps_for_log = 0 next_target_update = self.frozen_global_steps while self._epoch <= self._epochs: steps = self.make_training_step() local_steps_for_log += steps global_steps = self._global_steps_counter.inc(steps) # Updating target network: if self.unfreeze_thread: # TODO this check is dangerous if global_steps >= next_target_update: next_target_update += self.frozen_global_steps if next_target_update <= global_steps: # TODO use warn from the logger logging.warning(yellow("Global steps ({}) <= next target update ({}).".format( global_steps, next_target_update))) self._session.run(self.global_network.ops.unfreeze) # Logs & tests if self.local_steps_per_epoch * self._epoch <= self.local_steps: self._epoch += 1 if self.thread_index == 0: self._print_train_log(self.train_scores, overall_start_time, last_log_time, local_steps_for_log) if self._run_tests: test_scores, actions, frameskips = self.test(deterministic=self.deterministic_testing) if self.write_summaries: train_summary = self._session.run(self._summaries, {self.scores_placeholder: self.train_scores}) self._train_writer.add_summary(train_summary, global_steps) if self._run_tests: test_summary = self._session.run(self._summaries, {self.scores_placeholder: test_scores}) self._test_writer.add_summary(test_summary, global_steps) last_log_time = time.time() local_steps_for_log = 0 log("Learning rate: {}".format(self._session.run(self.learning_rate))) # Saves model if self._epoch % self.save_interval == 0: self.save_model() log("") self.train_scores = [] self.train_actions = [] self.train_frameskips = [] except (SignalException, ViZDoomUnexpectedExitException): threadsafe_print(red("Thread #{} aborting(ViZDoom killed).".format(self.thread_index)))
def finish(): delta_time = time.time() - init_time msg = str([header ]) + "\n" + "Finished after: {0:.3f} sec".format(delta_time) print() logger.log("PROGRESS BAR FINISH: " + msg, styles=[logger.LogStyle.SEPARATOR_END])
def work(self, status): if self.stateController.currentState.key != StateKey.selectFormation: return status formationIndex = self.task.formation[status] message = self.messages.selectFormation( Transitions(formationIndex + 15)) log(message, Types.verbose) self.stateController.transit(Transitions(formationIndex + 15)) return status
def __norm(x, min, max): x_norm = (x - min) / (max - min) if x_norm > 1.0 or x_norm < 0.0: logger.log("Normalization failed. Exiting...") sys.exit() return x_norm
def upload_image(self, file_path, caption='', hashtags=None): full_caption = self._create_full_caption(caption, hashtags) self._instagramApi.uploadPhoto(file_path, caption=full_caption, upload_id=None) logger.log('instagramprocessor.py', 'Uploading picture: {:s}'.format(file_path)) logger.log( 'instagramprocessor.py', 'Response: {:s}'.format(self._instagramApi.LastResponse.text))
def save_model(self, session, savefile=None): if savefile is None: savefile = self._model_savefile savedir = os.path.dirname(savefile) if not os.path.exists(savedir): log("Creating directory: {}".format(savedir)) os.makedirs(savedir) log("Saving model to: {}".format(savefile)) saver = tf.train.Saver() saver.save(session, savefile)
def say(self, msg): if not msg or len(msg) < 1: return if hasattr(self, 'user_nick'): msg = msg.format(user_nick='@' + self.user_nick) logger.log("sending message %s" % msg[:20]) url = "%srooms/%s/chatMessages" % (self.api_root, self.room_id) data = {"text": msg} web.post_json_secure(url, self.token, data) self.spoken = True
def on_chat_message(self, msg): raw_text = msg.get('text') if Rules.url_allowed(raw_text): try: self._save_potential_meme(url=textutils.extract_url(text=raw_text)) current_count = len(self._meme_dao.find_by_site(Site.TELEGRAM)) message = 'Memebot found a potential image. Total images found %s' % current_count except DuplicateKeyError: message = 'Memebot found a duplicate image. Bad bad bad!!!' self.sender.sendMessage(message) logger.log('telegramparser.py', message)
def behave(self, key): if key not in self.currentState.behavior: message = self.messages.invalidTransitionOrBehavior(key) log(message, Types.error) exit(1) mouseInfo = self.currentState.behavior[key] if len(mouseInfo) == 3: self.mouseController.clickAndNoStageChange(mouseInfo) elif len(mouseInfo) == 2: self.mouseController.scrollAndNoStageChange(mouseInfo) self.handleStateChange()
def __rat_l_evaluation(training_set, start_k, end_k): logger.log_header("Ratkowsky-Lance", filename=logger.LOG_CLUSTER_FILE_NAME, styles=[logger.LogHeaderStyle.SUB_HEADER]) Results = rat_l.compute(training_set, start_k, end_k) for i in range(0, len(Results)): logger.log("rat_l(" + str(i + start_k) + ") = " + str(Results[i]), filename=logger.LOG_CLUSTER_FILE_NAME, styles=[logger.LogStyle.NONE])
def __pbm_evaluation(training_set, start_k, end_k): logger.log_header("PBM", filename=logger.LOG_CLUSTER_FILE_NAME, styles=[logger.LogHeaderStyle.SUB_HEADER]) Results = pbm.compute(training_set, start_k, end_k) for i in range(0, len(Results)): logger.log("pbm(" + str(i + start_k) + ") = " + str(Results[i]), filename=logger.LOG_CLUSTER_FILE_NAME, styles=[logger.LogStyle.NONE])
def work(self, status): if self.stateController.currentState.key != StateKey.flagShipSeriousDamaged: return status message = self.messages.retreatFlagShipDamaged() log(message, Types.verbose) self.stateController.transit( Transitions.retreatAtFlagShipSeriousDamaged) self.stateController.transit(Transitions.sailingOff) if self.stateController.currentState.key == StateKey.sailingOffExpidition: self.stateController.transit(Transitions.selectCombat) return Status.normal
def process(bot_input, bot_output): # try: input_command = bot_input["message"].lower() bot_input.direct_message = bot_output.nick.lower().replace('@', '') in input_command if bot_output.master.lower() in bot_input.nick.lower(): if "take off" in input_command or "go home" in input_command or "go away" in input_command: try: bot_output.say(random.choice(bot_output.responses["death_messages"])) except: logger.log("Too stupid to quit.") sys.exit("later") if input_command == "shut up": bot_output.say("SHUTTING UP") time.sleep(30) return if input_command.startswith("."): input_command = input_command[1:] pieces = input_command.split(' ') command = match_command(list(bot_input.bot.commands), pieces[0]) if isinstance(command, list): # multiple potential matches bot_output.say("did you mean %s or %s?" % (', '.join(command[:-1]), command[-1])) elif command in bot_input.bot.commands: func, args = bot_input.bot.commands[command] try: if func.__name__ in bot_input.bot.credentials: bot_input.credentials = bot_input.bot.credentials[func.__name__] input_string = " ".join(pieces[1:]) bot_input.input_string = input_string func(bot_input, bot_output) except Exception as e: logger.log("Almost died from command: %s" % e) bot_output.say("Almost died from command: %s" % e) else: bot_output.say("I don't recognize the command: %s" % input_command) else: for func, args in bot_input.bot.plugs['regex']: m = args['re'].search(bot_input["message"]) if m and not bot_output.spoken: if args['run_always'] or bot_input.direct_message or bot_output.chattiness > random.random(): # todo: update groupdict with inp bot_input.groupdict = m.groupdict bot_input.inp = m.groupdict() bot_input.input_string = input_command if func.__name__ in bot_input.bot.credentials: bot_input.credentials = bot_input.bot.credentials[func.__name__] func(bot_input, bot_output) if bot_input.direct_message and not bot_output.spoken: bot_output.say(random.choice(bot_output.responses["answers"]))
def transit(self, key): if key not in self.currentState.transition: message = self.messages.invalidTransitionOrBehavior(key) log(message, Types.error) exit(1) resultStates, clickInfo = self.currentState.transition[key] self.mouseController.clickAndWaitUntilStateChange( clickInfo, self.currentState.key, resultStates, )
def fetch(self): for line in self.stream.iter_lines(self.STREAM_CHUNK_SIZE, decode_unicode=True): if line and len(line.strip()) and line != ':': try: result = json.loads(line.decode('utf-8')) yield result except Exception as e: print("error parsing line") print(line) print(traceback.format_exc()) for info in sys.exc_info(): logger.log("error info: " + str(info))
def say(self, msg): if not msg or len(msg) < 1: return if hasattr(self, 'user_nick'): msg = self.filter_words(msg).format(self.user_nick) logger.log("sending message %s" % msg[:20]) channel_pieces = self.channel.split("/") url = "https://api.flowdock.com/flows/%s/%s/messages" % (channel_pieces[0], channel_pieces[1]) data = {"event": "message", "content": msg} web.post_json(url, self.username, self.password, **data) self.spoken = True
def repairDamagedShips(self, damagedShips): if damagedShips: message = self.messages.existsDamagedShipsWarning(damagedShips) log(message, Types.warning) if self.stateController.currentState.key != StateKey.combatPreparationQuickRepair: self.stateController.transit(Transitions.selectQuickRepair) for shipPos in damagedShips: self.stateController.behave(Behaviors(shipPos + 6)) return Status.normal message = self.messages.noDamagedShipsMessage() log(message, Types.verbose) return Status.normal
def run(self): # TODO this method is ugly, make it nicer try: overall_start_time = time.time() last_log_time = overall_start_time local_steps_for_log = 0 while self._epoch <= self._epochs: steps = self.make_training_step() local_steps_for_log += steps global_steps = self._global_steps_counter.inc(steps) # Logs & tests if self.local_steps_per_epoch * self._epoch <= self.local_steps: self._epoch += 1 if self.thread_index == 0: self._print_train_log(self.train_scores, overall_start_time, last_log_time, local_steps_for_log) if self._run_tests: test_scores = self.test( deterministic=self.deterministic_testing) if self.write_summaries: train_summary = self._session.run( self._summaries, {self.scores_placeholder: self.train_scores}) self._train_writer.add_summary( train_summary, global_steps) if self._run_tests: test_summary = self._session.run( self._summaries, {self.scores_placeholder: test_scores}) self._test_writer.add_summary( test_summary, global_steps) last_log_time = time.time() local_steps_for_log = 0 log("Learning rate: {}".format( self._session.run(self.learning_rate))) # Saves model if self._epoch % self.save_interval == 0: self.save_model() log("") self.train_scores = [] except (SignalException, ViZDoomUnexpectedExitException): threadsafe_print( red("Thread #{} aborting(ViZDoom killed).".format( self.thread_index)))
def handleBattleResult(self, status): if self.stateController.currentState.key != StateKey.battleResult: return status self.stateController.transit(Transitions.nextState) if self.stateController.currentState.key == StateKey.slavagedShip: self.stateController.transit(Transitions.nextState) if self.stateController.currentState.key == StateKey.newShip: message = self.messages.lockNewShip() log(message, Types.verbose) self.stateController.transit(Transitions.confirmAtNewShip) if self.stateController.currentState.key == StateKey.sailingOffCombat: return Status.normal return status
def register_event(args, func): name = args['name'] regex = args['regex'] if name is None or regex is None: print "No %s found for an event...skipping..." % 'regex' if regex is None else 'name' return registered_text = "Registered an event with the name '%s' and the regex: %s" % (name, regex) new_event = {name: func} events.update(new_event) new_regex = {name: re.compile(regex)} event_regex.update(new_regex) handler = Handler(func) func_handlers.update({func: handler}) logger.log(registered_text)
def test_async(q_learning, settings, modelfile, eps, deterministic=True): config = tf.ConfigProto() config.gpu_options.allow_growth = True session = tf.InteractiveSession(config=config) if q_learning: agent = ADQNLearner(thread_index=0, session=session, **settings) else: agent = A3CLearner(thread_index=0, session=session, **settings) log("Initializing variables...") session.run(tf.global_variables_initializer()) log("Initialization finished.\n") agent.load_model(session, modelfile) log("\nScores: ") scores = [] for _ in range(eps): reward = agent.run_episode(deterministic=deterministic) scores.append(reward) print("{0:3f}".format(reward)) print() log("\nMean score: {:0.3f}".format(np.mean(scores)))
def load_from_folder(self, resample, use_default_labels): os.chdir(self.folder_path) training_files = [] if use_default_labels: for filename in [f for f in os.listdir('.') if os.path.isfile(f)]: label = filename.replace('_', ' ').replace('.mp3', '').title() training_files.append((label, filename)) logger.log("Using %s with label %s" % (filename, label)) else: resample_prefix = 'resample_' for filename in [f for f in os.listdir('.') if os.path.isfile(f)]: print " enter label for '" + filename + "' >" label = raw_input() if len(label) == 0: label = filename target = filename if resample: target = resample_prefix + filename try: subprocess.call(['/usr/local/bin/lame', '-V5', '--vbr-new', '--resample', '16', filename, target], stdout=subprocess.PIPE) generated_name = label.strip().replace(" ", "_").lower() + '.mp3' subprocess.call(['rm', filename]) subprocess.call(['mv', target, generated_name]) training_files.append((label, generated_name)) except: logger.warn(" could not resample file '%s'!" % filename) else: training_files.append((label, target)) print 'finished resampling!' self.train_set.data = audio.train(training_files, self.folder_path, self.train_set.config, self.train_set.name, conn_str) self.train_set.s3_links = [] self.train_set.status = "trained" self.train_set.save() return self.train_set._id
def add_command(args, func): cmd = args['name'] if cmd is None: print('No name found for a command...ignoring...') return registered_text = "Registered a command with the name '%s'" % cmd new_command = {cmd: func} commands.update(new_command) if 'permission' in args and args['permission'] is not None: new_permission = {cmd: args['permission']} command_permissions.update(new_permission) registered_text += " with the permission %s" % str(new_permission.get(cmd)) if 'help' in args and args['help'] is not None: new_help = {cmd: args['help']} command_helps.update(new_help) registered_text += " the help text of %s" % str(new_help.get(cmd)) if 'aliases' in args and args['aliases'] is not None: aliases = args['aliases'].split(", ") for alias in aliases: commands.update({alias: func}) registered_text += " and the aliases of %s" % str(list(aliases)) logger.log(registered_text) handler = Handler(func) func_handlers.update({func: handler})
def main(): logger.log('starting') bot = BotOutput(config) logger.log('running') bot.run()
def train(tracks, training_audio_path, conf, set_name, conn_str): logger.log('begin training sequence on %s ' % training_audio_path) # set up all model parameters num_components = conf['num_components'] em_epsilon = conf['em_epsilon'] em_iter = conf['em_iter'] cv_type = conf['cv_type'] audio_freq = conf['audio_freq'] mfcc_step_size = conf['mfcc_step_size'] mfcc_block_size = conf['mfcc_block_size'] frames_per_second = audio_freq / mfcc_step_size audio_block_size = frames_per_second * conf['sample_length'] audio_step_size = frames_per_second * conf['sample_step_size'] # set up Yaafe afp, engine = get_engine(audio_freq, mfcc_block_size, mfcc_step_size) connection = conn.get() connection.register([TrackData]) result = [] for label, filename in tracks: logger.log('begin processing %s.' % filename) afp.processFile(engine, training_audio_path + '/' + filename) output = engine.readAllOutputs()['mfcc'] mfcc = output num_samples = mfcc.shape[0] track_gmms = [] track = connection.TrackData() track.label = label track.set = set_name for index in range(0, (num_samples - audio_block_size), audio_step_size): with warnings.catch_warnings(record=True) as w: mfcc_data = mfcc[index:index + audio_block_size] classifier = GMM(n_components = num_components, cvtype = cv_type) classifier.fit(mfcc_data, thresh = em_epsilon, n_iter = em_iter) means = classifier._get_means().tolist() if cv_type == 'diag': covars = [np.diag(diag).tolist() for diag in classifier._get_covars()] else: covars = classifier._get_covars().tolist() weights = classifier._get_weights().tolist() for weight in weights: if math.isnan(weight): logger.warn("Warning: invalid GMM entry, skipping at label: %s, index %s" % (label, str(index))) break else: track_gmms.append([means, covars, weights]) track.data = track_gmms track.save() result.append(track._id) return result
def run_bot(): bot = Bot() bot._config_mtime = 0 parser = argparse.ArgumentParser(description="It's a Bot. Nuff Said") parser.add_argument('-a','--adapter', help='Adapter (console or flowbot). Default is console.') args = parser.parse_args() adapter_name = args.adapter if not adapter_name or not hasattr(adapters, adapter_name.lower()): logger.log("Adapter not found. Try console or flowbot. Using console") adapter_name = "console" adapter_class = getattr(adapters, adapter_name.lower()) sys.path += ['plugins'] # so 'import hook' works without duplication os.chdir(sys.path[0] or '.') # do stuff relative to the install directory print('Loading plugins') config.config(bot) reload.reload(bot, init=True) if not hasattr(bot, 'config'): logger.log("no config found for bot", logging.ERROR) exit() logger.log("Connecting") bot.conns = {} bot.credentials = {} try: if adapter_name in bot.config['adapters']: for room, conf in bot.config['adapters'][adapter_name]["rooms"].items(): conf["responses"] = personality.load_personality(conf["personality"].lower()) bot.conns[room] = adapter_class.BotOutput(conf) else: error_message = "Adapter not found in config: {0}".format(adapter_name) print(error_message) logger.error(error_message) sys.exit() for name, conf in bot.config['credentials'].items(): bot.credentials[name] = conf except Exception as e: logger.log("malformed config file %s" % e, logging.ERROR) sys.exit() bot.persist_dir = os.path.abspath('persist') if not os.path.exists(bot.persist_dir): os.mkdir(bot.persist_dir) logger.log("Running main loop") last_error = datetime(2000,1,1) last_run = datetime.now() while (last_error - last_run).seconds > 10: reload.reload(bot) # these functions only do things config.config(bot) # if changes have occurred for conn, adapter in bot.conns.items(): try: last_run = datetime.now() adapter.run(bot) except SystemExit as ex: last_error = last_run except Exception as e: logger.error(traceback.format_exc()) for info in sys.exc_info(): logger.log("error info: " + str(info)) #logger.log("Unexpected error: %s" % sys.exc_info()) last_error = datetime.now() logger.log("So tired... sleeping for 5 seconds") time.sleep(5)
def private_message(self, user, msg): logger.log("sending private message %s" % msg[:20]) # TODO: Private message (listen and send) url = "%srooms/%s/chatMessages" % (self.api_root, self.room_id) data = {"event": "message", "content": msg} web.post_json(url, self.username, self.password, **data)
def get_users(self): user_endpoint = "%srooms/%s/users" % (self.api_root, self.room_id) logger.log("hitting endpoint: %s" % user_endpoint) self.users = web.get_json_secure(user_endpoint, self.token) return self.users
def private_message(self, user, msg): logger.log("sending private message %s" % msg[:20]) url = "https://api.flowdock.com/private/{0}/messages".format(user) data = {"event": "message", "content": msg} web.post_json(url, self.username, self.password, **data) self.spoken = True