def get_kafka_config(): config_file = get_config_file(ES_CONFIG_NAME) init_section = 'kafka' topic = config.read_config(config_file, init_section, 'topic') bootstrap_server = config.read_config(config_file, init_section, 'bootstrap_server') return topic, bootstrap_server pass
def get_redis_config(): config_file = get_config_file(ES_CONFIG_NAME) init_section = 'redis' host = config.read_config(config_file, init_section, 'host') port = config.read_config(config_file, init_section, 'port') db = config.read_config(config_file, init_section, 'db') password = config.read_config(config_file, init_section, 'password') return host, password, port, db pass
def get_mysql_config(biz_type): config_file = get_config_file(ES_CONFIG_NAME) init_section = "mysql_" + biz_type host = config.read_config(config_file, init_section, 'host') port = config.read_config(config_file, init_section, 'port') db = config.read_config(config_file, init_section, 'db') user = config.read_config(config_file, init_section, 'user') passwd = config.read_config(config_file, init_section, 'passwd') return host, port, db, user, passwd pass
def send_verification(self, id, email, first_name): # get config entries config = read_config() app_host = config.get('app', 'host') email_host = config.get('gmail', 'host') sender = config.get('gmail', 'sender') username = config.get('gmail', 'username') password = config.get('gmail', 'password') # send email server = smtplib.SMTP(email_host) server.ehlo() server.starttls() server.login(username, password) message_body = "\r\n".join([ "From: " + sender, "To: " + email, "Subject: eKonek Account Verification", "", "Hi " + first_name, "", "Thank you for registering at eKonek. " + "Please verify your account by clicking this link: " + "http://" + app_host + "/customer/" + str(id) + "/verify", "", "Regards,", "", "eKonek Team" ]) server.sendmail(sender, email, message_body) server.quit()
def main(config_path, out_infix, slot): logger.info('loading config file...') game_config = read_config(config_path, out_infix) # set multi-processing: bugs in `list(dataloader)` # see more on `https://github.com/pytorch/pytorch/issues/973` torch.multiprocessing.set_sharing_strategy('file_system') # set random seed set_seed(game_config['global']['random_seed']) logger.info('reading dataset...') dataset = DocPTReader(game_config) # training arguments batch_size = 1 num_workers = 5 test_iters = 500 # dataset loader batch_test_data = dataset.get_dataset_test_slot(slot, batch_size, num_workers, test_iters) docs_name = dataset.doc_reader.get_all_names() logger.info('start testing...') with torch.no_grad(): test_mrr = eval_on_rep(docs_name, batch_test_data) logger.info("test_all_mrr=%.2f%%" % test_mrr) logger.info('finished.')
def send_password(self, id, email, first_name): # get config entries config = read_config() app_host = config.get('app', 'host') email_host = config.get('gmail', 'host') sender = config.get('gmail', 'sender') username = config.get('gmail', 'username') password = config.get('gmail', 'password') # send email server = smtplib.SMTP(email_host) server.ehlo() server.starttls() server.login(username, password) message_body = "\r\n".join([ "From: " + sender, "To: " + email, "Subject: eKonek Forgot Password", "", "Hi " + first_name, "", "You have requested to reset your password. " + "To continue to reset your password please click this link: " + "http://" + app_host + "/reset/" + str(id), "", "Regards,", "", "eKonek Team" ]) server.sendmail(sender, email, message_body) server.quit()
def get_es_config(): config_file = get_config_file(ES_CONFIG_NAME) init_section = 'es' host_port = config.read_config(config_file, init_section, 'host_port') return host_port pass
def main(pre_model_path, tar_model_path): logger.info('loading config file...') config = read_config('config/rmsc.yaml') logger.info('constructing model...') model = LWPT(config) logging.info("transforming model from '%s' to '%s'..." % (pre_model_path, tar_model_path)) transform(pre_model_path, tar_model_path, model) logging.info('finished.')
def send_single_notification(self, registration_id, title, message): # get config entries config = read_config() server_key = config.get('fcm', 'server_key') push_service = FCMNotification(api_key=server_key) result = push_service.notify_single_device( registration_id=registration_id, message_title=title, message_body=message ) print result
def run(config_path, is_vocab, is_split): game_config = read_config(config_path) set_seed(game_config['global']['random_seed']) if is_vocab: print('Pre-Processing GuessMovie dataset...') preprocessor = PreProcess(out_path='data/wo_entity/vocab/') preprocessor.pre_process_entity(replace_ent=False) if is_split: print('Spliting dataset...') split_two(doc_id_path='data/wo_entity/vocab/guessmovie_doc_id.json', out_path_prefix='data/wo_entity/')
def preprocess_meta_data(): """ preprocess the config for specific run: 1. reads command line arguments 2. updates the config file and set gpu config 3. configure gpu settings 4. define logger. """ args = get_args() config = read_config(args) gpu_init() set_logger_and_tracker(config) save_scripts(config) return config
def main(): config = read_config() conn = get_connection(config) collection = get_collection(conn, config) while (True): cmd = input('>>> ') if cmd == ":q": print("Program finished") conn.close() exit(0) elif cmd == ':h': print("Type a name after the promt (>>>)") print(":h Help") print(":q Quit") else: name = cmd baby_names = get_popularity_by_name(collection, name) for baby_name in baby_names: print(("Ransk: %s Year: %s") % (baby_name['rank'], baby_name['year']))
return Response() @app.route('/run_scaling', methods=['POST']) @login_required def run_scaling(): for _ in range(10): run_hive_query_asynchronous( cluster_label=config['hadoop_cluster_name'], query_filename='top_10_revenue_generating_products.sql', qubole_database_name=config['qubole_database_name']) return Response() def parse_command_line_args(): parser = argparse.ArgumentParser(description='Quick start App') parser.add_argument('--config', required=True, help='Configuration') parser.add_argument('--extra-config', help='Configuration of clusters and notebooks') return parser.parse_args() if __name__ == "__main__": logging.basicConfig(stream=sys.stderr, level=logging.INFO) args = parse_command_line_args() config = read_config(args.config) app.secret_key = os.urandom(47) app.config.update(config) Qubole.configure(api_token=config['qubole_api_token']) app.run(host='0.0.0.0', port=int(config['port']), threaded=True)
type=int, help="num_workers for DataLoader") args = parser.parse_args() return args if __name__ == "__main__": args = get_args() model_path = "./logs/" + args.config_file.split('/')[-1].split('.')[0] result_path = "./results/" + args.config_file.split('/')[-1].split('.')[0] cfg = read_config(args.config_file) input_size = cfg["model"]["input_size"] batch_size = cfg["hyperparameters"]["batch_size"] dataloader_params = { 'batch_size': 1, 'shuffle': True, 'drop_last': False, 'num_workers': args.num_workers } test_data = CustomTestDataset(args.data_path, input_size) test_gen = DataLoader(test_data, **dataloader_params) # Model
model = scene_transformer(cfg) checkpoint_callback = ModelCheckpoint(save_last=True, save_top_k=5) EarlyStopping_callback = EarlyStopping( monitor='val_loss', min_delta=0.00, patience=50, ) trainer = Trainer( gpus=1, gradient_clip_val=1.0, # fast_dev_run=True, max_epochs=cfg["train"]["epochs"], checkpoint_callback=checkpoint_callback, resume_from_checkpoint=cfg["train"]["resume"], early_stop_callback=EarlyStopping_callback) monkeypatch_tensorboardlogger(trainer.logger) trainer.fit(model, train_dataloader=train_loader, val_dataloaders=val_loader) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("cfg_path", help="Path to config file") args = parser.parse_args() cfg = read_config(args.cfg_path) run_training(cfg)
def do_general_setup(prog_name="do_general_setup()"): global num_scan_params, meas_dist, SM_model_dist, SM_exp_dist, SM_pred_dist, BSM_input_dists, BSM_scan_dists, param_grid # # parse arguments # utils.info(prog_name, "Parsing arguments") cfg_name, do_show_figs, plots_fname, tag = parse_command_line_arguments() # # read config file # utils.info(prog_name, f"Loading config file {cfg_name}") config.read_config(cfg_name) # # load scan param details # utils.info(prog_name, "Interpreting scan parameters") params = inputs.load_cfg_to_scan_params() num_scan_params = len(params) utils.info( prog_name, "Scan params are: " + " / ".join([p for p in config.get_scan_param_names()])) stats.set_target_coverage( config.config.getfloat("GENERAL", "ConfidenceLevel", fallback=0.95)) utils.info(prog_name, f"Target coverage is {stats.target_coverage}") # # load input files # utils.info(prog_name, "Loading input files") inputs.load_cfg_to_input_store(look_for_params=True) # # get input measured / expected / SM / BSM distributions # meas_dist = inputs.get_meas_dist(name="get_limits.py::meas") SM_model_dist = inputs.get_SM_dist(name="get_limits.py::SM::model", key="theoretical") SM_exp_dist = inputs.get_SM_dist(name="get_limits.py::SM::pred", key="experimental") SM_pred_dist = Distribution(SM_exp_dist) SM_pred_dist.cov = SM_pred_dist.cov + SM_model_dist.cov BSM_input_dists = inputs.get_BSM_distributions( prefix="get_limits.py::BSM::") utils.info(prog_name, f"Measured distribution is {meas_dist}") utils.info(prog_name, f"SM experimental distribution is {SM_exp_dist}") utils.info(prog_name, f"SM theoretical distribution is {SM_model_dist}") utils.info(prog_name, f"SM combined distribution is {SM_pred_dist}") for key, item in BSM_input_dists.items(): utils.info(prog_name, f"BSM input distribution at point {key} is {item}") # # generate model distributions across BSM grid # utils.info(prog_name, "Generating param grid") param_grid = create_param_grid(params) utils.info(prog_name, "Populating predictions across param grid") BSM_scan_dists = inputs.generate_BSM_predictions(BSM_input_dists, param_grid, SM=SM_model_dist) utils.info(prog_name, "Adding SM to BSM") shape = BSM_scan_dists.values.shape BSM_scan_dists.values = BSM_scan_dists.values.flatten() for idx in range(len(BSM_scan_dists.values)): BSM_scan_dists.values[idx] = BSM_scan_dists.values[ idx] + SM_model_dist # not using add_to_values as I want SM error to be included utils.info( prog_name, f"BSM scan distribution at index {idx} is {BSM_scan_dists.values[idx]}" ) BSM_scan_dists.values = BSM_scan_dists.values.reshape(shape) # # return config options # return cfg_name, do_show_figs, plots_fname, tag
def main(args): """ Main Function """ config = read_config() collection = get_collection(config) save_results(collection, get_baby_names_by_range(1880, 2017))
action = controller.calculate_action(obs_state, ref_signal) else: action = 0.0 data.push_datapoint(ref_signal, obs_state, action) # feed control signal into the system system.step(action) return data # main if __name__ == "__main__": #read config config = conf.read_config(config_file) # init entities based on config T, dt, system, controller, reference, init_state = conf.init_entities( config) # init data storage data = Data(T, dt, system.get_store_dim(), system.get_action_dim()) # execute simulation simulation_data = simulation(system, controller, reference, init_state, data) # evaluate results # save/plot results
def read_last_dump_time(key_name): config_file = get_config_file(key_name) return config.read_config(config_file, INI_SECTION, key_name)