Exemplo n.º 1
0
def log_params(curr_log_dir, index, root_handler, params):
    """
        Logs the parameters given.
    """
    logger = utils.create_logger(curr_log_dir, index=index)
    logger.addHandler(root_handler)

    logger.info("Current log dir: " + curr_log_dir)
    logger.info('Training with parameter combination ' + str(index))
    logger.info("With parameters: " + pformat(params[index]))
    logger.info("")
Exemplo n.º 2
0
            batch = training_pipeline.request_batch(train_request)
            curr_loss.append(batch.loss)
            if len(curr_loss) % pipeline_params['save_every'] == 0:
                loss = loss + curr_loss
                np.save(history_path, loss, allow_pickle=True)
                curr_loss = []


if __name__ == '__main__':

    dataset, exp, index, _ = script_utils.get_args()
    logdir, expirement_dir = script_utils.get_logdir(dataset, exp)

    curr_log_dir = script_utils.make_dirs(logdir, index)

    root_logger = utils.create_logger(logdir, name='root')
    root_handler = root_logger.handlers[0]

    root_logger.info("Starting experiment %s with dataset %s",
                     expirement_dir.split('-')[0], dataset)
    params = json.load(open(logdir + "/param_dict.json"))

    contrastive_params, _, model_params, _ = script_utils.get_params(params)

    root_logger.info("Parameter dict: %s", pformat(contrastive_params[index]))
    root_logger.info("")

    pipeline = Contrastive

    model = script_utils.get_model(index, model_params, root_logger)