Esempio n. 1
0
    def print_desc(self, resume_from):
        desc = f"NAME: {self.name}\n"
        desc += f"DATA_DIR: {self.data_dir}\n"
        desc += f"BATCH_SIZE: {self.BATCH_SIZE}\n"
        desc += f"BUFFER_SIZE: {self.BUFFER_SIZE}\n"
        desc += f"PREFETCH_SIZE: {self.PREFETCH_SIZE}\n"
        desc += f"NB_BATCHES: {self.NB_BATCHES}\n"
        desc += f"SAMPLE_SHAPE: {self.sample_shape}\n"
        desc += f"OUTPUT_SHAPE: {self.output_shape}\n"
        desc += f"MIN_WIDTH/HEIGHT: {self.min_wh}\n"
        desc += f"MIN_WEIGHT: {self.min_wei} == {2 ** self.min_wei}\n"
        desc += f"NB_LAYERS: {self.nb_layers}\n"
        desc += f"LEARNING_RATE_D: {self.LEARNING_RATE_D}\n"
        desc += f"LEARNING_RATE_G: {self.LEARNING_RATE_G}\n"
        desc += f"BETA_1: {self.BETA_1}\n"
        desc += f"BETA_2: {self.BETA_2}\n"
        desc += f"TRAINING_RATIO: {self.TRAINING_RATIO}\n"
        desc += f"GRADIENT_PENALTY_WEIGHT: {self.GRADIENT_PENALTY_WEIGHT}\n"
        desc += f"Z_SIZE: {self.Z_SIZE}\n"
        desc += f"LR_ALPHA: {self.LR_ALPHA}\n"
        desc += f"BN_MOMENTUM: {self.BN_MOMENTUM}\n"
        desc += f"KERNEL_SIZE: {self.KERNEL_SIZE}\n"
        print(desc)

        with self.writer.as_default():
            tf.summary.trace_on()
            text("Hyper-parameters", desc, step=resume_from)
            self.writer.flush()
Esempio n. 2
0
def log_test_results(cfg, model, test_generator, test_metrics, log_dir):
    '''
    Visualize performance of a trained model on the test set. Optionally save the model.
    :param cfg: Project config
    :param model: A trained Keras model
    :param test_generator: A Keras generator for the test set
    :param test_metrics: Dict of test set performance metrics
    :param log_dir: Path to write TensorBoard logs
    '''

    # Visualization of test results
    test_predictions = model.predict(test_generator, verbose=0)
    test_labels = test_generator.labels
    plt = plot_roc(test_labels,
                   test_predictions,
                   list(test_generator.class_indices.keys()),
                   dir_path=cfg['PATHS']['IMAGES'])
    roc_img = plot_to_tensor()
    plt = plot_confusion_matrix(test_labels,
                                test_predictions,
                                list(test_generator.class_indices.keys()),
                                dir_path=cfg['PATHS']['IMAGES'])
    cm_img = plot_to_tensor()

    # Log test set results and plots in TensorBoard
    writer = tf_summary.create_file_writer(logdir=log_dir)

    # Create table of test set metrics
    test_summary_str = [['**Metric**', '**Value**']]
    for metric in test_metrics:
        metric_values = test_metrics[metric]
        test_summary_str.append([metric, str(metric_values)])

    # Create table of model and train hyperparameters used in this experiment
    hparam_summary_str = [['**Variable**', '**Value**']]
    for key in cfg['TRAIN']:
        hparam_summary_str.append([key, str(cfg['TRAIN'][key])])
    for key in cfg['NN'][cfg['TRAIN']['MODEL_DEF'].upper()]:
        hparam_summary_str.append(
            [key, str(cfg['NN'][cfg['TRAIN']['MODEL_DEF'].upper()][key])])

    # Write to TensorBoard logs
    with writer.as_default():
        tf_summary.text(name='Test set metrics',
                        data=tf.convert_to_tensor(test_summary_str),
                        step=0)
        tf_summary.text(name='Run hyperparameters',
                        data=tf.convert_to_tensor(hparam_summary_str),
                        step=0)
        tf_summary.image(name='ROC Curve (Test Set)', data=roc_img, step=0)
        tf_summary.image(name='Confusion Matrix (Test Set)',
                         data=cm_img,
                         step=0)
    return
Esempio n. 3
0
def log_test_results(cfg, model, test_generator, test_metrics, log_dir):
    '''
    Visualize performance of a trained model on the test set. Optionally save the model.
    :param cfg: Project config
    :param model: A trained Keras model
    :param test_generator: A Keras generator for the test set
    :param test_metrics: Dict of test set performance metrics
    :param log_dir: Path to write TensorBoard logs
    '''

    # Visualization of test results
    test_predictions = model.predict_generator(test_generator, verbose=0)
    test_labels = test_generator.labels
    covid_idx = test_generator.class_indices['COVID-19']
    plt = plot_roc("Test set", test_labels, test_predictions, class_id=covid_idx)
    roc_img = plot_to_tensor()
    plt = plot_confusion_matrix(test_labels, test_predictions, class_id=covid_idx)
    cm_img = plot_to_tensor()

    # Log test set results and plots in TensorBoard
    writer = tf_summary.create_file_writer(logdir=log_dir)

    # Create table of test set metrics
    test_summary_str = [['**Metric**','**Value**']]
    thresholds = cfg['TRAIN']['THRESHOLDS']  # Load classification thresholds
    for metric in test_metrics:
        if metric in ['precision', 'recall'] and isinstance(metric, list):
            metric_values = dict(zip(thresholds, test_metrics[metric]))
        else:
            metric_values = test_metrics[metric]
        test_summary_str.append([metric, str(metric_values)])

    # Create table of model and train config values
    hparam_summary_str = [['**Variable**', '**Value**']]
    for key in cfg['TRAIN']:
        hparam_summary_str.append([key, str(cfg['TRAIN'][key])])
    if cfg['TRAIN']['CLASS_MODE'] == 'binary':
        for key in cfg['NN']['DCNN_BINARY']:
            hparam_summary_str.append([key, str(cfg['NN']['DCNN_BINARY'][key])])
    else:
        for key in cfg['NN']['DCNN_BINARY']:
            hparam_summary_str.append([key, str(cfg['NN']['DCNN_BINARY'][key])])

    # Write to TensorBoard logs
    with writer.as_default():
        tf_summary.text(name='Test set metrics', data=tf.convert_to_tensor(test_summary_str), step=0)
        tf_summary.text(name='Run hyperparameters', data=tf.convert_to_tensor(hparam_summary_str), step=0)
        tf_summary.image(name='ROC Curve (Test Set)', data=roc_img, step=0)
        tf_summary.image(name='Confusion Matrix (Test Set)', data=cm_img, step=0)
    return