示例#1
0
def main():
    ###########################################################################
    ### SPECIFY DATA PATHS
    P = '/local/meliao/projects/fourier_neural_operator/'
    DATA_DIR = os.path.join(P, 'data/')
    MODEL_DIR = os.path.join(P, 'experiments/31_different_activations/models')
    PLOTS_DIR = os.path.join(P, 'experiments/31_different_activations/plots/')
    RESULTS_DIR = os.path.join(P,
                               'experiments/31_different_activations/results')
    if not os.path.isdir(PLOTS_DIR):
        os.mkdir(PLOTS_DIR)

    ###########################################################################
    ### LOAD DATA

    FP_FMT = "2021-09-29_NLS_data_00_{}_test.mat"
    # DSET_KEYS = ['00', '01', '02', '03', '04']
    DSET_NAME_DD = {
        '00': 'Flat DFT Coeffs on [1, ..., 5]',
        '01': 'GRF Original',
        '02': 'GRF on [1, ..., 5]',
        '03': 'GRF high coefficient decay',
        '04': 'GRF low coefficient decay'
    }
    data_fp_dd = {
        i: os.path.join(DATA_DIR, FP_FMT.format(i))
        for i in DSET_NAME_DD.keys()
    }

    raw_data_dd = {k: sio.loadmat(v) for k, v in data_fp_dd.items()}

    data_dd = {
        k: OneStepDataSetComplex(v['output'], v['t'], v['x'])
        for k, v in raw_data_dd.items()
    }

    # data_dd = sio.loadmat(DATA_FP)
    # dset = OneStepDataSetComplex(data_dd['output'], data_dd['t'], data_dd['x'])

    ###########################################################################
    ### SET UP EXPERIMENT

    experiment = Experiment.MultiDataExperiment(name="31")
    experiment.register_data_variable(data_dd)

    ACTIVATIONS = ['tanh', 'sigmoid', 'relu', 'sin']
    # ACTIVATIONS = ['relu', 'sin']
    K_ACTIVATION = 'activation'
    experiment.register_new_variable(K_ACTIVATION, ACTIVATIONS)

    ###########################################################################
    ### LOAD MODELS

    MODEL_FMT = "time_10_dset_{dataset}_activation_{activation}_ep1000"
    experiment.load_models(MODEL_DIR, MODEL_FMT)

    ###########################################################################
    ### MAKE PREDICTIONS

    with torch.no_grad():
        experiment.run_prediction_and_errors(
            prediction_fn=data_management.prediction_fn_composed,
            data_spec_fn=data_management.data_spec_fn_fno,
            error_fn=data_management.error_fn_l2_normalized)

    sin_relu_set = set()
    sin_relu_set.add('sin')
    sin_relu_set.add('relu')
    sin_relu_dd = {'activation': sin_relu_set}
    for dset_key, dset_name in DSET_NAME_DD.items():
        logging.info(f"Plotting errors on dset {dset_key}")

        # make plot for all activation types
        plot_fp = os.path.join(PLOTS_DIR,
                               f"dset_{dset_key}_composed_errors_all.png")
        dset_vals = set()
        dset_vals.add(dset_key)
        filter_dd = {'dataset': dset_vals}
        errors_dd = experiment.get_error_dd("{activation}",
                                            remove_first_row=True,
                                            **filter_dd)
        plotting_utils.plot_time_errors(errors_dd=errors_dd,
                                        title=f"Errors on dataset {dset_name}",
                                        fp=plot_fp)

        # make plot for sin and relu
        plot_fp_i = os.path.join(
            PLOTS_DIR, f"dset_{dset_key}_composed_errors_sin-relu.png")
        sin_relu_dd['dataset'] = dset_vals
        errors_dd_i = experiment.get_error_dd("{activation}",
                                              remove_first_row=True,
                                              **sin_relu_dd)
        plotting_utils.plot_time_errors(errors_dd=errors_dd_i,
                                        title=f"Errors on dataset {dset_name}",
                                        fp=plot_fp_i)

        # make predictions plot for sin and relu
        preds_dd = experiment.get_preds_dd("{activation}", **sin_relu_dd)
        for test_case in range(5):
            plot_fp_i = os.path.join(
                PLOTS_DIR,
                f'test_case_{test_case}_dset_{dset_key}_sin-relu.png')

            preds_dd_for_plt = {k: v[test_case] for k, v in preds_dd.items()}
            solns = experiment.dataset_dd[dset_key].X[test_case]
            plotting_utils.plot_one_testcase_panels(
                preds_dd=preds_dd_for_plt,
                solns=solns,
                show_n_timesteps=5,
                title=f"Testcase {test_case} on dataset {dset_name}",
                fp=plot_fp_i)
示例#2
0
def main():
    ###########################################################################
    ### SPECIFY DATA PATHS
    logging.info("Beginning plotting")
    P = '/local/meliao/projects/fourier_neural_operator/'
    DATA_DIR = os.path.join(P, 'data/')
    MODEL_DIR = os.path.join(P, 'experiments/31_different_activations/models')
    PLOTS_DIR = os.path.join(P, 'experiments/32_scaling_for_freq/plots/')
    if not os.path.isdir(PLOTS_DIR):
        os.mkdir(PLOTS_DIR)

    ###########################################################################
    ### LOAD DATA

    FP_FMT = "2021-09-29_NLS_data_00_{}_test.mat"
    DSET_NAME_DD = {
        # '00': 'Flat DFT Coeffs on [1, ..., 5]',
        '01': 'GRF Original',
        # '02': 'GRF on [1, ..., 5]',
        # '03': 'GRF high coefficient decay',
        '04': 'GRF low coefficient decay'
    }
    data_fp_dd = {
        i: os.path.join(DATA_DIR, FP_FMT.format(i))
        for i in DSET_NAME_DD.keys()
    }

    raw_data_dd = {k: sio.loadmat(v) for k, v in data_fp_dd.items()}

    data_dd = {
        k: OneStepDataSetComplex(v['output'], v['t'], v['x'])
        for k, v in raw_data_dd.items()
    }

    scaled_data_dd = {
        k: FreqScalingDataSet(raw_data_dd['04']['output'],
                              raw_data_dd['04']['t'],
                              raw_data_dd['04']['x'],
                              scale_param=1 / k)
        for k in [2, 3]
    }
    ###########################################################################
    ### SET UP COMPOSED PREDS EXPERIMENT AND MAKE PREDICTIONS

    experiment1 = Experiment.MultiDataExperiment(name="Standard data dd")
    experiment1.register_data_variable(data_dd)

    ACTIVATIONS = ['relu', 'sin']

    K_ACTIVATION = 'activation'
    experiment1.register_new_variable(K_ACTIVATION, ACTIVATIONS)

    MODEL_FMT = "time_10_dset_{dataset}_activation_{activation}_ep1000"
    experiment1.load_models(model_dir=MODEL_DIR, model_fmt=MODEL_FMT)

    with torch.no_grad():
        experiment1.run_prediction_and_errors(
            prediction_fn=data_management.prediction_fn_composed,
            data_spec_fn=data_management.data_spec_fn_fno,
            error_fn=data_management.error_fn_l2_normalized)
    ###########################################################################
    ### SET UP SCALING BY 2 EXPERIMENT

    experiment2 = Experiment.MultiDataExperiment(name='Scaling dd')
    experiment2.register_data_variable(scaled_data_dd)
    experiment2.register_new_variable(K_ACTIVATION, ['relu', 'sin'])
    experiment2.register_new_variable('train_dset', ['01', '03', '04'])
    MODEL_FMT = "time_10_dset_{train_dset}_activation_{activation}_ep1000"

    experiment2.load_models(model_dir=MODEL_DIR, model_fmt=MODEL_FMT)

    with torch.no_grad():
        experiment2.run_prediction_and_errors(
            prediction_fn=data_management.prediction_fn_scale_for_freq,
            data_spec_fn=data_management.data_spec_fn_fno,
            error_fn=data_management.error_fn_l2_normalized)