Пример #1
0
def get_distance_to_performance(df, variable):
    """
    plots the
    :param df: pandas.DataFrame where the columns are performances of models on the different patients
    :param variable: 'vel' if we are plotting velocity, else 'absVel' for absolute velocity
    :return: dictionary where the performances are ordered based on the size of the receptive field of the networks
    """
    distance_performance_dict = {}
    for column in df.columns:
        if 'k4_d2' not in column:
            if ('Unnamed' not in column) and (variable in column):
                print(column)
                kernels, dilations = get_kernels_from_name(column)
                max_k, _ = get_num_of_predictions(kernels, dilations)
                distance = int((input_time_length - max_k + 1) / 2)
                if ('k3_d3' in column) and ('sbp1' not in column):
                    distance = int(522 / 2)
                performance = df[column].to_numpy()
                print(column, distance, np.median(performance))

                distance_performance_dict[distance] = [
                    np.median(performance),
                    scipy.stats.sem(performance)
                ]
    ordered_distance_performance_dict = collections.OrderedDict(
        sorted(distance_performance_dict.items()))

    return ordered_distance_performance_dict
def get_gradients_for_intermediate_layers_from_np_arrays(
        file, prefix, modules, train_mode, eval_mode, shift_by=None):
    amp_grads, amp_grads_mch, amp_grads_nch, phase_grads, phase_grads_mch, phase_grads_nch = [], [], [], [], [], []
    kernel_size, dilation = get_kernel_and_dilation_from_long_name(file)
    X_reshaped_list = []
    for module_name in modules:
        if shift_by is not None:
            shift_by_str = f'/shift_{shift_by}/'
        else:
            shift_by_str = ''
        max_k, max_l = get_num_of_predictions(kernel_size,
                                              dilation,
                                              layer=module_name)
        X_reshaped_list.append(
            np.zeros([1, 1, input_time_length - max_k + 1, 1]))
        print(module_name, prefix)
        amp_grads.append(
            np.load(
                f'{home}/outputs/{gradient_save_dir}/{file}/{shift_by_str}{prefix}/amps/{module_name}/amps_avg_{file}_{train_mode}_{eval_mode}_ALLCH.npy'
            ))
        amp_grads_mch.append(
            np.load(
                f'{home}/outputs/{gradient_save_dir}/{file}/{shift_by_str}{prefix}/amps/{module_name}/amps_avg_{file}_{train_mode}_{eval_mode}_MCH.npy'
            ))
        amp_grads_nch.append(
            np.load(
                f'{home}/outputs/{gradient_save_dir}/{file}/{shift_by_str}{prefix}/amps/{module_name}/amps_avg_{file}_{train_mode}_{eval_mode}_NCH.npy'
            ))
        # phase_grads.append(np.load(
        #     f'{home}/outputs/all_layer_gradients/{file}/{shift_by_str}{prefix}/phase/{module_name}/phase_avg_{file}_{train_mode}_{eval_mode}_ALLCH.npy'))
        # phase_grads_mch.append(np.load(
        #     f'{home}/outputs/all_layer_gradients/{file}/{shift_by_str}{prefix}/phase/{module_name}/phase_avg_{file}_{train_mode}_{eval_mode}_MCH.npy'))
        # phase_grads_nch.append(np.load(
        #     f'{home}/outputs/all_layer_gradients/{file}/{shift_by_str}{prefix}/phase/{module_name}/phase_avg_{file}_{train_mode}_{eval_mode}_NCH.npy'))
    return X_reshaped_list, amp_grads, amp_grads_mch, amp_grads_nch, phase_grads, phase_grads_mch, phase_grads_nch
def plot_multiple_gradients_from_ndarrays(trained_mode, eval_mode, prefixes,
                                          file, titles, grad_type):
    plt.clf()
    fig_s, ax_s = plt.subplots(2, 2, sharey='row', figsize=(20, 10))
    output = f'{output_dir}/multigraphs/{file}/{grad_type}/{grad_type}_avg_{file}_{trained_mode}_{eval_mode}_ALLCH_sw.png'
    kernels = None
    indices = [(0, 0), (0, 1), (1, 0), (1, 1)]
    kernels, dilations = get_kernel_and_dilation_from_long_name(file)

    for i, prefix in enumerate(prefixes):
        gradients = np.load(
            f'{home}/outputs/all_layer_gradients/{file}/{prefix}/{grad_type}/conv_classifier/{grad_type}_avg_{file}_{train_mode}_{eval_mode}_ALLCH.npy'
        )
        gradients_mch = np.load(
            f'{home}/outputs/all_layer_gradients/{file}/{prefix}/{grad_type}/conv_classifier/{grad_type}_avg_{file}_{train_mode}_{eval_mode}_MCH.npy'
        )
        gradients_nch = np.load(
            f'{home}/outputs/all_layer_gradients/{file}/{prefix}/{grad_type}/conv_classifier/{grad_type}_avg_{file}_{train_mode}_{eval_mode}_NCH.npy'
        )
        gradients_sem = np.mean(scipy.stats.sem(np.abs(gradients), axis=1),
                                axis=0)
        gradients_mch_sem = np.mean(scipy.stats.sem(np.abs(gradients_mch),
                                                    axis=1),
                                    axis=0)
        gradients_nch_sem = np.mean(scipy.stats.sem(np.abs(gradients_nch),
                                                    axis=1),
                                    axis=0)
        output_shape = get_num_of_predictions(kernels, dilations)
        batch = np.zeros([1, 1, input_time_length - output_shape[0] + 1, 1])
        create_multi_graph(
            [gradients, gradients_mch, gradients_nch],
            [gradients_sem, gradients_mch_sem, gradients_nch_sem], batch,
            ax_s[indices[i]], titles[i])
    plt.legend()
    plt.xlabel('Frequency [Hz]')
    plt.ylabel('Gradient')
    plt.tight_layout()
    print('saving figures:', output)
    fig_s.savefig(output)
    plt.show()
    plt.close(fig_s)
def set_gradient_df_index(gradient_df, layer, file, shifts=False):
    kernel_size, dilations = get_kernels_from_name(file)
    max_k, max_l = get_num_of_predictions(kernel_size, dilations, layer=None)
    print(file, max_k)
    if 'k3_d3' in file:
        max_k = 521
        shape = 521 * 2
    else:
        shape = min((input_time_length - max_k) * 2, 1200)
    # shape = 522
    y = np.around(np.fft.rfftfreq(shape, 1 / 250.0), 0)
    # index = np.linspace(0, 125, len(y))
    if shifts:
        new_columns = [
            int(1000 * (int(column) / 250)) for column in gradient_df.columns
        ]
        gradient_df.columns = new_columns
    # y = [str(ypsilon).split('.')[0] for ypsilon in y]
    gradient_df = gradient_df.set_index(pandas.Index(y), drop=True)

    return gradient_df