Exemplo n.º 1
0
def ops_saved_summery(net_name=cfg.NET.__name__,
                      dataset_name=cfg.DATA.name(),
                      mode=Mode.ALL_MODES,
                      ps='*',
                      ones_range=('*', '*'),
                      acc_loss='*',
                      gran_thresh='*',
                      init_acc='*',
                      batch_size=cfg.BATCH_SIZE,
                      max_samples=cfg.TEST_SET_SIZE):
    rec_finder = RecordFinder(net_name, dataset_name, ps, ones_range,
                              gran_thresh, acc_loss, init_acc)
    final_rec_fn = rec_finder.find_rec_filename(mode,
                                                RecordType.FINAL_RESULT_REC)
    if final_rec_fn is None:
        print('No Record found')
        return
    rec = load_from_file(final_rec_fn, '')
    print(rec)

    base_fn = 'ops_summery_' + rec.filename
    summery_fn_pkl = os.path.join(cfg.RESULTS_DIR, base_fn + '.pkl')
    if os.path.exists(summery_fn_pkl):
        arr = load_from_file(summery_fn_pkl, path='')
    else:
        nn = NeuralNet()
        data = Datasets.get(dataset_name, cfg.DATASET_DIR)
        nn.net.initialize_spatial_layers(data.shape(), cfg.BATCH_SIZE,
                                         rec.patch_size)
        test_gen, _ = data.testset(batch_size=batch_size,
                                   max_samples=max_samples)

        arr = [None] * len(rec.mask)
        for idx, layer in enumerate(rec.mask):
            nn.net.reset_spatial()
            print(
                f"----------------------------------------------------------------"
            )

            nn.net.strict_mask_update(update_ids=[idx], masks=[layer])
            _, test_acc, _ = nn.test(test_gen)
            ops_saved, ops_total = nn.net.num_ops()

            arr[idx] = (ops_saved, ops_total, test_acc)
            nn.net.print_ops_summary()

        print(
            f"----------------------------------------------------------------"
        )
        nn.net.reset_spatial()
        save_to_file(arr, use_default=False, path='', filename=summery_fn_pkl)

    out_path = os.path.join(cfg.RESULTS_DIR, base_fn + ".csv")
    with open(out_path, 'w', newline='') as f:
        csv.writer(f).writerow(['layer', 'ops_saved', 'ops_total'])
        for idx, r in enumerate(arr):
            csv.writer(f).writerow([idx, r[0], r[1]])

    return arr
Exemplo n.º 2
0
def plot_ops_saved_vs_max_acc_loss(net_name,
                                   dataset_name,
                                   ps,
                                   ones_range,
                                   gran_thresh,
                                   acc_loss_opts,
                                   init_acc,
                                   modes=None,
                                   title=None):
    bs_line_rec = get_baseline_rec(net_name, dataset_name, ps, init_acc)
    plt.figure()
    if bs_line_rec is not None:
        plt.plot(acc_loss_opts,
                 [round(bs_line_rec.ops_saved / bs_line_rec.total_ops, 3)] *
                 len(acc_loss_opts),
                 '--',
                 label=f'baseline')
        plt.axvline(x=bs_line_rec.init_acc - bs_line_rec.baseline_acc,
                    linestyle='--',
                    label='baseline')

    modes = get_modes(modes)
    for mode in modes:
        ops_saved = [None] * len(acc_loss_opts)
        for idx, acc_loss in enumerate(acc_loss_opts):
            rec_finder = RecordFinder(net_name, dataset_name, ps, ones_range,
                                      gran_thresh, acc_loss, init_acc)
            fn = rec_finder.find_rec_filename(mode,
                                              RecordType.FINAL_RESULT_REC)
            if fn is not None:
                final_rec = load_from_file(fn, '')
                ops_saved[idx] = round(
                    final_rec.ops_saved / final_rec.total_ops, 3)
        if (ops_saved != [None] * len(acc_loss_opts)):
            plt.plot(acc_loss_opts, ops_saved, 'o--', label=gran_dict[mode])

    plt.xlabel('max acc loss [%]')
    plt.ylabel('operations saved [%]')

    if title is None:
        title = ''
    plt.title(
        f'Operations Saved vs Maximun Allowed Accuracy Loss {title}\n'
        f'{net_name}, {dataset_name}, INITIAL ACC:{init_acc} \n'
        f'PATCH SIZE:{ps}, ONES:{ones_range[0]}-{ones_range[1]-1}, GRANULARITY:{gran_thresh}\n'
        f'LQ{cfg.LQ_OPTION}, CQ{cfg.CQ_OPTION}r{cfg.CHANNELQ_UPDATE_RATIO}, PQ{cfg.PQ_OPTION}r{cfg.PATCHQ_UPDATE_RATIO}'
    )

    plt.legend()
    # plt.show()
    plt.savefig(
        f'{cfg.RESULTS_DIR}/ops_saved_vs_max_acc_loss_{net_name}_{dataset_name}_acc{init_acc}_'
        +
        f'LQ{cfg.LQ_OPTION}_CQ{cfg.CQ_OPTION}r{cfg.CHANNELQ_UPDATE_RATIO}_PQ{cfg.PQ_OPTION}r{cfg.PATCHQ_UPDATE_RATIO}_'
        + f'ps{ps}_ones{ones_range[0]}x{ones_range[1]}_mg{gran_thresh}.pdf')
Exemplo n.º 3
0
def get_baseline_rec(net_name, dataset_name, ps, init_acc):
    rec_finder = RecordFinder(net_name, dataset_name, ps, ('*', '*'), '*', '*',
                              init_acc)
    bs_line_fn = rec_finder.find_rec_filename(None, RecordType.BASELINE_REC)
    if bs_line_fn is None:
        optim = Optimizer(ps, (None, None), None, None)
        optim.base_line_result()
        bs_line_fn = rec_finder.find_rec_filename(None,
                                                  RecordType.BASELINE_REC)
    if bs_line_fn is None:
        print(
            f' !!! Was not able to get baseline result for initial accuracy of {init_acc} !!!'
        )
        print(f' !!! Adjust TEST_SET_SIZE in Config.py !!!')
        return bs_line_fn
    return load_from_file(bs_line_fn, '')
Exemplo n.º 4
0
def plot_ops_saved_vs_ones(net_name,
                           dataset_name,
                           ps,
                           ones_possibilities,
                           gran_thresh,
                           acc_loss,
                           init_acc,
                           modes=None):
    #    bs_line_rec = get_baseline_rec(net_name, dataset_name, ps, init_acc)
    plt.figure()
    #    if bs_line_rec is not None:
    #        plt.plot(ones_possibilities, [bs_line_rec.ops_saved/bs_line_rec.total_ops]*len(ones_possibilities),
    #                                      '--', label=f'baseline, {round(bs_line_rec.init_acc-bs_line_rec.baseline_acc, 2)}% loss')
    modes = get_modes(modes)
    for mode in modes:
        ops_saved = [None] * len(ones_possibilities)
        has_results = False
        for idx, ones in enumerate(ones_possibilities):
            rec_finder = RecordFinder(net_name, dataset_name, ps,
                                      (ones, ones + 1), gran_thresh, acc_loss,
                                      init_acc)
            fn = rec_finder.find_rec_filename(mode,
                                              RecordType.FINAL_RESULT_REC)
            if fn is not None:
                rec = load_from_file(fn, '')
                ops_saved[idx] = round(rec.ops_saved / rec.total_ops, 3)
                has_results = True
        if has_results:
            plt.plot(ones_possibilities,
                     ops_saved,
                     'o--',
                     label=gran_dict[mode])
    plt.xlabel('number of ones')
    plt.ylabel('operations saved [%]')
    plt.title(
        f'Operations Saved vs Number of Ones \n'
        f'{net_name}, {dataset_name}, INITIAL ACC:{init_acc} \n'
        f'PATCH SIZE:{ps}, MAX ACC LOSS:{acc_loss}, GRANULARITY:{gran_thresh}')
    plt.legend()
    plt.savefig(
        f'{cfg.RESULTS_DIR}/ops_saved_vs_number_of_ones_{net_name}_{dataset_name}'
        + f'acc{init_acc}_ps{ps}_ma{acc_loss}_mg{gran_thresh}.pdf')
Exemplo n.º 5
0
    def __init__(self,
                 patch_size,
                 ones_range,
                 gran_thresh,
                 max_acc_loss,
                 init_acc=None,
                 test_size=cfg.TEST_SET_SIZE,
                 patterns_idx=None):
        self.ps = patch_size
        self.max_acc_loss = max_acc_loss
        self.gran_thresh = gran_thresh

        if patterns_idx is None:
            self.ones_range = ones_range
            self.input_patterns = None
        else:
            patterns_rec = load_from_file(
                f'all_patterns_ps{self.ps}_cluster{patterns_idx}.pkl',
                path=cfg.RESULTS_DIR)
            self.ones_range = (patterns_rec[1], patterns_rec[1] + 1)
            self.input_patterns = patterns_rec[2]

        self.full_net_run_time = None
        self.total_ops = None

        self.nn = NeuralNet()
        self.nn.net.initialize_spatial_layers(dat.shape(), cfg.BATCH_SIZE,
                                              self.ps)
        self.test_gen, _ = dat.testset(batch_size=cfg.BATCH_SIZE,
                                       max_samples=cfg.TEST_SET_SIZE)
        self.test_set_size = cfg.TEST_SET_SIZE
        if INNAS_COMP:
            init_acc = DEBUG_INIT_ACC
        if init_acc is None:
            _, test_acc, correct = self.nn.test(self.test_gen)
            print(f'==> Asserted test-acc of: {test_acc} [{correct}]\n ')
            self.init_acc = test_acc  # TODO - Fix initialize bug
        else:
            self.init_acc = init_acc
        self.record_finder = RecordFinder(cfg.NET.__name__, dat.name(),
                                          patch_size, ones_range, gran_thresh,
                                          max_acc_loss, self.init_acc)
Exemplo n.º 6
0
def show_final_mask_grid_resnet18(channel=0,
                                  net_name='*',
                                  dataset_name='*',
                                  mode=Mode.ALL_MODES,
                                  ps='*',
                                  ones_range=('*', '*'),
                                  acc_loss='*',
                                  gran_thresh='*',
                                  init_acc='*',
                                  filename=None,
                                  font_size=22,
                                  show_patch_grid=False):
    rec_finder = RecordFinder(net_name, dataset_name, ps, ones_range,
                              gran_thresh, acc_loss, init_acc)
    final_rec_fn = rec_finder.find_rec_filename(mode,
                                                RecordType.FINAL_RESULT_REC)
    if final_rec_fn is None:
        print('No Record found')
        return
    rec = load_from_file(final_rec_fn, '')

    shift = 4
    grid = (18, 24)
    st = ((0, 0), (0, 8), (0, 16), (8, 0), (8, 8), (8, 16), (8, 20), (12, 16),
          (12, 20), (16, 0 + shift), (16, 2 + shift), (16, 4 + shift),
          (16, 6 + shift), (16, 8 + shift), (16, 10 + shift), (16, 12 + shift),
          (16, 14 + shift))
    span = (8, 8, 8, 8, 8, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2)

    fig = plt.figure()
    plt.tight_layout()
    fig.set_figheight(10)
    fig.set_figwidth(13)
    plt.rcParams.update({'font.size': font_size})
    for l_to_plot_idx, l_to_plot in enumerate(rec.mask):
        l_to_plot = l_to_plot.numpy()

        plt.subplot2grid(grid,
                         st[l_to_plot_idx],
                         colspan=span[l_to_plot_idx],
                         rowspan=span[l_to_plot_idx])
        plt.imshow(l_to_plot[channel], cmap=plt.cm.gray)  #(0:black, 1:white)
        plt.title(f'{l_to_plot_idx}')
        ax = plt.gca()
        # Minor ticks
        ax.set_xticks(np.arange(-.5, l_to_plot[channel].shape[0] - 1,
                                rec.patch_size),
                      minor=True)
        ax.set_yticks(np.arange(-.5, l_to_plot[channel].shape[1] - 1,
                                rec.patch_size),
                      minor=True)
        # Gridlines based on minor ticks
        if show_patch_grid:
            ax.grid(which='minor', color='r', linestyle='-', linewidth=2)
        plt.tick_params(axis='both',
                        which='major',
                        bottom=False,
                        top=False,
                        left=False,
                        right=False,
                        labelbottom=False,
                        labelleft=False)
    plt.tight_layout()
    plt.subplots_adjust(top=1)
    if filename is None:
        plt.show()
    else:
        plt.savefig(filename)

    return rec
Exemplo n.º 7
0
def show_channel_grid(layer=0,
                      net_name='*',
                      dataset_name='*',
                      mode=Mode.ALL_MODES,
                      ps='*',
                      ones_range=('*', '*'),
                      acc_loss='*',
                      gran_thresh='*',
                      init_acc='*',
                      filename=None,
                      font_size=22,
                      show_patch_grid=False):
    rec_finder = RecordFinder(net_name, dataset_name, ps, ones_range,
                              gran_thresh, acc_loss, init_acc)
    final_rec_fn = rec_finder.find_rec_filename(mode,
                                                RecordType.FINAL_RESULT_REC)
    if final_rec_fn is None:
        print('No Record found')
        return
    rec = load_from_file(final_rec_fn, '')

    layer_mask = rec.mask[layer].numpy()

    no_of_channels = layer_mask.shape[0]
    rows = math.ceil(math.sqrt(no_of_channels))
    fig, axs = plt.subplots(nrows=rows, ncols=rows)
    fig.set_figheight(30)
    fig.set_figwidth(30)
    plt.rcParams.update({'font.size': font_size})

    for c in range(layer_mask.shape[0]):
        row_idx = math.floor(c / rows)
        col_idx = c - row_idx * rows
        axs[row_idx][col_idx].imshow(layer_mask[c],
                                     cmap=plt.cm.gray)  #(0:black, 1:white)
        axs[row_idx][col_idx].set_title(f'{c}')

        # Minor ticks
        axs[row_idx][col_idx].set_xticks(np.arange(-.5,
                                                   layer_mask[c].shape[0] - 1,
                                                   rec.patch_size),
                                         minor=True)
        axs[row_idx][col_idx].set_yticks(np.arange(-.5,
                                                   layer_mask[c].shape[1] - 1,
                                                   rec.patch_size),
                                         minor=True)
        # Gridlines based on minor ticks
        if show_patch_grid:
            axs[row_idx][col_idx].grid(which='minor',
                                       color='r',
                                       linestyle='-',
                                       linewidth=2)
        axs[row_idx][col_idx].tick_params(axis='both',
                                          which='major',
                                          bottom=False,
                                          top=False,
                                          left=False,
                                          right=False,
                                          labelbottom=False,
                                          labelleft=False)
        #axs[row_idx][col_idx].colorbar()
    plt.tight_layout()
    if filename is None:
        plt.show()
    else:
        plt.savefig(filename)

    return rec
Exemplo n.º 8
0
def show_final_mask(show_all_layers=False,
                    layers_to_show=None,
                    show_all_channels=False,
                    channels_to_show=None,
                    plot_3D=False,
                    net_name='*',
                    dataset_name='*',
                    mode=Mode.ALL_MODES,
                    ps='*',
                    ones_range=('*', '*'),
                    acc_loss='*',
                    gran_thresh='*',
                    init_acc='*'):
    rec_finder = RecordFinder(net_name, dataset_name, ps, ones_range,
                              gran_thresh, acc_loss, init_acc)
    final_rec_fn = rec_finder.find_rec_filename(mode,
                                                RecordType.FINAL_RESULT_REC)
    if final_rec_fn is None:
        print('No Record found')
        return
    rec = load_from_file(final_rec_fn, '')
    print(rec)
    mask_size = [
        rec.layers_layout[l][0] * rec.layers_layout[l][1] *
        rec.layers_layout[l][2] for l in range(len(rec.mask))
    ]
    zeros_in_each_layer = [
        np.count_nonzero(rec.mask[l].numpy() == 0) / mask_size[l]
        for l in range(len(rec.mask))
    ]

    plt.figure()
    tick_label = [str(l) for l in range(len(rec.mask))]
    plt.bar(list(range(len(rec.mask))),
            zeros_in_each_layer,
            tick_label=tick_label)
    plt.xlabel('layer index')
    plt.ylabel('zeros [%]')
    plt.title('[%] of Zeros in each Prediction Layer for the Chosen Mask')
    plt.show()

    if show_all_layers:
        layers_to_show = range(len(rec.mask))
    elif layers_to_show is None:
        layers_to_show = [
            max(range(len(zeros_in_each_layer)),
                key=zeros_in_each_layer.__getitem__)
        ]

    for idx, l_to_plot_idx in enumerate(layers_to_show):
        l_to_plot = rec.mask[l_to_plot_idx].numpy()
        if rec.mode == Mode.UNIFORM_FILTERS or rec.mode == Mode.UNIFORM_LAYER:  # all channels in layer are the same
            show_channel(l_to_plot_idx, 0, rec.layers_layout[l_to_plot_idx],
                         l_to_plot[0], rec.patch_size)
        else:
            if plot_3D:
                show_layer(l_to_plot_idx, rec.layers_layout[l_to_plot_idx],
                           l_to_plot)
            if show_all_channels:
                channels = range(rec.layers_layout[l_to_plot_idx][0])
            elif channels_to_show is None:
                channels = [
                    0,
                    round(rec.layers_layout[l_to_plot_idx][0] / 2),
                    rec.layers_layout[l_to_plot_idx][0] - 1
                ]
            elif type(channels_to_show) is list and type(
                    channels_to_show[0]) is list:
                channels = channels_to_show[idx]
            elif type(channels_to_show) is not list:
                channels = [channels_to_show]
            else:
                channels = channels_to_show
            for channel in channels:
                show_channel(l_to_plot_idx, channel,
                             rec.layers_layout[l_to_plot_idx],
                             l_to_plot[channel], rec.patch_size)

    return rec
Exemplo n.º 9
0
class Optimizer:
    def __init__(self,
                 patch_size,
                 ones_range,
                 gran_thresh,
                 max_acc_loss,
                 init_acc=None,
                 test_size=cfg.TEST_SET_SIZE,
                 patterns_idx=None):
        self.ps = patch_size
        self.max_acc_loss = max_acc_loss
        self.gran_thresh = gran_thresh

        if patterns_idx is None:
            self.ones_range = ones_range
            self.input_patterns = None
        else:
            patterns_rec = load_from_file(
                f'all_patterns_ps{self.ps}_cluster{patterns_idx}.pkl',
                path=cfg.RESULTS_DIR)
            self.ones_range = (patterns_rec[1], patterns_rec[1] + 1)
            self.input_patterns = patterns_rec[2]

        self.full_net_run_time = None
        self.total_ops = None

        self.nn = NeuralNet()
        self.nn.net.initialize_spatial_layers(dat.shape(), cfg.BATCH_SIZE,
                                              self.ps)
        self.test_gen, _ = dat.testset(batch_size=cfg.BATCH_SIZE,
                                       max_samples=cfg.TEST_SET_SIZE)
        self.test_set_size = cfg.TEST_SET_SIZE
        if INNAS_COMP:
            init_acc = DEBUG_INIT_ACC
        if init_acc is None:
            _, test_acc, correct = self.nn.test(self.test_gen)
            print(f'==> Asserted test-acc of: {test_acc} [{correct}]\n ')
            self.init_acc = test_acc  # TODO - Fix initialize bug
        else:
            self.init_acc = init_acc
        self.record_finder = RecordFinder(cfg.NET.__name__, dat.name(),
                                          patch_size, ones_range, gran_thresh,
                                          max_acc_loss, self.init_acc)

    def plot_ops_saved_accuracy_uniform_network(self):
        layers_layout = self.nn.net.generate_spatial_sizes(dat.shape())
        rcs = Record(layers_layout, self.gran_thresh, True, Mode.UNIFORM_LAYER,
                     self.init_acc, self.ps, self.ones_range)
        no_of_patterns = rcs.all_patterns.shape[2]
        ops_saved_array = [None] * no_of_patterns
        acc_array = [None] * no_of_patterns

        self._init_nn()
        for p_idx in range(no_of_patterns):
            sp_list = [None] * len(layers_layout)
            for layer, layer_mask in enumerate(
                    mf.base_line_mask(layers_layout,
                                      self.ps,
                                      pattern=rcs.all_patterns[:, :, p_idx])):
                sp_list[layer] = torch.from_numpy(layer_mask)
            self.nn.net.strict_mask_update(update_ids=list(
                range(len(layers_layout))),
                                           masks=sp_list)
            _, test_acc, _ = self.nn.test(self.test_gen)
            ops_saved, ops_total = self.nn.net.num_ops()
            self.nn.net.reset_ops()
            ops_saved_array[p_idx] = ops_saved / ops_total
            acc_array[p_idx] = test_acc

        plt.figure()
        plt.subplot(211)
        plt.plot(list(range(no_of_patterns)), ops_saved_array, 'o')
        plt.xlabel('pattern index')
        plt.ylabel('ops_saved [%]')
        plt.title(f'ops saved for uniform network, patch_size:{self.ps}')
        plt.subplot(212)
        plt.plot(list(range(no_of_patterns)), acc_array, 'o')
        plt.xlabel('pattern index')
        plt.ylabel('accuracy [%]')
        plt.title(f'accuracy for uniform network, patch_size:{self.ps}')

        data = [rcs.all_patterns, ops_saved_array, acc_array]
        save_to_file(
            data, False, cfg.RESULTS_DIR,
            'baseline_all_patterns_{cfg.NET.__name__}_{dat.name()}' +
            f'acc{self.init_acc}_ps{self.ps}_ones{self.ones_range[0]}x{self.ones_range[1]}_mg{self.gran_thresh}.pkl'
        )

        plt.savefig(
            f'{cfg.RESULTS_DIR}/baseline_all_patterns_{cfg.NET.__name__}_{dat.name()}'
            +
            f'acc{self.init_acc}_ps{self.ps}_ones{self.ones_range[0]}x{self.ones_range[1]}_mg{self.gran_thresh}.pdf'
        )

        return data

    def base_line_result(self):
        layers_layout = self.nn.net.generate_spatial_sizes(dat.shape())
        self._init_nn()

        sp_list = [None] * len(layers_layout)
        for layer, layer_mask in enumerate(
                mf.base_line_mask(layers_layout, self.ps)):
            sp_list[layer] = torch.from_numpy(layer_mask)
        self.nn.net.strict_mask_update(update_ids=list(
            range(len(layers_layout))),
                                       masks=sp_list)

        _, test_acc, _ = self.nn.test(self.test_gen)
        ops_saved, ops_total = self.nn.net.num_ops()
        bl_rec = BaselineResultRc(self.init_acc, test_acc, ops_saved,
                                  ops_total, self.ps, cfg.NET.__name__,
                                  dat.name())
        print(bl_rec)
        save_to_file(bl_rec, True, cfg.RESULTS_DIR)

    def _quantizier_main(self, rec_type, in_rec):
        if rec_type == RecordType.lQ_RESUME:
            resume_param_path = self.record_finder.find_rec_filename(
                in_rec.mode, RecordType.lQ_RESUME)
            quantizier = LayerQuantizier(in_rec, self.init_acc,
                                         self.max_acc_loss, self.ps,
                                         self.ones_range, self.get_total_ops(),
                                         resume_param_path)
        else:
            q_rec_fn = self.record_finder.find_rec_filename(
                in_rec.mode, rec_type)
            Quantizier = PatchQuantizier if rec_type == RecordType.pQ_REC else ChannelQuantizier
            if q_rec_fn is None:
                quantizier = Quantizier(in_rec, self.init_acc,
                                        self.max_acc_loss, self.ps)
            else:
                quantizier = Quantizier(in_rec, self.init_acc,
                                        self.max_acc_loss, self.ps,
                                        load_from_file(q_rec_fn, ''))
        if not quantizier.is_finised():
            self._init_nn()
            quantizier.simulate(self.nn, self.test_gen)
        if RecordType.lQ_RESUME == rec_type:
            return
        return quantizier.output_rec

    def create_FR_with_different_acc_loss(self, mode, acc_loss):
        self.record_finder.max_acc_loss = '*'
        best_FR = None
        for lq_rec_fn in self.record_finder.find_all_recs_fns(
                mode, RecordType.lQ_RESUME):
            in_rec_fn = '_'.join(os.path.basename(lq_rec_fn).split('_')[2:])
            lq = LayerQuantizier(
                load_from_file(in_rec_fn, path=cfg.RESULTS_DIR), self.init_acc,
                self.max_acc_loss, self.ps, self.ones_range,
                self.get_total_ops(), lq_rec_fn)
            final_rec = lq.find_final_mask(acc_loss,
                                           nn=self.nn,
                                           test_gen=self.test_gen)
            if best_FR is None:
                best_FR = final_rec
            elif best_FR.ops_saved < final_rec.ops_saved:
                best_FR = final_rec
        print(best_FR)
        save_to_file(best_FR, True, cfg.RESULTS_DIR)
        print('==> result saved to ' + best_FR.filename)
        self.record_finder.max_acc_loss = self.max_acc_loss
        return best_FR

    def retrain_with_mask(self, final_rec, epochs=50, lr=0.01):
        ckp_name_prefix = final_rec.get_retrain_prefix()
        self._init_nn()
        self.nn.net.strict_mask_update(update_ids=list(
            range(len(final_rec.mask))),
                                       masks=final_rec.mask)
        self.nn.net.print_spatial_status()
        self.nn.train(epochs=epochs, lr=lr, ckp_name_prefix=ckp_name_prefix)

    def create_FR_after_retrain(self,
                                mode,
                                acc_loss,
                                retrain=True,
                                epochs=50,
                                lr=0.01):
        final_rec = self.create_FR_with_different_acc_loss(mode, acc_loss)
        if retrain:
            self.retrain_with_mask(final_rec, epochs=epochs, lr=lr)
        retrain_nn = NeuralNet(ckp_name_prefix=final_rec.get_retrain_prefix())
        retrain_nn.net.initialize_spatial_layers(dat.shape(), cfg.BATCH_SIZE,
                                                 self.ps)
        retrain_nn.net.reset_spatial()
        retrain_nn.net.strict_mask_update(update_ids=list(
            range(len(final_rec.mask))),
                                          masks=final_rec.mask)
        if INNAS_COMP:
            test_acc = 100
            ops_saved = 100
            ops_total = 100
        else:
            _, test_acc, _ = retrain_nn.test(self.test_gen)
            ops_saved, ops_total = retrain_nn.net.num_ops()
        final_rec.retrain_update(test_acc, ops_saved, ops_total, epochs, lr)
        print(final_rec)
        save_to_file(final_rec, path=cfg.RESULTS_DIR)

    def run_mode(self, mode=None):
        if Mode.MAX_GRANULARITY == mode:
            self.by_max_granularity()
        elif Mode.UNIFORM_FILTERS == mode:
            self.by_uniform_filters()
        elif Mode.UNIFORM_LAYER == mode:
            self.by_uniform_layers()
        elif Mode.UNIFORM_PATCH == mode:
            self.by_uniform_patches()
        else:
            self.run_all_modes()

    def run_all_modes(self):
        self.by_uniform_layers()
        self.by_uniform_filters()
        self.by_uniform_patches()
        self.by_max_granularity()

    def by_uniform_layers(self):
        in_rec = self.gen_first_lvl_results(Mode.UNIFORM_LAYER)
        self._quantizier_main(RecordType.lQ_RESUME, in_rec)
        self.record_finder.print_result(Mode.UNIFORM_LAYER)

    def by_uniform_patches(self):
        in_rec = self.gen_first_lvl_results(Mode.UNIFORM_PATCH)
        cQ_rec = self._quantizier_main(RecordType.cQ_REC, in_rec)
        self._quantizier_main(RecordType.lQ_RESUME, cQ_rec)
        self.record_finder.print_result(Mode.UNIFORM_PATCH)

    def by_uniform_filters(self):
        in_rec = self.gen_first_lvl_results(Mode.UNIFORM_FILTERS)
        pQ_rec = self._quantizier_main(RecordType.pQ_REC, in_rec)
        self._quantizier_main(RecordType.lQ_RESUME, pQ_rec)
        self.record_finder.print_result(Mode.UNIFORM_FILTERS)

    def by_max_granularity(self):
        in_rec = self.gen_first_lvl_results(Mode.MAX_GRANULARITY)
        pQ_rec = self._quantizier_main(RecordType.pQ_REC, in_rec)
        cQ_rec = self._quantizier_main(RecordType.cQ_REC, pQ_rec)
        self._quantizier_main(RecordType.lQ_RESUME, cQ_rec)
        self.record_finder.print_result(Mode.MAX_GRANULARITY)

    def print_runtime_eval(self):
        print(
            f"================================================================"
        )
        print(
            f"----------------------------------------------------------------"
        )
        print(f"                      NET: {cfg.NET.__name__}")
        print(f"                  DATASET: {dat.name()}")
        print(f"               PATCH SIZE: {self.ps}")
        print(
            f"                     ONES: {self.ones_range[0]}-{self.ones_range[1]-1}"
        )
        print(f"              GRANULARITY: {self.gran_thresh}")
        print(f"            TEST SET SIZE: {self.test_set_size}")
        print(f"    CHANNELQ UPDATE RATIO: {cfg.CHANNELQ_UPDATE_RATIO}")
        print(f"      PATCHQ UPDATE RATIO: {cfg.PATCHQ_UPDATE_RATIO}")
        print(
            f"----------------------------------------------------------------"
        )
        for mode in Modes:
            no_of_runs, run_times = self.eval_run_time(mode)
            total_run_time = (no_of_runs[0] * run_times[0] +
                              no_of_runs[1] * run_times[0] +
                              no_of_runs[2] * run_times[1]) / (60 * 60)
            if total_run_time > 24:
                total_run_time = round(total_run_time / 24, 2)
                total_run_time_units = 'days'
            else:
                total_run_time = round(total_run_time, 2)
                total_run_time_units = 'hours'
            print("    {}    {:>25} [{}]".format(mode, total_run_time,
                                                 total_run_time_units))
            print(
                f"----------------------------------------------------------------"
            )
            print(
                f"         iters 1st lvl         iters 2nd lvl          iters lQ "
            )
            print("number {:>15} {:>21} {:>17}".format(no_of_runs[0],
                                                       no_of_runs[1],
                                                       no_of_runs[2]))
            print("time   {:>15} {:>21} {:>17}".format(
                round(no_of_runs[0] * run_times[0]),
                round(no_of_runs[1] * run_times[0]),
                round(no_of_runs[2] * run_times[1])))
            print(f"\nsec per iter    ")
            print(f"        1st/2nd lvl: {run_times[0]}")
            print(f"        lQ: {run_times[1]}")

            print(
                f"----------------------------------------------------------------"
            )
        print(
            f"================================================================"
        )

    def eval_run_time(self, mode, no_of_tries=5):
        layers_layout = self.nn.net.generate_spatial_sizes(dat.shape())
        if self.input_patterns is None:
            recs_first_lvl = Record(layers_layout, self.gran_thresh, True,
                                    mode, self.init_acc, self.ps,
                                    self.ones_range)
        else:
            recs_first_lvl = Record(layers_layout, self.gran_thresh, False,
                                    mode, self.init_acc, self.input_patterns,
                                    self.ones_range)
        first_lvl_runs = recs_first_lvl.size

        self.nn.net.reset_spatial()
        run_time_for_iter = 0
        for idx in range(no_of_tries):
            layer = random.randint(0, recs_first_lvl.no_of_layers - 1)
            channel = random.randint(0,
                                     recs_first_lvl.no_of_channels[layer] - 1)
            patch = random.randint(0, recs_first_lvl.no_of_patches[layer] - 1)
            pattern_idx = random.randint(
                0, recs_first_lvl.no_of_patterns[layer] - 1)
            pattern = recs_first_lvl.all_patterns[:, :, pattern_idx]
            mask = mf.get_specific_mask(layers_layout[layer], channel, patch,
                                        pattern,
                                        recs_first_lvl.patch_sizes[layer],
                                        mode)
            st_time = time.time()
            self.nn.net.reset_spatial()
            self.nn.net.strict_mask_update(update_ids=[layer],
                                           masks=[torch.from_numpy(mask)])
            _, test_acc, _ = self.nn.test(self.test_gen)
            end_time = time.time()
            run_time_for_iter += (end_time - st_time)

        run_time_for_iter = run_time_for_iter / no_of_tries
        recs_first_lvl.fill_empty()

        if mode == Mode.UNIFORM_LAYER:
            second_lvl_runs = 0
            lQ = LayerQuantizier(recs_first_lvl, self.init_acc, 0, self.ps,
                                 self.ones_range, self.get_total_ops())
            lQ_runs = lQ.number_of_iters()
        elif mode == Mode.MAX_GRANULARITY:
            pQ = PatchQuantizier(recs_first_lvl, self.init_acc, 0, self.ps)
            pQ.output_rec.fill_empty()
            cQ = ChannelQuantizier(pQ.output_rec, self.init_acc, 0, self.ps)
            cQ.output_rec.fill_empty()
            second_lvl_runs = pQ.number_of_iters() + cQ.number_of_iters()
            lQ = LayerQuantizier(cQ.output_rec, self.init_acc, 0, self.ps,
                                 self.ones_range, self.get_total_ops())
            lQ_runs = lQ.number_of_iters()
        elif mode == Mode.UNIFORM_FILTERS:
            pQ = PatchQuantizier(recs_first_lvl, self.init_acc, 0, self.ps)
            second_lvl_runs = pQ.number_of_iters()
            pQ.output_rec.fill_empty()
            lQ = LayerQuantizier(pQ.output_rec, self.init_acc, 0, self.ps,
                                 self.ones_range, self.get_total_ops())
            lQ_runs = lQ.number_of_iters()
        elif mode == Mode.UNIFORM_PATCH:
            cQ = ChannelQuantizier(recs_first_lvl, self.init_acc, 0, self.ps)
            cQ.output_rec.fill_empty()
            second_lvl_runs = cQ.number_of_iters()
            lQ = LayerQuantizier(cQ.output_rec, self.init_acc, 0, self.ps,
                                 self.ones_range, self.get_total_ops())
            lQ_runs = lQ.number_of_iters()

        no_of_runs = (first_lvl_runs, second_lvl_runs, lQ_runs)
        run_times = (round(run_time_for_iter,
                           3), self.get_full_net_run_time(no_of_tries))
        return no_of_runs, run_times

    def get_total_ops(self):
        if self.total_ops is None:
            self._init_nn()
            self.get_full_net_run_time(1)
        return self.total_ops

    def get_full_net_run_time(self, no_of_tries):
        if self.full_net_run_time is None:
            self.nn.net.reset_spatial()
            self.nn.net.fill_masks_to_val(1)
            self.full_net_run_time = 0
            for idx in range(no_of_tries):
                st_time = time.time()
                _, test_acc, _ = self.nn.test(self.test_gen)
                if self.total_ops is None:
                    _, self.total_ops = self.nn.net.num_ops()
                end_time = time.time()
                self.nn.net.reset_ops()
                assert test_acc == self.init_acc, f'starting accuracy does not match! curr_acc:{test_acc}, prev_acc:{self.init_acc}'
                self.full_net_run_time += (end_time - st_time)
            self.full_net_run_time = round(
                self.full_net_run_time / no_of_tries, 3)
        return self.full_net_run_time

    def gen_first_lvl_results(self, mode):
        rec_filename = self.record_finder.find_rec_filename(
            mode, RecordType.FIRST_LVL_REC)
        if rec_filename is not None:
            rcs = load_from_file(rec_filename, path='')
            st_point = rcs.find_resume_point()
            if st_point is None:
                return rcs

        layers_layout = self.nn.net.generate_spatial_sizes(dat.shape())
        self._init_nn()

        if rec_filename is None:
            if self.input_patterns is None:
                rcs = Record(layers_layout, self.gran_thresh, True, mode,
                             self.init_acc, self.ps, self.ones_range)
            else:
                rcs = Record(layers_layout, self.gran_thresh, False, mode,
                             self.init_acc, self.input_patterns,
                             self.ones_range)
            st_point = [0] * 4

            if INNAS_COMP:
                rcs.filename = 'DEBUG_' + rcs.filename

        print('==> Result will be saved to ' +
              os.path.join(cfg.RESULTS_DIR, rcs.filename))
        save_counter = 0
        for layer, channel, patch, pattern_idx, mask in tqdm(
                mf.gen_masks_with_resume(self.ps,
                                         rcs.all_patterns,
                                         rcs.mode,
                                         rcs.gran_thresh,
                                         layers_layout,
                                         resume_params=st_point)):
            self.nn.net.strict_mask_update(update_ids=[layer],
                                           masks=[torch.from_numpy(mask)])

            if INNAS_COMP:
                test_acc = 100
                ops_saved = 100
                ops_total = 100
            else:
                _, test_acc, _ = self.nn.test(self.test_gen)
                ops_saved, ops_total = self.nn.net.num_ops()
                self.nn.net.reset_spatial()
            rcs.addRecord(ops_saved, ops_total, test_acc, layer, channel,
                          patch, pattern_idx)

            save_counter += 1
            if save_counter > cfg.SAVE_INTERVAL:
                save_to_file(rcs, True, cfg.RESULTS_DIR)
                save_counter = 0

        save_to_file(rcs, True, cfg.RESULTS_DIR)
        print('==> Result saved to ' +
              os.path.join(cfg.RESULTS_DIR, rcs.filename))
        return rcs

    def _init_nn(self):
        self.nn.net.reset_spatial()