コード例 #1
0
def grib_invdist(gid, target_lats, target_lons, mv):
    num_cells = target_lons.size
    indices = np.indices(target_lons.shape)
    valid_target_coords = (target_lons > -1.0e+10) & (target_lons != mv)
    xs = np.where(valid_target_coords, indices[0], int_fill_value).ravel()
    ys = np.where(valid_target_coords, indices[1], int_fill_value).ravel()
    idxs1 = empty(num_cells, fill_value=int_fill_value, dtype=int)
    idxs2 = empty(num_cells, fill_value=int_fill_value, dtype=int)
    idxs3 = empty(num_cells, fill_value=int_fill_value, dtype=int)
    idxs4 = empty(num_cells, fill_value=int_fill_value, dtype=int)
    invs1 = empty(num_cells)
    invs2 = empty(num_cells)
    invs3 = empty(num_cells)
    invs4 = empty(num_cells)

    format_progress = '{}Inverse distance interpolation: {}/{}  [outs: {}] ({}%)'.format
    i = 0
    outs = 0
    back_char, progress_step = progress_step_and_backchar(num_cells)
    stdout.write('Start interpolation: {}\n'.format(now_string()))
    stdout.write(format_progress(back_char, 0, num_cells, outs, 0))
    stdout.flush()

    for lat, lon in itertools.izip(target_lats.flat, target_lons.flat):
        if i % progress_step == 0:
            stdout.write(format_progress(back_char, i, num_cells, outs, i * 100. / num_cells))
            stdout.flush()
        if not (lon < -1.0e+10 or lon == mv):

            try:
                # TODO CHECK IF asscalar is really needed here
                n_nearest = gribapi.grib_find_nearest(gid, np.asscalar(lat), np.asscalar(lon), npoints=4)
            except gribapi.GribInternalError:
                # tipically "out of grid" error
                outs += 1
                xs[i] = int_fill_value
                ys[i] = int_fill_value
            else:
                invs1[i], invs2[i], invs3[i], invs4[i], idxs1[i], idxs2[i], idxs3[i], idxs4[i] = _compute_coeffs_and_idxs(n_nearest)
        i += 1

    invs1 = invs1[~np.isnan(invs1)]
    invs2 = invs2[~np.isnan(invs2)]
    invs3 = invs3[~np.isnan(invs3)]
    invs4 = invs4[~np.isnan(invs4)]
    sums = ne.evaluate('invs1 + invs2 + invs3 + invs4')
    coeffs1 = ne.evaluate('invs1 / sums')
    coeffs2 = ne.evaluate('invs2 / sums')
    coeffs3 = ne.evaluate('invs3 / sums')
    coeffs4 = ne.evaluate('invs4 / sums')
    stdout.write('{}{:>100}'.format(back_char, ' '))
    stdout.write(format_progress(back_char, i, num_cells, outs, 100))
    stdout.write('End interpolation: {}\n\n'.format(now_string()))
    stdout.flush()
    return xs[xs != int_fill_value], ys[ys != int_fill_value], \
        idxs1[idxs1 != int_fill_value], idxs2[idxs2 != int_fill_value], idxs3[idxs3 != int_fill_value], idxs4[idxs4 != int_fill_value], \
        coeffs1, coeffs2, coeffs3, coeffs4
コード例 #2
0
 def get_model_out_folder(self):
     if (not hasattr(
             self, 'model_out_folder')) or (self.model_out_folder is None):
         now = now_string()
         path_model_checkpoint = os.path.join('model', self.save_folder,
                                              now)
         os.makedirs(path_model_checkpoint, exist_ok=True)
         self.model_out_folder = path_model_checkpoint
     return self.model_out_folder
コード例 #3
0
def grib_nearest(gid, target_lats, target_lons, mv):
    num_cells = target_lons.size
    indices = np.indices(target_lons.shape)
    valid_target_coords = (target_lons > -1.0e+10) & (target_lons != mv)
    xs = np.where(valid_target_coords, indices[0], int_fill_value).ravel()
    ys = np.where(valid_target_coords, indices[1], int_fill_value).ravel()
    idxs = empty(num_cells, fill_value=int_fill_value, dtype=int)

    back_char, progress_step = progress_step_and_backchar(num_cells)
    format_progress = '{}Nearest neighbour interpolation: {}/{}  [outs: {}] ({}%)'.format
    i = 0
    outs = 0
    stdout.write('Start interpolation: {}\n'.format(now_string()))
    stdout.write(format_progress(back_char, 0, num_cells, outs, 0))
    stdout.flush()

    for lat, lon in itertools.izip(target_lats.flat, target_lons.flat):
        if i % progress_step == 0:
            stdout.write(format_progress(back_char, i, num_cells, outs, i * 100. / num_cells))
            stdout.flush()
        if not (lon <= -1.0e+10 or lon == mv):
            try:
                # TODO CHECK IF asscalar is really needed here
                n_nearest = gribapi.grib_find_nearest(gid, np.asscalar(lat), np.asscalar(lon))
            except gribapi.GribInternalError:
                outs += 1
                xs[i] = int_fill_value
                ys[i] = int_fill_value
            else:
                idxs[i] = n_nearest[0]['index']
        i += 1
    stdout.write('{}{:>100}'.format(back_char, ' '))
    stdout.write(format_progress(back_char, i, num_cells, outs, 100))
    stdout.write('End interpolation: {}\n\n'.format(now_string()))
    stdout.flush()
    return xs[xs != int_fill_value], ys[ys != int_fill_value], idxs[idxs != int_fill_value]
コード例 #4
0
    def export_selection(self):
        sel_files_folder = os.path.join('config_files','select_files')
        os.makedirs(sel_files_folder,exist_ok=True)

        sel_list = self.listbox.curselection()
        index_list = [self.listbox.get(ind) for ind in sel_list]
        print("Exporting list for image generator. List: {0}".format(index_list))
        now_s = now_string()
        out_selection_file = {'index_list' : index_list,
                              'train_result_path': self.current_model.current_config_file,
                              'details' : '',
                              'mask_file' : self.current_model.current_mask_file}
        sel_file_name = "{0}_{1}_{2}_selection.json".format(self.current_model.classifier_key,self.current_model.dataset_key,now_s)
        sel_path = os.path.join(sel_files_folder,sel_file_name)
        with open(sel_path,'w') as f:
            json.dump(out_selection_file,f)
        print("Select in {0}".format(sel_path))
コード例 #5
0
    def generate(self, dataset_object: Dataset, index_list, mask_file,
                 select_path):

        d_name = str(dataset_object.__class__.__name__)
        gen_name = str(self.__class__.__name__)
        f_name = '{0}_{1}_{2}'.format(d_name, gen_name, now_string())
        out_folder = os.path.join('gen_images', f_name)
        os.makedirs(out_folder, exist_ok=True)

        gen_dict = {}

        # open masks pickle
        with open(mask_file, 'rb') as f:
            mask_dict = pickle.load(f)

        for ind in index_list:
            mask = mask_dict['masks'][ind]
            img = dataset_object.get_train_image_at(ind)[0][
                0]  # returns (img,label) , img = [batch,w,h,c]

            result = self.generate_img_mask(img, mask)

            cv2.imwrite(os.path.join(out_folder, '{0}__mask.png'.format(ind)),
                        mask.astype(np.uint8) * 255)

            gen_dict[ind] = []
            for ind_out, elem in enumerate(result):
                out_path_img = os.path.join(
                    out_folder, '{0}__{1}.png'.format(ind, ind_out))
                cv2.imwrite(out_path_img, elem)
                gen_dict[ind].append(out_path_img)

        exp_json = {
            'date': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
            'dataset': d_name,
            'used_select': select_path,
            'index_map': gen_dict,
            'mask_file': str(mask_file),
            'generator': gen_name
        }
        print("Results in " +
              str(os.path.join(out_folder, 'exp_details.json')))
        with open(os.path.join(out_folder, 'exp_details.json'), 'w') as f:
            json.dump(exp_json, f)
コード例 #6
0
    def train(self,
              train_file_used=None,
              save_model=True,
              eval=True,
              special_t=1):

        self.current_log = ''
        saver = tf.train.Saver()

        i = 0
        show_batch_dist = True
        best_eval = 0

        with timeit() as t:
            for i in range(special_t):  # todo refactor
                self.dataset.initialize_iterator_train(self.sess)
                while True:
                    try:
                        fd = self.prepare_feed(is_train=True, debug=self.debug)

                        if self.use_summary:
                            l, _, acc, tgts, summary = self.sess.run([
                                self.loss, self.train_step, self.accuracy,
                                self.targets, self.all_summaries
                            ], fd)
                            self.summary_train.add_summary(summary)
                        else:
                            l, _, acc, tgts = self.sess.run([
                                self.loss, self.train_step, self.accuracy,
                                self.targets
                            ], fd)

                        if i % 100 == 0:
                            from collections import Counter
                            log = "It: {}, loss_batch: {:.3f}, batch_accuracy: {:.2f}%".format(
                                i, l, acc * 100)
                            self.current_log += '{0} \n'.format(log)
                            print(log)
                            if show_batch_dist:
                                print(Counter(tgts.argmax(axis=1).tolist()))

                        i += 1
                    except tf.errors.OutOfRangeError:
                        log = 'break at2 {0}'.format(i)
                        self.current_log += '{0} \n'.format(log)
                        print(log)
                        break

                # Eval in val set
                if eval:
                    print("Doing eval")
                    out_string, acc_v = self.eval()

                    if save_model and (best_eval < acc_v):
                        best_eval = acc_v
                        self.save_model(saver, prefix='best')
                #     if best_eval > 0.78:
                #         break
                # if best_eval > 0.78:
                #     break

        # Save model
        if save_model:
            path_model_checkpoint = self.save_model(saver)

            # save accuracy in val set
            if eval:
                with open(
                        os.path.join(path_model_checkpoint,
                                     "accuracy_val.txt"), 'w') as f:
                    f.write(out_string)

            # create train_result config
            data = {
                'mask_files': [],
                'model_load_path': path_model_checkpoint,
                'train_file_used': train_file_used
            }
            out_folder = os.path.join('config_files', 'train_result')
            os.makedirs(out_folder, exist_ok=True)
            now = now_string()
            json_name = '{0}__{1}__{2}.json'.format(
                self.dataset.__class__.__name__, self.__class__.__name__, now)
            with open(os.path.join(out_folder, json_name), 'w') as f:
                json.dump(data, f)

            return path_model_checkpoint
        else:
            return None
コード例 #7
0
** For iterator
- calcular pred,real
- SI es pred != pred seguir
- Calcular CAM clase pred
- Calcular CAM clase pred
- Binarizar y cortar segun mascara. Guardar a carpeta ambas imagenes



"""

if __name__ == '__main__':

    # dataset = Imagenet_Dataset(20,30)
    # dataset = Cifar10_Dataset(20,40)
    dataset = CWR_Dataset(4, 60)

    with CWR_classifier(dataset, debug=False) as model:
        out_path_folder = os.path.join('vis_results',
                                       model.get_name() + "_" + now_string())
        os.makedirs(out_path_folder, exist_ok=True)

        # model.load('.','model/Imagenet_subset_vgg16_CAM/29_May_2018__15_16')
        # model.load('./model/check.meta','./model/vgg16_classifier/29_May_2018__01_41')
        model.load('./model/CWR_Clasifier/04_Oct_2018__18_11')
        # exp_CAM_eval(dataset, model,out_path_folder)
        visualize_dataset_CAM_predicted(dataset,
                                        model,
                                        out_path_folder,
                                        out_pickle=True,
                                        use_real_label=True)
コード例 #8
0
def do_train_config(config_path):
    with open(config_path,'r') as f:
        data=json.load(f)

    t_mode = data["train_mode"]
    t_params = data['train_params']


    if t_mode == 'epochs':

        model_key = data['model_key']
        model_params = data['model_params']
        model_load_path = data['model_load_path']
        dataset_key = data['dataset_key']
        dataset_params = data['dataset_params']

        batch_size = t_params['b_size'] if 'b_size' in t_params else 20
        epochs = t_params['epochs'] if 'epochs' in t_params else 1

        model_class = model_obj_dict[model_key]
        dataset_class = dataset_obj_dict[dataset_key]
        base_dataset = dataset_class(epochs,batch_size,**dataset_params)

        if ('just_eval' in t_params) and (t_params['just_eval']):
            print("Doing eval for validation set")
            with model_class(base_dataset, **model_params) as model:
                if model_load_path:
                    model.load(model_load_path)

                model.eval(mode='test')
                model.eval(mode='val')
            return

        train_for_epochs(base_dataset,model_class,model_params,model_load_path,config_path)

    elif t_mode == "gen_train":
        gen_file = t_params['gen_file']

        with open(gen_file) as f:
            data_gen=json.load(f)


        used_select = data_gen['used_select']
        with open(used_select) as f:
            data_select=json.load(f)
        train_result_path = data_select['train_result_path']
        with open(train_result_path,'r') as f:
            data_t_r = json.load(f)
            path_train_file = data_t_r["train_file_used"]

            with open(path_train_file,'r') as f2:
                data_train_file = json.load(f2)
            m_k = data_train_file['model_key']
            m_p = data_train_file['model_params']
            d_k = data_train_file['dataset_key']
            d_p = data_train_file['dataset_params']

        if data['model_load_path'] is not None:
            model_load_path = data['model_load_path']
            print("Using model load path from TRAIN_FILE {0}".format(model_load_path))
        else:
            model_load_path = data_t_r['model_load_path']
            print("Using model load path from SELECT_FILE {0}".format(model_load_path))


        batch_size = t_params['b_size'] if 'b_size' in t_params else 20
        epochs = t_params['epochs'] if 'epochs' in t_params else 1

        model_class = model_obj_dict[m_k]
        dataset_class = dataset_obj_dict[d_k]
        base_dataset = dataset_class(epochs,batch_size,**d_p) # type: Dataset


        if ('just_eval' in t_params) and (t_params['just_eval']):
            print("Doing eval for validation set")
            with model_class(base_dataset, **m_p) as model:
                if model_load_path:
                    model.load(model_load_path)

                model.eval(mode='test')
                model.eval(mode='val')
            return


        # images, labels, index_list = create_lists(base_dataset, data_gen['index_map'])
        # Create dummy dataset add all gen_images and random images
        dataset_one_use = placeholder_dataset(base_dataset)



        with model_class(dataset_one_use, **m_p) as model: # this also save the train_result
            model.load(model_load_path)
            # dataset_one_use.prepare_dataset(index_list, images, labels)
            # dataset_one_use.show_current()
            # model.train(train_file_used=config_path,save_model=False,eval=True)


            current_ind = None
            current_img=None
            current_label=None
            current_cams=None
            selected_cam=0
            current_mask=None
            gen_images=None
            backprops=1
            gens=0
            use_selective_dropout = True
            add_original=True

            index_list = []
            img_list = []
            label_list = []
            gen_map = {}

            # accion: invocar ref_gen
            gen_model = get_generator_from_key("random_crop",dataset=base_dataset)

            act = 'no_exit'
            out_f = os.path.join('out_backprops', now_string())
            action_map = {'0': 'sel_img',
                          '1': 'set_mask',
                          '2': "gen_image",
                          '3': 'add_gen_to_dataset',
                          '4': 'flush_dataset',
                          '5': 'do_backprop',
                          '6': 'exit',
                          '7': 'sel_cam',
                          '8': 'sel_gen',
                          '9' : 'save_mask',
                          '10' : 'load_mask',
                          '11' : 'add_org_random_insert',
                          '12' : 'loop random get X times',
                          '14' : 'exp_1_nov',
                          '13' : 'continue_training'}

            while act != 'exit':
                plt.close('all')
                try:
                    act = action_map.setdefault(
                        input("Accion? {0}".format(sorted(action_map.items()))),
                        '')

                    if act == 'sel_img':
                        ind_sel = input("Index ?")
                        img, label, all_cams, scores, r_label = get_img_cam(ind_sel, base_dataset, model)
                        current_ind = ind_sel
                        current_img = img[0].squeeze()
                        current_label = label
                        current_cams = all_cams
                    elif act == 'sel_cam':
                        selected_cam = int(input("Cam index ?"))

                    elif act == 'exp_1_nov':
                        indexs_string = 'n02114548_10505.JPEG,n02120079_9808.JPEG,n02114548_11513.JPEG,n02120079_4409.JPEG,n02114548_5207.JPEG'
                        selected_img_indexs=list(map(lambda x : x.strip(),indexs_string.split(",")))
                        n_iterations=15
                        iter_till_insert=3
                        skip_insert = False
                        batch_size_exp = 50
                        gen_mask_map={}
                        op=''
                        sel_dropout = True

                        use_default = int(input("Use default 0-1"))

                        if use_default == 0:
                            selected_img_indexs = list(map(lambda x : x.strip(),input("Selected indexs?").split(",")))
                            n_iterations = int(input("Selected Iterations?"))
                            iter_till_insert = int(input("iter_till_insert?"))
                            batch_size_exp = int(input("batch_size_exp?"))
                            op = input("op?")

                            # select image masks
                            for im_index in selected_img_indexs:

                                img, label, all_cams, scores, r_label = get_img_cam(im_index, base_dataset, model)
                                sel_cam = int(input("CAM index?"))

                                cam_for_mask = np.squeeze(all_cams[sel_cam])
                                temp_img = img[0].squeeze()
                                current_mask = sel_mask(cam_for_mask,temp_img)

                                gen_mask_map[im_index] = (sel_cam,current_mask)
                                with open("sel_mask_exp_3.pkl",'wb') as f:
                                    pickle.dump(gen_mask_map,f,-1)
                        else:
                            with open("sel_mask_exp_3.pkl", 'rb') as f:
                                gen_mask_map=pickle.load(f)


                        for i in range(n_iterations):


                            # generate random batch
                            index_list, img_list, label_list, gens = generate_random_for_loop(
                                base_dataset, current_ind, current_img,current_label, gens,
                                current_mask,n_random=batch_size_exp,gen_imgs=0)


                            if i % iter_till_insert == 0 and not(skip_insert):
                                # add original images
                                for img_ind in selected_img_indexs:
                                    img, label, all_cams, scores, r_label = get_img_cam(img_ind, base_dataset, model,show_images=False)
                                    index_list.append(img_ind)
                                    img_list.append(img.squeeze())
                                    label_list.append(label)

                                # add generated images if any
                                if op == 'add_gen':
                                    # generate random batch
                                    for img_ind in gen_mask_map:
                                        sel_cam, current_mask = gen_mask_map[img_ind]
                                        img, label, all_cams, scores, r_label = get_img_cam(img_ind, base_dataset, model,show_images=False)
                                        i_list, im_list, l_list, c_gens = generate_random_for_loop(base_dataset, img_ind, img.squeeze(),label, 0,current_mask, n_random=0,gen_imgs=1)

                                        index_list += i_list
                                        img_list += im_list
                                        label_list += l_list

                            # flush
                            flush_to_dataset(dataset_one_use, index_list,img_list, label_list, False,current_ind,current_img,current_label)

                            # do backpropagation
                            backprops = do_backprop(model, base_dataset, dataset_one_use,
                                                    selected_img_indexs, [],
                                                    out_f, gen_mask_map,
                                                    backprops, config_path,
                                                    change_feed_dict=sel_dropout)




                    elif act == 'add_org_random_insert':
                        index_list, img_list, label_list, gens = generate_random_for_loop(base_dataset, current_ind,current_img,current_label, gens,current_mask)

                    elif act == 'continue_training':
                        n_backpropagations = int(input("How many batches?"))

                        for i in range(n_backpropagations):
                            index_list, img_list, label_list, gens = generate_random_for_loop(
                                base_dataset, current_ind, current_img,current_label, gens,
                                current_mask,n_random=20,gen_imgs=0)

                            # flush
                            flush_to_dataset(dataset_one_use, index_list,img_list, label_list, False,current_ind,current_img,current_label)

                            # do backpropagation
                            index_list_t = [] if current_ind is None else [current_ind]

                            backprops = do_backprop(model, base_dataset,
                                                    dataset_one_use,
                                                    index_list_t, [],
                                                    out_f, {}, backprops, config_path,
                                                    change_feed_dict=False)

                    elif act == 'loop random get X times':

                        n_backpropagations = int(input("How many times?"))

                        for i in range(n_backpropagations):
                            index_list, img_list, label_list, gens = generate_random_for_loop(
                                base_dataset, current_ind, current_img,current_label, gens,
                                current_mask)

                            # flush
                            flush_to_dataset(dataset_one_use, index_list,img_list, label_list, add_original,current_ind,current_img,current_label)

                            # do backpropagation
                            backprops = do_backprop(model, base_dataset,
                                                    dataset_one_use,
                                                    [current_ind], [],
                                                    out_f, {},
                                                    backprops, config_path,
                                                    change_feed_dict=False)


                        pass

                    elif act == 'save_mask':

                        out_path=os.path.join('./config_files/mask_files/mask_from_gen_{0}.pkl'.format(now_string()))
                        with open(out_path,'wb') as f:
                            pickle.dump(current_mask,f)
                        print("Mask saved to {0}".format(out_path))

                    elif act == 'load_mask':
                        path_to_mask = input("Mask path?")
                        with open(path_to_mask,'rb') as f:
                            current_mask=pickle.load(f)
                        print("Mask loaded  {0}".format(path_to_mask))
                        gen_map[current_ind] = (selected_cam, current_mask)

                    elif act == 'sel_gen':
                        selected_key_gen = input("Gen key?")
                        gen_model = get_generator_from_key(selected_key_gen,
                                                           dataset=base_dataset)

                    elif act == 'set_mask':
                        cam_for_mask = np.squeeze(current_cams[selected_cam])
                        current_img = np.squeeze(current_img)
                        print(cam_for_mask.shape)
                        print(current_img.shape)
                        current_mask = sel_mask(cam_for_mask, current_img)
                        gen_map[current_ind] = (selected_cam, current_mask)

                    elif act == 'gen_image':
                        gen_images = gen_model.generate_img_mask(current_img,
                                                                 current_mask)
                        plt.figure()
                        plt.title('Original')
                        plt.imshow(current_img.squeeze())

                        for index_gen, gen_img in enumerate(gen_images):
                            plt.figure()
                            plt.title('gen_{0}'.format(index_gen))
                            plt.imshow(gen_img.squeeze())
                        plt.show()

                    elif act == 'add_gen_to_dataset':
                        for g_img in gen_images:
                            n_index = "gen_id__{0}__bindex__{1}".format(
                                current_ind, gens)
                            print("Adding {0}".format(n_index))
                            index_list.append(n_index)
                            gens += 1
                            img_list.append(g_img)
                            label_list.append(current_label)

                    elif act == 'flush_dataset':
                        flush_to_dataset(dataset_one_use, index_list, img_list,label_list, add_original,current_ind,current_img,current_label,show_data=True)
                        index_list = []
                        img_list = []
                        label_list = []

                    elif act == 'do_backprop':
                        backprops = do_backprop(model, base_dataset, dataset_one_use, [current_ind], [], out_f, gen_map, backprops, config_path,
                                                change_feed_dict=use_selective_dropout)
                except Exception as e:
                    import traceback
                    print(traceback.format_exc())
                    print("Exception try again")
                    raise e
コード例 #9
0
def do_train_config(config_path,
                    indexs_string,
                    n_iterations,
                    iter_till_insert,
                    gens_per_original,
                    skip_insert,
                    batch_size_exp,
                    exp_list,
                    base_name,
                    dropout_k,
                    mask_file_path_map,
                    plot_masks,
                    out_file_path=None,
                    missclass_index_path=None, add_summary=False,
                    lambda_value =  0.2*0.2*1.0/(14*14) ,
                    param_a=None,
                    param_b=None,
                    tf_config=None
                   ):

    """
        # #'n02114548_10505.JPEG'
        #"n02120079_1801.JPEG,n02114548_6413.JPEG,n02120079_4625.JPEG,n02114548_3599.JPEG,n02120079_6223.JPEG,n02114548_10182.JPEG,n02120079_13994.JPEG,n02120079_12464.JPEG,n02120079_14956.JPEG"
        indexs_string = 'n02114548_10505.JPEG,n02120079_9808.JPEG,n02114548_11513.JPEG,n02120079_4409.JPEG,n02114548_5207.JPEG'
        n_iterations = 50 # 15,3, 50, 25
        iter_till_insert = 3
        gens_per_original = 1
        skip_insert = False
        batch_size_exp = 50
        op = ''
        sel_dropout = False
        exp_list = [3]
        base_name = 'exp_cam_loss'
        dropout_k = 16
        mask_file_path_map = "sel_mask_exp_3.pkl" #'mask_map_label_9__2018-Dec-12--00:04.pkl'
        plot_masks = True


    """
    sel_dropout = False

    data_config = parse_config_recur(config_path)
    t_params = data_config['train_params']
    m_k = data_config['model_key']
    m_p = data_config['model_params']
    d_k = data_config['dataset_key']
    d_p = data_config['dataset_params']

    m_p['use_summary']= add_summary
    if tf_config:
        m_p['tf_config'] = tf_config

    if data_config['model_load_path_at_train_file'] is not None:
        model_load_path = data_config['model_load_path_at_train_file']
        print("Using model load path from TRAIN_FILE {0}".format(model_load_path))
    else:
        model_load_path = data_config['model_load_path_train_result']
        print("Using model load path from SELECT_FILE {0}".format(model_load_path))


    batch_size = t_params['b_size'] if 'b_size' in t_params else 20
    epochs = t_params['epochs'] if 'epochs' in t_params else 1
    model_class = model_obj_dict[m_k]
    dataset_class = dataset_obj_dict[d_k]



    gen_mask_map = {}

    if not(mask_file_path_map is None):
        with open(mask_file_path_map, 'rb') as f:
            gen_mask_map = pickle.load(f)
            if 'masks' in gen_mask_map:
                gen_mask_map = {k: (None, gen_mask_map['masks'][k]) for k in gen_mask_map['masks']}


    special_batch_count = 0



    for id_exp in exp_list:

        plt.close('all')

        optional_backprop_args = {}

        sample_batch_fun  = lambda x : generate_random_for_loop(x, None, None, None, 0, None, n_random=batch_size_exp,gen_imgs=0)

        if id_exp == 0:
            exp_type = 'add_org'
            op = ''
            sel_dropout = False

        elif id_exp == 1:
            exp_type = 'gen'
            op = 'add_gen'
            sel_dropout = False

        elif id_exp == 2:
            exp_type = 'dropout_org'
            op = ''
            sel_dropout = True

        elif id_exp == 3:
            model_class = imagenet_classifier_cam_loss
            exp_type = 'cam_loss'
            op = ''
            sel_dropout = True

            optional_backprop_args['new_feed_fun'] = lambda f, gm,k=None: get_f_d_cam_loss(f, gm, lambda_value, k=k,use_simple_index=False)

        elif id_exp == 12:
            model_class = imagenet_classifier_CONV_LOSS
            exp_type = 'CONV_loss'
            op = ''
            sel_dropout = True

            optional_backprop_args['new_feed_fun'] = lambda f, gm,k=None: get_f_d_cam_loss(f, gm, lambda_value, k=k,use_simple_index=False)


        elif id_exp == 9:
            model_class = imagenet_classifier_cam_loss_V2
            exp_type = 'cam_V2_loss'
            op = ''
            sel_dropout = True
            m_p['param_b'] = param_b
            m_p['param_a'] = param_a

            optional_backprop_args['new_feed_fun'] = lambda f, gm,k=None: get_f_d_cam_loss(f, gm, lambda_value, k=k,use_simple_index=False)

        elif id_exp == 20:
            exp_type = 'cam_V2__KEEP_MODEL_loss'
            op = ''
            sel_dropout = True

            optional_backprop_args['new_feed_fun'] = lambda f, gm,k=None: get_f_d_cam_loss(f, gm, lambda_value, k=k,use_simple_index=False)



        elif id_exp == 4:
            model_class = imagenet_classifier_focal_loss
            exp_type = 'focal_loss'
            op = ''
            sel_dropout = False

        elif id_exp == 5:
            exp_type = 'just_missclass'

        elif id_exp == 6:
            exp_type = 'cont_training'
            op = ''
            skip_insert = True
            sel_dropout = False

        elif id_exp == 7:
            exp_type = 'cont_training_big_sample_missclass'
            op = ''
            skip_insert = True
            sel_dropout = False
            assert(missclass_index_path is not None),'Give missclass_index_list parameter for exp'

            with open(missclass_index_path,'r') as f:
                missclass_index_list = f.read().split('\n')
            missclass_index_list = list(filter(lambda x : len(x) > 0 , missclass_index_list))
            n_missclass = int(batch_size_exp*0.3)
            n_random = int(batch_size_exp*0.7)
            sample_batch_fun = lambda x: generate_random_from_list(x, missclass_index_list, 0,n_random=n_random, gen_imgs=n_missclass)

            pass
        
        elif id_exp == 8:
            exp_type = 'training_all_masks'
            op = ''
            model_class = imagenet_classifier_cam_loss
            skip_insert = True
            sel_dropout = True
            assert(missclass_index_path is not None),'Give missclass_index_list parameter for exp'
            assert(mask_file_path_map is not None),'GIve mask file'

            with open(missclass_index_path,'r') as f:
                missclass_index_list = f.read().split('\n')
            missclass_index_list = list(filter(lambda x : len(x) > 0 , missclass_index_list))

            n_with_mask = int(batch_size_exp*0.7)
            n_random = int(batch_size_exp*0.3)
            sample_batch_fun = lambda x: generate_random_from_list(x, missclass_index_list, 0,n_random=n_random, gen_imgs=n_with_mask)

            optional_backprop_args['new_feed_fun'] = lambda f, gm, k=None: get_f_d_cam_loss( f, gm, lambda_value, k=k,use_simple_index=True)

            with open(mask_file_path_map, 'rb') as f:
                t = pickle.load(f)['masks']
                gen_mask_map = {k : (None,t[k]) for k in t}

        elif id_exp == 10:
            exp_type = 'training_all_masks_V2'
            op = ''
            model_class =  imagenet_classifier_cam_loss_V2
            sel_dropout = True
            assert(missclass_index_path is not None),'Give missclass_index_list parameter for exp'
            assert(mask_file_path_map is not None),'GIve mask file'

            with open(missclass_index_path,'r') as f:
                missclass_index_list = f.read().split('\n')
            missclass_index_list = list(filter(lambda x : len(x) > 0 , missclass_index_list))

            n_with_mask = int(batch_size_exp*0.7)
            n_random = int(batch_size_exp*0.3)
            sample_batch_fun = lambda x: generate_random_from_list(x, missclass_index_list, 0,n_random=n_random, gen_imgs=n_with_mask)

            optional_backprop_args['new_feed_fun'] = lambda f, gm, k=None: get_f_d_cam_loss( f, gm, lambda_value, k=k,use_simple_index=False)

            with open(mask_file_path_map, 'rb') as f:
                t = pickle.load(f)['masks']
                gen_mask_map = {k : (None,t[k]) for k in t}

        elif id_exp == 31:
            exp_type = 'training_all_masks_V2'
            op = ''
            model_class =  simple_classifier
            sel_dropout = True
            assert(missclass_index_path is not None),'Give missclass_index_list parameter for exp'
            assert(mask_file_path_map is not None),'GIve mask file'

            with open(missclass_index_path,'r') as f:
                missclass_index_list = f.read().split('\n')
            missclass_index_list = list(filter(lambda x : len(x) > 0 , missclass_index_list))

            n_with_mask = int(batch_size_exp*0.7)
            n_random = int(batch_size_exp*0.3)
            sample_batch_fun = lambda x: generate_random_from_list(x, missclass_index_list, 0,n_random=n_random, gen_imgs=n_with_mask)

            optional_backprop_args['new_feed_fun'] = lambda f, gm, k=None: get_f_d_cam_loss( f, gm, lambda_value, k=k,use_simple_index=False)

            with open(mask_file_path_map, 'rb') as f:
                t = pickle.load(f)['masks']
                gen_mask_map = {k : (None,t[k]) for k in t}



        elif id_exp == 30:
            exp_type = 'training_masks_features_espurias_V2'
            op = ''
            model_class =  imagenet_classifier_cam_loss_V2
            sel_dropout = True
            assert(mask_file_path_map is not None),'GIve mask file'

            with open(mask_file_path_map, 'rb') as f:
                gen_mask_map = pickle.load(f)

            missclass_index_list = list(gen_mask_map.keys())

            n_with_mask = int(batch_size_exp*0.7)
            n_random = int(batch_size_exp*0.3)
            sample_batch_fun = lambda x: generate_random_from_list(x, missclass_index_list, 0,n_random=n_random, gen_imgs=n_with_mask)

            optional_backprop_args['new_feed_fun'] = lambda f, gm, k=None: get_f_d_cam_loss( f, gm, lambda_value, k=k,use_simple_index=False)





        elif id_exp == 13:
            exp_type = 'training_all_masks_CONV_LOSS'
            op = ''
            model_class = imagenet_classifier_CONV_LOSS #simple_classifier
            skip_insert = True
            sel_dropout = True
            assert(missclass_index_path is not None),'Give missclass_index_list parameter for exp'
            assert(mask_file_path_map is not None),'GIve mask file'

            with open(missclass_index_path,'r') as f:
                missclass_index_list = f.read().split('\n')
            missclass_index_list = list(filter(lambda x : len(x) > 0 , missclass_index_list))

            n_with_mask = int(batch_size_exp*0.7)
            n_random = int(batch_size_exp*0.3)
            sample_batch_fun = lambda x: generate_random_from_list(x, missclass_index_list, 0,n_random=n_random, gen_imgs=n_with_mask)

            optional_backprop_args['new_feed_fun'] = lambda f, gm, k=None: get_f_d_cam_loss( f, gm, lambda_value, k=k,use_simple_index=True)

            with open(mask_file_path_map, 'rb') as f:
                t = pickle.load(f)['masks']
                gen_mask_map = {k : (None,t[k]) for k in t}



        elif id_exp == 11:
            exp_type = 'auto_training_CAM'
            op = ''
            model_class = imagenet_classifier_cam_loss_AUTO
            sel_dropout = True

        optional_backprop_args['change_feed_dict'] = sel_dropout
        optional_backprop_args['selective_dropout_k'] = dropout_k


        exp_name = '{0}_{1}_{2}'.format(base_name,exp_type,now_string())
        out_f = out_file_path if not(out_file_path is None) else os.path.join('out_backprops', exp_name)
        m_p['out_folder'] = out_f

        selected_img_indexs = list(
            map(lambda x: x.strip(), indexs_string.split(",")))

        do_exp_fun = lambda md,d_dummy, d_base :  for_loop_model_exp(md, sample_batch_fun,d_dummy,d_base,
                                                                     iter_till_insert,skip_insert,selected_img_indexs,
                                                                     gens_per_original,out_f,gen_mask_map,config_path,
                                                                     optional_backprop_args, n_iterations, op)


        if id_exp == 11:
            do_exp_fun = lambda md, d_dummy, d_base: auto_cam_loss(md, sample_batch_fun,
                          d_dummy,
                          d_base,selected_img_indexs,
                          out_f,
                          gen_mask_map,
                          config_path,
                          optional_backprop_args, selected_img_indexs)




        with tf.Graph().as_default():
            base_dataset = dataset_class(epochs, batch_size,**d_p)  # type: Dataset

            st_missclass_folder = os.path.join(out_f, 'start')
            os.makedirs(st_missclass_folder, exist_ok=True)
            st_missclass_list = calc_missclass_and_plot(model_class, m_p,model_load_path, base_dataset,st_missclass_folder)

        if id_exp == 5:
            return


        with tf.Graph().as_default():


            base_dataset = dataset_class(epochs, batch_size, **d_p)  # type: Dataset
            # Create dummy dataset add all gen_images and random images
            dataset_one_use = placeholder_dataset(base_dataset)

            if plot_masks:
                out_folder_originals = os.path.join(out_f, 'originals')
                os.makedirs(out_folder_originals, exist_ok=True)
                for k in gen_mask_map:
                    org_img, label = base_dataset.get_train_image_at(k)

                    out_name = os.path.join(out_folder_originals,'{0}_mask.png'.format(k))
                    out_name_or = os.path.join(out_folder_originals, '{0}.png'.format(k))

                    rgb = (len(org_img[0].shape) == 3 and org_img[0].shape[2] == 3)

                    img_out_or = cv2.cvtColor(org_img[0],cv2.COLOR_RGB2BGR) if rgb else org_img[0]


                    img_out = gen_mask_map[k][1].astype(np.uint8)*255
                    cv2.imwrite(out_name,img_out)
                    cv2.imwrite(out_name_or, img_out_or)

                    #
                    # img_out = gen_mask_map[k][1].astype(np.float32)
                    # plt.imshow(img_out)
                    # plt.savefig(out_name)
                    # plt.imshow(img_out_or)
                    # plt.savefig(out_name_or)
                    # plt.close('all')


            with model_class(dataset_one_use, **m_p) as model: # this also save the train_result
                model.load(model_load_path)


                do_exp_fun(model,dataset_one_use,base_dataset)


                saver = tf.train.Saver()
                folder_model_temp = 'temp_folder_model'
                os.makedirs(folder_model_temp,exist_ok=True)
                out_model=model.save_model(saver,folder_model_temp)

        with tf.Graph().as_default():
            base_dataset = dataset_class(epochs, batch_size, **d_p)  # type: Dataset

            en_missclass_folder = os.path.join(out_f,'end')
            os.makedirs(en_missclass_folder, exist_ok=True)
            st_miss_class = calc_missclass_and_plot(model_class, m_p,out_model,base_dataset, en_missclass_folder)

        with tf.Graph().as_default():
            base_dataset = dataset_class(epochs, batch_size,
                                         **d_p)  # type: Dataset

            after_missclass_folder = os.path.join(out_f,'start_indexs_after')
            os.makedirs(after_missclass_folder, exist_ok=True)
            st_miss_class = calc_missclass_and_plot(model_class, m_p,out_model,base_dataset, after_missclass_folder,indexs_list=st_missclass_list)

        out_model_path = os.path.join(out_f, 'model_after')

        import shutil
        shutil.move(out_model,out_model_path)

    return out_f