Beispiel #1
0
    copy_dst_csv = CSV_DIR + FILE_ID + '.csv'
    copy_dst_pkl = PKL_DIR + FILE_ID + '.pkl'

    if os.path.exists(SAVE_DIR): remove_dir(SAVE_DIR)
    if os.path.exists(copy_dst_csv): remove_file(copy_dst_csv)
    if os.path.exists(copy_dst_pkl): remove_file(copy_dst_pkl)

    pkl_files = glob.glob(PKL_DIR + QUERY_FILE_ID + '.pkl')
    print(pkl_files)

    if len(pkl_files) == 0:
        print("No such pkl files")
        sys.exit()

    best_file_id = os.path.basename(pkl_files[0])[:-4]  # -.pkl'
    best_performance = np.sum(read_pkl(pkl_files[0])['te_te_precision_at_k'])
    for pkl_idx in range(len(pkl_files)):
        file_id = os.path.basename(pkl_files[pkl_idx])[:-4]  # -.pkl'
        performance = np.sum(
            read_pkl(pkl_files[pkl_idx])['te_te_precision_at_k'])
        print("performance : {} from {}".format(performance, file_id))
        if performance > best_performance:
            best_performance = performance
            best_file_id = file_id
    print("best performance : {} from {}".format(best_performance,
                                                 best_file_id))
    copy_file(CSV_DIR + best_file_id + '.csv', copy_dst_csv)
    copy_file(PKL_DIR + best_file_id + '.pkl', copy_dst_pkl)
    copy_dir(RESULT_DIR + 'metric/save/' + best_file_id, SAVE_DIR)

    # load data
sys.path.append('../configs')

from sklearn.model_selection import train_test_split
# ../utils
from reader import read_pkl
from writer import write_npy, create_muldir
from dataset_op import label_count

# ../configs
from path import CIFAR100PATH, CIFARPROCESSED
from info import CIFARNCLASS

import numpy as np
import pickle

train = read_pkl(CIFAR100PATH + 'train', encoding='bytes')
test = read_pkl(CIFAR100PATH + 'test', encoding='bytes')

train_image = train[b'data'].astype(np.float32) / 255
test_image = test[b'data'].astype(np.float32) / 255

pixel_mean = np.mean(train_image,
                     axis=0)  # use global pixel mean only for train data

train_image -= pixel_mean
test_image -= pixel_mean

train_image = np.transpose(np.reshape(train_image, [-1, 3, 32, 32]),
                           [0, 2, 3, 1])
train_label = np.array(train[b'fine_labels'])
Beispiel #3
0
# ../configs
from path import IMAGENET32PATH, IMAGENET32PROCESSED

# ../utils
from reader import read_pkl
from writer import write_npy, create_muldir

from sklearn.model_selection import train_test_split
import numpy as np

train_img = list()
train_label = list()
image_mean = None

for idx in range(1, 10):
    content = read_pkl(IMAGENET32PATH + 'train_data_batch_%d' % idx)
    if image_mean is None: image_mean = content['mean']
    else:
        assert np.sum(image_mean) == np.sum(
            content['mean']), "pixel_mean value should be same"

    nimg = len(content['data'])
    nlabel = len(content['labels'])
    assert nimg == nlabel, "image and label should be same "
    train_img.append(content['data'])
    train_label.append(content['labels'])

train_img = np.concatenate(train_img, axis=0)
train_label = np.concatenate(train_label, axis=0) - 1