Пример #1
0
def create_dataholder(data_str, imsz):
    if data_str == 'UCSDped2':
        dataholder = anom_UCSDholder(data_str, imsz)
    elif data_str == 'UCSDped1':
        dataholder = anom_UCSDholder(data_str, imsz)
    elif data_str.startswith('Avenue'):
        dataholder = anom_Avenueholder(data_str, imsz)
    elif data_str == 'Pako':
        dataholder = anom_Pakoholder(data_str, imsz)
    return dataholder
Пример #2
0
def feat_extract(data_str, resz, mode):
    feature = 0 # 0: raw pixel, 1: HOG
    #if data_str == 'UCSDped2_demo':
    dataholder = anom_UCSDholder(data_str, resz)

    data_folder = dataholder.data_folder


    feat_folder = '%s/feat' % (data_folder)
    if not os.path.exists(feat_folder):
        os.mkdir(feat_folder)

    model_folder = '%s/model' % (data_folder)
    MODEL_DIR = model_folder
    if not os.path.exists(model_folder):
        os.mkdir(model_folder)

    ext = dataholder.ext
    imsz = dataholder.imsz

    if resz is None:
        resz = imsz
    if feature == 1:
        resz = (math.ceil(resz[0]*1.0/16)*16,math.ceil(resz[1]*1.0/16)*16)
        resz = (int(resz[0]), int(resz[1]))

    h, w = 12, 18
    h_step, w_step = 6, 9

    # feat_file_format = '%s/%s_resz%dx%d_cellsz%dx%d_step%dx%d%s_v2.npz'
    feat_file_format = '%s/%s_resz%dx%d_%s_v3.npz'

    if feature == 0:
        strfeature = "raw"
    elif feature == 1:
        strfeature = "HOG"

    print('Extracting %s feature from training videos...' % strfeature)
    if mode == 0:
        video_list = read_list_from_file('%s/all.lst' % data_folder)
    elif mode == 1:
        video_list = read_list_from_file('%s/test1.lst' % data_folder)
        test1_folder = '%s/Test1' % (feat_folder)
        if not os.path.exists(test1_folder):
            os.mkdir(test1_folder)

    for s in video_list:
        frm_folder = "%s/%s" % (data_folder, s)
        print(frm_folder)
        feat_file = feat_file_format % (
            feat_folder, s, resz[0], resz[1], strfeature)
        if os.path.isfile(feat_file)==False:
            preprocess(frm_folder, feat_file, imsz, resz,  ext, feature, bshow=0)
        else:
            print('File exists.')

    print('Finished.')
Пример #3
0
def feat_resize(data_str, imsz, resz, bshow=0):

    # train_str = 'test'
    # test_str = 'test'

    dataholder = anom_UCSDholder(data_str, imsz)

    data_folder = dataholder.data_folder

    feat_folder = '%s/feat' % (data_folder)
    if not os.path.exists(feat_folder):
        os.mkdir(feat_folder)

    feat_file_format = '%s/%s_resz%dx%d_raw_v3.npz'

    if bshow == 1:
        fig = plt.figure('Resize')

    test_list = read_list_from_file('%s/all.lst' % (data_folder))

    for s in test_list:
        frm_folder = "%s/%s" % (data_folder, s)

        feat_file = feat_file_format % (feat_folder, s, imsz[0], imsz[1])
        resz_file = feat_file_format % (feat_folder, s, resz[0], resz[1])
        if os.path.isfile(feat_file):
            print('Loading %s' % s)
            data_test = np.load(feat_file)
            data_im = data_test
            num_data, h, w = data_test.shape
            D = np.zeros([num_data, *resz])
            for i in range(0, num_data):
                # im = data_im[i,:,:].astype('uint8')
                im = data_im[i, :, :]
                im_resz = cv2.resize(im, (resz[1], resz[0]))

                # im_resz = im_resz/255.0
                D[i, :, :] = im_resz

                if bshow == 1:

                    plt.clf()
                    plt.subplot(1, 2, 1)
                    plt.imshow(im, cmap='Greys_r')

                    plt.title('Original %d' % i)

                    plt.subplot(1, 2, 2)
                    plt.imshow(im_resz, cmap='Greys_r')
                    plt.title('Resize %d' % i)

                    plt.show(block=False)
                    plt.pause(0.05)
            np.save(open(resz_file, 'wb'), D)
            print('saved to %s' % resz_file)
    print('Finished.')
Пример #4
0
# # extracting optical flow images
import os
import sys
import subprocess
from utils.anom_UCSDholderv1 import anom_UCSDholder
if len(sys.argv) > 1:
    dataset = sys.argv[1]
    dataholder = anom_UCSDholder(dataset, resz=None)

    data_folder = dataholder.data_folder
    feat_folder = '%s/feat' % (data_folder)

    release_folder = os.path.dirname(os.path.realpath(__file__))
    # data_folder= '%s/data/UCSD/UCSDped2' % release_folder

    feat_folder = '%s/feat' % data_folder
    if os.path.isdir(feat_folder) == False:
        os.mkdir(feat_folder)
    set_name = 'all'
    ext = 'tif'
    subprocess.call([
        "matlab", "-nodisplay", "-r",
        "addpath('%s/eccv2004Matlab'); extract_BroxOF('%s','%s','%s');exit();"
        % (release_folder, data_folder, set_name, ext)
    ])
Пример #5
0
def train_hvad_GANv5(params):
    import pix2pix_so_v1_func_largev1a as pix2pix
    # experiment params
    mode = params.get_value('mode')
    data_str = params.get_value('data_str')
    train_str = params.get_value('train_str')

    layers = params.get_value('layers')
    layers_with_clusters = params.get_value('layers_with_clusters')
    cae_folder_name = params.get_value('cae_folder_name')
    skip_frame = params.get_value('skip_frame')
    bshow = params.get_value('bshow')
    bh5py = params.get_value('bh5py')

    resz = params.get_value('resz')
    direction = params.get_value('direction')

    # learner parameters
    lr_rate = params.get_value('lr_rate')
    w_init = params.get_value('w_init')
    num_epochs = params.get_value('num_epochs')
    train_flag = params.get_value('train_flag')

    # a.batch_size = 1

    OF_scale = 0.3
    dataholder = anom_UCSDholder(data_str, resz)

    data_folder = dataholder.data_folder
    feat_folder = '%s/feat' % (data_folder)
    if not os.path.exists(feat_folder):
        os.mkdir(feat_folder)

    model_folder = '%s/model' % (data_folder)
    MODEL_DIR = model_folder
    if not os.path.exists(model_folder):
        os.mkdir(model_folder)
    res_folder = '%s/result' % (data_folder)
    if not os.path.exists(res_folder):
        os.mkdir(res_folder)
    print('Model folder = %s' % model_folder)

    model_folder_name = 'hvad-gan-layer0-v5-brox'
    hvad_model_folder = '%s/%s' % (model_folder, model_folder_name)
    if os.path.isdir(hvad_model_folder) == False:
        os.mkdir(hvad_model_folder)

    flog = open(
        '%s/log_train_hvad_GANv5_brox_large_v2_reshape.txt' %
        hvad_model_folder, 'wt')
    imsz = dataholder.imsz

    mask_file_org = '%s/data_mask_org.h5' % (feat_folder)
    mask_file = '%s/data_mask.h5' % (feat_folder)
    F_file = '%s/data_F.h5' % (feat_folder)
    M_file = '%s/data_M.h5' % (feat_folder)

    if resz is None:
        resz = imsz

    # load cluster result
    if np.array(layers_with_clusters).sum() > 0:
        cluster_file = '%s/cluster_scene_8x16/cluster.npz' % model_folder
        npz_data = np.load(cluster_file)
        C = npz_data['C_compact_im']

    if bshow == 1:
        fig = plt.figure()

    train_time_start = time.time()
    train_list = read_list_from_file('%s/%s.lst' % (data_folder, train_str))
    for l in range(len(layers)):
        print('[%d] Training GAN on layer %d using with/without mask %d' %
              (l, layers[l], layers_with_clusters[l]))
        if layers[l] == 0:

            # load features
            frame_file_format = '%s/%s_resz%dx%d_raw_v3.npz'
            OF_file_format = '%s/%s_sz256x256_BroxOF.mat'

            # load all data for training
            F = None
            data_O = None
            for s in train_list:
                # frm_folder = "%s/%s" % (data_folder, s)
                # feat_file = feat_file_format  % (
                # feat_folder, s, resz[0], resz[1], h, w, h_step, w_step, strfeature)
                frame_file = frame_file_format % (feat_folder, s, resz[0],
                                                  resz[1])
                if os.path.isfile(frame_file):
                    dualprint('Loading %s' % s, flog)
                    F_s = np.load(frame_file)
                    print('data shape:', F_s.shape)
                    if skip_frame > 1:
                        F_s = F_s[::skip_frame, :, :]
                        print('Skipping frame:', F_s.shape)
                    if F is None:
                        F = F_s
                    else:
                        F = np.concatenate([F, F_s], axis=0)
                else:
                    dualprint('File %s doesn' 't exists' % frame_file, flog)
                    raise ValueError('File %s doesn' 't exists' % frame_file)

            dualprint('Convert frame and optical flow into [-1.0, 1.0]')
            # convert frame data in [0.0, 1.0] to [-1.0, 1.0]
            F = pix2pix.preprocess(F)
            data_F = np.stack((F, F, F), axis=3)
            del F, F_s
            print('data_F min %f max %f' % (data_F.min(), data_F.max()))
            print('data_F shape', data_F.shape)
            data_shape = data_F.shape

            idx = np.arange(data_shape[0])
            seed = random.randint(0, 2**31 - 1)
            np.random.seed(seed)
            random.seed(seed)
            rng_state = np.random.get_state()
            print('before shuffling')
            print('corresponding idx', idx[:10])

            np.random.shuffle(idx)
            print('data_F[:10].mean(): %f' % (data_F[:10, :, :, :].mean()))

            np.random.set_state(rng_state)
            np.random.shuffle(data_F)
            print('after shuffling')
            print('corresponding idx', idx[:10])
            print('data_F[:10].mean(): %f' % (data_F[:10, :, :, :].mean()))

            # store features
            #F_file_format = '%s/data_F.npy'
            #M_file_format = '%s/%s_data_M.npz'
            F_file = '%s/data_F.h5' % (feat_folder)
            #np.save(F_file, data_F)
            f = h5py.File(F_file, 'w')
            f.create_dataset('data', data=data_F, compression='gzip')
            f.close()
            print('saved to %s' % F_file)
            del data_F

            gc.collect()

            times = 0
            datasize = 0
            M_file = '%s/data_M.h5' % (feat_folder)
            for s in train_list:
                OF_file = OF_file_format % (feat_folder, s)
                if os.path.isfile(OF_file):

                    if bh5py == 1:
                        f_h5py = h5py.File(OF_file, 'r')
                        OF = f_h5py['O']
                        OF = np.array(OF).T
                        print(OF.shape)
                    else:
                        mat_data = sio.loadmat(OF_file)
                        OF = mat_data['O']
                    OF = norm_OF_01(OF, scale=OF_scale)
                    OF = pix2pix.preprocess(OF)
                    last_OF = OF[-1, :, :, :]
                    last_OF = np.reshape(last_OF, [1, *last_OF.shape])
                    OF = np.concatenate([OF, last_OF], axis=0)

                    # print(OF.shape)
                    if skip_frame > 1:
                        OF = OF[::skip_frame, :, :, :]
                        print('after skipping frames')
                        print(OF.shape)
                    '''
                    if data_O is None:
                        data_O = OF
                    else:
                        data_O = np.concatenate([data_O, OF], axis=0)
                    '''
                    #data_O_resz = np.zeros([OF.shape[0], resz[1], resz[0], 3])
                    data_O_resz = OF

                    #for i in range(OF.shape[0]):
                    #    data_O_resz[i, :, :, :] = cv2.resize(OF[i, :, :, :], (resz[1], resz[0]))
                    if times == 0:
                        f = h5py.File(M_file, 'w')
                        f.create_dataset('data',
                                         data=data_O_resz,
                                         maxshape=(None, data_O_resz.shape[1],
                                                   data_O_resz.shape[2],
                                                   data_O_resz.shape[3]),
                                         compression='gzip')
                        f.close()
                        print('saved to %s' % M_file)
                    else:
                        f = h5py.File(M_file, 'a')
                        dataset = f['data']
                        dataset.resize([
                            datasize + data_O_resz.shape[0],
                            data_O_resz.shape[1], data_O_resz.shape[2],
                            data_O_resz.shape[3]
                        ])
                        dataset[datasize:datasize +
                                data_O_resz.shape[0]] = data_O_resz
                        f.close()
                        print('saved to %s' % M_file)
                    datasize = datasize + data_O_resz.shape[0]
                    times = times + 1
                else:
                    dualprint('File %s doesn' 't exists' % OF_file, flog)

            del OF, last_OF
            '''
            data_O_resz = np.zeros([data_O.shape[0], resz[1], resz[0], 3])

            for i in range(data_O.shape[0]):
                data_O_resz[i, :, :, :] = cv2.resize(data_O[i, :, :, :], (resz[1], resz[0]))

            del data_O
            '''
            #gc.collect()
            #data_O_resz = norm_OF_01(data_O_resz, scale=OF_scale)
            #data_O_resz = pix2pix.preprocess(data_O_resz)

            f = h5py.File(M_file, 'r')
            #data_M = data_O_resz
            data_M = f['data'][:]
            print('Loading %s' % M_file)
            f.close()
            del data_O_resz
            gc.collect()

            idx = np.arange(data_shape[0])
            print('before shuffling')
            print('corresponding idx', idx[:10])

            np.random.set_state(rng_state)
            np.random.shuffle(idx)
            print('data_M[:10].mean(): %f' % (data_M[:10, :, :, :].mean()))

            np.random.set_state(rng_state)
            np.random.shuffle(data_M)
            print('after shuffling')
            print('corresponding idx', idx[:10])
            print('data_M[:10].mean(): %f' % (data_M[:10, :, :, :].mean()))

            print('data_M min %f max %f' % (data_M.min(), data_M.max()))
            print('data_M shape', data_M.shape)

            #np.save(M_file, data_M)
            f = h5py.File(M_file, 'w')
            f.create_dataset('data', data=data_M, compression='gzip')
            f.close()
            print('saved to %s' % M_file)
            del data_M
            gc.collect()

            if train_flag > 0:
                f = h5py.File(mask_file_org, 'r')
                print('Loading %s' % mask_file_org)
                data_mask = f['data'][:]
                f.close()

                idx = np.arange(data_shape[0])
                print('before shuffling')
                print('corresponding idx', idx[:10])

                np.random.set_state(rng_state)
                np.random.shuffle(idx)
                print('data_mask[:10].mean(): %f' %
                      (data_mask[:10, :, :, :].mean()))

                np.random.set_state(rng_state)
                np.random.shuffle(data_mask)
                print('after shuffling')
                print('corresponding idx', idx[:10])
                print('data_mask[:10].mean(): %f' %
                      (data_mask[:10, :, :, :].mean()))

                print('data_mask min %f max %f' %
                      (data_mask.min(), data_mask.max()))
                print('data_mask shape', data_mask.shape)

                f = h5py.File(mask_file, 'w')
                f.create_dataset('data', data=data_mask, compression='gzip')
                f.close()
                print('saved to %s' % mask_file)
                del data_mask
                gc.collect()

        else:
            # load extracted high level features.
            #cae_folder = '%s/%s' % (model_folder, cae_folder_name)
            #cae_folder_name = None
            if cae_folder_name is not None:
                cae_folder = '%s/%s' % (model_folder, cae_folder_name)
                # loading data
                HF = None
                HM = None
                times = 0
                datasize = 0
                F_file = '%s/data_F.h5' % (feat_folder)
                for s in train_list:
                    feat_file1 = '%s/%s_resz%dx%d_cae1_layer%d.npz' % (
                        cae_folder, s, resz[0], resz[1], layers[l])

                    if os.path.isfile(feat_file1):
                        dualprint('Loading %s of cae1' % s, flog)
                        npz_data = np.load(feat_file1)
                        HF_s = npz_data['feat']
                        if skip_frame > 1:
                            HF_s = HF_s[::skip_frame, :, :, :]
                            print('Skipping frame:', HF_s.shape)
                        '''
                        if HF is None:
                            HF = HF_s
                        else:
                            HF = np.concatenate([HF, HF_s], axis=0)
                        '''
                        if times == 0:
                            f = h5py.File(F_file, 'w')
                            f.create_dataset('data',
                                             data=HF_s,
                                             maxshape=(None, HF_s.shape[1],
                                                       HF_s.shape[2],
                                                       HF_s.shape[3]),
                                             compression='gzip')
                            f.close()
                            print('saved to %s' % F_file)
                        else:
                            f = h5py.File(F_file, 'a')
                            dataset = f['data']
                            dataset.resize([
                                datasize + HF_s.shape[0], HF_s.shape[1],
                                HF_s.shape[2], HF_s.shape[3]
                            ])
                            dataset[datasize:datasize + HF_s.shape[0]] = HF_s
                            f.close()
                            print('saved to %s' % F_file)
                        datasize = datasize + HF_s.shape[0]
                        times = times + 1
                    else:
                        dualprint('File %s does not exists' % feat_file1, flog)
                        raise ValueError('File %s does not exists' %
                                         feat_file1)

                del HF_s
                dualprint('Convert frame and optical flow into [-1.0, 1.0]')
                # convert frame data in [0.0, 1.0] to [-1.0, 1.0]

                f = h5py.File(F_file, 'r')
                HF = f['data'][:]
                print('Loading %s' % F_file)
                f.close()
                print('Before converting to [-1, 1]')
                print('HF min %f max %f' % (HF.min(), HF.max()))
                print('HF shape', HF.shape)

                scale = 0.3

                HF, HF_mean, HF_std = norm_data(HF, shift=0.0, scale=scale)

                HF = np.minimum(np.maximum(HF, -1.0), 1.0)
                print('After converting to [-1, 1]')
                print('HF min %f max %f' % (HF.min(), HF.max()))
                print('HF shape', HF.shape)
                data_F = HF
                del HF
                num_data, height, width, num_c = data_F.shape

                height1, width1, num_c1 = compute_reshape(height, width, num_c)
                # store features
                #F_file_format = '%s/data_F.h5'
                #M_file_format = '%s/%s_data_M.npz'

                #np.save(F_file, data_F)
                f = h5py.File(F_file, 'w')
                f.create_dataset('data', data=data_F, compression='gzip')
                print('saved to %s' % F_file)
                f.close()
                del data_F
                gc.collect()

                times = 0
                datasize = 0
                M_file = '%s/data_M.h5' % (feat_folder)
                for s in train_list:
                    feat_file2 = '%s/%s_resz%dx%d_cae2_layer%d.npz' % (
                        cae_folder, s, resz[0], resz[1], layers[l])

                    if os.path.isfile(feat_file2):
                        dualprint('Loading %s of cae2' % s, flog)
                        npz_data = np.load(feat_file2)
                        HM_s = npz_data['feat']
                        if skip_frame > 1:
                            HM_s = HM_s[::skip_frame, :, :, :]
                            print('Skipping frame:', HM_s.shape)
                        '''
                        if HM is None:
                            HM = HM_s
                        else:
                            HM = np.concatenate([HM, HM_s], axis=0)
                        '''
                        if times == 0:
                            f = h5py.File(M_file, 'w')
                            f.create_dataset('data',
                                             data=HM_s,
                                             maxshape=(None, HM_s.shape[1],
                                                       HM_s.shape[2],
                                                       HM_s.shape[3]),
                                             compression='gzip')
                            f.close()
                            print('saved to %s' % M_file)
                        else:
                            f = h5py.File(M_file, 'a')
                            dataset = f['data']
                            dataset.resize([
                                datasize + HM_s.shape[0], HM_s.shape[1],
                                HM_s.shape[2], HM_s.shape[3]
                            ])
                            dataset[datasize:datasize + HM_s.shape[0]] = HM_s
                            f.close()
                            print('saved to %s' % M_file)
                        datasize = datasize + HM_s.shape[0]
                        times = times + 1
                    else:
                        dualprint('File %s does not exists' % feat_file2, flog)
                        raise ValueError('File %s does not exists' %
                                         feat_file2)

                del HM_s
                f = h5py.File(M_file, 'r')
                HM = f['data'][:]
                print('Loading %s' % M_file)
                f.close()
                print('HM min %f max %f' % (HM.min(), HM.max()))
                print('HM shape', HM.shape)
                print('After converting to [-1, 1]')

                HM, HM_mean, HM_std = norm_data(HM, shift=0.0, scale=scale)

                HM = np.minimum(np.maximum(HM, -1.0), 1.0)

                print('HM min %f max %f' % (HM.min(), HM.max()))
                print('HM shape', HM.shape)
                data_M = HM
                del HM
                gc.collect()
                mean_std_file = '%s/mean_std_layer%d_large_v2.dill' % (
                    cae_folder, layers[l])

                dill.dump(
                    {
                        'cae1_mean': HF_mean,
                        'cae1_std': HF_std,
                        'cae1_scale': scale,
                        'cae2_mean': HM_mean,
                        'cae2_std': HM_std,
                        'cae2_scale': scale
                    }, open(mean_std_file, 'wb'))
                dualprint(
                    'Saving the mean and std of the layer %d feature as %s' %
                    (layers[l], mean_std_file))

            else:
                raise ValueError(
                    'Please provide the reference to the trained cae folder to train pix2pix using high level feature.'
                )

        if layers[l] == 0:
            output_folder = hvad_model_folder
        else:
            output_folder = cae_folder

        if direction == 'AtoB':

            output_folder_FM = '%s/layer%d-usecluster%d-FtoM-large-v2-reshape' % (
                output_folder, layers[l], layers_with_clusters[l])
            if os.path.isdir(output_folder_FM) == False:
                os.mkdir(output_folder_FM)

            flog2 = open('%s/log.txt' % output_folder_FM, 'wt')
            dualprint('[Layer %d] F->M training...' % layers[l], flog)
        elif direction == 'BtoA':
            output_folder_MF = '%s/layer%d-usecluster%d-MtoF-large-v2-reshape' % (
                output_folder, layers[l], layers_with_clusters[l])
            if os.path.isdir(output_folder_MF) == False:
                os.mkdir(output_folder_MF)
            flog2 = open('%s/log.txt' % output_folder_MF, 'wt')
            dualprint('[Layer %d] M->F training...' % layers[l], flog)

        if layers_with_clusters[l] == False:  # no clustering results

            if layers[l] > 0:
                #'''
                # reshape feature maps into 256 x 256 images

                #num_data, height, width, num_c = data_F.shape

                #height1, width1, num_c1 = compute_reshape(height, width, num_c)
                print(height1, width1, num_c1, height1 * width1 * num_c1)

                data_M_resz = reshape_feat(data_M, height1, width1, num_c1)
                del data_M
                gc.collect()
                M_file = '%s/data_M.h5' % (feat_folder)
                f = h5py.File(M_file, 'w')
                f.create_dataset('data',
                                 shape=(data_M_resz.shape[0], resz[0], resz[1],
                                        data_M_resz.shape[3]),
                                 compression='gzip')
                size1 = int(data_M_resz.shape[0] / 2)
                size2 = data_M_resz.shape[0] - size1
                data_M = np.zeros(
                    [size1, resz[0], resz[1], data_M_resz.shape[3]])
                for i in range(size1):
                    data_M[i, :, :, :] = cv2.resize(data_M_resz[i, :, :, :],
                                                    (resz[1], resz[0]))
                    #f['data'][i] = cv2.resize(data_M_resz[i, :, :, :], (resz[1], resz[0]))
                f['data'][0:size1] = data_M
                del data_M
                data_M = np.zeros(
                    [size2, resz[0], resz[1], data_M_resz.shape[3]])
                for i in range(size2):
                    data_M[i, :, :, :] = cv2.resize(
                        data_M_resz[size1 + i, :, :, :], (resz[1], resz[0]))
                f['data'][size1:data_M_resz.shape[0]] = data_M
                del data_M_resz, data_M
                data_M = f['data'][:]
                f.close()
                data_shape = data_M.shape
                idx = np.arange(data_shape[0])
                seed = random.randint(0, 2**31 - 1)
                np.random.seed(seed)
                random.seed(seed)
                rng_state = np.random.get_state()

                print('before shuffling')
                print('corresponding idx', idx[:10])

                np.random.shuffle(idx)
                print('data_M[:10].mean(): %f' % (data_M[:10, :, :, :].mean()))

                np.random.set_state(rng_state)
                np.random.shuffle(data_M)
                print('after shuffling')
                print('corresponding idx', idx[:10])
                print('data_M[:10].mean(): %f' % (data_M[:10, :, :, :].mean()))
                #M_file_format = '%s/data_M.npy'

                #np.save(M_file, data_M)
                f = h5py.File(M_file, 'w')
                f.create_dataset('data', data=data_M, compression='gzip')
                f.close()
                print('saved to %s' % M_file)
                del data_M
                gc.collect()

                F_file = '%s/data_F.h5' % (feat_folder)
                if os.path.isfile(F_file):
                    print('Loading %s' % F_file)
                    #data_F = np.load(F_file)
                    f = h5py.File(F_file, 'r')
                    data_F = f['data'][:]
                    f.close()
                    print('data_F shape:', data_F.shape)
                else:
                    print('File %s doesn' 't exists' % F_file)

                data_F_resz = reshape_feat(data_F, height1, width1, num_c1)

                dualprint(
                    'Reshape features from (%d, %d, %d) to (%d, %d, %d)' %
                    (data_F.shape[1], data_F.shape[2], data_F.shape[3],
                     data_F_resz.shape[1], data_F_resz.shape[2],
                     data_F_resz.shape[3]), flog2)
                reshape_info_file = '%s/reshape_info_layer%d_large_v2.dill' % (
                    output_folder, layers[l])
                shape_info = {
                    'org_shape':
                    [data_F.shape[1], data_F.shape[2], data_F.shape[3]],
                    'reshape': [
                        data_F_resz.shape[1], data_F_resz.shape[2],
                        data_F_resz.shape[3]
                    ],
                    'resize': [resz[0], resz[1], data_F_resz.shape[3]]
                }
                print(shape_info)
                dill.dump(shape_info, open(reshape_info_file, 'wb'))
                dualprint('Saving shape info as: %s' % reshape_info_file,
                          flog2)
                f = h5py.File(F_file, 'w')
                f.create_dataset('data',
                                 shape=(data_F_resz.shape[0], resz[0], resz[1],
                                        data_F_resz.shape[3]),
                                 compression='gzip')
                data_F = np.zeros(
                    [size1, resz[0], resz[1], data_F_resz.shape[3]])
                for i in range(size1):
                    data_F[i, :, :, :] = cv2.resize(data_F_resz[i, :, :, :],
                                                    (resz[1], resz[0]))
                    #f['data'][i] = cv2.resize(data_F_resz[i, :, :, :], (resz[1], resz[0]))
                f['data'][0:size1] = data_F
                del data_F
                data_F = np.zeros(
                    [size2, resz[0], resz[1], data_F_resz.shape[3]])
                for i in range(size2):
                    data_F[i, :, :, :] = cv2.resize(
                        data_F_resz[size1 + i, :, :, :], (resz[1], resz[0]))
                f['data'][size1:data_F_resz.shape[0]] = data_F
                del data_F_resz, data_F
                data_F = f['data'][:]
                f.close()
                dualprint(
                    'Resizing to: (%d, %d, %d)' %
                    (data_F.shape[1], data_F.shape[2], data_F.shape[3]), flog2)
                data_shape = data_F.shape
                idx = np.arange(data_shape[0])
                print('before shuffling')
                print('corresponding idx', idx[:10])
                np.random.set_state(rng_state)
                np.random.shuffle(idx)
                print('data_F[:10].mean(): %f' % (data_F[:10, :, :, :].mean()))

                np.random.set_state(rng_state)
                np.random.shuffle(data_F)
                print('after shuffling')
                print('corresponding idx', idx[:10])
                print('data_F[:10].mean(): %f' % (data_F[:10, :, :, :].mean()))
                F_file = '%s/data_F.h5' % (feat_folder)
                #np.save(M_file, data_M)
                f = h5py.File(F_file, 'w')
                f.create_dataset('data', data=data_F, compression='gzip')
                f.close()
                print('saved to %s' % F_file)
                del data_F
                gc.collect()

                if train_flag > 0:
                    f = h5py.File(mask_file_org, 'r')
                    print('Loading %s' % mask_file_org)
                    data_mask = f['data'][:]
                    f.close()

                    idx = np.arange(data_shape[0])
                    print('before shuffling')
                    print('corresponding idx', idx[:10])

                    np.random.set_state(rng_state)
                    np.random.shuffle(idx)
                    print('data_mask[:10].mean(): %f' %
                          (data_mask[:10, :, :, :].mean()))

                    np.random.set_state(rng_state)
                    np.random.shuffle(data_mask)
                    print('after shuffling')
                    print('corresponding idx', idx[:10])
                    print('data_mask[:10].mean(): %f' %
                          (data_mask[:10, :, :, :].mean()))

                    print('data_mask min %f max %f' %
                          (data_mask.min(), data_mask.max()))
                    print('data_mask shape', data_mask.shape)

                    f = h5py.File(mask_file, 'w')
                    f.create_dataset('data',
                                     data=data_mask,
                                     compression='gzip')
                    f.close()
                    print('saved to %s' % mask_file)
                    del data_mask
                    gc.collect()
                #'''
                #data_shape = (3604, 256, 256, 8)
                '''
                #M_file = M_file_format % (feat_folder)
                if os.path.isfile(M_file):
                    print('Loading %s' % M_file)
                    #data_M = np.load(M_file)
                    f = h5py.File(M_file, 'r')
                    data_M = f['M_data'][:]
                    f.close()
                    print('data_M shape:', data_M.shape)
                else:
                    print('File %s doesn''t exists' % F_file)
                '''
                #F_file = F_file_format % (feat_folder)
                #np.save(F_file, data_F)
                #print('saved to %s' % F_file)
                #del data_F, data_F_resz
                #gc.collect()
            '''    
            if layers[l] == 0:
                F_file = '%s/data_F.h5' % (feat_folder)
                if os.path.isfile(F_file):
                    print('Loading %s' % F_file)
                    #data_F = np.load(F_file)
                    f = h5py.File(F_file, 'r')
                    data_F = f['F_data'][:]
                    f.close()
                    print('data_F shape:', data_F.shape)
                else:
                    print('File %s doesn''t exists' % F_file)
            '''
            gen_layers1 = [(256, 64), (128, 128), (64, 256), (32, 512),
                           (16, 512), (8, 512), (4, 512), (2, 512)]

            gen_layers2 = [(4, 512, 0.5), (8, 512, 0.5), (16, 512, 0.5),
                           (32, 512, 0.0), (64, 256, 0.0), (128, 128, 0.0),
                           (256, 64, 0.0)]

            gen_layers_specs1 = []
            gen_layers_specs2 = []

            for i in range(len(gen_layers1)):
                if gen_layers1[i][0] == data_shape[1]:
                    num_gen_feats = gen_layers1[i][1]
                    num_dis_feats = num_gen_feats
                    gen_layers_specs1 = [
                        out_dim for _, out_dim in gen_layers1[i + 1:]
                    ]
                    break
            print('num_gen_feats = %d' % num_gen_feats)
            print('num_dis_feats = %d' % num_dis_feats)
            print('gen_layers_specs1', gen_layers_specs1)
            for i in range(len(gen_layers2)):
                if gen_layers2[i][0] == data_shape[1]:
                    gen_layers_specs2 = gen_layers2[:i + 1]
                    gen_layers_specs2 = [
                        (out_dim, drop)
                        for _, out_dim, drop in gen_layers_specs2
                    ]
                    break
            dis_layers = [
                [256, 64, 2],  # gdf, feat_dims, stride
                [128, 128, 2],
                [64, 256, 2],
                [32, 512, 1],
                [31, 1, 1]
            ]
            if data_shape[1] >= 256:
                strides = [2, 2, 2, 1, 1]
            elif data_shape[1] >= 128:
                strides = [2, 2, 1, 1, 1]
            elif data_shape[1] >= 64:
                strides = [2, 1, 1, 1, 1]
            else:
                strides = [1, 1, 1, 1, 1]

            dis_layers_specs = copy.copy(dis_layers)
            for i in range(len(dis_layers)):
                dis_layers_specs[i][2] = strides[i]
            if data_shape[1] <= 32:
                dis_layers_specs = [[32, 512, 1]]
            else:
                for i in range(len(dis_layers)):
                    if dis_layers[i][0] == data_shape[1]:
                        dis_layers_specs = dis_layers[i:-1]

            if direction == 'AtoB':

                pix2pix.pix2pix_func(mode,
                                     F_file,
                                     M_file,
                                     mask_file,
                                     data_shape,
                                     data_folder,
                                     output_folder_FM,
                                     num_epochs,
                                     'AtoB',
                                     num_gen_feats=num_gen_feats,
                                     num_dis_feats=num_dis_feats,
                                     gen_layers_specs1=gen_layers_specs1,
                                     gen_layers_specs2=gen_layers_specs2,
                                     dis_layers_specs=dis_layers_specs,
                                     layers=layers,
                                     train_flag=train_flag)
                flog2.write('Finished.\n')
                flog2.close()

            elif direction == 'BtoA':
                pix2pix.pix2pix_func(mode,
                                     M_file,
                                     F_file,
                                     mask_file,
                                     data_shape,
                                     data_folder,
                                     output_folder_MF,
                                     num_epochs,
                                     'BtoA',
                                     num_gen_feats=num_gen_feats,
                                     num_dis_feats=num_dis_feats,
                                     gen_layers_specs1=gen_layers_specs1,
                                     gen_layers_specs2=gen_layers_specs2,
                                     dis_layers_specs=dis_layers_specs,
                                     layers=layers,
                                     train_flag=train_flag)
                flog2.write('Finished.\n')
                flog2.close()
        elif layers_with_clusters[l] == True:  # using clusters
            print('[Error] Not being implemented yet.')
            pass

        else:
            raise ValueError('Invalid value of layers_with_clusters')

    train_time_end = time.time()
    dualprint(
        'Training time: %f (seconds)' % (train_time_end - train_time_start),
        flog)
    flog.close()
    print('Finished.')
Пример #6
0
def evalv1(data_str, imsz, vis_folder, beta, filename=None, test_str = 'test', anom_map_list = None):


    train_str = 'train'

    dataholder = anom_UCSDholder(data_str, imsz)
    data_folder = dataholder.data_folder


    if filename is None:
        res_file = '%s/result_%s_beta%0.5f_v1.pkl'  % (vis_folder, dataholder.version, beta)
    else:
        res_file = '%s/%s.pkl' % (vis_folder, filename)

    if anom_map_list is None:
        test_list = read_list_from_file('%s/%s.lst' % (data_folder, test_str))

        anom_map_list = []
        for s in test_list:
            print('Loading %s' % s)
            npzfiles = np.load('%s/%s_final.npz' % (vis_folder, s))
            Emap_enh  = npzfiles['Emap_enh']
            anom_map_list.append(Emap_enh)



    res = dataholder.evaluate(anom_map_list, beta, test_str)
    pkl.dump(res, open(res_file, 'wb'))

    if filename is None:
        txt_file = '%s/result_%s_beta%0.5f_v1.txt'  % (vis_folder, dataholder.version, beta)
    else:
        txt_file = '%s/%s.txt' % (vis_folder, filename)
    f = open(txt_file, 'wt')
    f.write('Frame Level: \n')
    f.write('AUC\t%f \n' % res['AUC_frame'] )
    f.write('EER\t%f \n' % res['MCR_frame_ERR'] )

    f.write('Pixel Level: \n')
    f.write('AUC\t%f \n' % res['AUC_pxl'] )
    f.write('EER\t%f \n' % res['MCR_pxl_ERR'] )

    f.write('Dual Pixel Level: \n')
    f.write('AUC\t%f \n' % res['AUC_dpxl'] )
    # f.write('EER %f \n' % res['MCR_dpxl_ERR'] )
    f.close()


    plt.figure()
    plt.subplot(2, 2, 1)
    # roc1 = plt.plot(res['FPR_frame'], res['TPR_frame'], 'x-r', label='Ours (AUC=%0.2f)' % res['AUC_frame'])
    roc1 = plt.plot(res['FPR_frame'], res['TPR_frame'], 'x-r', label='Ours (AUC=%0.2f, EER=%0.2f)'
                                                                     % (res['AUC_frame'], 100 * res['MCR_frame_ERR']))
    plt.title('Frame level')
    plt.legend()
    plt.xlabel('False Positive Rate')
    plt.ylabel('True Positive Rate')
    plt.axis('equal')

    plt.subplot(2, 2, 2)
    # roc2 = plt.plot(res['FPR_pxl'], res['TPR_pxl'], 'x-r', label='Ours (AUC=%0.2f)' % res['AUC_pxl'])
    roc2 = plt.plot(res['FPR_pxl'], res['TPR_pxl'], 'x-r', label='Ours (AUC=%0.2f, EER=%0.2f)'
                                                                 % (res['AUC_pxl'], 100 * res['MCR_pxl_ERR']))
    plt.title('Pixel level')
    plt.legend()
    plt.xlabel('False Positive Rate')
    plt.ylabel('True Positive Rate')
    plt.axis('equal')
    plt.suptitle('ROC curve')

    plt.subplot(2, 2, 3)

    roc3 = plt.plot(res['FPR_dpxl'], res['TPR_dpxl'], 'x-r', label='Ours (AUC=%0.2f)'
                                                                 % (res['AUC_dpxl']))
    plt.title('Dual pixel level')
    plt.legend()
    plt.xlabel('False Positive Rate')
    plt.ylabel('True Positive Rate')
    plt.axis('equal')
    plt.suptitle('ROC curve')


    plt.savefig('%s/roc_%s_beta%0.5f_v1.pdf' % (vis_folder, dataholder.version, beta))
    plt.show(block=False)

    print('Output to %s' % vis_folder)
    print('Finished.')
Пример #7
0
    plt.savefig('%s/roc_%s_beta%0.5f_v1.pdf' % (vis_folder, dataholder.version, beta))
    plt.show(block=False)

    print('Output to %s' % vis_folder)
    print('Finished.')

if __name__ == "__main__":
    data_str = 'UCSDped2'
    imsz = [240, 360]
    vis_folder_names =   [

        'hvad-msz50-t1.200'
        ]

    if data_str == 'Avenue':
        dataholder = anom_Avenueholder(data_str, imsz)
    else:
        dataholder = anom_UCSDholder(data_str, imsz)
    data_folder = dataholder.data_folder
    exp_folder = '%s/result' % data_folder

    vbeta = [0.05]
    for vis_folder_name in vis_folder_names:

        vis_folder = '%s/%s' % (exp_folder, vis_folder_name)

        print('Vis folder name: %s' % vis_folder_name)

        for beta in vbeta:
            print('beta = %0.5f ' % beta)
            evalv1(data_str, imsz, vis_folder, beta)
def train_hvad(params):
    # experiment params
    mode = params.get_value('mode')
    data_str = params.get_value('data_str')
    train_str = params.get_value('train_str')
    bshow = params.get_value('bshow')
    bh5py = params.get_value('bh5py')
    batch_size = params.get_value('batch_size')
    encoder_dims = params.get_value('encoder_dims')
    imsz = params.get_value('imsz')
    frame_step = params.get_value('frame_step')

    # learner parameters
    # lr_rate = params.get_value('lr_rate')
    w_init = params.get_value('w_init')
    data_range = params.get_value('data_range')
    # a.batch_size = 1

    OF_scale = 0.3
    h_resz, w_resz = 256, 256

    noise_sigma = 0.2
    gamma = 0.0
    denoising = True
    k_h = 5
    k_w = 5
    d_h = 2
    d_w = 2
    use_bn = True
    # optimizer_name = 'Adam'
    optimizer_name = 'Adagrad'
    cae_lr_rate = 0.1

    dataholder = anom_UCSDholder(data_str, imsz)
    data_folder = dataholder.data_folder

    if data_str in ['avenue','avenue_sz240x360fr1', 'UCSDped1']:
        skip_frame = 2
    else:
        skip_frame = 1

    feat_folder = '%s/feat' % (data_folder)
    if not os.path.exists(feat_folder):
        os.mkdir(feat_folder)

    model_folder = '%s/model' % (data_folder)
    if not os.path.exists(model_folder):
        os.mkdir(model_folder)
    res_folder = '%s/result' % (data_folder)
    if not os.path.exists(res_folder):
        os.mkdir(res_folder)

    encoder_act = ['lrelu'] * len(encoder_dims)
    decoder_dims = encoder_dims[:(len(encoder_dims) - 1)]
    decoder_dims.reverse()
    # print(decoder_dims)
    decoder_dims = decoder_dims + [None]
    print('decoder_dims', decoder_dims)
    decoder_act = ['lrelu'] * len(decoder_dims)

    layer_str = '-'.join(str(s) for s in encoder_dims)
    # model_folder_name = 'hvad-%s-lrelu-k%d-gamma%0.2f-denoise%0.2f-bn%d-%s-lr%0.2f-v5-brox' % (
    #     layer_str, k_h, gamma, noise_sigma, use_bn, optimizer_name, cae_lr_rate)
    model_folder_name = 'hvad-%s-release' % (layer_str)
    hvad_model_folder = '%s/%s' % (model_folder, model_folder_name)
    if os.path.isdir(hvad_model_folder) == False:
        os.mkdir(hvad_model_folder)
    flog = open('%s/log.txt' % hvad_model_folder, 'wt')

    # print('Mode = %d' % mode)
    # load features
    frame_file_format = '%s/%s_resz256x256_raw_v3.npz'

    OF_file_format = '%s/%s_sz256x256_BroxOF.mat'
    train_list = read_list_from_file('%s/%s.lst' % (data_folder, train_str))

    train_time_start = time.time()
    if mode in [0, 2]:
        print('Training cae 1:')

        # load all data for training
        data_F = None
        for s in train_list:
            frame_file = frame_file_format % (feat_folder, s)
            if os.path.isfile(frame_file):
                dualprint('Loading %s' % s, flog)
                F = np.load(frame_file)
                print('F shape', F.shape)
                if skip_frame > 1:
                    F = F[::skip_frame, :, :]
                    print('Skipping frame:', F.shape)

                if data_F is None:
                    data_F = F
                else:
                    data_F = np.concatenate([data_F, F], axis=0)
            else:
                dualprint('File %s doesn''t exists' % frame_file, flog)

        dualprint('Convert frame and optical flow into [-1.0, 1.0]')
        # convert frame data in [0.0, 1.0] to [-1.0, 1.0]
        data_F = preprocess(data_F)

        # resize the frame and optical flow into [h_resz, w_resz]
        #F_resz = np.zeros([data_F.shape[0], h_resz, w_resz])

        #for i in range(F_resz.shape[0]):
        #    F_resz[i, :, :] = cv2.resize(data_F[i, :, :], (w_resz, h_resz))
        F_resz = data_F
        del data_F
        gc.collect()
        print('F shape', F_resz.shape)
        # print(O_resz.shape)
        print('F min %f max %f' % (F_resz.min(), F_resz.max()))
        # print('O min %f max %f' % (O_resz.min(), O_resz.max()))

        data1 = np.stack((F_resz, F_resz, F_resz), axis=3)

        del F_resz
        gc.collect()

        decoder_dims[-1] = data1.shape[3]

        cae_trainer1_folder = '%s/cae1' % hvad_model_folder
        if os.path.isdir(cae_trainer1_folder) == False:
            os.mkdir(cae_trainer1_folder)
        cae1 = ConvAEv4(   input_height = data1.shape[1],
                                input_width = data1.shape[2],
                                input_channels = data1.shape[3],
                                batch_size=batch_size,
                                num_epochs = num_epochs,
                                lr_rate = cae_lr_rate,
                                w_init = w_init,
                                gamma = gamma,
                                saved_folder = cae_trainer1_folder,
                                k_h = k_h,
                                k_w = k_w,
                                d_h = d_h,
                                d_w = d_w,
                                use_bn = use_bn,
                                denoising = denoising,
                                # encode_dims = [256, 128],
                                # decode_dims = [256, data1.shape[3]],
                                encoder_dims = encoder_dims,
                                encoder_act = encoder_act,
                                decoder_dims = decoder_dims,
                                decoder_act = decoder_act,
                                optimizer_name = optimizer_name,
                                debug = True,
                                disp_freq = disp_freq,
                                save_freq = save_freq,
                                device = device)
        cae1.build_model()


        idx = np.random.permutation(data1.shape[0])
        data1 = data1[idx, :, :, :]
        datasize = data1.shape[0]

        data1_org = data1.copy()
        if denoising==True:
            # corrupt the data with Gaussian noise

            batches = make_batches(data1.shape[0], 100)
            for batch_idx, (batch_start, batch_end) in enumerate(batches):
                print('batch_idx, batch_start, batch_end', batch_idx, batch_start, batch_end)
                data1[batch_start:batch_end, :, :, :] = data1_org[batch_start:batch_end, :, :, :] + np.random.normal(0.0, noise_sigma, size=data1_org[batch_start:batch_end, :, :, :].shape)

                data1[batch_start:batch_end, :, :, :] = np.maximum(data1[batch_start:batch_end, :, :, :], data_range[0])
                data1[batch_start:batch_end, :, :, :] = np.minimum(data1[batch_start:batch_end, :, :, :], data_range[1])
        data1_org_file = '%s/data1_org.h5' % (feat_folder)
        f = h5py.File(data1_org_file, 'w')
        f.create_dataset('data', data=data1_org, compression='gzip')
        f.close()
        print('saved to %s' % data1_org_file)
        del data1_org
        data1_file = '%s/data1.h5' % (feat_folder)
        f = h5py.File(data1_file, 'w')
        f.create_dataset('data', data=data1, compression='gzip')
        f.close()
        print('saved to %s' % data1_file)
        del data1
        gc.collect()
        #cae1.fit(data1, data1_org)
        cae1.fit(data1_file, data1_org_file, datasize)

    if mode in [1, 2]:
        print('Training cae 2')
        #data_O = None
        times = 0
        datasize = 0
        data2_file = '%s/data2.h5' % (feat_folder)
        data2_org_file = '%s/data2_org.h5' % (feat_folder)
        for s in train_list:
            dualprint('Loading %s' % s, flog)
            OF_file = OF_file_format % (feat_folder, s)
            if os.path.isfile(OF_file):

                if bh5py == 1:
                    f_h5py = h5py.File(OF_file, 'r')
                    OF = f_h5py['O']
                    OF = np.array(OF).T
                    print(OF.shape)
                    f_h5py.close()
                else:
                    mat_data = sio.loadmat(OF_file)
                    OF = mat_data['O']
                    mat_data.close()
                OF = norm_OF_01(OF, scale=OF_scale)
                OF = preprocess(OF)
                last_OF = OF[-1, :, :, :]
                last_OF = np.reshape(last_OF, [1, *last_OF.shape])
                OF = np.concatenate([OF, last_OF], axis=0)

                # print(OF.shape)
                if skip_frame > 1:
                    OF = OF[::skip_frame, :, :, :]
                    print('after skipping frames')
                    print(OF.shape)
                '''
                if data_O is None:
                    data_O = OF
                else:
                    data_O = np.concatenate([data_O, OF], axis=0)
                '''
                print('O shape', OF.shape)
                print('O min %f max %f' % (OF.min(), OF.max()))
                #data2 = np.zeros([OF.shape[0], h_resz, w_resz, 3])
                #for i in range(OF.shape[0]):
                #    data2[i, :, :, :] = cv2.resize(OF[i, :, :, :], (w_resz, h_resz))
                data2 = OF
                if times == 0:
                    f = h5py.File(data2_file, 'w')
                    f.create_dataset('data', data=data2, maxshape=(None, data2.shape[1], data2.shape[2], data2.shape[3]), compression='gzip')
                    f.close()
                    print('saved to %s' % data2_file)
                else:
                    f = h5py.File(data2_file, 'a')
                    dataset = f['data']
                    dataset.resize([datasize+data2.shape[0], data2.shape[1], data2.shape[2], data2.shape[3]])
                    dataset[datasize:datasize+data2.shape[0]] = data2
                    f.close()
                    print('saved to %s' % data2_file)
                datasize = datasize + data2.shape[0]
            else:
                dualprint('File %s doesn''t exists' % OF_file, flog)
            times = times+1
            del OF
            del last_OF
            gc.collect()
        #dualprint('Convert frame and optical flow into [-1.0, 1.0]')
        # convert frame data in [0.0, 1.0] to [-1.0, 1.0]
        #data_O = norm_OF_01(data_O, scale=OF_scale)
        #data_O = preprocess(data_O)
        '''
        print('O shape', data_O.shape)
        print('O min %f max %f' % (data_O.min(), data_O.max()))
        data2 = np.zeros([data_O.shape[0], h_resz, w_resz, 3])
        for i in range(data_O.shape[0]):
            data2[i, :, :, :] = cv2.resize(data_O[i, :, :, :], (w_resz, h_resz))

        del data_O
        gc.collect()
        '''
        N = 3
        splitsize = int(datasize/N)
        for i in range(N):
            f = h5py.File(data2_file, 'r')
            if i<N-1:
                data2 = f['data'][i*splitsize:(i+1)*splitsize, :, :, :]
            else:
                data2 = f['data'][i*splitsize:datasize, :, :, :]
            f.close()
            idx = np.random.permutation(data2.shape[0])
            data2 = data2[idx, :, :, :]
            data2_org = data2.copy()
            if denoising == True:
                data2 = data2_org + np.random.normal(0.0, noise_sigma, size=data2.shape)
                data2 = np.maximum(data2, data_range[0])
                data2 = np.minimum(data2, data_range[1])
            if i==0:
                f = h5py.File(data2_org_file, 'w')
                f.create_dataset('data', data=data2_org, maxshape=(None, data2_org.shape[1], data2_org.shape[2], data2_org.shape[3]), compression='gzip')
                print('saved to %s' % data2_org_file)
                f.close()
            else:
                f = h5py.File(data2_org_file, 'a')
                dataset = f['data']
                dataset.resize([i*splitsize + data2_org.shape[0], data2_org.shape[1], data2_org.shape[2], data2_org.shape[3]])
                dataset[i*splitsize:i*splitsize + data2_org.shape[0]] = data2_org
                f.close()
                print('saved to %s' % data2_org_file)
            f = h5py.File(data2_file, 'a')
            dataset = f['data']
            dataset[i*splitsize:i*splitsize + data2.shape[0]] = data2
            f.close()
            print('saved to %s' % data2_file)
            
        del data2_org

        decoder_dims[-1] = data2.shape[3]

        cae2_folder = '%s/cae2' % hvad_model_folder
        if os.path.isdir(cae2_folder) == False:
            os.mkdir(cae2_folder)
        cae2 = ConvAEv4(input_height=data2.shape[1],
                        input_width=data2.shape[2],
                        input_channels=data2.shape[3],
                        batch_size=batch_size,
                        num_epochs=num_epochs,
                        lr_rate=cae_lr_rate,
                        w_init=w_init,
                        gamma=gamma,
                        saved_folder=cae2_folder,
                        k_h=k_h,
                        k_w=k_w,
                        d_h=d_h,
                        d_w=d_w,
                        use_bn=use_bn,
                        denoising=denoising,
                        # encode_dims = [256, 128],
                        # decode_dims = [256, data1.shape[3]],
                        encoder_dims=encoder_dims,
                        encoder_act=encoder_act,
                        decoder_dims=decoder_dims,
                        decoder_act=decoder_act,
                        optimizer_name=optimizer_name,
                        debug=True,
                        disp_freq=disp_freq,
                        save_freq=save_freq,
                        device=device)
        cae2.build_model()
        '''
        idx = np.random.permutation(data2.shape[0])
        data2 = data2[idx, :, :, :]
        datasize = data2.shape[0]
        

        data2_org = data2.copy()
        if denoising == True:
            batches = make_batches(data2.shape[0], 100)
            for batch_idx, (batch_start, batch_end) in enumerate(batches):
                print('batch_idx, batch_start, batch_end', batch_idx, batch_start, batch_end)
                data2[batch_start:batch_end, :, :, :] = data2_org[batch_start:batch_end, :, :, :] + np.random.normal(0.0, noise_sigma, size=data2[batch_start:batch_end, :, :, :].shape)
                data2[batch_start:batch_end, :, :, :] = np.maximum(data2[batch_start:batch_end, :, :, :], data_range[0])
                data2[batch_start:batch_end, :, :, :] = np.minimum(data2[batch_start:batch_end, :, :, :], data_range[1])
        
        data2_org_file = '%s/data2_org.h5' % (feat_folder)
        f = h5py.File(data2_org_file, 'w')
        f.create_dataset('data', data=data2_org, compression='gzip')
        f.close()
        print('saved to %s' % data2_org_file)
        del data2_org
        data2_file = '%s/data2.h5' % (feat_folder)
        f = h5py.File(data2_file, 'w')
        f.create_dataset('data', data=data2, compression='gzip')
        f.close()
        print('saved to %s' % data2_file)
        '''
        del data2
        gc.collect()
        cae2.fit(data2_file, data2_org_file, datasize)
        #cae2.fit(data2, data2_org)



    train_time_end = time.time()
    dualprint('Training time: %f (seconds)' % (train_time_end - train_time_start), flog)
    flog.close()
    print('Finished.')
    model_folder_name = 'hvad-32-16-8-release'
    all_str = 'all'
    device = '/device:GPU:0'

print('Data set: %s' % data_str)
print('cae_list:', cae_list)
print('batch_size:', batch_size)
print('model_folder_name: %s' % model_folder_name)

bshow = 0
bh5py = 1
OF_scale = 0.3
resz = [256, 256]


dataholder = anom_UCSDholder(data_str, resz)
data_folder = dataholder.data_folder

feat_folder = '%s/feat' % (data_folder)
if not os.path.exists(feat_folder):
    os.mkdir(feat_folder)

model_folder = '%s/model' % (data_folder)
MODEL_DIR = model_folder
res_folder = '%s/result' % (data_folder)

hvad_model_folder = '%s/%s' % (model_folder, model_folder_name)

ext = dataholder.ext
imsz = dataholder.imsz
Пример #10
0
def test_compute_recon(params):
    # experiment params
    mode = params.get_value('mode')
    mode_explain = params.get_value('mode_explain')

    layers = params.get_value('layers')
    layers_with_cluster = params.get_value('layers_with_cluster')
    data_str = params.get_value('data_str')
    test_str = params.get_value('test_str')
    gan0_folder_name = params.get_value('gan0_folder_name')
    cae_folder_name = params.get_value('cae_folder_name')

    bh5py = params.get_value('bh5py')

    resz = params.get_value('resz')
    data_range = params.get_value('data_range')

    OF_scale = 0.3
    alpha = 2.0
    dataholder = anom_UCSDholder(data_str, resz)
    data_folder = dataholder.data_folder

    feat_folder = '%s/feat' % (data_folder)
    if not os.path.exists(feat_folder):
        os.mkdir(feat_folder)

    model_folder = '%s/model' % (data_folder)

    if not os.path.exists(model_folder):
        os.mkdir(model_folder)
    res_folder = '%s/result' % (data_folder)
    if not os.path.exists(res_folder):
        os.mkdir(res_folder)

    time_recon_start = time.time()
    cae_id = mode + 1
    for l in range(len(layers)):

        if layers[l] == 0:
            gan_model_folder = '%s/%s/layer%d-usecluster%d-%s-large-v2-reshape' % (
                model_folder, gan0_folder_name, layers[l],
                layers_with_cluster[l], mode_explain[mode])

        else:
            gan_model_folder = '%s/%s/layer%d-usecluster%d-%s-large-v2-reshape' % (
                model_folder, cae_folder_name, layers[l],
                layers_with_cluster[l], mode_explain[mode])

        cae_folder = '%s/%s' % (model_folder, cae_folder_name)
        if layers[l] == 0:
            vis_folder = '%s/%s/recon' % (model_folder, gan0_folder_name)
        else:
            vis_folder = '%s/recon' % (cae_folder)
        recon_Test_folder = '%s/Test' % (vis_folder)
        if os.path.isdir(vis_folder) == False:
            os.makedirs(vis_folder)
        if os.path.isdir(recon_Test_folder) == False:
            os.makedirs(recon_Test_folder)
        flog = open(
            '%s/test_compute_recon_log_mode%d_largev2_reshape.txt' %
            (vis_folder, mode), 'wt')
        dualprint('mode = %d' % mode, flog)

        pix2pix.a.checkpoint = gan_model_folder
        # load a file Test001 to get the num_channel
        if layers[0] == 0:
            num_channel = 3
            feat_sz = resz
        else:

            reshape_info_file = '%s/reshape_info_layer%d_large_v2.dill' % (
                cae_folder, layers[l])
            shape_info = dill.load(open(reshape_info_file, 'rb'))
            print(shape_info)
            feat_sz = [shape_info['resize'][0], shape_info['resize'][1]]
            num_channel = shape_info['resize'][2]

        gan = build_model(gan_model_folder, feat_sz[0], feat_sz[1],
                          num_channel)

        saver = tf.train.Saver(max_to_keep=1)

        sv = tf.train.Supervisor(logdir=None,
                                 save_summaries_secs=0,
                                 saver=None)
        with sv.managed_session() as sess:
            dualprint("loading model from checkpoint %s" % gan_model_folder,
                      flog)
            checkpoint = tf.train.latest_checkpoint(gan_model_folder)

            saver.restore(sess, checkpoint)

            test_list = read_list_from_file('%s/%s.lst' %
                                            (data_folder, test_str))
            data_mean_F, data_std_F, data_scale_F = None, None, None
            data_mean_M, data_std_M, data_scale_M = None, None, None

            for i_s in range(len(test_list)):
                s = test_list[i_s]
                dualprint('[%s]' % s, flog)
                if layers[l] == 0:

                    # load raw 3 contiguous frame data
                    feat_file_format = '%s/%s_resz%sx%s_raw_v3.npz' % (
                        '%s', '%s', resz[0], resz[1])
                    frame_file = feat_file_format % (feat_folder, s)
                    if os.path.isfile(frame_file):
                        dualprint('Loading %s' % s, flog)
                        F = np.load(frame_file)
                        print('F shape', F.shape)

                    else:
                        dualprint('File %s doesn'
                                  't exists' % frame_file, flog)

                    dualprint(
                        'Convert frame and optical flow into [-1.0, 1.0]')
                    # convert frame data in [0.0, 1.0] to [-1.0, 1.0]
                    F = convert01tom1p1(F)
                    F_resz = np.zeros([F.shape[0], resz[0], resz[1]])
                    for i in range(F.shape[0]):
                        F_resz[i, :, :] = cv2.resize(F[i, :, :],
                                                     (resz[1], resz[0]))

                    data_F_s_resz = np.stack((F_resz, F_resz, F_resz), axis=3)

                    OF_file_format = '%s/%s_sz256x256_BroxOF.mat'
                    OF_file = OF_file_format % (feat_folder, s)
                    if os.path.isfile(OF_file):

                        if bh5py == 1:
                            f_h5py = h5py.File(OF_file, 'r')
                            OF = f_h5py['O']
                            OF = np.array(OF).T
                            print(OF.shape)
                        else:
                            mat_data = sio.loadmat(OF_file)
                            OF = mat_data['O']
                        last_OF = OF[-1, :, :, :]
                        last_OF = np.reshape(last_OF, [1, *last_OF.shape])
                        OF = np.concatenate([OF, last_OF], axis=0)

                    else:
                        dualprint('File %s doesn' 't exists' % OF_file, flog)

                    OF = norm_OF_01(OF, scale=OF_scale)
                    OF = convert01tom1p1(OF)

                    print('O shape', OF.shape)
                    print('O min %f max %f' % (OF.min(), OF.max()))
                    #OF_resz = np.zeros([OF.shape[0], resz[0], resz[1], 3])
                    #for i in range(OF.shape[0]):
                    #    OF_resz[i, :, :, :] = cv2.resize(OF[i, :, :, :], (resz[1], resz[0]))

                    data_M_s_resz = OF
                else:
                    """
                    feat_F_file_format = '%s/%s_data_F.npz'
                    F_file = feat_F_file_format % (feat_folder, s)
                    if os.path.isfile(F_file):
                        print('Loading %s' % F_file)
                        data_F_s_resz = np.load(F_file)
                    else:
                        print('File %s doesn''t exists' % F_file)
                        raise ValueError('File %s doesn''t exists' % F_file)

                    feat_M_file_format = '%s/%s_data_M.npz'
                    M_file = feat_M_file_format % (feat_folder, s)
                    if os.path.isfile(M_file):
                        print('Loading %s' % M_file)
                        data_M_s_resz = np.load(M_file)
                    else:
                        print('File %s doesn''t exists' % M_file)
                        raise ValueError('File %s doesn''t exists' % M_file)
                    """
                    feat_F_file_format = '%s/%s_resz%sx%s_cae1_layer%d.npz' % (
                        '%s', '%s', resz[0], resz[1], layers[l])
                    npz_data = load_feat([s], cae_folder, feat_F_file_format)
                    data_F_s = npz_data['feat']

                    feat_M_file_format = '%s/%s_resz%sx%s_cae2_layer%d.npz' % (
                        '%s', '%s', resz[0], resz[1], layers[l])
                    npz_data = load_feat([s], cae_folder, feat_M_file_format)
                    data_M_s = npz_data['feat']

                    if data_mean_F is None:
                        mean_std_file = '%s/mean_std_layer%d_large_v2.dill' % (
                            cae_folder, layers[l])
                        dill_data = dill.load(open(mean_std_file, 'rb'))
                        dualprint('Loading mean-std file: %s' % mean_std_file)
                        data_mean_F = dill_data['cae1_mean']
                        data_std_F = dill_data['cae1_std']
                        data_scale_F = dill_data['cae1_scale']
                        data_mean_M = dill_data['cae2_mean']
                        data_std_M = dill_data['cae2_std']
                        data_scale_M = dill_data['cae2_scale']

                    dualprint('Original F shape: %d x %d' %
                              (data_F_s.shape[1], data_F_s.shape[2]))
                    dualprint('Original M shape: %d x %d' %
                              (data_M_s.shape[1], data_M_s.shape[2]))

                    dualprint(
                        'Normalizing to [%d, %d] using trained mean and standard deviation'
                        % (data_range[0], data_range[1]))

                    data_F_s = np.divide(data_F_s - data_mean_F,
                                         data_std_F + epsilon) * data_scale_F
                    data_F_s = np.minimum(np.maximum(data_F_s, data_range[0]),
                                          data_range[1])

                    data_M_s = np.divide(data_M_s - data_mean_M,
                                         data_std_M + epsilon) * data_scale_M
                    data_M_s = np.minimum(np.maximum(data_M_s, data_range[0]),
                                          data_range[1])
                    # resize to [256, 256]
                    print('Resizing to [%d, %d]' % (resz[0], resz[1]))
                    data_F_s_resz = data_F_s
                    data_M_s_resz = data_M_s
                    #"""

                print('data F shape', data_F_s_resz.shape)
                print('data F min and max [%f, %f]' %
                      (data_F_s_resz.min(), data_F_s_resz.max()))

                print('data M shape', data_M_s_resz.shape)
                print('data M min and max [%f, %f]' %
                      (data_M_s_resz.min(), data_M_s_resz.max()))
                #"""
                if layers[l] > 0:
                    print('reshaping and resizing')
                    data_F_s_resz = process_feat_reshape_resize(
                        data_F_s_resz, resz)
                    data_M_s_resz = process_feat_reshape_resize(
                        data_M_s_resz, resz)
                    print('After reshaping and resizing')
                    print('data F shape', data_F_s_resz.shape)
                    print('data F min and max [%f, %f]' %
                          (data_F_s_resz.min(), data_F_s_resz.max()))

                    print('data M shape', data_M_s_resz.shape)
                    print('data M min and max [%f, %f]' %
                          (data_M_s_resz.min(), data_M_s_resz.max()))
                #"""
                if mode == 0:
                    M_s_recon = reconstruct(data_F_s_resz, data_M_s_resz, gan,
                                            sess)
                    time_recon_end = time.time()
                    M_s_recon_file = '%s/%s_M_recon_layer%d_usecluster%d_large_v2_reshape.npz' % (
                        vis_folder, s, layers[l], layers_with_cluster[l])
                    np.savez(M_s_recon_file, M_recon=M_s_recon)
                    print('Saving %s' % M_s_recon_file)
                elif mode == 1:
                    F_s_recon = reconstruct(data_M_s_resz, data_F_s_resz, gan,
                                            sess)
                    time_recon_end = time.time()
                    F_s_recon_file = '%s/%s_F_recon_layer%d_usecluster%d_large_v2_reshape.npz' % (
                        vis_folder, s, layers[l], layers_with_cluster[l])
                    np.savez(F_s_recon_file, F_recon=F_s_recon)
                    print('Saving %s' % F_s_recon_file)

    #time_recon_end = time.time()
    dualprint('Test time %f (seconds):' % (time_recon_end - time_recon_start),
              flog)

    print('Finished.')
    flog.close()
Пример #11
0
def test_hvad(params):
    # experiment params
    data_str = params.get_value('data_str')
    test_str = params.get_value('test_str')

    resz = params.get_value('resz')
    thresh = params.get_value('thresh')
    folder_name = params.get_value('folder_name')

    # frame_feat = 'conv5' # 'raw'

    dataholder = anom_UCSDholder(data_str, resz)

    data_folder = dataholder.data_folder


    feat_folder = '%s/feat' % (data_folder)
    if not os.path.exists(feat_folder):
        os.mkdir(feat_folder)

    model_folder = '%s/model' % (data_folder)
    if not os.path.exists(model_folder):
        os.mkdir(model_folder)
    res_folder = '%s/result' % (data_folder)
    if not os.path.exists(res_folder):
        os.mkdir(res_folder)
    vis_folder = '%s/%s' % (res_folder, folder_name)
    if os.path.isdir(vis_folder) == False:
        os.mkdir(vis_folder)


    imsz = dataholder.imsz
    test_str='test1'
    test_list = read_list_from_file('%s/%s.lst' % (data_folder, test_str))
    thresh = 0.80
    mask_file = '%s/data_mask_org.h5' % (feat_folder)
    #data_mask = None
    data_num = 2550
    if data_str == 'UCSDped1':
        data_num = 6800
    elif data_str == 'avenue':
        data_num = 7670
    data_mask = np.ones((data_num, resz[1], resz[0], 1)) #ped1 6800 ped2 2550 avenue 7670
    skip_frame = 1
    for s in test_list:
        frm_list = []
        frm_folder = '%s/%s' % (data_folder, s)
        _, frm_ext = os.path.splitext(dataholder.img_format)
        frm_files = glob.glob(frm_folder + '/*' + frm_ext)
        frm_files.sort()
        print(frm_files[0])
        for i in range(len(frm_files)):
            filepath,tempfilename = os.path.split(frm_files[i])
            filename,extension = os.path.splitext(tempfilename)
            frm_list.append(int(filename))
        print('frm_list:', frm_list)
        _, testname = s.split('/')
        print('Loading %s' % testname)

        npzfiles = np.load('%s/Test/%s_final.npz' % (vis_folder, testname))
        E_map_final  = npzfiles['Emap_enh']
        npzfiles.close()
        mask1 = (E_map_final < thresh).astype(int)
        mask = np.zeros([len(frm_list), mask1.shape[1], mask1.shape[2]])
        for i in range(len(frm_list)):
            mask[i] = mask1[frm_list[i]]
        print('Before skipping frame:', mask.shape)
        if skip_frame > 1:
            mask = mask[::skip_frame, :, :]
            print('Skipping frame:', mask.shape)
        mask_resz = np.zeros([mask.shape[0], resz[1], resz[0]])
        mask = mask.astype('float32')
        for i in range(mask.shape[0]):
            mask_resz[i, :, :] = cv2.resize(mask[i, :, :], (resz[1], resz[0]))
        mask = mask_resz.astype(int)

        mask = np.expand_dims(mask, axis=3)
        print("mask shape:", mask.shape)
        if data_mask is None:
            data_mask = mask
        else:
            data_mask = np.concatenate([data_mask, mask], axis=0)

    del mask_resz, mask, mask1
    print('data_mask shape', data_mask.shape)

    f = h5py.File(mask_file, 'w')
    f.create_dataset('data', data=data_mask, compression='gzip')
    f.close()
    print('saved to %s' % mask_file)

    del E_map_final
    gc.collect()

    print('Finished.')
Пример #12
0
def test_hvad(params):
    # experiment params

    data_str = params.get_value('data_str')
    test_str = params.get_value('test_str')

    resz = params.get_value('resz')
    thresh = params.get_value('thresh')
    folder_name = params.get_value('folder_name')


    resz = [256, 256]

    dataholder = anom_UCSDholder(data_str, resz)

    data_folder = dataholder.data_folder


    feat_folder = '%s/feat' % (data_folder)
    if not os.path.exists(feat_folder):
        os.mkdir(feat_folder)

    model_folder = '%s/model' % (data_folder)
    if not os.path.exists(model_folder):
        os.mkdir(model_folder)
    res_folder = '%s/result' % (data_folder)
    if not os.path.exists(res_folder):
        os.mkdir(res_folder)


    imsz = dataholder.imsz

    test_list = read_list_from_file('%s/%s.lst' % (data_folder, test_str))


    vis_folder = '%s/%s' % (res_folder, folder_name)

    if os.path.isdir(vis_folder) == False:
        os.mkdir(vis_folder)

    res_filename = '%s/resfile.txt' %(vis_folder)
    resfile= open(res_filename, 'w')

    thresh = 0.80
    score_thresh = 0.85
    minframeNum = 10
    tpr_thresh = 0.49
    tpr_mean =[]
    for s in test_list:
        print('Loading %s' % s)
        npzfiles = np.load('%s/%s_final.npz' % (vis_folder, s))
        E_map_final  = npzfiles['Emap_enh']
        npzfiles.close()
        mask = (E_map_final >= thresh).astype(int)

        print('%s: Loading ground-truth' % s)
        frm_folder = '%s/%s_gt' % (data_folder, s)
        _, gt_ext = os.path.splitext(dataholder.gt_format)
        gt_files = glob.glob(frm_folder + '/*' + gt_ext)
        gt_files.sort()
        gt_listi = []
        gt_list = []
        for (j, file) in enumerate(gt_files):
            img = cv2.imread(file, 0)
            im_resz = cv2.resize(img, (imsz[1], imsz[0]),
                                    interpolation=cv2.INTER_NEAREST)
            im_resz = im_resz / 255.0
            gt_listi.append(im_resz)
        gt_list.append(gt_listi)
        print('-->%d frames' % len(gt_listi))
        GT = np.concatenate(gt_list, axis=0)
        print("GT shape: ",GT.shape)
        
        gt_frame = GT.sum(axis=(1,2))
        gt_frame_bool = gt_frame > 0
        I = (mask + GT)
        I_intersect = I >= 2
        I_frame = I_intersect.sum(axis=(1, 2))
        mask_frame = mask.sum(axis=(1,2))
        score = E_map_final*mask
        score_frame = score.sum(axis=(1,2))
        flag = 0
        anomaly_start = []
        anomaly_end = []
        E_map_score = []
        for i in range(GT.shape[0]):
            if mask_frame[i] == 0:
                E_map_score.append(0)
            else:
                E_map_score.append(score_frame[i]/mask_frame[i])

        E_map_score1 = np.array(E_map_score)
        E_map_score1 = E_map_score1 /E_map_score1.max()
        E_map_score = E_map_score1.tolist()
        flag = 0
        pxlTPR = 0
        for i in range(GT.shape[0]):
            if E_map_score[i] >= score_thresh and flag == 0:
                anomaly_start.append(i)
                flag = 1
            if E_map_score[i] < score_thresh and flag == 1:
                if i - anomaly_start[-1] < minframeNum:
                    del anomaly_start[-1]
                else:
                    anomaly_end.append(i-1)
                flag = 0

        if flag == 1:
            if i - anomaly_start[-1] < minframeNum:
                del anomaly_start[-1]
            else:
                anomaly_end.append(i-1)
        if len(anomaly_start) > 0:
            resfile.write(s)
            resfile.write(':\n')
        for i in range(len(anomaly_start)):
            pos = 0
            tpr_pxl = []
            total = anomaly_end[i] - anomaly_start[i] + 1
            for j in range(anomaly_start[i], anomaly_end[i]+1):
                if gt_frame_bool[j] == True:
                    pos += 1
                if I_frame[j]== 0:
                    tpr_pxl.append(0.0)
                else:
                    tpr_pxl.append(float('%.2f' % (I_frame[j] * 1.0 / gt_frame[j])))
            tpr_frame = float('%.2f' % (pos / total))
            tpr_pxl_array = np.array(tpr_pxl)
            tpr_mean.append(tpr_pxl_array.mean())
            resfile.write('frame:'+str(anomaly_start[i])+'-'+str(anomaly_end[i])+'(tpr_pxl_mean:'+str(float('%.2f' %tpr_pxl_array.mean()))+')')
            resfile.write('\n')
            resfile.write('tpr_frame:'+str(tpr_frame))
            resfile.write('\n')
            resfile.write('tpr_pxl:'+str(tpr_pxl))
            resfile.write('\n')

        copy = 1
        if copy == 1:
            if len(anomaly_start) > 0:
                print('%s: copy files' % s)
                frm_folder = '%s/%s' % (data_folder, s)
                _, frm_ext = os.path.splitext(dataholder.img_format)
                frm_files = glob.glob(frm_folder + '/*' + frm_ext)
                frm_files.sort()
                target_frmfolder = '%s/newdata/%s' % (data_folder, s)
                if not os.path.exists(target_frmfolder):
                    os.makedirs(target_frmfolder)
                for i in range(len(anomaly_start)):
                    for j in range(anomaly_start[i], anomaly_end[i]+1):
                        target_frmfile = '%s/%03d%s' % (target_frmfolder, j, frm_ext) #avenue 04d  uscd 03d
                        copyfile(frm_files[j], target_frmfile)
                

    tpr_mean_array = np.array(tpr_mean)
    resfile.write('tpr_mean:'+str(float('%.4f' %tpr_mean_array.mean())))
    print(float('%.4f' %tpr_mean_array.mean()))
    del E_map_final
    gc.collect()
    resfile.close()

    print('Finished.')
Пример #13
0
def test_hvad(params):
    # experiment params

    cae_folder_name = params.get_value('cae_folder_name')
    gan_layer0_folder_name = params.get_value('gan_layer0_folder_name')
    data_str = params.get_value('data_str')
    test_str = params.get_value('test_str')
    bshow = params.get_value('bshow')
    bsave = params.get_value('bsave')
    use_thresh = params.get_value('use_thresh')
    bh5py = params.get_value('bh5py')

    resz = params.get_value('resz')
    thresh = params.get_value('thresh')
    fr_rate_2obj = params.get_value('fr_rate_2obj')
    frame_step = params.get_value('frame_step')
    min_size = params.get_value('min_size')
    data_range = params.get_value('data_range')
    pause_time = params.get_value('pause_time')
    longevity = params.get_value('longevity')
    folder_name = params.get_value('folder_name')
    layer_ids = params.get_value('layer_ids')

    scale = 0.3
    alpha = 2.0
    # alpha = 1.0
    resz = [256, 256]

    # frame_feat = 'conv5' # 'raw'

    dataholder = anom_UCSDholder(data_str, resz)

    data_folder = dataholder.data_folder

    feat_folder = '%s/feat' % (data_folder)
    if not os.path.exists(feat_folder):
        os.mkdir(feat_folder)

    model_folder = '%s/model' % (data_folder)
    if not os.path.exists(model_folder):
        os.mkdir(model_folder)
    res_folder = '%s/result' % (data_folder)
    if not os.path.exists(res_folder):
        os.mkdir(res_folder)

    imsz = dataholder.imsz

    test_list = read_list_from_file('%s/%s.lst' % (data_folder, test_str))

    vis_folder = '%s/%s' % (res_folder, folder_name)

    if os.path.isdir(vis_folder) == False:
        os.mkdir(vis_folder)

    thresh = 0.8
    score_thresh = 0.8
    minframeNum = 5
    for s in test_list:
        print('Loading %s' % s)
        npzfiles = np.load('%s/%s_final.npz' % (vis_folder, s))
        E_map_final = npzfiles['Emap_enh']
        npzfiles.close()
        mask = (E_map_final >= thresh).astype(int)

        print('%s: Loading ground-truth' % s)
        frm_folder = '%s/%s_gt' % (data_folder, s)
        _, gt_ext = os.path.splitext(dataholder.gt_format)
        gt_files = glob.glob(frm_folder + '/*' + gt_ext)
        gt_files.sort()
        gt_listi = []
        gt_list = []
        for (j, file) in enumerate(gt_files):
            img = cv2.imread(file, 0)
            im_resz = cv2.resize(img, (imsz[1], imsz[0]),
                                 interpolation=cv2.INTER_NEAREST)
            im_resz = im_resz / 255.0
            gt_listi.append(im_resz)
        gt_list.append(gt_listi)
        print('-->%d frames' % len(gt_listi))
        GT = np.concatenate(gt_list, axis=0)
        print("GT shape: ", GT.shape)

        gt_frame = GT.sum(axis=(1, 2))
        gt_frame_bool = gt_frame > 0
        score = E_map_final * mask
        mask_frame = mask.sum(axis=(1, 2))
        score_frame = score.sum(axis=(1, 2))
        E_map_score = []
        if use_thresh == 1:
            normal_start = []
            normal_end = []
            for i in range(GT.shape[0]):
                #score = (E_map_final[i] - E_map_final[i].min()) / E_map_final[i].max()
                if mask_frame[i] == 0:
                    E_map_score.append(0)
                else:
                    E_map_score.append(score_frame[i] / mask_frame[i])

            E_map_score1 = np.array(E_map_score)
            #E_map_score1 = (E_map_score1 - E_map_score1.min())/E_map_score1.max()
            E_map_score1 = E_map_score1 / E_map_score1.max()
            E_map_score = E_map_score1.tolist()

            flag = 0
            for i in range(GT.shape[0]):
                if E_map_score[i] <= score_thresh and flag == 0:
                    normal_start.append(i)
                    flag = 1
                if E_map_score[i] > score_thresh and flag == 1:
                    if i - normal_start[-1] < minframeNum:
                        del normal_start[-1]
                    else:
                        normal_end.append(i - 1)
                    flag = 0

            if flag == 1:
                if i - normal_start[-1] < minframeNum:
                    del normal_start[-1]
                else:
                    normal_end.append(i - 1)
            normal_data = None
            for i in range(len(normal_start)):
                data = E_map_final[normal_start[i]:normal_end[i] + 1]
                if normal_data is None:
                    normal_data = data
                else:
                    normal_data = np.concatenate([normal_data, data], axis=0)
            if normal_data is not None:
                mask1 = (normal_data >= 0.02).astype(int)
                score1 = normal_data * mask1
                mask1_frame = mask1.sum(axis=(1, 2))
                score1_frame = score1.sum(axis=(1, 2))
                #normal_data = normal_data/ normal_data.max()
                #new_thresh = normal_data.mean()
                normal_score = []
                for i in range(normal_data.shape[0]):
                    if mask1_frame[i] == 0:
                        normal_score.append(0)
                    else:
                        normal_score.append(score1_frame[i] / mask1_frame[i])
                normal_score1 = np.array(normal_score)
                new_thresh = normal_score1.min()
                normal_score1 = normal_score1 / normal_score1.max()
                score_thresh1 = normal_score1.mean()
            else:
                new_thresh = 0
                score_thresh1 = 0
            new_thresh = float('%.2f' % new_thresh)
            score_thresh1 = float('%.2f' % score_thresh1)
            mask = (E_map_final >= new_thresh).astype(int)
            score = E_map_final * mask
            mask_frame = mask.sum(axis=(1, 2))
            score_frame = score.sum(axis=(1, 2))

        flag = 0
        anomaly_start = []
        anomaly_end = []
        E_map_score = []
        for i in range(GT.shape[0]):
            #score = (E_map_final[i] - E_map_final[i].min()) / E_map_final[i].max()
            #b = np.nonzero(mask[i])
            #num = np.array(b).ndim
            #E_map_score.append(score.mean())
            if mask_frame[i] == 0:
                E_map_score.append(0)
            else:
                E_map_score.append(score_frame[i] / mask_frame[i])
            if gt_frame_bool[i] == True and flag == 0:
                anomaly_start.append(i)
                flag = 1
            if gt_frame_bool[i] == False and flag == 1:
                anomaly_end.append(i)
                flag = 0
        if flag == 1:
            anomaly_end.append(i)
        print("anomaly_start: ", anomaly_start)
        print("anomaly_end: ", anomaly_end)
        bar_pos = []
        bar_width = []
        for i in range(len(anomaly_start)):
            bar_pos.append((anomaly_start[i] + anomaly_end[i]) / 2)
            bar_width.append(anomaly_end[i] - anomaly_start[i])
        print("bar_pos: ", bar_pos)
        print("bar_width: ", bar_width)

        E_map_score1 = np.array(E_map_score)
        #E_map_score1 = (E_map_score1 - E_map_score1.min())/E_map_score1.max()
        E_map_score1 = E_map_score1 / E_map_score1.max()
        E_map_score = E_map_score1.tolist()
        x = np.arange(GT.shape[0])
        plt.ion()
        if use_thresh == 1:
            #score_thresh1 = score_thresh1*2.5*(1-score_thresh1)  #ped1 2.5  ped2 5.5
            score_thresh1 = 2 * (1 - score_thresh1) * score_thresh1
            plt.xlabel("Frame   thresh=%.2f   score_thresh=%.2f" %
                       (new_thresh, score_thresh1))
        else:
            plt.xlabel("Frame   thresh=%.2f" % thresh)
        plt.ylabel("Anomaly score")
        plt.plot(x, E_map_score)
        if use_thresh == 1:
            plt.axhline(score_thresh1,
                        0,
                        GT.shape[0],
                        color="red",
                        linestyle='--')
        plt.bar(bar_pos, 1, bar_width, facecolor='#9999ff', edgecolor='white')
        plt.ylim((0, 1))
        #video_vis_folder = '%s/%s' % (vis_folder, s)
        #if os.path.isdir(video_vis_folder) == False:
        #    os.mkdir(video_vis_folder)
        #fig_file = '%s/anomalyOutput.jpg' % (video_vis_folder)
        fig_file = '%s/graph/%s.jpg' % (vis_folder, s)
        plt.savefig(fig_file)
        #plt.show()
        plt.pause(3)
        plt.close()

    del E_map_final
    gc.collect()

    print('Finished.')