def main(pathfeats1,
         pathfeats2,
         dataset,
         output=None,
         mean=False,
         different=False):
    pathfeats1 = fh.is_file(pathfeats1)
    pathfeats2 = fh.is_file(pathfeats2)
    dataset = fc.Configuration().has_dataset(dataset)
    if output:
        if fh.is_folder(output, boolean=True):
            fileout = join(output, 'merged_feats.txt')
        else:
            fileout = fh.is_file(fileoutput)
    else:
        dirin = dirname(pathfeats1)
        fileout = join(dirin, 'merged_feats.txt')
    if different:
        fh.merge_features_different_files(pathfeats1,
                                          pathfeats2,
                                          fileout,
                                          dataset,
                                          mean=mean)
    else:
        fh.merge_features_equal_files(pathfeats1,
                                      pathfeats2,
                                      fileout,
                                      mean=mean)
def grid_svm(trainfile,
             valfile,
             outputdir,
             kernel='rbf',
             gamma_min=2e-15,
             gamma_max=2e3,
             gamma_step=1e2,
             c_min=2e-5,
             c_max=2e15,
             c_step=1e2):
    """
    Perform a grid of parameters in SVM

    Parameters:
    -----------
    trainfile: string
        path to the file containing training features
    valfile: string
        path to the file containing validation features
    outputdir: string
        path to the folder to save tests
    kernel: string
        type of kernel (scikit names)
    gamma_min: int
        minimum value of gamma
    gamma_max: int
        maximum value of gamma
    gamma_step: int
        step of increasing gamma
    c_min: int
        minimum value of C
    c_max: int
        maximum value of C
    c_step: int
        step of increasing value of C
    """
    trainfile = fh.is_file(trainfile)
    _, X_train, y_train = fh.load_features(trainfile)
    valfile = fh.is_file(valfile)
    vpaths, X_val, y_val = fh.load_features(valfile)
    outputdir = fh.is_folder(outputdir)

    vgamma = create_range(gamma_min, gamma_max, gamma_step)
    vc = create_range(c_min, c_max, c_step)
    svc = svm.SVC(kernel=kernel, verbose=True)
    for c in vc:
        for g in vgamma:
            logger.info('Running C: %E :: Gamma: %E' % (c, g))
            clf = svm.SVC(kernel=kernel, C=c, gamma=g)
            clf.fit(X_train, y_train)
            pred = clf.predict(X_val)

            fileout = join(outputdir,
                           str(kernel) + '_' + str(c) + '_' + str(g) + '.txt')
            logger.info('saving output file in: %s' % fileout)
            with open(fileout, 'w') as fout:
                for path, y, p in zip(vpaths, y_val, pred):
                    fout.write('%s %d %d\n' % (path, y, p))
    logger.info('Finished!')
def main(fileground, filefeatures, output):
    fileground = fh.is_file(fileground)
    filefeatures = fh.is_file(filefeatures)
    if output:
        fileout = output
    else:
        fileout = join(dirname(filefeatures), 'tmp.txt')
    fh.change_paths(fileground, filefeatures, fileout)

    if not output:
        os.remove(filefeatures)
        os.rename(fileout, filefeatures)
def resize_pathfile(inputfile, outputfolder, dataset, size):
    """
    Receives the path of a file and resize all images in this
    file to size=`size`
    
    Parameters:
    -----------
    input : string
        path to the input file containing multiple images
    output : string
        path to the output folder
    size : int
        new size of the image
    """
    inputfile = filehandler.is_file(inputfile)
    outputfolder = filehandler.is_folder(outputfolder)
    fname = filehandler.add_text2path(inputfile, size, withfolder=False)
    fout = open(join(outputfolder, fname), 'w')

    logger.info('resizing images to: %dx%d' % (size, size))
    logger.info('saving output file at: %s' % join(outputfolder, fname))

    pf = filehandler.ImagePaths(inputfile, dataset)
    for impath, label in pf:
        #logger.info('processing file: %s' % impath)
        _, fimg = pf.extract_root()
        outpath = join(outputfolder, fimg)

        #logger.info('saving file: %s' % fimg)
        imfolder = dirname(outpath)
        if not exists(imfolder):
            os.makedirs(imfolder)

        resize_file(impath, outpath, size)
        fout.write('%s %s\n' % (outpath, str(label)))
Esempio n. 5
0
def main(frame1, frame2, output=None, channels=False):
    image_1 = fh.is_file(frame1)
    image_2 = fh.is_file(frame2)
    if output:
        dirout = fh.is_folder(output)
    else:
        dirin = dirname(frame1)

    flow = of.optical_flow(frame1, frame2, channels=channels)
    if channels:
        outflowX = join(dirin, 'optflow_x.jpg')
        outflowY = join(dirin, 'optflow_y.jpg')
        cv2.imwrite(outflowX, flow[0])
        cv2.imwrite(outflowY, flow[1])
    else:
        output = join(dirin, 'optflow.jpg')
        cv2.imwrite(output, flow)
def main(fileinput, output=None, dataset="PENN"):
    fileinput = fh.is_file(fileinput)
    if output:
        dirout = fh.is_folder(output)
    else:
        dirin = dirname(fileinput)
        dirout = join(dirin, 'JPG')
        if not isdir(dirout):
            os.makedirs(dirout)
    cvr.convert_files(fileinput, dirout, dataset, to='jpg')
def main(fileinput, dirout, output=None, window=2, dataset="PENN"):
    fileinput = fh.is_file(fileinput)
    dirout = fh.is_folder(dirout)
    if output:
        fileout = output
    else:
        dirin = dirname(fileinput)
        fileout = join(dirout, 'bronx_paths.txt')

    utils.bronx_file(fileinput, dirout, fileout, dataset, window=window)
def calculate_from_file(inputfile, by_pixel=True, channels='RGB', output=None):
    inputfile = fh.is_file(inputfile)
    if output:
        outfolder = fh.is_folder(output)
    else:
        outfolder = dirname(inputfile)
    fout = join(outfolder, 'mean')
    if by_pixel:
        mean_pixel(inputfile, outfolder)
    else:
        mean_channel(inputfile, mode=channels)
def main(filetrain, fileval, output, kernel, gamma_min, gamma_max, gamma_step,
         c_min, c_max, c_step):
    filetrain = fh.is_file(filetrain)
    fileval = fh.is_file(fileval)
    if output:
        dirout = fh.is_folder(output)
    else:
        dirin = dirname(filetrain)
        dirout = join(dirin, 'GridSVM')
        if not isdir(dirout):
            os.makedirs(dirout)
    clr.grid_svm(filetrain,
                 fileval,
                 dirout,
                 kernel=kernel,
                 gamma_min=gamma_min,
                 gamma_max=gamma_max,
                 gamma_step=gamma_step,
                 c_min=c_min,
                 c_max=c_max,
                 c_step=c_step)
Esempio n. 10
0
def main(inputfile, by_pixel=False, channels='RGB', output=None):
    inputfile = filehandler.is_file(inputfile)
    if output:
        dirout = filehandler.is_folder(output)
    else:
        dirin = dirname(inputfile)
        dirout = join(dirin, 'mean')
        if not exists(dirout):
            os.makedirs(dirout)
    rgbmean.calculate_from_file(inputfile,
                                by_pixel=by_pixel,
                                channels=channels,
                                output=dirout)
def main(input_1,
         dataset_1,
         input_2,
         dataset_2,
         input_3,
         dataset_3,
         output=None,
         values=False):
    input_1 = filehandler.is_file(input_1)
    input_2 = filehandler.is_file(input_2)
    input_3 = filehandler.is_file(input_3)

    if output:
        fname = output
        _, ext = filehandler.filename(output, extension=True)
        ext = ext.replace('.', '')
    else:
        dirout = dirname(input_1)
        fname = join(dirout, 'cm.eps')
        ext = 'eps'

    mats = []
    cm_1 = plots.ConfusionMatrix(dataset_1, inputfile=input_1)
    mats.append(cm_1._genConfusionMatrix())
    cm_2 = plots.ConfusionMatrix(dataset_2, inputfile=input_2)
    mats.append(cm_2._genConfusionMatrix())
    cm_3 = plots.ConfusionMatrix(dataset_3, inputfile=input_3)
    mats.append(cm_3._genConfusionMatrix())

    titles = [dataset_1, dataset_2, dataset_3]
    vec_labels = [cm_1.labels, cm_2.labels, cm_3.labels]
    plots.save_multiple_plots(fname,
                              mats,
                              vec_labels,
                              title=titles,
                              cmap=plt.cm.Blues,
                              type=ext,
                              show_values=values)
Esempio n. 12
0
def create_pathfile(inputfolder):
    """
    Create 3 files: `path.txt`, `train.txt` and `test.txt` 
    `path.txt` file contains the paths and true label of all images in the dataset
    `train.txt` and `test.txt` contain paths and true labels separatedly.

    Parameters:
    -----------
    inputfolder: string
        path to the root folder of the dataset
    """
    #load configuration of the dataset
    conf = fc.Configuration()
    dlabels = conf.id_label("PENN")

    imgdir = join(inputfolder, 'frames')
    lbldir = join(inputfolder, 'labels')
    paths = join(inputfolder, 'paths.txt')
    train = join(inputfolder, 'train.txt')
    test = join(inputfolder, 'test.txt')

    with open(paths, 'w') as fpaths, \
         open(train, 'w') as ftrain, \
         open(test,  'w') as ftest:

        for root, dirs, files in sorted(os.walk(imgdir, topdown=False)):
            current = basename(root)
            if current and current != 'frames':
                labels = join(lbldir, current + '.mat')
                labels = fh.is_file(labels)
                mat = loadmat(labels)
                train = mat['train'][0][0]
                action = mat['action'][0]
                idact = dlabels[action]
                for fname in sorted(files):
                    path = join(root, fname)
                    if train == 1:
                        ftrain.write('%s %d\n' % (path, idact))
                    else:
                        ftest.write('%s %d\n' % (path, idact))
                    fpaths.write('%s %d\n' % (path, idact))
Esempio n. 13
0
def main(inputfile, output=None, window=1, channels=False):
    inputfile = fh.is_file(inputfile)
    if output:
        dirout = fh.is_folder(output)
    else:
        dirin = dirname(inputfile)

    # generate pairs of images to the optical flow
    dic = fh.imgpath2dic(inputfile)
    seqs = fh.pairs_of_paths(sorted(dic.keys()), window)

    # create optical flow for each pair
    for id1, id2 in seqs:
        flow = of.optical_flow(dic[id1], dic[id2], channels=channels)
        if channels:
            outflowX = join(dirin, str(id1)+'-'+str(id2)+'_x.jpg')
            outflowY = join(dirin, str(id1)+'-'+str(id2)+'_y.jpg')
            cv2.imwrite(outflowX, flow[0])
            cv2.imwrite(outflowY, flow[1])
        else:
            output = join(dirin, str(id1)+'-'+str(id2)+'.jpg')
            cv2.imwrite(output, flow)
Esempio n. 14
0
def main(datainput, imsize, output=None):
    """
    Parameters:
    -----------
    datainput : string
        path to the root folder or file containing the dataset
    imsize : int
        size of the new images
    output : string
        path to the folder where the new dataset will be saved
    """
    input = fh.is_folder(datainput, boolean=True)
    if input:
        # create a pathfile to the dataset
        pennaction.create_pathfile(input)
    else:
        input = fh.is_file(datainput, boolean=True)
        if output:
            dirout = fh.is_folder(output)
        else:
            dirout = join(dirname(input), str(imsize))
            if not exists(dirout):
                os.makedirs(dirout)
Esempio n. 15
0
def extract(inputfile, dataset, output=None):
    """
    Extract frames corresponding to videos in ``inputfile``
    
    Parameters
    ----------
    inputfile : string
        file containing paths and true labels
    datasets : string (dogs|kitchen|ucf11)
        name of the dataset
    output : string
        folder to save output files 

    Output
    -------
        save files containing the name of the action (from the path) and its
        respective frames. Create also a file named ``videos.txt`` containing
        a list of all generated files.
    """
    inputfile = filehandler.is_file(inputfile)
    if output:
        dirout = filehandler.is_folder(output)
    else:
        dirout = dirname(inputfile)

    fh = filehandler.Videos(inputfile, dataset)
    dvideos = fh.extract_videos()
    fvideos = open(join(dirout, 'videos.txt'), 'w')
    for video in dvideos:
        fname = video + '.txt'
        vname = join(dirout, fname)
        fvideos.write('%s\n' % vname)
        logger.info('Creating file: %s' % fname)
        with open(vname, 'w') as fout:
            for path, y in sorted(dvideos[video]):
                fout.write('%s %s\n' % (path, y))
    fvideos.close()