Exemplo n.º 1
0
def main(argv):

    # Parse arguments
    args = parse_args(argv)
    set_verbose_level(args.verbose)

    print_verbose("Args: %s" % str(args), 1)

    # Adapted from
    # https://nbviewer.jupyter.org/github/BVLC/caffe/blob/master/examples/00-classification.ipynb
    np.set_printoptions(threshold=np.nan)

    caffe.set_mode_cpu()
    net = caffe.Net(args.proto, args.model, caffe.TEST)

    # input preprocessing: 'data' is the name of the input blob == net.inputs[0]
    transformer = caffe.io.Transformer({'data': net.blobs['data'].data.shape})
    transformer.set_transpose('data', (2, 0, 1))
    transformer.set_mean('data', VGG_MEAN_PIXEL)  # mean pixel
    transformer.set_raw_scale(
        'data', 255
    )  # the reference model operates on images in [0,255] range instead of [0,1]
    transformer.set_channel_swap(
        'data',
        (2, 1,
         0))  # the reference model has channels in BGR order instead of RGB

    # Read image names
    with open(args.list) as f:
        allnames = f.read().splitlines()

    for sub in xrange(0, len(allnames), CAFFE_BATCH_SIZE):
        fnames = allnames[sub:sub + CAFFE_BATCH_SIZE]

        # Reshape input data
        print net.blobs['data'].data.shape
        net.blobs['data'].reshape(len(fnames), *net.blobs['data'].shape[1:])
        print net.blobs['data'].data.shape

        # Preprocess images
        for idx, fname in enumerate(fnames):
            fpath = os.path.join(args.input, fname)
            print "Processing image %s ..." % fpath
            img = transformer.preprocess('data', caffe.io.load_image(fpath))
            net.blobs['data'].data[idx] = img

        # Extract features
        print "Extracting features ..."
        out = net.forward()

        # Write extracted features
        for idx, fname in enumerate(fnames):
            path = os.path.join(args.output, os.path.dirname(fname))
            if not os.path.exists(path):
                os.makedirs(path)
            fpath = os.path.join(args.output, fname + ".feat")
            print "Writing features to %s ..." % fpath
            np.savetxt(fpath, net.blobs['fc7'].data[idx])

    print "Done!"
Exemplo n.º 2
0
def main(argv):

    # Parse arguments
    args = parse_args(argv)
    set_verbose_level(args.verbose)
    set_n_cores(args.cores)

    print_verbose("Args: %s" % str(args), 1)

    # Prepare data
    data, labels, classes = gen_data(args.dir)
    print_verbose('Data: %s' % str(data), 5)
    print_verbose('Labels: %s' % str(labels), 4)
    print_verbose('Classes: %s' % str(classes), 4)

    print_verbose('Data shape: %s' % str(data.shape), 2)
    print_verbose('Labels shape: %s' % str(labels.shape), 2)
    print_verbose('Classes shape: %s' % str(classes.shape), 2)

    print_verbose('Data bytes: %s' % str(data.nbytes), 2)

    # Calculate distances
    dist = calc_dist(args.model, data)

    # Generate score model
    model = generate_model(dist, classes, args)
    print_verbose('Model: %s' % str(model), 0)

    # Export
    print_verbose('Saving model to %s' % args.score, 0)
    with open(args.score, "wb") as f:
        pickle.dump(model, f)

    print_verbose('Done!', 0)
Exemplo n.º 3
0
def main(argv):

    # Parse arguments
    args = parse_args(argv)
    set_verbose_level(args.verbose)

    print_verbose("Args: %s" % str(args), 1)

    # Resize images
    resize_from_csv(args.csv, args.original, args.resized, args.density)
Exemplo n.º 4
0
def main(argv):

    # Parse arguments
    args = parse_args(argv)
    set_verbose_level(args.verbose)

    print_verbose("Args: %s" % str(args), 1)

    # Download images
    download_from_csv(args.csv, args.directory)
Exemplo n.º 5
0
def main(argv):

    # Parse arguments
    args = parse_args(argv)
    set_verbose_level(args.verbose)

    print_verbose("Args: %s" % str(args), 1)

    # Crawl URL
    crawl(args.url)
Exemplo n.º 6
0
def main(argv):

    # Parse arguments
    args = parse_args(argv)
    set_verbose_level(args.verbose)
    set_n_cores(args.cores)

    print_verbose("Args: %s" % str(args), 1)

    # Extract patches
    patch_extract(args.image, args.window, args.step, args.dir)
Exemplo n.º 7
0
def main(argv):

    # Parse arguments
    args = parse_args(argv)
    set_verbose_level(args.verbose)
    set_n_cores(args.cores)

    print_verbose("Args: %s" % str(args), 1)

    data, labels, classes = gen_data(args.dir)

    print_verbose('Data: %s' % str(data), 5)
    print_verbose('Labels: %s' % str(labels), 4)
    print_verbose('Classes: %s' % str(classes), 4)

    print_verbose('Data shape: %s' % str(data.shape), 2)
    print_verbose('Labels shape: %s' % str(labels.shape), 2)
    print_verbose('Classes shape: %s' % str(classes.shape), 2)
Exemplo n.º 8
0
def main(argv):

    # Parse arguments
    args = parse_args(argv)
    set_verbose_level(args.verbose)

    print_verbose("Args: %s" % str(args), 1)

    # Crawl URL
    result = crawl(args.url)

    # Extract data
    raw_data = extract_data(result)

    # Sort (optional)
    raw_data.sort()

    # Save to CSV
    gen_csv(args.csv, raw_data)
Exemplo n.º 9
0
def main(argv):

    # Parse arguments
    args = parse_args(argv)
    set_verbose_level(args.verbose)
    set_n_cores(args.cores)

    print_verbose("Args: %s" % str(args), 1)

    # Some tests
    data, labels = gen_data(args.dir, False)

    print_verbose('Data: %s' % str(data), 5)
    print_verbose('Labels: %s' % str(labels), 4)

    print_verbose('Data shape: %s' % str(data.shape), 2)
    print_verbose('Labels shape: %s' % str(labels.shape), 2)

    classification = classify(data, labels, args)
    print_verbose('Final classification: %s' % str(classification), 0)

    # Evaluate performance
    if args.gtruth:
        eval_perf(classification)