Example #1
0
    def _read_dicom_image(self, filename):
        d = dicom.read_file(filename)
        img = d.pixel_array.astype('float')
        img = crop_resize(img, newsize=(64, 64))  # PH Added preprocessing
        img = np.true_divide(img, 255)

        return img
Example #2
0
    def _read_dicom_image(self, filename):
        d = dicom.read_file(filename)
        img = d.pixel_array.astype('float')
        img = crop_resize(img, newsize=(64,64))  # PH Added preprocessing
        img = np.true_divide(img,255)

        return img
Example #3
0
def predict(inputimages, nkerns = 100, batch_size = 260, fine_tuned_params_path = None):

    ######################
    #   INITIALIZATIONS  #
    ######################
    dim = len(inputimages)
    new_images = numpy.zeros((dim,64,64))

    for i in xrange(dim):
        new_images[i, :, :] = crop_resize(inputimages[i], newsize=(64, 64))
        new_images[i, :, :] = numpy.true_divide(new_images[i], 255)

    if fine_tuned_params_path is None:
        b_CNN_input = None
        W_CNN_input = None
        W_logistic = None
        b_logistic = None
    else:
        with open(fine_tuned_params_path) as f:
            params = pickle.load(f)

        # load pre-trained parameters
        W_logistic, b_logistic, W_CNN_input, b_CNN_input = params[0]
        W_logistic = numpy.asarray(W_logistic,dtype='float32')
        b_logistic = numpy.asarray(b_logistic,dtype='float32')
        W_CNN_input = numpy.asarray(W_CNN_input,dtype='float32')
        b_CNN_input= numpy.asarray(b_CNN_input,dtype='float32')

        W_logistic = theano.shared(W_logistic.astype(fx), borrow=True)
        b_logistic = theano.shared(b_logistic.astype(fx), borrow=True)
        W_CNN_input = theano.shared(W_CNN_input.astype(fx), borrow=True)
        b_CNN_input= theano.shared(b_CNN_input.astype(fx), borrow=True)

    rng = numpy.random.RandomState(23455)

    # manipulate data

    train_set_x = numpy.asarray(new_images, dtype='float32')
    dim = train_set_x.shape
    train_set_x = numpy.reshape(train_set_x, (dim[0], (dim[1]*dim[2])))
    train_set_x = theano.shared(train_set_x.astype(fx), borrow=True)                      # convert to 260 x 4096

    n_batches = train_set_x.get_value(borrow=True).shape[0]
    n_batches /= batch_size

    ###############
    # BUILD MODEL #
    ###############

    # build model
    #print('... building the model')
    index = T.lscalar()
    x = T.matrix('x', dtype=fx)

    # Convolution + Pooling Layer
    layer0_input = x.reshape((batch_size, 1, 64, 64))
    layer0 = LeNetConvPoolLayer(
        rng = rng,
        input = layer0_input,
        filter_shape = (nkerns, 1, 11, 11),
        image_shape = (batch_size, 1, 64, 64),
        poolsize = (6, 6),
        W = W_CNN_input,
        b = b_CNN_input
    )
    layer0_output = layer0.output.flatten(2)

    # Logistic Regression Layer
    layer3 = LogisticRegression(
        input = layer0_output,
        n_in = 8100,
        n_out = 1024,
        W = W_logistic,
        b = b_logistic
    )
    predict_model = theano.function(
        inputs = [index],
        outputs=layer3.thresh,
        givens={
            x: train_set_x[index * batch_size: (index + 1) * batch_size]
        }
    )

    preds = [predict_model(minibatch_index) for minibatch_index in xrange(n_batches)]
    images = [numpy.reshape(preds[i],(32,32)) for i in xrange(n_batches)]

    '''
    with open('/Users/Peadar/Documents/KagglePythonProjects/AML/DataScienceBowl/data/CNN_output.pickle', 'wb') as f:
        pickle.dump(images, f)
    '''

    return images