コード例 #1
0
ファイル: Torch_DataLoader.py プロジェクト: laltman2/CATCH
def scale_int(s, config, num_overlaps):
    shape = config['shape']
    ext = feature_extent(s, config)
    #introduce noise to ext
    ext_noise = config['ext_noise']
    ext = np.random.normal(ext, ext_noise*ext)
    extsize = ext*2
    shapesize = shape[0]
    if extsize <= shapesize:
        scale = 1
    else:
        scale = int(np.floor(extsize/shapesize) + 1)
    newshape = [i * scale for i in shape]
    holo = LMHologram(coordinates=coordinates(newshape))
    holo.instrument.properties = config['instrument']
    # ... calculate hologram
    frame = np.random.normal(0, config['noise'], newshape)
    holo.particle = s
    holo.particle.x_p += (scale-1)*100
    holo.particle.y_p += (scale-1)*100
    holo.particle = add_overlaps(ext, num_overlaps, config).append(holo.particle)
    frame += holo.hologram().reshape(newshape)
    frame = np.clip(100 * frame, 0, 255).astype(np.uint8)
    #decimate
    frame = frame[::scale, ::scale]
    return frame, scale
コード例 #2
0
 def mie_loss(self, params, image, dim):
     '''Returns the residual between the image and our Mie model.'''
     p = params.valuesdict()
     h = LMHologram(coordinates=coordinates(dim))
     h.particle.r_p = [p['x'] + dim[0] // 2, p['y'] + dim[1] // 2, p['z']]
     h.particle.a_p = p['a_p']
     h.particle.n_p = p['n_p']
     h.instrument.wavelength = p['lamb']
     h.instrument.magnification = p['mpp']
     h.instrument.n_m = p['n_m']
     hologram = h.hologram().reshape(dim)
     return (hologram - image) / self.noise
コード例 #3
0
def makedata(config={}):
    '''Make Training Data'''
    # set up pipeline for hologram calculation
    shape = config['shape']
    holo = LMHologram(coordinates=coordinates(shape))
    holo.instrument.properties = config['instrument']

    # create directories and filenames
    directory = os.path.expanduser(config['directory'])
    imgtype = config['imgtype']

    nframes = config['nframes']
    start = 0
    tempnum = nframes
    for dir in ('images', 'labels', 'params'):
        path = os.path.join(directory, dir)
        if not os.path.exists(path):
            os.makedirs(path)
        already_files = len(os.listdir(path))
        if already_files < tempnum:  #if there are fewer than the number of files desired
            tempnum = already_files
    if not config['overwrite']:
        start = tempnum
        if start >= nframes:
            return
    with open(directory + '/config.json', 'w') as f:
        json.dump(config, f)
    filetxtname = os.path.join(directory, 'filenames.txt')
    imgname = os.path.join(directory, 'images', 'image{:05d}.' + imgtype)
    jsonname = os.path.join(directory, 'params', 'image{:05d}.json')
    yoloname = os.path.join(directory, 'labels' , 'image{:05d}.txt')

    filetxt = open(filetxtname, 'w')
    for n in range(start, nframes):  # for each frame ...
        print(imgname.format(n))
        sample = make_sample(config)   # ... get params for particles
        # ... calculate hologram
        frame = np.random.normal(1., config['noise'], shape)
        if len(sample) > 0:
            holo.particle = sample
            frame += holo.hologram().reshape(shape) - 1.
        frame = np.clip(100 * frame, 0, 255).astype(np.uint8)
        # ... and save the results
        cv2.imwrite(imgname.format(n), frame)
        with open(jsonname.format(n), 'w') as fp:
            fp.write(format_json(sample, config))
        with open(yoloname.format(n), 'w') as fp:
            fp.write(format_yolo(sample, config))
        filetxt.write(imgname.format(n) + '\n')
        #print('finished image {}'.format(n+1))
    return
コード例 #4
0
ファイル: mtd.py プロジェクト: davidgrier/pylorenzmie
def mtd(configfile='mtd.json'):
    '''Make Training Data'''
    # read configuration
    with open(configfile, 'r') as f:
        config = json.load(f)

    # set up pipeline for hologram calculation
    shape = config['shape']
    holo = LMHologram(coordinates=coordinates(shape))
    holo.instrument.properties = config['instrument']

    # create directories and filenames
    directory = os.path.expanduser(config['directory'])
    imgtype = config['imgtype']
    for dir in ('images_labels', 'params'):
        if not os.path.exists(os.path.join(directory, dir)):
            os.makedirs(os.path.join(directory, dir))
    shutil.copy2(configfile, directory)
    filetxtname = os.path.join(directory, 'filenames.txt')
    imgname = os.path.join(directory, 'images_labels',
                           'image{:04d}.' + imgtype)
    jsonname = os.path.join(directory, 'params', 'image{:04d}.json')
    yoloname = os.path.join(directory, 'images_labels', 'image{:04d}.txt')

    filetxt = open(filetxtname, 'w')
    for n in range(config['nframes']):  # for each frame ...
        print(imgname.format(n))
        sample = make_sample(config)  # ... get params for particles
        # ... calculate hologram
        frame = np.random.normal(0, config['noise'], shape)
        if len(sample) > 0:
            holo.particle = sample
            frame += holo.hologram().reshape(shape)
        else:
            frame += 1.
        frame = np.clip(100 * frame, 0, 255).astype(np.uint8)
        # ... and save the results
        cv2.imwrite(imgname.format(n), frame)
        with open(jsonname.format(n), 'w') as fp:
            fp.write(format_json(sample, config))
        with open(yoloname.format(n), 'w') as fp:
            fp.write(format_yolo(sample, config))
        filetxt.write(imgname.format(n) + '\n')
コード例 #5
0
ファイル: Torch_DataLoader.py プロジェクト: laltman2/CATCH
def scale_float(s, config, num_overlaps):
    shape = config['shape']
    ext = feature_extent(s, config)
    #introduce noise to ext
    ext_noise = config['ext_noise']
    ext = np.random.normal(ext, ext_noise*ext)
    extsize = ext*2
    shapesize = shape[0]
    scale = float(extsize)/float(shapesize)
    newshape = [int(extsize)]*2
    holo = LMHologram(coordinates=coordinates(newshape))
    holo.instrument.properties = config['instrument']
    # ... calculate hologram
    frame = np.random.normal(0, config['noise'], newshape)
    s.x_p += (scale-1)*100.
    s.y_p += (scale-1)*100.
    totalspheres = add_overlaps(ext, num_overlaps, config)
    totalspheres.append(s)
    holo.lorenzmie.particle = totalspheres
    frame += holo.hologram().reshape(newshape)
    frame = np.clip(100 * frame, 0, 255).astype(np.uint8)
    #reshape
    #frame = cv2.resize(frame, tuple(shape))
    return frame, scale
コード例 #6
0
    THIS_DIR = os.path.dirname(os.path.abspath(__file__))
    path = (THIS_DIR, '..', 'docs', 'tutorials', 'crop.png')
    TEST_IMAGE = os.path.join(path)

    # Feature with instrumental properties and mask properties
    a = Feature(wavelength=0.447, magnification=0.048, n_m=1.34,
                distribution='radial', percentpix=0.1)

    # Normalized image data
    data = cv2.imread(TEST_IMAGE)
    data = cv2.cvtColor(data, cv2.COLOR_BGR2GRAY).astype(np.float)
    data /= np.mean(data)
    a.data = data

    # Pixel coordinates
    a.coordinates = coordinates(data.shape)
    
    # Initial estimates for particle properties
    p = a.model.particle
    p.r_p = [data.shape[0]//2, data.shape[1]//2, 330.]
    p.a_p = 1.1
    p.n_p = 1.4
    print('Initial estimates:\n{}'.format(p))

    # init dummy hologram for proper speed gauge
    b = a.model.hologram()
    start = time()
    result = a.optimize()
    delta = time() - start
    print('Refined estimates:\n{}'.format(p))
    print('Time to fit: {:.3f} s'.format(time() - start))
コード例 #7
0
def crop_feature(img_list=[], xy_preds=[], new_shape=(201, 201)):
    '''
    img_list: list of images (np.ndarray) with shape: old_shape
    xy_preds is the output of a yolo prediction: list of list of dicts
    xy_preds[i] corresponds to img_list[i]


    output:
    list of list of feature objects
    '''

    numfiles = len(img_list)
    numpreds = len(xy_preds)
    if numfiles != numpreds:
        raise Exception(
            'Number of images: {} does not match number of predictions: {}'.
            format(numfiles, numpreds))

    frame_list = []
    est_input_img = []
    est_input_scale = []
    for num in range(numfiles):
        feature_list = []
        img_local = img_list[num]
        preds_local = xy_preds[num]
        for pred in preds_local:
            f = Feature(model=LMHologram())
            conf = pred["conf"] * 100
            (x, y, w, h) = pred["bbox"]
            xc = int(np.round(x))
            yc = int(np.round(y))
            ext = np.amax([int(w), int(h)])
            if ext <= new_shape[0]:
                crop_shape = new_shape
                scale = 1
            else:
                scale = int(np.floor(ext / new_shape[0]) + 1)
                crop_shape = np.multiply(new_shape, scale)
            cropped, corner1 = crop_center(img_local, (xc, yc), crop_shape)
            cropped = cropped[:, :, 0]
            est_img = cropped[::scale, ::scale]
            est_input_img.append(est_img)
            est_input_scale.append(scale)
            newcenter = [int(x) for x in np.divide(crop_shape, 2)]
            ext_shape = (ext, ext)
            data, corner2 = crop_center(cropped, newcenter, ext_shape)
            corner = np.add(corner1, corner2)
            data = np.array(data) / 100.
            f.data = data
            coords = coordinates(shape=ext_shape, corner=corner)
            f.coordinates = coords
            f.model.particle.x_p = x
            f.model.particle.y_p = y
            feature_list.append(f)
        feature_list = np.array(feature_list)
        frame_list.append(feature_list)
    frame_list = np.array(frame_list)
    frlistsize = 0
    for frame in frame_list:
        frlistsize += len(frame)
    est_input_img = np.array(est_input_img)
    est_input_scale = np.array(est_input_scale)
    if frlistsize != len(est_input_img):
        print('error in output sizes')
        print('Frame list size:', frlistsize)
        print('Estimator input size:', len(est_input_img))
    return frame_list, est_input_img, est_input_scale
コード例 #8
0
ファイル: CATCH_refine.py プロジェクト: laltman2/CATCH
    # Instrument configuration
    ins = f.model.instrument
    ins.wavelength = 0.447     # [um]
    ins.magnification = 0.048  # [um/pixel]
    ins.n_m = 1.34

    #provide initial parameter estimates from ML
    p = f.particle
    p.properties = row

    #crop experimental data and give it to feature
    frame = cv2.imread(row.framepath, cv2.IMREAD_GRAYSCALE)
    crop = crop_frame(frame, [row])[0]
    
    f.data = crop/np.mean(crop)
    f.coordinates = coordinates(crop.shape, corner=row.bbox[0])

    #mask settings
    f.mask.percentpix = 0.2
    f.mask.distribution = 'radial'
    
    #fit
    result = f.optimize()
    report(result)

    #replace refined values in new df
    refrow = row.copy()
    newprops = pd.Series(f.particle.properties)
    refrow.update(newprops)
    refrow['redchi'] = result.redchi
    refrow = refrow.to_frame().T
コード例 #9
0
def example():
    '''
    Make a "noisy" hologram. Then fit the noisy hologram. Plot the results.
    '''
    ## Make Noisy Hologram.
    # Create hologram to be fitted.
    from time import time
    x, y, z = 0., 0., 100.
    a_p = 0.5
    n_p = 1.5
    n_m = 1.339
    dim = [201, 201]
    lamb = 0.447
    mpp = 0.048
    h = LMHologram(coordinates=coordinates(dim))
    h.particle.r_p = [x + dim[0] // 2, y + dim[1] // 2, z]
    h.particle.a_p = a_p
    h.particle.n_p = n_p
    h.instrument.wavelength = lamb
    h.instrument.magnification = mpp
    h.instrument.n_m = n_m
    hologram = h.hologram().reshape(dim)

    # Add noise.
    std = 0.05
    noise = np.random.normal(size=hologram.shape) * std
    noisy_hologram = hologram + noise

    # Fit the noisy hologram.
    init_params = {
        'x': x,
        'y': y,
        'z': z + 8,
        'a_p': a_p - .3,
        'n_p': n_p + .03,
        'n_m': n_m,
        'mpp': mpp,
        'lamb': lamb
    }
    mie_fit = Mie_Fitter(init_params)
    t = time()
    result = mie_fit.fit(noisy_hologram)
    print("Time to fit: {:.05f}".format(time() - t))

    # Calculate the resulting image.
    residual = result.residual.reshape(*dim)
    final = hologram

    # Write error report.
    report_fit(result)

    ## Make plots.
    # Plot images.
    sns.set(style='white', font_scale=1.4)
    plt.imshow(np.hstack([noisy_hologram, final, residual + 1]))
    plt.title('Image, Fit, Residual')
    plt.gray()
    plt.show()

    # Plot Covariance.
    f, ax = plt.subplots()
    #cmap = sns.diverging_palette(220, 10, as_cmap=True)

    sns.set(font_scale=1.5)
    plt.title('Log Covariance Matrix')
    sns.heatmap(np.log(result.covar),
                cmap='PuBu',
                square=True,
                cbar_kws={},
                ax=ax)
    ax.set_xticklabels(['x', 'y', 'z', r'a$_p$', r'n$_p$'])
    ax.set_yticklabels([r'n$_p$', r'a$_p$', 'z', 'y', 'x'])
    plt.show()
コード例 #10
0
    def _update(self):
        if self._coordinates is None:
            self._selected = None
            return
        npts = self._distance.size
        if self.percentpix >= 1.:
            index = np.delete(np.arange(npts), self.exclude)
        else:
            nchosen = int(npts * self.percentpix)
            rho = self._get_distribution()
            if rho is not None:
                rho[self.exclude] = 0.
                rho /= np.sum(rho)
            index = np.random.choice(npts, nchosen, p=rho, replace=False)
        self._selected = np.full(npts, False)
        self._selected[index] = True


if __name__ == '__main__':  # pragma: no cover
    from pylorenzmie.utilities import coordinates

    shape = (201, 201)
    corner = (350, 300)
    m = Mask(coordinates(shape, corner=corner))
    m.settings['percentpix'] = 0.4
    m.settings['distribution'] = 'radial_gaussian'
    m.exclude = np.arange(10000, 12000)
    m.initialize_sample()
    m.draw_mask()
コード例 #11
0
ファイル: pylm_fit.py プロジェクト: laltman2/CATCH
feature = Feature(model=LMHologram(double_precision=False))

# Instrument configuration
ins = feature.model.instrument
ins.wavelength = 0.447     # [um]
ins.magnification = 0.048  # [um/pixel]
ins.n_m = 1.34

# The normalized image constitutes the data for the Feature()
data = img[:,:,0]
data = data / np.mean(data)
feature.data = data

# Specify the coordinates for the pixels in the image data
feature.coordinates = coordinates(data.shape)

p = feature.particle
p.r_p = [data.shape[0]//2, data.shape[1]//2, results['z_p']]
p.a_p = results['a_p']
p.n_p = results['n_p']

holo = feature.hologram()
resid = feature.residuals() +1.

display = np.hstack([data, holo, resid])

matplotlib.use('Qt5Agg')
plt.imshow(display, cmap='gray')
plt.title('Image, Predicted Holo, Residual')
plt.show()