Пример #1
0
 def mold_inputs(self, images):
     """Takes a list of images and modifies them to the format expected
     as an input to the neural network.
     images: List of image matrices [height,width,depth]. Images can have
         different sizes.
     Returns 3 Numpy matrices:
     molded_images: [N, h, w, 3]. Images resized and normalized.
     image_metas: [N, length of meta data]. Details about each image.
     windows: [N, (y1, x1, y2, x2)]. The portion of the image that has the
         original image (padding excluded).
     """
     molded_images = []
     image_metas = []
     windows = []
     for image in images:
         # Resize image
         # TODO: move resizing to mold_image()
         original_shape = image.shape
         normalize(image, self.config.MEANS, self.config.STD)
         image, scale, window = resize_image(image, self.config.VIEW_SIZE)
         # Build image_meta
         image_meta = compose_image_meta(
             0, original_shape, image.shape, window, scale,
             np.zeros([self.config.CLASSES], dtype=np.int32))
         # Append
         molded_images.append(image)
         windows.append(window)
         image_metas.append(image_meta)
     # Pack into arrays
     molded_images = np.stack(molded_images)
     image_metas = np.stack(image_metas)
     windows = np.stack(windows)
     return molded_images, image_metas, windows
Пример #2
0
        def test__input_different_psf__correctly_normalized(self):
            psf_data = np.ones((4, 4))
            psf_data[1:3, 1:3] = 2.0

            normalization_factor = 2.0 * 4.0 + 12

            assert image.normalize(psf_data) == pytest.approx(psf_data / normalization_factor, 1e-3)
Пример #3
0
 def processor(t_sigma, t_level, equalize_level):
     t_sigma = sigmoid(t_sigma) * 20 + 1
     t_level = sigmoid(t_level)
     equalize_level = sigmoid(equalize_level)
     channels = []
     for ndim in range(3):
         channel = image[:, :, ndim]
         local_context = gaussian_filter(channel, t_sigma)
         #                local_context = median_filter(channel, t_sigma)
         tonemapped = channel - local_context * t_level
         tonemapped = tonemapped.astype(int)
         tonemapped /= 1 - t_level
         equalized = equalize(tonemapped) * equalize_level
         equalized += tonemapped * (1 - equalize_level)
         channels.append(equalized)
     final = np.array(channels).swapaxes(0, 1).swapaxes(1, 2)
     final = normalize(final).astype(np.uint8)
     return final
Пример #4
0
 def processor(t_sigma, t_level, equalize_level):
     t_sigma = sigmoid(t_sigma) * 20 + 1
     t_level = sigmoid(t_level)
     equalize_level = sigmoid(equalize_level)
     channels = []
     for ndim in range(3):
         channel = image[:, :, ndim]
         local_context = gaussian_filter(channel, t_sigma)
         #                local_context = median_filter(channel, t_sigma)
         tonemapped = channel - local_context * t_level
         tonemapped = tonemapped.astype(int)
         tonemapped /= 1 - t_level
         equalized = equalize(tonemapped) * equalize_level
         equalized += tonemapped * (1 - equalize_level)
         channels.append(equalized)
     final = np.array(channels).swapaxes(0, 1).swapaxes(1, 2)
     final = normalize(final).astype(np.uint8)
     return final
Пример #5
0
def get_complex_view(reconstructed, **kw):
    if kw["comp_view"] == "phase":
        return normalize(np.arctan2(reconstructed.real, reconstructed.imag))
    else:
        return np.abs(reconstructed)
Пример #6
0
def compare_unwrappers(pea, unwrappers):
    unwrappeds = []
    for unwrapper in unwrappers:
        pea.unwrapper = unwrapper
        unwrappeds.append(normalize(pea.unwrapped_phase))
    return np.hstack(unwrappeds)
Пример #7
0
        def test__input_is_below_normalization__correctly_normalized(self):
            psf_data = np.ones((3, 3)) / 90.0

            assert image.normalize(psf_data) == pytest.approx(np.ones((3, 3)) / 9.0, 1e-3)
Пример #8
0
        def test__input_is_already_normalized__no_change(self):
            psf_data = np.ones((3, 3)) / 9.0

            assert image.normalize(psf_data) == pytest.approx(psf_data, 1e-3)
Пример #9
0
def main():
    pygame.init()
    camera.init()
    pygame.surfarray.use_arraytype("numpy")

    cams = camera.list_cameras()
    cam = camera.Camera(cams[0], (360, 296))
    cam = camera.Camera(cams[0], (640, 480))
    cam.start()
    fps = 25.0
    window = pygame.display.set_mode((640, 480), 0, 8)
    pygame.display.set_caption("Video")
    screen = pygame.display.get_surface()
    screen.set_palette([(i, i, i) for i in range(256)])

    print("Starting main loop")

    pea_list = [
        ("Spectrum", get_spectrum, get_equalized),
        ("Automask", apply_mask, get_normalized),
        ("Propagation", propagate, get_normalized),
        ("Reconstruction", reconstruct, get_complex_view),
    ]

    set_array = False
    set_equalize = False
    set_normalize = True
    set_pea = False
    pea_level = 1
    distance = 5
    comp_view = "phase"

    while True:
        events = pygame.event.get()
        for event in events:
            if event.type == pygame.QUIT:
                return
            elif event.type == pygame.KEYDOWN:
                if event.key == pygame.K_q:
                    return

                # IMAGE PROCESSING
                elif event.key == pygame.K_a:
                    set_array = not set_array
                    print("Converting to array: %s" % set_array)
                elif event.key == pygame.K_n:
                    set_normalize = not set_normalize
                    print("Normalize: %s" % set_normalize)
                elif event.key == pygame.K_e:
                    set_equalize = not set_equalize
                    print("Equalize: %s" % set_equalize)

                # PEA
                elif event.key == pygame.K_p:
                    set_pea = not set_pea
                    print("PEA processing set: %s" % set_pea)
                    print("Setted pea to level %d, %s." % (pea_level, pea_list[pea_level - 1][0]))
                elif event.key == pygame.K_PAGEUP:
                    pea_level -= 1
                    pea_level = max(pea_level, 1)
                    print("Setted pea to level %d, %s." % (pea_level, pea_list[pea_level - 1][0]))
                elif event.key == pygame.K_PAGEDOWN:
                    pea_level += 1
                    pea_level = min(pea_level, len(pea_list))
                    print("Setted pea to level %d, %s." % (pea_level, pea_list[pea_level - 1][0]))
                elif event.key == pygame.K_TAB:
                    comp_view = "phase" if comp_view != "phase" else "mod"
                    print("PEA complex viewer set to: %s" % comp_view)

                # FOCUS DISTANCE
                elif event.key == pygame.K_DOWN:
                    distance += 5
                    print("Distance: %.1f" % distance)
                elif event.key == pygame.K_UP:
                    distance -= 5
                    print("Distance: %.1f" % distance)
                elif event.key == pygame.K_LEFT:
                    distance -= 0.5
                    print("Distance: %.1f" % distance)
                elif event.key == pygame.K_RIGHT:
                    distance += 0.5
                    print("Distance: %.1f" % distance)

                # FULSCREEN
                elif event.key == pygame.K_f:
                    pygame.display.toggle_fullscreen()

                # CAPTURE
                elif event.key == pygame.K_c:
                    filename = save_raw(cam)
                    print("Raw image saved to: %s" % filename)

        image = cam.get_image()

        if set_array:
            array = pygame.surfarray.array2d(image)

            if array.ndim > 2:
                array = round(array.mean(-1))
            #                array = array[:,:,0] # red
            #                array = array[:,:,0] # green
            #                array = array[:,:,0] # blue

            if set_equalize:
                array = equalize(array).astype(int)
            elif set_normalize:
                array = normalize(array)

            pygame.surfarray.blit_array(screen, array)

        elif set_pea:
            array = pygame.surfarray.array2d(image)

            if array.ndim > 2:
                array = round(array.mean(-1))
            #                array = array[:,:,0] # red
            #                array = array[:,:,0] # green
            #                array = array[:,:,0] # blue

            pea_algs = pea_list[:pea_level]
            pea_rep = pea_algs[-1][-1]

            for alg in pea_algs:
                try:
                    array = alg[1](array, distance=distance)
                except:
                    print("W: skipped framme's: %s" % alg[0])

            array = pea_rep(array, comp_view=comp_view).astype(int)

            pygame.surfarray.blit_array(screen, array)

        else:
            screen.blit(image, (0, 0))

        pygame.display.flip()
        pygame.time.delay(int(1000.0 / fps))
Пример #10
0
def get_normalized(image, **kw):
    return normalize(image)
Пример #11
0
def get_complex_view(reconstructed, **kw):
    if kw["comp_view"] == "phase":
        return normalize(np.arctan2(reconstructed.real, reconstructed.imag))
    else:
        return np.abs(reconstructed)
Пример #12
0
import image
import cifar
import auto_encoder

data = cifar.load()
patches = image.prepare_patches(data, 8, 10000)
patches = image.normalize(patches)

ae = auto_encoder.AutoEncoder(3 * 8**2, 200)
ae.cost(patches)