Esempio n. 1
0
    def update_image(self):
        """Método que atualiza a imagem"""
        self.image = self.original

        for i in range(self.list_model.rowCount()):
            if self.list_model.item(i).checkState():
                filtros = self.list_model.item(i).data()
                self.image = apply_filters(self.image, filtros['filter_type'],
                                           filtros['params'])

        self.mostrar_imagem()
Esempio n. 2
0
    def mostrar_imagem(self):
        """Método para previsão da imagem"""
        self.image = apply_filters(self.original, self.filtros['filter_type'],
                                   self.filtros['params'])
        size = self.image.shape
        step = self.image.size / size[0]

        qformat = QImage.Format_RGBA8888 if size[
            2] == 4 else QImage.Format_RGB888
        img = QImage(self.image, size[1], size[0], step, qformat).rgbSwapped()
        self.lblImage.setPixmap(QPixmap.fromImage(img))
def mainloop():
    global config, masks, layers, config_mtime

    config, config_mtime_new = load_config(config_mtime, config)
    if config_mtime != config_mtime_new:
        config['width'] = width
        config['height'] = height
        layers = []  # Allow filters to run their destructors
        layers = reload_layers(config)
        config_mtime = config_mtime_new

    if static_image is not None:
        success, frame = True, static_image
    else:
        success, frame = cap.read()
    if not success:
        print("Error getting a webcam image!")
        sys.exit(1)
    # BGR to RGB
    frame = frame[..., ::-1]
    frame = frame.astype(np.float)

    input_height, input_width = frame.shape[:2]
    internal_resolution = config.get("internal_resolution", 0.5)

    target_height, target_width = to_input_resolution_height_and_width(
        internal_resolution, output_stride, input_height, input_width)

    padT, padB, padL, padR = calc_padding(frame, target_height, target_width)
    resized_frame = tf.image.resize_with_pad(
        frame,
        target_height,
        target_width,
        method=tf.image.ResizeMethod.BILINEAR)

    resized_height, resized_width = resized_frame.shape[:2]

    # Preprocessing
    if model_type == "mobilenet":
        resized_frame = np.divide(resized_frame, 127.5)
        resized_frame = np.subtract(resized_frame, 1.0)
    elif model_type == "resnet50":
        m = np.array([-123.15, -115.90, -103.06])
        resized_frame = np.add(resized_frame, m)
    else:
        assert (False)

    sample_image = resized_frame[tf.newaxis, ...]

    results = sess.run(output_tensor_names,
                       feed_dict={input_tensor: sample_image})

    if model_type == "mobilenet":
        segment_logits = results[1]
        part_heatmaps = results[2]
        heatmaps = results[4]
    else:
        segment_logits = results[6]
        part_heatmaps = results[5]
        heatmaps = results[2]

    scaled_segment_scores = scale_and_crop_to_input_tensor_shape(
        segment_logits, input_height, input_width, padT, padB, padL, padR,
        True)

    scaled_part_heatmap_scores = scale_and_crop_to_input_tensor_shape(
        part_heatmaps, input_height, input_width, padT, padB, padL, padR, True)

    scaled_heatmap_scores = scale_and_crop_to_input_tensor_shape(
        heatmaps, input_height, input_width, padT, padB, padL, padR, True)

    mask = to_mask_tensor(scaled_segment_scores,
                          config.get("segmentation_threshold", 0.75))
    mask = np.reshape(mask, mask.shape[:2])

    part_masks = to_mask_tensor(scaled_part_heatmap_scores, 0.999)
    part_masks = np.array(part_masks)
    heatmap_masks = to_mask_tensor(scaled_heatmap_scores, 0.99)
    heatmap_masks = np.array(heatmap_masks)

    # Average over the last N masks to reduce flickering
    # (at the cost of seeing afterimages)
    num_average_masks = max(1, config.get("average_masks", 3))
    masks.insert(0, mask)
    masks = masks[:num_average_masks]

    mask = np.mean(masks, axis=0)
    mask = (mask * 255).astype(np.uint8)

    dilate_value = config.get("dilate", 0)
    erode_value = config.get("erode", 0)
    blur_value = config.get("blur", 0)

    if dilate_value:
        mask = cv2.dilate(mask,
                          np.ones((dilate_value, dilate_value), np.uint8),
                          iterations=1)
    if erode_value:
        mask = cv2.erode(mask,
                         np.ones((erode_value, erode_value), np.uint8),
                         iterations=1)
    if blur_value:
        mask = cv2.blur(mask, (blur_value, blur_value))

    frame = np.append(frame, np.expand_dims(mask, axis=2), axis=2)

    input_frame = frame.copy()
    frame = np.zeros(input_frame.shape)
    for layer_type, layer_filters in layers:
        # Initialize the layer frame
        layer_frame = np.zeros(frame.shape)  # transparent black
        if layer_type == "foreground":
            layer_frame = input_frame.copy()
        elif layer_type == "input":
            layer_frame = input_frame.copy()
            # make the frame opaque
            layer_frame[:, :, 3] = 255 * np.ones(input_frame.shape[:2])
        elif layer_type == "previous":
            layer_frame = frame.copy()
            # make the frame opaque
            layer_frame[:, :, 3] = 255 * np.ones(input_frame.shape[:2])
        elif layer_type == "empty":
            pass

        layer_frame = filters.apply_filters(layer_frame, mask, part_masks,
                                            heatmap_masks, layer_filters)
        if layer_frame.shape[2] == 4:
            transparency = layer_frame[:, :, 3] / 255.0
            transparency = np.expand_dims(transparency, axis=2)
            frame[:,:,:3] = frame[:,:,:3] * \
                (1.0 - transparency) + layer_frame[:,:,:3] * transparency
        else:
            frame[:, :, :3] = layer_frame[:, :, :3].copy()

    # Remove alpha channel
    frame = frame[:, :, :3]

    if config.get("debug_show_mask") is not None:
        mask_id = int(config.get("debug_show_mask", None))
        if mask_id > -1 and mask_id < 24:
            mask = part_masks[:, :, mask_id] * 255.0
        frame[:, :, 0] = mask
        frame[:, :, 1] = mask
        frame[:, :, 2] = mask
    elif config.get("debug_show_heatmap") is not None:
        heatmap_id = int(config.get("debug_show_heatmap", None))
        if heatmap_id > -1 and heatmap_id < 17:
            mask = heatmap_masks[:, :, heatmap_id] * 255.0
        frame[:, :, 0] = mask
        frame[:, :, 1] = mask
        frame[:, :, 2] = mask

    frame = frame.astype(np.uint8)
    fakewebcam.schedule_frame(frame)
Esempio n. 4
0
def url_partition(base_url, proxies, max_levels=6, LOGGER=None):
    """Partition the listings for a given url into multiple sub-urls,
    such that each url contains at most 20 properties.
    """
    urls = [base_url]
    num_levels = 0
    partitioned_urls = []
    while urls and (num_levels < max_levels):
        #         rand_move = random.randint(0, len(proxies) - 1)
        partition_inputs = []
        for url in urls:
            # proxy = construct_proxy(*proxies[(rand_move + i) % len(proxies)][1:3])
            # LOGGER.debug(f"scraping url {url} with proxy {proxy}")
            partition_inputs.append((url, proxies))

        scraper_results = []
        with ProcessPoolExecutor(
                max_workers=min(50, len(partition_inputs))) as executor:
            # must pass in the two args for get_page_info: url and list of all proxies
            # in order for map function to work as expected, need to pass in list of all urls
            # list takes form [[url1, (proxy1, proxy2, ...)], [url2, (proxy1, proxy2, ...)], ...]
            scraper_results = list(
                executor.map(get_page_info, partition_inputs))

        # LOGGER.info('Getting {} results'.format(len(scraper_results)))
        # LOGGER.info('Results: {}'.format(scraper_results))
        print('Getting {} results'.format(len(scraper_results)))
        print('Results: {}'.format(scraper_results))

        values = []
        for result in scraper_results:
            to_nulls = [x if x else 'NULL' for x in result]
            values.append("('{}', {}, {}, {})".format(*to_nulls))

        print("Values from search criteria, Step {}:\n {}".format(
            num_levels + 1, values))

        #         with sqlite3.connect(SQLITE_DB_PATH) as db:
        #             LOGGER.info('stage {} saving to db!'.format(num_levels))
        #             values = []
        #             for result in scraper_results:
        #                 to_nulls = [x if x else 'NULL' for x in result]
        #                 values.append("('{}', {}, {}, {})".format(*to_nulls))
        #             cursor = db.cursor()
        #             cursor.execute("""
        #                 INSERT INTO URLS (URL, NUM_PROPERTIES, NUM_PAGES, PER_PAGE_PROPERTIES)
        #                 VALUES {};
        #             """.format(','.join(values)))

        # LOGGER.info('Writing to value list {} results'.format(len(scraper_results)))
        print("Writing to value list {} results".format(len(scraper_results)))
        new_urls = []
        for result in scraper_results:
            if (result[1] and result[2] and result[3]
                    and result[1] > result[2] * result[3]) or (num_levels
                                                               == 0):
                expanded_urls = apply_filters(result[0], base_url)
                if len(expanded_urls) == 1 and expanded_urls[0] == result[0]:
                    # LOGGER.info('Cannot further split {}'.format(result[0]))
                    print("Cannot further split {}".format(result[0]))
                else:
                    new_urls.extend(expanded_urls)
            else:
                partitioned_urls.append(result)
        # LOGGER.info('stage {}: running for {} urls. We already captured {} urls'.format(
        #     num_levels, len(new_urls), len(partitioned_urls)))
        print("stage {}: running for {} urls. We already captured {} urls".
              format(num_levels, len(new_urls), len(partitioned_urls)))
        urls = new_urls
        num_levels += 1
        time.sleep(random.randint(2, 5))
    return partitioned_urls