Ejemplo n.º 1
0
def region_growing(image, seeds, lower=None, upper=None):
    """
    Wrapper for SimpleITK's ConnectedThreshold region-growing segmentation. This was created for ease of use as
    ConnectedThreshold requires an proprietary SimpleITK image format. This function handles converting the image (in
    the conventional numpy.ndarray format) to sitk's proprietary one and back, as well as performing the actual
    segmentation.
    :param image: An image (numpy.ndarray).
    :param seeds: The seeds from which the region growing will start (list of tuples).
    :param lower: The lower boundary, under which the 'growing' process stops.
    :param upper: The upper boundary, over which the 'growing' process stops.
    :return: The segmented image (numpy.ndarray)
    """

    # Doesn't work with lower=0. Instead try lower=0.00001.
    if not lower:

        lower = percentile(image, 25)

    if not upper:

        upper = percentile(image, 75)

    if all([isinstance(x, int) for x in seeds]):
        seeds = [seeds]

    image = sitk.GetImageFromArray(image)

    segm = sitk.ConnectedThreshold(image1=image,
                                   seedList=seeds,
                                   lower=lower,
                                   upper=upper,
                                   replaceValue=1)

    return sitk.GetArrayFromImage(segm)
Ejemplo n.º 2
0
    def _calculate_values(self):
        """
        Calculates various values
        :return:
        """
        for tool in self._retracts:
            if tool in self.extruders:
                self.extruders[tool].retract = abs(
                    utils.percentile(self._retracts[tool],
                                     0.99,
                                     key=lambda x: x[0]))
                self.extruders[tool].retract_speed = utils.percentile(
                    self._retracts[tool], 0.99, key=lambda x: x[1])
                if (self.extruders[tool].retract > 15
                        or self.extruders[tool].retract_speed > 10000):
                    raise ValueError(
                        "Deducing retract values returned bad values: {} length, {} speed"
                        .format(
                            self.extruders[tool].retract,
                            self.extruders[tool].retract_speed,
                        ))

        for tool in self._temperatures:
            if tool in self.extruders:
                self.extruders[
                    tool].temperature_setpoints = self._temperatures[tool]

        if self._z_speeds:
            self.settings.travel_z_speed = abs(
                utils.percentile(self._z_speeds, 0.99))

        if self._travel_speeds:
            self.settings.travel_xy_speed = abs(
                utils.percentile(self._travel_speeds, 0.99))
def experiment1():
    """Does downsampling preserve percentile statistics?"""
    motif = (prok_motifs[11])
    downsamples = [sample(int(len(motif)/10), motif,replace=False) for i in range(100)]
    maxent_spoofs = spoof_maxent_motifs(motif, 1000, verbose=True)
    down_spoofs = [spoof_maxent_motifs(dm, 100) for dm in tqdm(downsamples)]
    true_mi, spoof_mis = motif_mi(motif), map(motif_mi, tqdm(maxent_spoofs))
    down_mis, down_spoof_mis = map(motif_mi, downsamples), [map(motif_mi, spoofs) for spoofs in tqdm(down_spoofs)]
    true_percentile = percentile(true_mi, spoof_mis)
    down_percentiles = [percentile(down_mi, ds_mis) for (down_mi, ds_mis) in zip (down_mis, down_spoof_mis)]
Ejemplo n.º 4
0
    def snapshot(self):
        """
        Return a snapshot of current metrics

        [(app, metric, val, type, timestamp), ...]
        """
        metrics = []
        ss = copy.deepcopy(self.metrics)

        # Reset metrics
        self.reset_metrics()

        for app in ss:
            for m, val in ss[app]['COUNTER'].iteritems():
                metrics.append((app, m, val, 'COUNTER', time.time()))

            for m, vals in ss[app]['GAUGE'].iteritems():
                if vals:
                    metrics.append((app, m, sum(vals) / float(len(vals)), 'GAUGE', time.time()))

            for m, vals in ss[app]['PERCENTILE'].iteritems():
                # Emit 50%, 75%, 95%, 99% as GAUGE
                for p in self.percentiles:
                    # Assume the metric name has a trailing separator to append the percentile to
                    metrics.append((app, '{0}{1}_percentile'.format(m, int(p*100)), utils.percentile(vals, p), 'GAUGE', time.time()))
                metrics.append((app, '{0}average'.format(m), sum(vals) / float(len(vals)), 'GAUGE', time.time()))

        return metrics
Ejemplo n.º 5
0
def self_score_percentile(motif):
    spoofs = spoof_maxent_motifs(motif, 1000)
    spoof_jk_sds = [
        sd(jackknife_distribution(spoof)) for spoof in tqdm(spoofs)
    ]
    motif_jk_sd = sd(jackknife_distribution(motif))
    return percentile(motif_jk_sd, spoof_jk_sds)
Ejemplo n.º 6
0
    def snapshot(self):
        """
        Return a snapshot of current metrics

        [(app, metric, val, type, timestamp), ...]
        """
        metrics = []
        ss = copy.deepcopy(self.metrics)

        # Reset metrics
        self.reset_metrics()

        for app in ss:
            for m, val in ss[app]['COUNTER'].iteritems():
                metrics.append((app, m, val, 'COUNTER', time.time()))

            for m, vals in ss[app]['GAUGE'].iteritems():
                if vals:
                    metrics.append(
                        (app, m, sum(vals) / float(len(vals)), 'GAUGE',
                         time.time()))

            for m, vals in ss[app]['PERCENTILE'].iteritems():
                # Emit 50%, 75%, 95%, 99% as GAUGE
                for p in self.percentiles:
                    # Assume the metric name has a trailing separator to append the percentile to
                    metrics.append(
                        (app, '{0}{1}_percentile'.format(m, int(p * 100)),
                         utils.percentile(vals, p), 'GAUGE', time.time()))
                metrics.append(
                    (app, '{0}average'.format(m), sum(vals) / float(len(vals)),
                     'GAUGE', time.time()))

        return metrics
Ejemplo n.º 7
0
    def calc_percentile(self, member, outdict, inprop, outvalprop, outpercentileprop):
        # store in instance var if needed, no need to access cache for each
        # call.
        #
        # If not found in the instance, than try to get from cache (and set if
        # not found), plus setting it as an instance var. Also removes default
        # ordering by name (we don't need it)
        all_members = getattr(self, '_all_members', None)

        if not all_members:
            all_members = cache.get('all_members', None)
            if not all_members:
                self._all_members = all_members = list(
                    Member.objects.filter(is_current=True).order_by().values())
                cache.set('all_members', all_members, settings.LONG_CACHE_TIME)

        member_count = float(len(all_members))
        member_val = getattr(member, inprop) or 0

        avg = sum(x[inprop] or 0 for x in all_members) / member_count

        var = sum(((x[inprop] or 0) - avg) ** 2 for x in all_members) / member_count

        outdict[outvalprop] = member_val
        outdict[outpercentileprop] = percentile(avg, var, member_val) if var != 0 else 0
Ejemplo n.º 8
0
    def calc_percentile(self, member, outdict, inprop, outvalprop,
                        outpercentileprop):
        # store in instance var if needed, no need to access cache for each
        # call.
        #
        # If not found in the instance, than try to get from cache (and set if
        # not found), plus setting it as an instance var. Also removes default
        # ordering by name (we don't need it)
        all_members = getattr(self, '_all_members', None)

        if not all_members:
            all_members = cache.get('all_members', None)
            if not all_members:
                self._all_members = all_members = list(
                    Member.objects.filter(is_current=True).order_by().values())
                cache.set('all_members', all_members, settings.LONG_CACHE_TIME)

        member_count = float(len(all_members))
        member_val = getattr(member, inprop) or 0

        avg = sum(x[inprop] or 0 for x in all_members) / member_count

        var = sum(
            ((x[inprop] or 0) - avg)**2 for x in all_members) / member_count

        outdict[outvalprop] = member_val
        outdict[outpercentileprop] = percentile(avg, var,
                                                member_val) if var != 0 else 0
Ejemplo n.º 9
0
def experiment1():
    """Does downsampling preserve percentile statistics?"""
    motif = (prok_motifs[11])
    downsamples = [
        sample(int(len(motif) / 10), motif, replace=False) for i in range(100)
    ]
    maxent_spoofs = spoof_maxent_motifs(motif, 1000, verbose=True)
    down_spoofs = [spoof_maxent_motifs(dm, 100) for dm in tqdm(downsamples)]
    true_mi, spoof_mis = motif_mi(motif), map(motif_mi, tqdm(maxent_spoofs))
    down_mis, down_spoof_mis = map(motif_mi, downsamples), [
        map(motif_mi, spoofs) for spoofs in tqdm(down_spoofs)
    ]
    true_percentile = percentile(true_mi, spoof_mis)
    down_percentiles = [
        percentile(down_mi, ds_mis)
        for (down_mi, ds_mis) in zip(down_mis, down_spoof_mis)
    ]
def gen_level_agg_features(data, last_da, win_das, col):
    agg_cols = list(filter(lambda x: not x.startswith(col[:4]), level_cols))
    data = data.copy()
    indexing = (data.da < last_da) & (data.da >= last_da - win_das)
    gp = data.loc[indexing, [col] + agg_cols].groupby(col)[agg_cols]

    aggs = gp.agg([
        'mean', 'std', 'sem', pd.DataFrame.kurt, pd.DataFrame.skew,
        pd.DataFrame.mad, freq,
        percentile(.3),
        percentile(.9)
    ])
    aggs.columns = [
        'agg_level_{}_{}_{}_wd_{}'.format(col, c[0], c[1], win_das)
        for c in aggs.columns
    ]
    aggs = aggs.reset_index()
    data = data.loc[data.da == last_da].merge(aggs, how='left', on=col)
    data.drop(level_cols, inplace=True, axis=1)
    data.drop_duplicates([col, 'da'], inplace=True)
    data.fillna(0, inplace=True)
    data, _ = reduce_mem_usage(data)
    return data
def analyze_motif(motif, trials=1000):
    cols = transpose(motif)
    L = len(cols)
    ps = []
    for col1, col2 in (choose2(cols)):
        actual_mi = dna_mi(col1,col2)
        perm_mis = [dna_mi(col1,permute(col2)) for i in xrange(trials)]
        p = percentile(actual_mi, perm_mis)
        #print p
        ps.append(p)
    q = fdr(ps)
    correlated_pairs = [(i,j) for (i,j),p in zip(choose2(range(L)),ps) if p < q]
    num_correlated = len(correlated_pairs)
    print "correlated column pairs:", num_correlated, "%1.2f" % ((num_correlated)/choose(L,2))
    return correlated_pairs
def analyze_motif(motif, trials=1000):
    cols = transpose(motif)
    L = len(cols)
    ps = []
    for col1, col2 in (choose2(cols)):
        actual_mi = dna_mi(col1, col2)
        perm_mis = [dna_mi(col1, permute(col2)) for i in xrange(trials)]
        p = percentile(actual_mi, perm_mis)
        #print p
        ps.append(p)
    q = fdr(ps)
    correlated_pairs = [(i, j) for (i, j), p in zip(choose2(range(L)), ps)
                        if p < q]
    num_correlated = len(correlated_pairs)
    print "correlated column pairs:", num_correlated, "%1.2f" % (
        (num_correlated) / choose(L, 2))
    return correlated_pairs
Ejemplo n.º 13
0
    def process_metric(self, metric, legacy=False):
        """ Process metrics and store and publish """
        logger.debug("Received metric: {0}".format(metric))
        if legacy:
            # Legacy format for metrics is slightly different...
            # Index them under same "app name"
            app_name = '__LEGACY__'
            metric_name, value, metric_type = metric.split('|')
        else:
            app_name, metric_name, metric_type, value = metric

        try:
            value = float(value)
        except ValueError:
            logger.warn(
                "Failed to cast metric value to float - {0}".format(metric))
            return

        if app_name not in self.metrics:
            self.init_app_metrics(app_name)

        pub_metrics = []
        if metric_type == 'GAUGE':
            self.metrics[app_name][metric_type][metric_name].append(value)
            # Publish the current running average
            pub_val = sum(
                self.metrics[app_name][metric_type][metric_name]) / len(
                    self.metrics[app_name][metric_type][metric_name])
            pub_metrics.append(
                (self.hostname, app_name, metric_name, metric_type, pub_val,
                 time.time()))

        elif metric_type == 'PERCENTILE' or metric_type == 'HISTOGRAM':
            # Kill off the HISTOGRAM type!!
            metric_type = 'PERCENTILE'
            self.metrics[app_name][metric_type][metric_name].append(value)
            # Publish the current running percentiles
            for p in self.percentiles:
                pub_metrics.append(
                    (self.hostname, app_name,
                     '{0}{1}_percentile'.format(metric_name,
                                                int(p * 100)), 'GAUGE',
                     utils.percentile(
                         self.metrics[app_name][metric_type][metric_name],
                         p), time.time()))
            avg = sum(self.metrics[app_name][metric_type][metric_name]) / len(
                self.metrics[app_name][metric_type][metric_name])
            pub_metrics.append(
                (self.hostname, app_name, '{0}average'.format(metric_name),
                 metric_type, avg, time.time()))

        elif metric_type == 'COUNTER':
            self.metrics[app_name][metric_type][metric_name] += value
            pub_val = self.metrics[app_name][metric_type][metric_name]
            # Publish the running count
            pub_metrics.append(
                (self.hostname, app_name, metric_name, metric_type, pub_val,
                 time.time()))

        else:
            logger.warn("Unrecognized metric type - {0}".format(metric))
            return

        msg = ujson.dumps(pub_metrics)
        self.pub_socket.send(msg)
Ejemplo n.º 14
0
def pipeline(img,
             low,
             high,
             roi_percentile=85,
             focal_scope='global',
             maxima_areas='small',
             merge_type='blend',
             merge_alpha=0.5,
             filter_type='percentage',
             filter_percentage=15,
             filter_threshold=0.6):
    """
    Visualization of the whole workflow. Requires the original image and the high and low res CAMs to work. Performs
    the following steps:

    1. Applies a filter to blur the high-res map.
    2. Extracts the ROI from the low-res map through a percentile.
    3. Identifies the focal points of the low-res map by locating it's local maxima.
    4. Computes the gradient of the high-res map through a sobel filter.
    5. Draws a histogram of the gradient. Only considers areas corresponding to the ROI extracted from the low-res map.
    6. Calculates a 'lower' and 'upper' bound on the 25th and 75th percentile, respectively.
    7. Performs a region-growing segmentation algorithm on the gradient. The boundaries are the previous percentiles,
       while the focal points are set as the initial seeds (from where to start growing).
    8. Merges the result of the segmentation with the low-res map.
    9. Segments the original image according to the result of the previous merger.

    Note: it would be more efficient and elegant if I went for 'axes fraction' instead of 'data' for the coordinates
          of the ConnectionPatches, but it's too much of a hassle to change.

    :param img: Original RBG image, default shape=(224, 224, 3).
    :param low: Low-resolution CAM, default shape=(14, 14).
    :param high: High-resolution CAM, default shape=(224, 224).
    :param roi_percentile: Percentile based on which the ROI will be extracted. The default percentile=85 means that
                           the ROI will include the 15% highest-intensity pixels from the low-res map.
    :param focal_scope: The scope in which the focal points will be identified. 'global' looks for global maxima, while
                        'local' looks for local maxima. Accepted values: ['global', 'local']
    :param maxima_areas: Specifies the size of the focal points. Two options available: 'small' and 'large'.
    :param merge_type: Specifies the method of merging the high-res segment map with the low-res map.
                       Two methods available: 'blend' and 'multiply'. The first is a possibly weighted linear
                       combination of the two, while the second simply multiplies them.
    :param merge_alpha: If merge_type='blend', alpha regulates the importance of each of the two images (i.e. the low
                        and the high-res maps). Should be a float in [0, 1]. High values result in more influence from
                        the high-res map.
    :param filter_type: Specifies the method of segmenting the original image based on the combined CAM. Two methods are
                        available: 'percentage' and 'threshold'. The first keeps a percentage of the original image's
                        pixels while the second relies solely on the values of the combined CAM exceeding a threshold.
    :param filter_percentage: Selects the percentage of pixels to be included in the final segment. Only relevant if
                              filter_type='percentage'. Should be a number between 0 and 100.
    :param filter_threshold: Selects the threshold based on which the final segmentation will be performed. Only pixels
                             of the combined CAM that have an intensity greater than this threshold will be included.
                             Based on this mask, the original image will be segmented. Should be a float in [0, 1].
    """

    # Value checks

    # Categorical arguments
    if maxima_areas not in ('small', 'large'):
        raise ValueError(
            "available options for maxima_areas are: 'small' and 'large'.")

    if merge_type not in ('blend', 'multiply'):
        raise ValueError(
            "available options for merge_type are: 'blend' and 'multiply'.")

    if filter_type not in ('percentage', 'threshold'):
        raise ValueError(
            "vailable options for filter_type are: 'percentage' and 'threshold'."
        )

    # Percentage arguments
    if roi_percentile <= 0 or roi_percentile >= 100:
        raise ValueError('roi_percentile should be a percentage in (0, 100)')
    elif roi_percentile < 1:
        warnings.warn(
            'roi_percentile value in [0, 1). Should be defined as a percentage in (0, 100), '
            'e.g. If the desired percentage is 13%, pass 33 instead of 0.33!')

    if filter_percentage <= 0 or filter_percentage >= 100:
        raise ValueError(
            'filter_percentage should be a percentage in (0, 100)')
    elif filter_percentage < 1:
        warnings.warn(
            'filter_percentage value in [0, 1). Should be defined as a percentage in (0, 100), '
            'e.g. If the desired percentage is 13%, pass 33 instead of 0.33!')

    # Value arguments
    if merge_alpha < 0 or merge_alpha > 1:
        raise ValueError('merge_alpha should be a float in [0, 1]')

    if filter_threshold < 0 or filter_threshold > 1:
        raise ValueError('filter_threshold should be a float in [0, 1]')

    # Coordinates of the top/bottom/left/right/middle of the input image
    left = (0, img.shape[1] / 2)
    right = (img.shape[1], img.shape[1] / 2)
    bottom = (img.shape[1] / 2, img.shape[1])
    top = (img.shape[1] / 2, 0)
    midpoint = (img.shape[1] / 2, img.shape[1] / 2)

    # Create two 'blank' images for filling empty positions
    blank = np.ones(img[0].shape, dtype=np.uint8)
    half_blank = blank[::2]

    # Initialize 5x7 grid
    fig, ax = plt.subplots(5, 7, figsize=(16, 16))

    ##############################
    ######## First column ########
    ##############################

    # Fill first, second, fourth and fifth rows with blank images
    ax[0, 0].imshow(blank, alpha=0)
    ax[0, 0].axis('off')
    ax[1, 0].imshow(blank, alpha=0)
    ax[1, 0].axis('off')
    ax[3, 0].imshow(blank, alpha=0)
    ax[3, 0].axis('off')
    ax[4, 0].imshow(half_blank, alpha=0)
    ax[4, 0].axis('off')

    # Add original image to the third row
    ax[2, 0].imshow(img[0], zorder=3)
    ax[2, 0].axis('off')
    ax[2, 0].set_title('Original image', backgroundcolor='white', zorder=2)

    # Three crooked lines starting from the first row, represented by thirteen (!) connection patches
    # Connection of 'original image' to 'high-res map'
    con1a = ConnectionPatch(xyA=top,
                            xyB=midpoint,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[2, 0],
                            axesB=ax[1, 0],
                            color='black',
                            lw=2,
                            zorder=1)
    con1b = ConnectionPatch(xyA=midpoint,
                            xyB=left,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[1, 0],
                            axesB=ax[1, 1],
                            color='black',
                            lw=2,
                            arrowstyle='->')

    # Connection of 'original image' to 'low-res map'
    con2a = ConnectionPatch(xyA=bottom,
                            xyB=midpoint,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[2, 0],
                            axesB=ax[3, 0],
                            color='black',
                            lw=2)
    con2b = ConnectionPatch(xyA=midpoint,
                            xyB=left,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[3, 0],
                            axesB=ax[3, 1],
                            color='black',
                            lw=2,
                            arrowstyle='->')

    # Connection of 'original image' to 'result'
    con3b = ConnectionPatch(xyA=midpoint,
                            xyB=bottom,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[1, 0],
                            axesB=ax[0, 0],
                            color='black',
                            lw=2)
    con3c = ConnectionPatch(xyA=bottom,
                            xyB=bottom,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[0, 0],
                            axesB=ax[0, 1],
                            color='black',
                            lw=2)
    con3d = ConnectionPatch(xyA=bottom,
                            xyB=bottom,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[0, 1],
                            axesB=ax[0, 2],
                            color='black',
                            lw=2)
    con3e = ConnectionPatch(xyA=bottom,
                            xyB=bottom,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[0, 2],
                            axesB=ax[0, 3],
                            color='black',
                            lw=2)
    con3f = ConnectionPatch(xyA=bottom,
                            xyB=bottom,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[0, 3],
                            axesB=ax[0, 4],
                            color='black',
                            lw=2)
    con3g = ConnectionPatch(xyA=bottom,
                            xyB=bottom,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[0, 4],
                            axesB=ax[0, 5],
                            color='black',
                            lw=2)
    con3h = ConnectionPatch(xyA=bottom,
                            xyB=bottom,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[0, 5],
                            axesB=ax[0, 6],
                            color='black',
                            lw=2)
    con3i = ConnectionPatch(xyA=bottom,
                            xyB=midpoint,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[0, 6],
                            axesB=ax[1, 6],
                            color='black',
                            lw=2)
    con3k = ConnectionPatch(xyA=midpoint,
                            xyB=midpoint,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[1, 6],
                            axesB=ax[2, 6],
                            color='black',
                            lw=2)
    con3l = ConnectionPatch(xyA=midpoint,
                            xyB=top,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[2, 6],
                            axesB=ax[3, 6],
                            color='black',
                            lw=2,
                            arrowstyle='->',
                            zorder=1)

    # Add each patch to its respective axis
    ax[2, 0].add_artist(con1a)
    ax[1, 0].add_artist(con1b)

    ax[2, 0].add_artist(con2a)
    ax[3, 0].add_artist(con2b)

    ax[1, 0].add_artist(con3b)
    ax[0, 0].add_artist(con3c)
    ax[0, 1].add_artist(con3d)
    ax[0, 2].add_artist(con3e)
    ax[0, 3].add_artist(con3f)
    ax[0, 4].add_artist(con3g)
    ax[0, 5].add_artist(con3h)
    ax[0, 6].add_artist(con3i)
    ax[1, 6].add_artist(con3k)
    ax[2, 6].add_artist(con3l)

    ###############################
    ######## Second column ########
    ###############################

    # High-res map on the second line
    ax[1, 1].imshow(high)
    ax[1, 1].axis('off')
    ax[1, 1].set_title('High-res CAM')

    # Low-res map on the fourth line
    ax[3, 1].imshow(utils.resize(low), zorder=3)
    ax[3, 1].axis('off')
    ax[3, 1].set_title('Low-res CAM', backgroundcolor='white', zorder=2)

    # Fill the first, third and fifth lines with blank images
    ax[0, 1].imshow(blank, alpha=0)
    ax[0, 1].axis('off')
    ax[2, 1].imshow(blank, alpha=0)
    ax[2, 1].axis('off')
    ax[4, 1].imshow(half_blank, alpha=0)
    ax[4, 1].axis('off')

    # Four lines represented by eleven (!) connection patches
    # Connection of 'high-res map' to 'gradient'
    con4 = ConnectionPatch(xyA=right,
                           xyB=left,
                           coordsA='data',
                           coordsB='data',
                           axesA=ax[1, 1],
                           axesB=ax[1, 2],
                           color='black',
                           lw=2,
                           arrowstyle='->')

    # Connection of 'low-res map' to 'roi'
    con5a = ConnectionPatch(xyA=top,
                            xyB=midpoint,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[3, 1],
                            axesB=ax[2, 1],
                            color='black',
                            lw=2,
                            zorder=1)
    con5b = ConnectionPatch(xyA=midpoint,
                            xyB=left,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[2, 1],
                            axesB=ax[2, 2],
                            color='black',
                            lw=2,
                            arrowstyle='->')

    # Connection of 'low-res map' to 'focal points'
    con6 = ConnectionPatch(xyA=right,
                           xyB=left,
                           coordsA='data',
                           coordsB='data',
                           axesA=ax[3, 1],
                           axesB=ax[3, 2],
                           color='black',
                           lw=2,
                           arrowstyle='->')

    # Connection of 'low-res map' to 'merger'
    con7a = ConnectionPatch(xyA=bottom,
                            xyB=top,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[3, 1],
                            axesB=ax[4, 1],
                            color='black',
                            lw=2,
                            zorder=1)
    con7b = ConnectionPatch(xyA=top,
                            xyB=top,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[4, 1],
                            axesB=ax[4, 2],
                            color='black',
                            lw=2,
                            zorder=1)
    con7c = ConnectionPatch(xyA=top,
                            xyB=top,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[4, 2],
                            axesB=ax[4, 3],
                            color='black',
                            lw=2,
                            zorder=1)
    con7d = ConnectionPatch(xyA=top,
                            xyB=top,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[4, 3],
                            axesB=ax[4, 4],
                            color='black',
                            lw=2,
                            zorder=1)
    con7e = ConnectionPatch(xyA=top,
                            xyB=top,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[4, 4],
                            axesB=ax[4, 5],
                            color='black',
                            lw=2,
                            zorder=1)
    con7f = ConnectionPatch(xyA=top,
                            xyB=bottom,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[4, 5],
                            axesB=ax[3, 5],
                            color='black',
                            lw=2,
                            zorder=1,
                            arrowstyle='->')

    # Add the patches to their respective axes
    ax[1, 1].add_artist(con4)
    ax[3, 1].add_artist(con5a)
    ax[2, 1].add_artist(con5b)
    ax[3, 1].add_artist(con6)
    ax[3, 1].add_artist(con7a)
    ax[4, 1].add_artist(con7b)
    ax[4, 2].add_artist(con7c)
    ax[4, 3].add_artist(con7d)
    ax[4, 4].add_artist(con7e)
    ax[4, 5].add_artist(con7f)

    ##############################
    ######## Third column ########
    ##############################

    # High-res blur
    blurred = filters.blur(high)
    ax[1, 2].imshow(blurred)
    ax[1, 2].axis('off')
    ax[1, 2].set_title('Blurred')

    # Region of Interest
    roi = utils.resize(low) > utils.percentile(utils.resize(low),
                                               roi_percentile)
    a = ax[2, 2].imshow(roi)
    ax[2, 2].axis('off')
    ax[2, 2].set_title('Region of Interest')

    # Focal Points
    focal_points = maxima.find_focal_points(low,
                                            scope=focal_scope,
                                            maxima_areas=maxima_areas)
    bg, dots = a.get_cmap().colors[0], a.get_cmap().colors[-1]
    ax[3, 2].imshow(
        (blank.reshape(-1, 3) * bg).reshape(img.shape[1], img.shape[1], 3))
    ax[3, 2].scatter([x[0] for x in focal_points],
                     [x[1] for x in focal_points],
                     marker='x',
                     s=30,
                     c=dots)
    ax[3, 2].axis('off')
    ax[3, 2].set_title('Focal Points')

    # Fill first and fifth rows with blank images
    ax[0, 2].imshow(blank, alpha=0)
    ax[0, 2].axis('off')
    ax[4, 2].imshow(half_blank, alpha=0)
    ax[4, 2].axis('off')

    # Three lines represented by five connection patches
    con8 = ConnectionPatch(xyA=right,
                           xyB=left,
                           coordsA='data',
                           coordsB='data',
                           axesA=ax[1, 2],
                           axesB=ax[1, 3],
                           color='black',
                           lw=2,
                           arrowstyle='->')
    con9 = ConnectionPatch(xyA=right,
                           xyB=(0, 0.5),
                           coordsA='data',
                           coordsB='axes fraction',
                           axesA=ax[2, 2],
                           axesB=ax[2, 3],
                           color='black',
                           lw=2,
                           arrowstyle='->')
    con10a = ConnectionPatch(xyA=right,
                             xyB=midpoint,
                             coordsA='data',
                             coordsB='data',
                             axesA=ax[3, 2],
                             axesB=ax[3, 3],
                             color='black',
                             lw=2)
    con10b = ConnectionPatch(xyA=midpoint,
                             xyB=midpoint,
                             coordsA='data',
                             coordsB='data',
                             axesA=ax[3, 3],
                             axesB=ax[3, 4],
                             color='black',
                             lw=2)
    con10c = ConnectionPatch(xyA=midpoint,
                             xyB=left,
                             coordsA='data',
                             coordsB='data',
                             axesA=ax[3, 4],
                             axesB=ax[3, 5],
                             color='black',
                             lw=2,
                             arrowstyle='->')

    # Add the patches to their respective axes
    ax[1, 2].add_artist(con8)
    ax[2, 2].add_artist(con9)
    ax[3, 2].add_artist(con10a)
    ax[3, 3].add_artist(con10b)
    ax[3, 4].add_artist(con10c)

    ###############################
    ######## Fourth column ########
    ###############################

    # High-res edge detection
    grad = utils.normalize_image(filters.sobel(blurred))
    ax[1, 3].imshow(grad)
    ax[1, 3].axis('off')
    ax[1, 3].set_title('Edge detection')

    # Gradient percentiles
    roi_grad = grad[roi]
    lower = utils.percentile(roi_grad, 25)
    upper = utils.percentile(roi_grad, 75)
    ax[2, 3] = sns.distplot(roi_grad.ravel(), ax=ax[2, 3])
    ax[2, 3].plot([lower, lower], [0, 4], c='C1')
    ax[2, 3].plot([upper, upper], [0, 4], c='C1')
    ax[2, 3].text(lower,
                  -0.5,
                  'lower',
                  color='C1',
                  horizontalalignment='center')
    ax[2, 3].text(upper,
                  4.5,
                  'upper',
                  color='C1',
                  horizontalalignment='center')
    ax[2, 3].axis('off')
    ttl = ax[2, 3].set_title('Edge Histogram')
    ttl.set_bbox(dict(color='white', alpha=0.5, zorder=2))
    square_axes(
        ax[2,
           3])  # custom function that shrinks the axis object to a square box

    # Fill first, fourth and fifth rows
    ax[0, 3].imshow(blank, alpha=0)
    ax[0, 3].axis('off')
    ax[3, 3].imshow(blank, alpha=0)
    ax[3, 3].axis('off')
    ax[4, 3].imshow(half_blank, alpha=0)
    ax[4, 3].axis('off')

    # Three lines represented by four connection patches
    con11 = ConnectionPatch(xyA=bottom,
                            xyB=(0.5, 1),
                            coordsA='data',
                            coordsB='axes fraction',
                            axesA=ax[1, 3],
                            axesB=ax[2, 3],
                            color='black',
                            lw=2,
                            arrowstyle='->')
    con12a = ConnectionPatch(xyA=right,
                             xyB=midpoint,
                             coordsA='data',
                             coordsB='data',
                             axesA=ax[1, 3],
                             axesB=ax[1, 4],
                             color='black',
                             lw=2)
    con12b = ConnectionPatch(xyA=midpoint,
                             xyB=top,
                             coordsA='data',
                             coordsB='data',
                             axesA=ax[1, 4],
                             axesB=ax[2, 4],
                             color='black',
                             lw=2,
                             arrowstyle='->',
                             zorder=1)

    con13 = ConnectionPatch(xyA=(1, 0.5),
                            xyB=left,
                            coordsA='axes fraction',
                            coordsB='data',
                            axesA=ax[2, 3],
                            axesB=ax[2, 4],
                            color='black',
                            lw=2,
                            arrowstyle='->')

    # Add the patches to their respective axes
    ax[1, 3].add_artist(con11)
    ax[1, 3].add_artist(con12a)
    ax[1, 4].add_artist(con12b)
    ax[2, 3].add_artist(con13)

    ##############################
    ######## Fifth column ########
    ##############################

    # Region Growing Segmentation
    segm = segment.region_growing(grad,
                                  seeds=focal_points,
                                  lower=lower,
                                  upper=upper)
    ax[2, 4].imshow(segm, zorder=3)
    ax[2, 4].axis('off')
    ttl = ax[2, 4].set_title('Region Growing\nSegmentation')
    ttl.set_bbox(dict(color='white', alpha=0.5, zorder=2))

    # Fill first, second fourth and fifth rows
    ax[0, 4].imshow(blank, alpha=0)
    ax[0, 4].axis('off')
    ax[1, 4].imshow(blank, alpha=0)
    ax[1, 4].axis('off')
    ax[3, 4].imshow(blank, alpha=0)
    ax[3, 4].axis('off')
    ax[4, 4].imshow(half_blank, alpha=0)
    ax[4, 4].axis('off')

    # Just one connection! :)
    con14 = ConnectionPatch(xyA=right,
                            xyB=left,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[2, 4],
                            axesB=ax[2, 5],
                            color='black',
                            lw=2,
                            arrowstyle='->')

    ax[2, 4].add_artist(con14)

    ##############################
    ######## Sixth column ########
    ##############################

    # Add edges and fill small holes
    edges = (grad >= upper).astype(float)
    roi_edges = edges * roi
    segm_with_edges = segm + roi_edges
    filled = maxima.remove_small_holes(segm_with_edges)
    ax[2, 5].imshow(filled)
    ax[2, 5].axis('off')
    ax[2, 5].set_title('Remove small holes')

    # High-Low merger
    merged = merge.merge_images(filled,
                                low,
                                method=merge_type,
                                alpha=merge_alpha)
    ax[3, 5].imshow(merged)
    ax[3, 5].axis('off')
    ttl = ax[3, 5].set_title('High-Low Merger')
    ttl.set_bbox(dict(color='white', alpha=0.5, zorder=2))

    # Fill remaining rows
    ax[0, 5].imshow(blank, alpha=0)
    ax[0, 5].axis('off')
    ax[1, 5].imshow(blank, alpha=0)
    ax[1, 5].axis('off')
    ax[3, 5].imshow(blank, alpha=0)
    ax[3, 5].axis('off')
    ax[4, 5].imshow(half_blank, alpha=0)
    ax[4, 5].axis('off')

    # Last connection patches...
    con15 = ConnectionPatch(xyA=bottom,
                            xyB=top,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[2, 5],
                            axesB=ax[3, 5],
                            color='black',
                            lw=2,
                            zorder=-1,
                            arrowstyle='->')
    con16 = ConnectionPatch(xyA=right,
                            xyB=left,
                            coordsA='data',
                            coordsB='data',
                            axesA=ax[3, 5],
                            axesB=ax[3, 6],
                            color='black',
                            lw=2,
                            zorder=-1,
                            arrowstyle='->')

    ax[2, 5].add_artist(con15)
    ax[3, 5].add_artist(con16)

    ################################
    ######## Seventh column ########
    ################################

    # Result
    if filter_type == 'percentage':
        result = merge.keep_percentage(img,
                                       merged,
                                       percentage=filter_percentage / 100)
    else:
        result = merge.filter_image(img, merged, threshold=filter_threshold)
    ax[3, 6].imshow(result, zorder=3)
    ax[3, 6].axis('off')
    ttl = ax[3, 6].set_title('Result')
    ttl.set_bbox(dict(color='white', alpha=0.5, zorder=2))

    # Fill remaining rows
    ax[0, 6].imshow(blank, alpha=0)
    ax[0, 6].axis('off')
    ax[1, 6].imshow(blank, alpha=0)
    ax[1, 6].axis('off')
    ax[2, 6].imshow(blank, alpha=0)
    ax[2, 6].axis('off')
    ax[4, 6].imshow(half_blank, alpha=0)
    ax[4, 6].axis('off')
Ejemplo n.º 15
0
def pipeline(img,
             low,
             high,
             roi_percentile=85,
             focal_scope='global',
             maxima_areas='small',
             merge_type='blend',
             merge_alpha=0.5,
             filter_type='percentage',
             filter_percentage=15,
             filter_threshold=0.6):
    """
    The whole postprocessing pipeline, returning step-by-step results.

    In detail the postprocessing pipeline involves the following steps:

    1. Applies a filter to blur the high-res map.
    2. Extracts the ROI from the low-res map through a percentile.
    3. Identifies the focal points of the low-res map by locating it's local maxima.
    4. Computes the gradient of the high-res map through a sobel filter.
    5. Draws a histogram of the gradient. Only considers areas corresponding to the ROI extracted from the low-res map.
    6. Calculates a 'lower' and 'upper' bound on the 25th and 75th percentile, respectively.
    7. Performs a region-growing segmentation algorithm on the gradient. The boundaries are the previous percentiles,
       while the focal points are set as the initial seeds (from where to start growing).
    8. Merges the result of the segmentation with the low-res map.
    9. Segments the original image according to the result of the previous merger.

    :param img: The original image (numpy.ndarray).
    :param low: The low-resolution Class Activation Map (numpy.ndarray).
    :param high: The high-resolution Class Activation Map (numpy.ndarray).
    :param roi_percentile: The percentile above which the ROI will be estimated. roi_percentile=85 means that the 15%
                           highest intensity pixels of the low-res map will constitute the ROI (int in (0, 100)).
    :param focal_scope: The scope in which the focal points will be identified. 'global' looks for global maxima, while
                        'local' looks for local maxima. Accepted values: ['global', 'local']
    :param maxima_areas: Can either be 'large' or 'small', depending on whether or not we want larger or smaller areas.
                         Only relevant for 'local' scopes. Accepted values: ['global', 'local']
    :param merge_type: Selection on whether to multiply or blend the high with the low-res CAMs after processing.
                       Accepted values: ['blend', 'merge']
    :param merge_alpha: Parameter for the blend merge method. Higher values result in more influence from the high-res
                        map. Should be a float in [0, 1].
    :param filter_type: Selects how to crop the original image according to the refined CAM. Two options are available:
                        - 'percentage', which keeps a percentage of the highest-instensity values of the refined CAM
                        - 'threshold', which keeps the intensities above a certain threshold
    :param filter_percentage: A float representing the percentage of pixels to be kept (should be in [0, 1]). Only
                              relevant when filter_type='percentage'
    :param filter_threshold: A float in [0, 1] over which the intensities of the refined CAM will be kept. Only relevant
                             when filter_type='threshold'
    :return: A dictionary with all intermediate results from the postprocessing pipeline. In detail:
             - 'blurred': The blurred high-res CAM.
             - 'low': The original low-res CAM.
             - 'low_resized': The resized low-res CAM (through bilinear interpolation).
             - 'edges': The result of the sobel filter on the blurred high-res map.
             - 'roi': The Region Of Interest extracted from the low-res map.
             - 'bounds': The lower and upper bounds for the region-growing segmentation.
             - 'focal_points': The focal_points extracted from the low-res map.
             - 'segmented': The output of the region-growing segmentation.
             - 'full_segment': The filled segmentation.
             - 'merged': The merger of the segmentation with the low-res map, i.e. the refined CAM.
             - 'result': The part of the original image that has been cropped according to the regined CAM.
    """

    # High-res processing
    blurred = filters.blur(high)
    grad = utils.normalize_image(filters.sobel(blurred))

    # Low-res processing
    roi = utils.resize(low) > utils.percentile(utils.resize(low),
                                               roi_percentile)
    upper = utils.percentile(grad[roi], 75)
    lower = utils.percentile(grad[roi], 25)
    focal_points = maxima.find_focal_points(low,
                                            scope=focal_scope,
                                            maxima_areas=maxima_areas)

    # Region growing segmentation
    segm = segment.region_growing(grad,
                                  seeds=focal_points,
                                  lower=lower,
                                  upper=upper)

    # Segment processing
    edges = (grad >= upper).astype(float)
    roi_edges = edges * roi
    segm_with_edges = segm + roi_edges
    filled = maxima.remove_small_holes(segm_with_edges)

    # Merger
    merged = merge.merge_images(filled,
                                low,
                                method=merge_type,
                                alpha=merge_alpha)

    if filter_type == 'percentage':
        result = merge.keep_percentage(img,
                                       merged,
                                       percentage=filter_percentage / 100)
    elif filter_type == 'threshold':
        result = merge.filter_image(img, merged, threshold=filter_threshold)

    return {
        'blurred': blurred,
        'low': low,
        'low_resized': utils.resize(low),
        'edges': grad,
        'roi': roi,
        'bounds': (lower, upper),
        'focal_points': focal_points,
        'segmented': segm,
        'full_segment': filled,
        'merged': merged,
        'result': result
    }
Ejemplo n.º 16
0
    def __init__(self):

        self.contact = random.randint(1, 99)
        self.power = random.randint(1, 99)
        self.discipline = random.randint(1, 99)
        self.control = random.randint(1, 99)
        self.stuff = random.randint(1, 99)
        self.composure = random.randint(1, 99)

        self.batting = {}
        self.pitching = {}

        self.batting['single'] = percentile(self.contact, BAT_DIST['single'])
        self.batting['strikeout'] = percentile((100 - self.contact),
                                               BAT_DIST['strikeout'])
        self.batting['double'] = percentile(self.power, BAT_DIST['double'])
        self.batting['triple'] = percentile(self.power, BAT_DIST['triple'])
        self.batting['HR'] = percentile(self.power, BAT_DIST['HR'])
        self.batting['inPlayOut'] = percentile((100 - self.power),
                                               BAT_DIST['inPlayOut'])
        self.batting['BB'] = percentile(self.discipline, BAT_DIST['BB'])
        self.batting['HBP'] = percentile(self.discipline, BAT_DIST['HBP'])
        self.batting['sacrifice'] = percentile(random.randint(1, 99),
                                               BAT_DIST['sacrifice'])
        self.batting['GDP'] = percentile(random.randint(1, 99),
                                         BAT_DIST['GDP'])
        self.batting['error'] = percentile(random.randint(1, 99),
                                           BAT_DIST['error'])

        self.pitching['single'] = percentile((100 - self.stuff),
                                             PITCH_DIST['single'])
        self.pitching['strikeout'] = percentile(self.stuff,
                                                PITCH_DIST['strikeout'])
        self.pitching['double'] = percentile((100 - self.control),
                                             PITCH_DIST['double'])
        self.pitching['triple'] = percentile((100 - self.control),
                                             PITCH_DIST['triple'])
        self.pitching['HR'] = percentile((100 - self.control),
                                         PITCH_DIST['HR'])
        self.pitching['inPlayOut'] = percentile(self.stuff,
                                                PITCH_DIST['inPlayOut'])
        self.pitching['BB'] = percentile((100 - self.control),
                                         PITCH_DIST['BB'])
        self.pitching['HBP'] = percentile((100 - self.control),
                                          PITCH_DIST['HBP'])
        self.pitching['sacrifice'] = percentile(random.randint(1, 99),
                                                PITCH_DIST['sacrifice'])
        self.pitching['GDP'] = percentile(self.composure, PITCH_DIST['GDP'])
        self.pitching['error'] = percentile(random.randint(1, 99),
                                            PITCH_DIST['error'])
Ejemplo n.º 17
0
    def process_metric(self, metric, legacy=False):
        """ Process metrics and store and publish """
        logger.debug("Received metric: {0}".format(metric))
        if legacy:
            # Legacy format for metrics is slightly different...
            # Index them under same "app name"
            app_name = '__LEGACY__'
            metric_name, value, metric_type = metric.split('|')
        else:
            app_name, metric_name, metric_type, value = metric

        try:
            value = float(value)
        except ValueError:
            logger.warn("Failed to cast metric value to float - {0}".format(metric))
            return

        if app_name not in self.metrics:
            self.init_app_metrics(app_name)

        pub_metrics = []
        if metric_type == 'GAUGE':
            self.metrics[app_name][metric_type][metric_name].append(value)
            # Publish the current running average
            pub_val = sum(self.metrics[app_name][metric_type][metric_name])/len(self.metrics[app_name][metric_type][metric_name])
            pub_metrics.append((self.hostname, app_name, metric_name, metric_type, pub_val, time.time()))

        elif metric_type == 'PERCENTILE' or metric_type == 'HISTOGRAM':
            # Kill off the HISTOGRAM type!!
            metric_type = 'PERCENTILE'
            self.metrics[app_name][metric_type][metric_name].append(value)
            # Publish the current running percentiles
            for p in self.percentiles:
                pub_metrics.append((self.hostname, app_name, '{0}{1}_percentile'.format(metric_name, int(p*100)), 'GAUGE', utils.percentile(self.metrics[app_name][metric_type][metric_name], p), time.time()))
            avg = sum(self.metrics[app_name][metric_type][metric_name])/len(self.metrics[app_name][metric_type][metric_name])
            pub_metrics.append((self.hostname, app_name, '{0}average'.format(metric_name), metric_type, avg, time.time()))

        elif metric_type == 'COUNTER':
            self.metrics[app_name][metric_type][metric_name] += value
            pub_val = self.metrics[app_name][metric_type][metric_name]
            # Publish the running count
            pub_metrics.append((self.hostname, app_name, metric_name, metric_type, pub_val, time.time()))

        else:
            logger.warn("Unrecognized metric type - {0}".format(metric))
            return

        msg = ujson.dumps(pub_metrics)
        self.pub_socket.send(msg)