コード例 #1
0
def score_map(map_filename, atlas_data, labels):
    map_data = nb.load(map_filename).get_data()
    parcel_means = labeled_comprehension(map_data, atlas_data, list(labels),
                                         np.mean, float, np.nan)
    parcel_variance = labeled_comprehension(map_data, atlas_data, list(labels),
                                            np.var, float, np.nan)
    within_variance = parcel_variance.mean()
    between_variance = parcel_means.var()
    return within_variance, between_variance
コード例 #2
0
def score_map(map_filename, atlas_data, labels):
    map_data = nb.load(map_filename).get_data()
    parcel_means = labeled_comprehension(map_data,
                                             atlas_data,
                                             list(labels),
                                             np.mean,
                                             float,
                                             np.nan)
    parcel_variance = labeled_comprehension(map_data,
                                             atlas_data,
                                             list(labels),
                                             np.var,
                                             float,
                                             np.nan)
    within_variance = parcel_variance.mean()
    between_variance = parcel_means.var()
    return within_variance, between_variance
コード例 #3
0
def _hacky_make_image(labeled_img, u_labels, measures, m_key, dtype=np.int16):
    out = np.zeros(labeled_img.shape, dtype=dtype).ravel()
    if m_key is None:
        measures = [{"key": m} for m in measures]
        m_key = "key"

    def value_grab(a, b):
        out[b] = measures[a[0] - 1][m_key]
        return None

    sci_meas.labeled_comprehension(labeled_img,
                                   labeled_img,
                                   u_labels,
                                   value_grab,
                                   float,
                                   0,
                                   pass_positions=True)

    return out.reshape(labeled_img.shape)
コード例 #4
0
ファイル: RasterAT.py プロジェクト: Python3pkg/OpticalRS
def band_label_properties(labels,
                          band,
                          ind=None,
                          func=np.mean,
                          outdtype=np.float,
                          default=0.0):
    if type(ind) == type(None):
        ind = np.unique(labels.compressed())
    proparr = measurements.labeled_comprehension(band, labels, ind, func,
                                                 outdtype, default)
    return pd.Series(proparr, index=ind)
コード例 #5
0
def module_main(ctx):
    label_pairs = a3.inputs['label pair list']
    intensity_image = a3.MultiDimImageFloat_to_ndarray(
        a3.inputs['intensity image'])
    labeled_image = a3.MultiDimImageUInt32_to_ndarray(a3.inputs['labeled'])

    ids_1 = label_pairs[:, 0]
    cnt_1 = labeled_comprehension(intensity_image, labeled_image, ids_1, len,
                                  int, -1)

    print(np.c_[ids_1, cnt_1])
    print('voxel counting done 🍀')
コード例 #6
0
ファイル: regionprops.py プロジェクト: fmi-basel/improc
    def _extract_features(self, labels, intensity):

        unique_l = np.unique(labels)
        unique_l = unique_l[unique_l != 0]

        props = {
            feature_name:
            labeled_comprehension(intensity,
                                  labels,
                                  unique_l,
                                  self._features_functions[feature_name],
                                  out_dtype=float,
                                  default=np.nan)
            for feature_name in self.features
        }
        props['label'] = unique_l

        return props
コード例 #7
0
def spin_parcels(*,
                 lhannot,
                 rhannot,
                 version='fsaverage',
                 n_rotate=1000,
                 spins=None,
                 drop=None,
                 verbose=False,
                 **kwargs):
    """
    Rotates parcels in `{lh,rh}annot` and re-assigns based on maximum overlap

    Vertex labels are rotated with :func:`netneurotools.stats.gen_spinsamples`
    and a new label is assigned to each *parcel* based on the region maximally
    overlapping with its boundaries.

    Parameters
    ----------
    {lh,rh}annot : str
        Path to .annot file containing labels to parcels on the {left,right}
        hemisphere
    version : str, optional
        Specifies which version of `fsaverage` provided annotation files
        correspond to. Must be one of {'fsaverage', 'fsaverage3', 'fsaverage4',
        'fsaverage5', 'fsaverage6'}. Default: 'fsaverage'
    n_rotate : int, optional
        Number of rotations to generate. Default: 1000
    spins : array_like, optional
        Pre-computed spins to use instead of generating them on the fly. If not
        provided will use other provided parameters to create them. Default:
        None
    drop : list, optional
        Specifies regions in {lh,rh}annot that are not present in `data`. NaNs
        will be inserted in place of the these regions in the returned data. If
        not specified, parcels defined in `netneurotools.freesurfer.FSIGNORE`
        are assumed to not be present. Default: None
    seed : {int, np.random.RandomState instance, None}, optional
        Seed for random number generation. Default: None
    verbose : bool, optional
        Whether to print occasional status messages. Default: False
    return_cost : bool, optional
        Whether to return cost array (specified as Euclidean distance) for each
        coordinate for each rotation. Default: True
    kwargs : key-value pairs
        Keyword arguments passed to `netneurotools.stats.gen_spinsamples`

    Returns
    -------
    spinsamples : (N, `n_rotate`) numpy.ndarray
        Resampling matrix to use in permuting data parcellated with labels from
        {lh,rh}annot, where `N` is the number of parcels. Indices of -1
        indicate that the parcel was completely encompassed by regions in
        `drop` and should be ignored.
    cost : (N, `n_rotate`,) numpy.ndarray
        Cost (specified as Euclidean distance) of re-assigning each coordinate
        for every rotation in `spinsamples`. Only provided if `return_cost` is
        True.
    """
    def overlap(vals):
        """ Returns most common non-negative value in `vals`; -1 if all neg
        """
        vals = np.asarray(vals)
        vals, counts = np.unique(vals[vals > 0], return_counts=True)
        try:
            return vals[counts.argmax()]
        except ValueError:
            return -1

    if drop is None:
        drop = FSIGNORE
    drop = _decode_list(drop)

    # get vertex-level labels (set drop labels to - values)
    vertices, end = [], 0
    for n, annot in enumerate([lhannot, rhannot]):
        labels, ctab, names = read_annot(annot)
        names = _decode_list(names)
        todrop = set(names) & set(drop)
        inds = [names.index(f) - n for n, f in enumerate(todrop)]
        labs = np.arange(len(names) - len(inds)) + (end - (len(inds) * n))
        insert = np.arange(-1, -(len(inds) + 1), -1)
        vertices.append(np.insert(labs, inds, insert)[labels])
        end += len(names)
    vertices = np.hstack(vertices)
    labels = np.unique(vertices)
    mask = labels > -1

    # get spins + cost (if requested)
    spins, cost = _get_fsaverage_spins(version=version,
                                       spins=spins,
                                       n_rotate=n_rotate,
                                       verbose=verbose,
                                       **kwargs)
    if len(vertices) != len(spins):
        raise ValueError('Provided annotation files have a different '
                         'number of vertices than the specified fsaverage '
                         'surface.\n    ANNOTATION: {} vertices\n     '
                         'FSAVERAGE:  {} vertices'.format(
                             len(vertices), len(spins)))

    # spin and assign regions based on max overlap
    regions = np.zeros((len(labels[mask]), n_rotate), dtype='int32')
    for n in range(n_rotate):
        if verbose:
            msg = f'Calculating parcel overlap: {n:>5}/{n_rotate}'
            print(msg, end='\b' * len(msg), flush=True)
        regions[:, n] = labeled_comprehension(vertices[spins[:, n]], vertices,
                                              labels, overlap, int, -1)[mask]

    if kwargs.get('return_cost'):
        return regions, cost

    return regions
コード例 #8
0
def erode(input_image, erosion_levels, min_size, manual_expected_size):
    ######
    #- calculate erosion maps
    #- for all nuclei calculate area and determine unique ID
    #- now link all nuclei in trees for all largest nuclei
    #- find node with lowest cost for each tree
    #- create map with all lowest costs

    #print(list(erosion_levels))
    binary = input_image / 255 < 0.5
    lvl_labels = []  #image with labels
    lvl_ids = []
    lvl_areas = []
    lvl_contours = []
    lvl_nodes = []
    lvl_children = []
    max_id = 0

    binary = input_image / 255 < 0.5
    binary = binary_fill_holes(binary)
    mask = binary
    for lvl in erosion_levels:
        if lvl == 0:
            er = binary
        else:
            er = (binary_erosion(binary, iterations=lvl))
        remove_small_objects(er, min_size=min_size, in_place=True)

        lb, distance, mask = watershed_erosion_edt(mask, er)
        lb_zero = lb == 0
        lb_unique = lb + max_id
        lb_unique[lb_zero] = 0
        lvl_labels.append(lb_unique)
        ids = np.unique(lb_unique)
        ids = ids[1:]  #remove 0 label
        nb_labels = len(ids)
        if (nb_labels == 0):
            lvl_areas.append([])
        else:
            lvl_areas.append(
                labeled_comprehension(binary,
                                      lb,
                                      range(1, nb_labels + 1),
                                      func=np.sum,
                                      out_dtype=float,
                                      default=0))

        lvl_ids.append(ids)
        max_id = max_id + nb_labels

    #calculate contour for all detected cells
    for lvl_i in range(len(erosion_levels)):
        labels_i = lvl_labels[lvl_i]
        contours = []
        for j, id_j in enumerate(lvl_ids[lvl_i]):
            mask_j = labels_i == id_j
            cnt = cv2.findContours(np.uint8(mask_j), cv2.RETR_EXTERNAL,
                                   cv2.CHAIN_APPROX_NONE)[-2]
            arclen = cv2.arcLength(cnt[0], True)
            contours.append(arclen)
        lvl_contours.append(contours)

    #find children for all (future) nodes
    for lvl_i in range(len(erosion_levels) - 1):
        if (len(lvl_ids[lvl_i]) == 0):
            lvl_children.append([])
        else:
            c_lbls = labeled_comprehension(lvl_labels[lvl_i + 1],
                                           lvl_labels[lvl_i],
                                           lvl_ids[lvl_i],
                                           out_dtype=list,
                                           func=my_unique,
                                           default=0)
            lvl_children.append(c_lbls)

    #create actual nodes
    for lvl_i in range(len(erosion_levels)):
        nodes = []
        for j, id_j in enumerate(lvl_ids[lvl_i]):
            area = lvl_areas[lvl_i][j]
            contour = lvl_contours[lvl_i][j]
            ff = form_factor(area, contour)
            node = Node(id_j, area=area, contour=contour, formfactor=ff)
            nodes.append(node)
        lvl_nodes.append(nodes)

    #link nodes
    for lvl_i in range(len(erosion_levels) - 1):
        for j, id_j in enumerate(lvl_ids[lvl_i]):
            children_ids = lvl_children[lvl_i][j]
            parent_node = lvl_nodes[lvl_i][j]
            children_node_indexes = np.where(
                np.isin(lvl_ids[lvl_i + 1], children_ids))
            children_node_indexes = children_node_indexes[0].astype('int')
            children_nodes = np.take(lvl_nodes[lvl_i + 1],
                                     children_node_indexes)
            parent_node.children = children_nodes
        global expected_size
        if manual_expected_size == 0:
            expected_size = np.median(lvl_areas[0])
        else:
            expected_size = manual_expected_size

    ids_final_img = []
    final_nodes = []
    for node in lvl_nodes[0]:
        ids = traverse_tree(node)
        if type(ids) == type(list()):
            for node in ids:
                final_nodes.append(node)
        else:
            final_nodes.append(node)
    ids_final_img = final_nodes
    ids_final_img = [node.name for node in ids_final_img]
    final_labels = np.zeros((1024, 1024))
    for labels in lvl_labels:
        indices_to_take = np.where(np.isin(labels, ids_final_img))
        final_labels[indices_to_take] = labels[indices_to_take]

    labels, _ = label(final_labels)  #,compactness = 100)
    labels = watershed(image=binary,
                       markers=labels,
                       mask=binary,
                       watershed_line=False)  #,compactness = 100)
    return labels
コード例 #9
0
    def addDQ(self, adinputs=None, **params):
        """
        This primitive is used to add a DQ extension to the input AstroData
        object. The value of a pixel in the DQ extension will be the sum of the
        following: (0=good, 1=bad pixel (found in bad pixel mask), 2=pixel is
        in the non-linear regime, 4=pixel is saturated). This primitive will
        trim the BPM to match the input AstroData object(s).

        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        static_bpm: str
            Name of bad pixel mask ("default" -> use default from look-up table)
            If set to None, no static_bpm will be added.
        user_bpm: str
            Name of the bad pixel mask created by the user from flats and
            darks.  It is an optional BPM that can be added to the static one.
        illum_mask: bool
            add illumination mask?
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        timestamp_key = self.timestamp_keys["addDQ"]
        sfx = params["suffix"]

        # Getting all the filenames first prevents reopening the same file
        # for each science AD
        static_bpm_list = params['static_bpm']
        user_bpm_list = params['user_bpm']

        if static_bpm_list == "default":
            static_bpm_list = [self._get_bpm_filename(ad) for ad in adinputs]

        for ad, static, user in zip(*gt.make_lists(
                adinputs, static_bpm_list, user_bpm_list, force_ad=True)):
            if ad.phu.get(timestamp_key):
                log.warning('No changes will be made to {}, since it has '
                            'already been processed by addDQ'.format(
                                ad.filename))
                continue

            if static is None:
                # So it can be zipped with the AD
                final_static = [None] * len(ad)
            else:
                log.fullinfo("Using {} as static BPM".format(static.filename))
                final_static = gt.clip_auxiliary_data(ad,
                                                      aux=static,
                                                      aux_type='bpm',
                                                      return_dtype=DQ.datatype)

            if user is None:
                final_user = [None] * len(ad)
            else:
                log.fullinfo("Using {} as user BPM".format(user.filename))
                final_user = gt.clip_auxiliary_data(ad,
                                                    aux=user,
                                                    aux_type='bpm',
                                                    return_dtype=DQ.datatype)

            for ext, static_ext, user_ext in zip(ad, final_static, final_user):
                extver = ext.hdr['EXTVER']
                if ext.mask is not None:
                    log.warning(
                        'A mask already exists in extver {}'.format(extver))
                    continue

                non_linear_level = ext.non_linear_level()
                saturation_level = ext.saturation_level()

                # Need to create the array first for 3D raw F2 data, with 2D BPM
                ext.mask = np.zeros_like(ext.data, dtype=DQ.datatype)
                if static_ext is not None:
                    ext.mask |= static_ext.data
                if user_ext is not None:
                    ext.mask |= user_ext.data

                if saturation_level:
                    log.fullinfo('Flagging saturated pixels in {}:{} '
                                 'above level {:.2f}'.format(
                                     ad.filename, extver, saturation_level))
                    ext.mask |= np.where(ext.data >= saturation_level,
                                         DQ.saturated, 0).astype(DQ.datatype)

                if non_linear_level:
                    if saturation_level:
                        if saturation_level > non_linear_level:
                            log.fullinfo('Flagging non-linear pixels in {}:{} '
                                         'above level {:.2f}'.format(
                                             ad.filename, extver,
                                             non_linear_level))
                            ext.mask |= np.where(
                                (ext.data >= non_linear_level) &
                                (ext.data < saturation_level), DQ.non_linear,
                                0).astype(DQ.datatype)
                            # Readout modes of IR detectors can result in
                            # saturated pixels having values below the
                            # saturation level. Flag those. Assume we have an
                            # IR detector here because both non-linear and
                            # saturation levels are defined and nonlin<sat
                            regions, nregions = measurements.label(
                                ext.data < non_linear_level)
                            # In all my tests, region 1 has been the majority
                            # of the image; however, I cannot guarantee that
                            # this is always the case and therefore we should
                            # check the size of each region
                            region_sizes = measurements.labeled_comprehension(
                                ext.data, regions, np.arange(1, nregions + 1),
                                len, int, 0)
                            # First, assume all regions are saturated, and
                            # remove any very large ones. This is much faster
                            # than progressively adding each region to DQ
                            hidden_saturation_array = np.where(
                                regions > 0, 4, 0).astype(DQ.datatype)
                            for region in range(1, nregions + 1):
                                # Limit of 10000 pixels for a hole is a bit arbitrary
                                if region_sizes[region - 1] > 10000:
                                    hidden_saturation_array[regions ==
                                                            region] = 0
                            ext.mask |= hidden_saturation_array

                        elif saturation_level < non_linear_level:
                            log.warning('{}:{} has saturation level less than '
                                        'non-linear level'.format(
                                            ad.filename, extver))
                        else:
                            log.fullinfo('Saturation and non-linear levels '
                                         'are the same for {}:{}. Only '
                                         'flagging saturated pixels'.format(
                                             ad.filename, extver))
                    else:
                        log.fullinfo('Flagging non-linear pixels in {}:{} '
                                     'above level {:.2f}'.format(
                                         ad.filename, extver,
                                         non_linear_level))
                        ext.mask |= np.where(ext.data >= non_linear_level,
                                             DQ.non_linear,
                                             0).astype(DQ.datatype)

        # Handle latency if reqested
        if params.get("latency", False):
            try:
                adinputs = self.addLatencyToDQ(adinputs,
                                               time=params["time"],
                                               non_linear=params["non_linear"])
            except AttributeError:
                log.warning(
                    "addLatencyToDQ() not defined in primitivesClass " +
                    self.__class__.__name__)

        # Add the illumination mask if requested
        if params['add_illum_mask']:
            adinputs = self.addIllumMaskToDQ(adinputs,
                                             illum_mask=params["illum_mask"])

        # Timestamp and update filenames
        for ad in adinputs:
            gt.mark_history(ad, primname=self.myself(), keyword=timestamp_key)
            ad.update_filename(suffix=sfx, strip=True)

        return adinputs
コード例 #10
0
    def addDQ(self, adinputs=None, **params):
        """
        This primitive is used to add a DQ extension to the input AstroData
        object. The value of a pixel in the DQ extension will be the sum of the
        following: (0=good, 1=bad pixel (found in bad pixel mask), 2=pixel is
        in the non-linear regime, 4=pixel is saturated). This primitive will
        trim the BPM to match the input AstroData object(s).

        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        static_bpm: str
            Name of bad pixel mask ("default" -> use default from look-up table)
            If set to None, no static_bpm will be added.
        user_bpm: str
            Name of the bad pixel mask created by the user from flats and
            darks.  It is an optional BPM that can be added to the static one.
        illum_mask: bool
            add illumination mask?
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        timestamp_key = self.timestamp_keys["addDQ"]
        sfx = params["suffix"]

        # Getting all the filenames first prevents reopening the same file
        # for each science AD
        static_bpm_list = params['static_bpm']
        user_bpm_list = params['user_bpm']

        if static_bpm_list == "default":
            static_bpm_list = [self._get_bpm_filename(ad) for ad in adinputs]

        for ad, static, user in zip(*gt.make_lists(adinputs, static_bpm_list,
                                                   user_bpm_list, force_ad=True)):
            if ad.phu.get(timestamp_key):
                log.warning('No changes will be made to {}, since it has '
                    'already been processed by addDQ'.format(ad.filename))
                continue

            if static is None:
                # So it can be zipped with the AD
                final_static = [None] * len(ad)
            else:
                log.fullinfo("Using {} as static BPM".format(static.filename))
                final_static = gt.clip_auxiliary_data(ad, aux=static,
                                        aux_type='bpm', return_dtype=DQ.datatype)

            if user is None:
                final_user = [None] * len(ad)
            else:
                log.fullinfo("Using {} as user BPM".format(user.filename))
                final_user = gt.clip_auxiliary_data(ad, aux=user,
                                        aux_type='bpm', return_dtype=DQ.datatype)

            for ext, static_ext, user_ext in zip(ad, final_static, final_user):
                extver = ext.hdr['EXTVER']
                if ext.mask is not None:
                    log.warning('A mask already exists in extver {}'.
                                format(extver))
                    continue

                non_linear_level = ext.non_linear_level()
                saturation_level = ext.saturation_level()

                # Need to create the array first for 3D raw F2 data, with 2D BPM
                ext.mask = np.zeros_like(ext.data, dtype=DQ.datatype)
                if static_ext is not None:
                    ext.mask |= static_ext.data
                if user_ext is not None:
                    ext.mask |= user_ext.data

                if saturation_level:
                    log.fullinfo('Flagging saturated pixels in {}:{} '
                                 'above level {:.2f}'.
                                 format(ad.filename, extver, saturation_level))
                    ext.mask |= np.where(ext.data >= saturation_level,
                                         DQ.saturated, 0).astype(DQ.datatype)

                if non_linear_level:
                    if saturation_level:
                        if saturation_level > non_linear_level:
                            log.fullinfo('Flagging non-linear pixels in {}:{} '
                                         'above level {:.2f}'.
                                         format(ad.filename, extver,
                                                non_linear_level))
                            ext.mask |= np.where((ext.data >= non_linear_level) &
                                                 (ext.data < saturation_level),
                                                 DQ.non_linear, 0).astype(DQ.datatype)
                            # Readout modes of IR detectors can result in
                            # saturated pixels having values below the
                            # saturation level. Flag those. Assume we have an
                            # IR detector here because both non-linear and
                            # saturation levels are defined and nonlin<sat
                            regions, nregions = measurements.label(
                                                ext.data < non_linear_level)
                            # In all my tests, region 1 has been the majority
                            # of the image; however, I cannot guarantee that
                            # this is always the case and therefore we should
                            # check the size of each region
                            region_sizes = measurements.labeled_comprehension(
                                ext.data, regions, np.arange(1, nregions+1),
                                len, int, 0)
                            # First, assume all regions are saturated, and
                            # remove any very large ones. This is much faster
                            # than progressively adding each region to DQ
                            hidden_saturation_array = np.where(regions > 0,
                                                    4, 0).astype(DQ.datatype)
                            for region in range(1, nregions+1):
                                # Limit of 10000 pixels for a hole is a bit arbitrary
                                if region_sizes[region-1] > 10000:
                                    hidden_saturation_array[regions==region] = 0
                            ext.mask |= hidden_saturation_array

                        elif saturation_level < non_linear_level:
                            log.warning('{}:{} has saturation level less than '
                                'non-linear level'.format(ad.filename, extver))
                        else:
                            log.fullinfo('Saturation and non-linear levels '
                                         'are the same for {}:{}. Only '
                                         'flagging saturated pixels'.
                                format(ad.filename, extver))
                    else:
                        log.fullinfo('Flagging non-linear pixels in {}:{} '
                                     'above level {:.2f}'.
                                     format(ad.filename, extver,
                                            non_linear_level))
                        ext.mask |= np.where(ext.data >= non_linear_level,
                                             DQ.non_linear, 0).astype(DQ.datatype)


        # Handle latency if reqested
        if params.get("latency", False):
            try:
                adinputs = self.addLatencyToDQ(adinputs, time=params["time"],
                                               non_linear=params["non_linear"])
            except AttributeError:
                log.warning("addLatencyToDQ() not defined in primitivesClass "
                            + self.__class__.__name__)

        # Add the illumination mask if requested
        if params['add_illum_mask']:
            adinputs = self.addIllumMaskToDQ(adinputs, illum_mask=params["illum_mask"])

        # Timestamp and update filenames
        for ad in adinputs:
            gt.mark_history(ad, primname=self.myself(), keyword=timestamp_key)
            ad.update_filename(suffix=sfx, strip=True)

        return adinputs
コード例 #11
0
ファイル: preprocessing.py プロジェクト: GFleishman/regiprep
def _largest_connected_component(mask):
    lbls, nlbls = ndims.label(mask)
    vols = ndims.labeled_comprehension(mask, lbls, range(1, nlbls + 1), np.sum,
                                       float, 0)
    mask[lbls != np.argmax(vols) + 1] = 0
    return mask
コード例 #12
0
def apply_feature_extraction(instance_stack, tiff_stack, p_map, channels):

    labels_ring_stack = []
    results = pd.DataFrame()
    for frame_nb in tqdm.tqdm(range(np.shape(instance_stack)[0])):

        p_map_frame = p_map[frame_nb, :, :]
        tiff_frame = tiff_stack[frame_nb, :, :, :]
        instance_frame = instance_stack[frame_nb, :, :]
        nb_centers = np.max(instance_frame)

        to_zip = []  #all the features get appended here
        to_col = []  #all the names    get appended here

        ## EXTRACT FEATURES ##

        ## Store frame number
        frame_number_rep = np.repeat([frame_nb], nb_centers)

        to_zip.append(frame_number_rep)
        to_col.append("frame")

        ## Nuclear centers
        p_map_frame_binary = p_map_frame < (255 / 2)
        centers = center_of_mass(p_map_frame_binary,
                                 labels=instance_frame,
                                 index=range(1, nb_centers + 1))
        centers_x = np.array(centers)[:, 1]  #X/Y are inverted, take care!
        centers_y = np.array(centers)[:, 0]  #     ... are they?? TODO

        to_zip.append(centers_x)
        to_zip.append(centers_y)
        to_col.append("x")
        to_col.append("y")

        ## Mean certainty of nucleus segmentation
        mean_p_nuc = labeled_comprehension(p_map_frame,
                                           labels=instance_frame,
                                           index=range(1, nb_centers + 1),
                                           func=np.mean,
                                           out_dtype='float32',
                                           default=float("nan"))

        to_zip.append(mean_p_nuc)
        to_col.append("mean_p_nuc")

        ## Create cytosolic ring
        labels_ring = extract_ring(instance_frame)

        ## Calculate areas of nucleus and ring
        size_nuc = labeled_comprehension(p_map_frame_binary,
                                         labels=instance_frame,
                                         index=range(1, nb_centers + 1),
                                         func=np.sum,
                                         out_dtype='float32',
                                         default=float("nan"))

        ring_binary = np.zeros_like(labels_ring)
        ring_binary[labels_ring > 0] = 1
        size_ring = labeled_comprehension(ring_binary,
                                          labels=labels_ring,
                                          index=range(1, nb_centers + 1),
                                          func=np.sum,
                                          out_dtype='float32',
                                          default=float("nan"))

        to_zip.append(size_nuc)
        to_zip.append(centers_x)
        to_col.append("size_nuc")
        to_col.append("size_ring")

        ## Extract features from additional channels:
        channels_other = {k: v
                          for (k, v) in channels.items()
                          if k != "H2B"}  #for all channels that are not H2B

        for k, v in channels_other.items():
            #mean intensity of nucleus
            mean_nuc = labeled_comprehension(tiff_frame[:, :, v],
                                             labels=instance_frame,
                                             index=range(1, nb_centers + 1),
                                             func=np.mean,
                                             out_dtype='float32',
                                             default=float("nan"))
            #mean intensity of cytosolic ring
            mean_ring = labeled_comprehension(tiff_frame[:, :, v],
                                              labels=labels_ring,
                                              index=range(1, nb_centers + 1),
                                              func=np.mean,
                                              out_dtype='float32',
                                              default=float("nan"))
            #their ratio
            ratio = (mean_ring / mean_nuc).astype('float32')

            to_zip.append(mean_nuc)
            to_zip.append(mean_ring)
            to_zip.append(ratio)
            to_col.append("mean_nuc_" + k)
            to_col.append("mean_ring_" + k)
            to_col.append("ratio_" + k)

        ## STORE FEATURES TO PANDAS DF, ONE ROW / NUCLEUS
        #features = list(zip(centers_x,centers_y,nuclear_area,ring_area,frame_number_rep, range(1, nb_centers+1),
        #   mean_nucleus_c1, mean_ring_c1, ratio_c1,
        #  mean_p_nucleus))

        to_zip.append(range(1, nb_centers + 1))
        to_col.append("label_frame")

        features = list(zip(*to_zip))

        results = results.append(features)  #assign return value as it is a PD
        labels_ring_stack.append(
            labels_ring)  #dont assign return value as it is a list

    #columns=['y','x','size_nuc','size_ring','frame','label_frame',
    #       'mean_nuc_c1','mean_ring_c1','ratio_c1',
    #       'mean_p_nuc']

    results.columns = to_col
    #results = results.convert_dtypes() #convert to "best" dtype

    # downsample precision form 64bit to 32bit
    #    results = results.astype({'y':'float32','x':'float32','size_nuc':'float32','size_ring':'float32','frame':'float32', 'label_frame':'float32',
    #            'mean_nuc_c1':'float32','mean_ring_c1':'float32','ratio_c1':'float32',
    #            'mean_p_nuc':'float32'})

    results.astype('float32')

    labels_ring_stack = np.asarray(labels_ring_stack)

    return results, labels_ring_stack
コード例 #13
0
def spin_parcels(*,
                 lhannot,
                 rhannot,
                 version='fsaverage',
                 n_rotate=1000,
                 drop=None,
                 seed=None,
                 return_cost=False,
                 **kwargs):
    """
    Rotates parcels in `{lh,rh}annot` and re-assigns based on maximum overlap

    Vertex labels are rotated with :func:`netneurotools.stats.gen_spinsamples`
    and a new label is assigned to each *parcel* based on the region maximally
    overlapping with its boundaries.

    Parameters
    ----------
    {lh,rh}annot : str
        Path to .annot file containing labels to parcels on the {left,right}
        hemisphere
    version : str, optional
        Specifies which version of `fsaverage` provided annotation files
        correspond to. Must be one of {'fsaverage', 'fsaverage3', 'fsaverage4',
        'fsaverage5', 'fsaverage6'}. Default: 'fsaverage'
    n_rotate : int, optional
        Number of rotations to generate. Default: 1000
    drop : list, optional
        Specifies regions in {lh,rh}annot that are not present in `data`. NaNs
        will be inserted in place of the these regions in the returned data. If
        not specified, 'unknown' and 'corpuscallosum' are assumed to not be
        present. Default: None
    return_cost : bool, optional
        Whether to return cost array (specified as Euclidean distance) for each
        coordinate for each rotation Default: True
    kwargs : key-value, optional
        Key-value pairs passed to :func:`netneurotools.stats.gen_spinsamples`

    Returns
    -------
    spinsamples : (N, `n_rotate`) numpy.ndarray
        Resampling matrix to use in permuting data parcellated with labels from
        {lh,rh}annot, where `N` is the number of parcels. Indices of -1
        indicate that the parcel was completely encompassed by regions in
        `drop` and should be ignored.
    cost : (N, `n_rotate`,) numpy.ndarray
        Cost (specified as Euclidean distance) of re-assigning each coordinate
        for every rotation in `spinsamples`. Only provided if `return_cost` is
        True.
    """
    def overlap(vals):
        """ Returns most common non-negative value in `vals`; -1 if all neg
        """
        vals = np.asarray(vals)
        vals, counts = np.unique(vals[vals > 0], return_counts=True)
        try:
            return vals[counts.argmax()]
        except ValueError:
            return -1

    if drop is None:
        drop = [
            'unknown',
            'corpuscallosum',  # default FreeSurfer
            'Background+FreeSurfer_Defined_Medial_Wall'  # common alternative
        ]
    drop = _decode_list(drop)

    # get vertex-level labels (set drop labels to - values)
    vertices, end = [], 0
    for n, annot in enumerate([lhannot, rhannot]):
        labels, ctab, names = read_annot(annot)
        names = _decode_list(names)
        todrop = set(names) & set(drop)
        inds = [names.index(f) - n for n, f in enumerate(todrop)]
        labs = np.arange(len(names) - len(inds)) + (end - (len(inds) * n))
        insert = np.arange(-1, -(len(inds) + 1), -1)
        vertices.append(np.insert(labs, inds, insert)[labels])
        end += len(names)
    vertices = np.hstack(vertices)
    labels = np.unique(vertices)
    mask = labels > -1

    # get coordinates and hemisphere designation for spin generation
    coords, hemiid = _get_fsaverage_coords(version, 'sphere')
    if len(vertices) != len(coords):
        raise ValueError('Provided annotation files have a different number '
                         'of vertices than the specified fsaverage surface.\n'
                         '    ANNOTATION: {} vertices\n'
                         '    FSAVERAGE:  {} vertices'.format(
                             len(vertices), len(coords)))

    # spin and assign regions based on max overlap
    spins, cost = gen_spinsamples(coords, hemiid, n_rotate=n_rotate, **kwargs)
    regions = np.zeros((len(labels[mask]), n_rotate), dtype='int32')
    for n in range(n_rotate):
        regions[:, n] = labeled_comprehension(vertices[spins[:, n]], vertices,
                                              labels, overlap, int, -1)[mask]

    if return_cost:
        return regions, cost

    return regions
コード例 #14
0
ファイル: RasterAT.py プロジェクト: jkibele/OpticalRS
def band_label_properties(labels, band, ind=None, func=np.mean,
                          outdtype=np.float, default=0.0):
    if type(ind) == type(None):
        ind = np.unique(labels.compressed())
    proparr = measurements.labeled_comprehension(band, labels, ind, func, outdtype, default)
    return pd.Series(proparr, index=ind)