예제 #1
0
 def test_get_annotations(self):
     self.assertIsInstance(
         pymaid.get_annotations(config_test.test_skids,
                                remote_instance=self.rm), dict)
     self.assertIsInstance(
         pymaid.get_annotation_details(config_test.test_skids,
                                       remote_instance=self.rm),
         pd.DataFrame)
예제 #2
0
def pull_all_updates_by_skid(skids, **kwargs):
    annots = pymaid.get_annotations(skids, remote_instance=target_project)
    link_types = {
        'copy of': lambda skids: copy_neurons_by_skid(skids, **kwargs),
        'translation of': lambda skids: translate_neurons_by_skid(skids, **kwargs),
        'elastic transformation of':
            lambda skids: elastictransform_neurons_by_skid(skids, **kwargs),
        'elastic transformation and flipped of':
            lambda skids: elastictransform_neurons_by_skid(skids, left_right_flip=True, **kwargs),
        'pruned \(first entry, last exit\) by vol 109 of':  # Note that the \ MUST be included
            lambda skids: volume_prune_neurons_by_skid(skids, 109, **kwargs),
        'radius pruned of':
            lambda skids: radius_prune_neurons_by_skid(skids, **kwargs)
    }
    for skid in annots:
        link_annots = [annot for annot in annots[skid]
                       if 'LINKED NEURON' in annot
                       and 'UPDATED FROM LINKED NEURON' not in annot][0]
예제 #3
0
def push_all_updates_by_skid(skids, recurse=False, fake=True, **kwargs):
    """
    For each neuron in the source project with one of the given skids,
    search in the target project for neurons that are linked to it, and
    update the target neuron(s) using the appropriate
    manipulate_and_reupload_catmaid_neuron function as specified by the
    linking relation in the "LINKED NEURON" annotation.

    If recurse=True and this function succeeds in performing an update, it
    will then push_all_updates on that updated neuron to try to
    propagate changes through any chains of linked neurons. This
    recursion only happens within the target project. If you need to
    push the updated neuron to a different project, do that manually.
    """
    kwargs['fake'] = fake
    kwargs['refuse_to_update'] = False  # Since this function only does
                                        # updates, refusing to update is
                                        # redundant with 'fake'
    link_types = {
        'copy of': lambda skids: copy_neurons_by_skid(skids, **kwargs),
        'translation of': lambda skids: translate_neurons_by_skid(skids, **kwargs),
        'elastic transformation of':
            lambda skids: elastictransform_neurons_by_skid(skids, **kwargs),
        'elastic transformation and flipped of':
            lambda skids: elastictransform_neurons_by_skid(skids, left_right_flip=True, **kwargs),
        'pruned \(first entry, last exit\) by vol 109 of':  # Note that the \ MUST be included
            lambda skids: volume_prune_neurons_by_skid(skids, 109, **kwargs),
        'radius pruned of':
            lambda skids: radius_prune_neurons_by_skid(skids, **kwargs)
    }

    try:
        iter(skids)
    except:
        skids = [skids]

    if 'skip_dates' in kwargs:
        skip_dates = kwargs.pop('skip_dates')
    else:
        skip_dates = []

    all_target_annots = pymaid.get_annotation_list(remote_instance=target_project)

    original_source_project_id = source_project.project_id
    server_responses = []
    new_skids = skids
    while len(new_skids) > 0:
        new_skids = []
        for source_skid in skids:  # For each skeleton that needs to be pushed
            target_annots = [add_escapes(annot) for annot in all_target_annots.name
                             if 'skeleton id '+str(source_skid)+' ' in annot
                             and 'project id '+str(source_project.project_id)+' 'in annot]
            #print(target_annots)
            # For each annotation that indicates a link to the source skid
            for target_annot in target_annots:
                target_skids = get_skids_by_annotation(target_annot, remote_instance='target')
                if len(target_skids) == 0:
                    continue
                elif len(target_skids) != 1:
                    input('WARNING: Multiple neurons in the target project'
                          ' with the same linking annotation??? Skipping this'
                          f' push: {target_annot}')
                    continue
                if len(skip_dates) > 0:
                    this_target_skid_annots = pymaid.get_annotations(
                            target_skids, remote_instance=target_project)
                # Check what type of link is indicated by this linking annotation
                for linking_relation in link_types:
                    if linking_relation in target_annot:
                        resp = [f'Skipped: {target_annot}']
                        print('Found in project id '
                              f"{target_project.project_id}: '{target_annot}'")
                        if (len(skip_dates) == 0 or not any([any([date in annot for date in skip_dates]) for
                            annot in list(this_target_skid_annots.values())[0]])):
                                resp = link_types[linking_relation](source_skid)
                        else:
                            print(f'Skipping upload because was already updated recently')
                        if recurse and not fake:
                            #new_skids.append(resp[0]['skeleton_id']) # old
                            new_skids.append(target_skids[0])
                        server_responses.extend(resp)
        if recurse and not fake:
            source_project.project_id = target_project.project_id
            skids = new_skids
            print(f'Recursing - now pushing updates to skids {new_skids}')
    if recurse and not fake:
        source_project.project_id = original_source_project_id

    return server_responses
예제 #4
0
def segments_to_skids(seg_ids,
                      autoseg_instance,
                      name_pattern="Google: {id}",
                      merge_annotation_pattern="Merged: {name}",
                      verbose=True):
    """Retrieve skeleton IDs of neurons corresponding to given segmentation ID(s).

    If a given segmentation ID has been merged into another fragment, will try
    retrieving by annotation.

    Parameters
    ----------
    seg_ids :                   int | list of int
                                Segmentation ID(s) of autoseg skeletons to retrieve.
    autoseg_instance :          pymaid.CatmaidInstance
                                Instance with autoseg skeletons.
    name_pattern :              str, optional
                                Segmentation IDs are encoded in the name. Use
                                this parameter to define that pattern.
    merge_annotation_pattern :  str, optional
                                When neurons are merged, a reference to the
                                loosing skeleton's name is kept as annotation.
                                Use this parameter to define that pattern.

    Returns
    -------
    Dict
                        Dictionary mapping segmentation ID to skeleton ID.
                        Will be ``None`` if no skeleton found.

    """
    assert isinstance(autoseg_instance, pymaid.CatmaidInstance)

    assert isinstance(seg_ids,
                      (list, np.ndarray, set, tuple, pd.Index, int, str))

    seg_ids = navis.utils.make_iterable(seg_ids)

    # Prepare map seg ID -> skeleton ID
    seg2skid = {int(i): None for i in seg_ids}

    # First find neurons by name
    # Do NOT change the order of "names"!
    names = [name_pattern.format(id=i) for i in seg_ids]
    by_name = pymaid.get_skids_by_name(names,
                                       allow_partial=False,
                                       raise_not_found=False,
                                       remote_instance=autoseg_instance)

    by_name['skeleton_id'] = by_name.skeleton_id.astype(int)

    # Update map by those that could be found by name
    name2skid = by_name.set_index('name').skeleton_id.to_dict()
    seg2skid.update({
        int(i): int(name2skid[n])
        for i, n in zip(seg_ids, names) if n in by_name.name.values
    })

    # Look for missing IDs
    not_found = [s for s in seg_ids if not seg2skid[int(s)]]

    # Try finding by annotation (temporarily raise logger level)
    if not_found:
        map = merge_annotation_pattern
        an = [map.format(name=name_pattern.format(id=n)) for n in not_found]
        old_lvl = pymaid.logger.level
        pymaid.set_loggers('ERROR')
        by_annotation = pymaid.get_skids_by_annotation(
            an,
            raise_not_found=False,
            allow_partial=False,
            intersect=False,
            remote_instance=autoseg_instance)
        pymaid.set_loggers(old_lvl)

        if by_annotation:
            annotations = pymaid.get_annotations(
                by_annotation, remote_instance=autoseg_instance)

            for seg, a in zip(not_found, an):
                for skid in annotations:
                    if a in annotations[skid]:
                        seg2skid[int(seg)] = int(skid)
                        break

    # Figure out if we are still missing skeletons for any of the seg IDs
    if verbose:
        missing = [str(k) for k, v in seg2skid.items() if not v]
        if missing:
            # Check if skeleton ID has ever existed
            hist = pymaid.get_skeleton_change(missing,
                                              remote_instance=autoseg_instance)
            # Flatten the list of links (and convert to string)
            existed = set([str(e) for l in hist for e in l[0]])

            still_missing = set(missing) & existed

            if still_missing:
                msg = "{} out of {} segmentation IDs could not be found: {}"
                msg = msg.format(len(still_missing), len(seg_ids),
                                 ", ".join(still_missing))
                print(msg)

    return seg2skid