Beispiel #1
0
def _parse_ljson_v1(lms_dict):
    all_points = []
    labels = []  # label per group
    labels_slices = []  # slices into the full pointcloud per label
    offset = 0
    connectivity = []
    for group in lms_dict['groups']:
        lms = group['landmarks']
        labels.append(group['label'])
        labels_slices.append(slice(offset, len(lms) + offset))
        # Create the connectivity if it exists
        conn = group.get('connectivity', [])
        if conn:
            # Offset relative connectivity according to the current index
            conn = offset + np.asarray(conn)
            connectivity += conn.tolist()
        for p in lms:
            all_points.append(p['point'])
        offset += len(lms)

    # Don't create a PointUndirectedGraph with no connectivity
    points = _ljson_parse_null_values(all_points)
    n_points = points.shape[0]

    labels_to_masks = OrderedDict()
    # go through each label and build the appropriate boolean array
    for label, l_slice in zip(labels, labels_slices):
        mask = np.zeros(n_points, dtype=np.bool)
        mask[l_slice] = True
        labels_to_masks[label] = mask

    lmarks = LabelledPointUndirectedGraph.init_from_edges(points, connectivity,
                                                          labels_to_masks)
    return {'LJSON': lmarks}
Beispiel #2
0
def _parse_ljson_v1(lms_dict):
    all_points = []
    labels = []  # label per group
    labels_slices = []  # slices into the full pointcloud per label
    offset = 0
    connectivity = []
    for group in lms_dict["groups"]:
        lms = group["landmarks"]
        labels.append(group["label"])
        labels_slices.append(slice(offset, len(lms) + offset))
        # Create the connectivity if it exists
        conn = group.get("connectivity", [])
        if conn:
            # Offset relative connectivity according to the current index
            conn = offset + np.asarray(conn)
            connectivity += conn.tolist()
        for p in lms:
            all_points.append(p["point"])
        offset += len(lms)

    # Don't create a PointUndirectedGraph with no connectivity
    points = _ljson_parse_null_values(all_points)
    n_points = points.shape[0]

    labels_to_masks = OrderedDict()
    # go through each label and build the appropriate boolean array
    for label, l_slice in zip(labels, labels_slices):
        mask = np.zeros(n_points, dtype=np.bool)
        mask[l_slice] = True
        labels_to_masks[label] = mask

    lmarks = LabelledPointUndirectedGraph.init_from_edges(
        points, connectivity, labels_to_masks
    )
    return {"LJSON": lmarks}
Beispiel #3
0
def _parse_ljson_v2(lms_dict):
    points = _ljson_parse_null_values(lms_dict['landmarks']['points'])
    connectivity = lms_dict['landmarks'].get('connectivity')

    if connectivity is None and len(lms_dict['labels']) == 0:
        return PointCloud(points)
    else:
        labels_to_mask = OrderedDict()  # masks into the pointcloud per label
        n_points = points.shape[0]
        for label in lms_dict['labels']:
            mask = np.zeros(n_points, dtype=np.bool)
            mask[label['mask']] = True
            labels_to_mask[label['label']] = mask
        # Note that we can pass connectivity as None here and the edges will be
        # empty.
        return LabelledPointUndirectedGraph.init_from_edges(
            points, connectivity, labels_to_mask)
Beispiel #4
0
def _parse_ljson_v2(lms_dict):
    points = _ljson_parse_null_values(lms_dict['landmarks']['points'])
    connectivity = lms_dict['landmarks'].get('connectivity')

    if connectivity is None and len(lms_dict['labels']) == 0:
        return PointCloud(points)
    else:
        labels_to_mask = OrderedDict() # masks into the pointcloud per label
        n_points = points.shape[0]
        for label in lms_dict['labels']:
            mask = np.zeros(n_points, dtype=np.bool)
            mask[label['mask']] = True
            labels_to_mask[label['label']] = mask
        # Note that we can pass connectivity as None here and the edges will be
        # empty.
        return LabelledPointUndirectedGraph.init_from_edges(
            points, connectivity, labels_to_mask)
Beispiel #5
0
def _parse_ljson_v2(lms_dict):
    points = _ljson_parse_null_values(lms_dict["landmarks"]["points"])
    connectivity = lms_dict["landmarks"].get("connectivity")

    if connectivity is None and len(lms_dict["labels"]) == 0:
        lmarks = PointCloud(points)
    else:
        labels_to_mask = OrderedDict()  # masks into the pointcloud per label
        n_points = points.shape[0]
        for label in lms_dict["labels"]:
            mask = np.zeros(n_points, dtype=bool)
            mask[label["mask"]] = True
            labels_to_mask[label["label"]] = mask
        # Note that we can pass connectivity as None here and the edges will be
        # empty.
        lmarks = LabelledPointUndirectedGraph.init_from_edges(
            points, connectivity, labels_to_mask)

    return {"LJSON": lmarks}
Beispiel #6
0
def _parse_ljson_v1(lms_dict):
    from menpo.base import MenpoDeprecationWarning
    warnings.warn(
        'LJSON v1 is deprecated. export_landmark_file{s}() will '
        'only save out LJSON v2 files. Please convert all LJSON '
        'files to v2 by importing into Menpo and re-exporting to '
        'overwrite the files.', MenpoDeprecationWarning)
    all_points = []
    labels = []  # label per group
    labels_slices = []  # slices into the full pointcloud per label
    offset = 0
    connectivity = []
    for group in lms_dict['groups']:
        lms = group['landmarks']
        labels.append(group['label'])
        labels_slices.append(slice(offset, len(lms) + offset))
        # Create the connectivity if it exists
        conn = group.get('connectivity', [])
        if conn:
            # Offset relative connectivity according to the current index
            conn = offset + np.asarray(conn)
            connectivity += conn.tolist()
        for p in lms:
            all_points.append(p['point'])
        offset += len(lms)

    # Don't create a PointUndirectedGraph with no connectivity
    points = _ljson_parse_null_values(all_points)
    n_points = points.shape[0]

    labels_to_masks = OrderedDict()
    # go through each label and build the appropriate boolean array
    for label, l_slice in zip(labels, labels_slices):
        mask = np.zeros(n_points, dtype=np.bool)
        mask[l_slice] = True
        labels_to_masks[label] = mask

    return LabelledPointUndirectedGraph.init_from_edges(
        points, connectivity, labels_to_masks)
Beispiel #7
0
def _parse_ljson_v3(lms_dict):
    all_lms = {}
    for key, lms_dict_group in lms_dict['groups'].items():
        points = _ljson_parse_null_values(lms_dict_group['landmarks']['points'])
        connectivity = lms_dict_group['landmarks'].get('connectivity')
        # TODO: create the metadata label!

        if connectivity is None and len(lms_dict_group['labels']) == 0:
            all_lms[key] = PointCloud(points)
        else:
            # masks into the pointcloud per label
            labels_to_mask = OrderedDict()
            n_points = points.shape[0]
            for label in lms_dict_group['labels']:
                mask = np.zeros(n_points, dtype=np.bool)
                mask[label['mask']] = True
                labels_to_mask[label['label']] = mask

            # Note that we can pass connectivity as None here and the edges
            # will be empty.
            all_lms[key] = LabelledPointUndirectedGraph.init_from_edges(
                    points, connectivity, labels_to_mask)
    return all_lms
Beispiel #8
0
def _parse_ljson_v1(lms_dict):
    from menpo.base import MenpoDeprecationWarning
    warnings.warn('LJSON v1 is deprecated. export_landmark_file{s}() will '
                  'only save out LJSON v2 files. Please convert all LJSON '
                  'files to v2 by importing into Menpo and re-exporting to '
                  'overwrite the files.', MenpoDeprecationWarning)
    all_points = []
    labels = []  # label per group
    labels_slices = []  # slices into the full pointcloud per label
    offset = 0
    connectivity = []
    for group in lms_dict['groups']:
        lms = group['landmarks']
        labels.append(group['label'])
        labels_slices.append(slice(offset, len(lms) + offset))
        # Create the connectivity if it exists
        conn = group.get('connectivity', [])
        if conn:
            # Offset relative connectivity according to the current index
            conn = offset + np.asarray(conn)
            connectivity += conn.tolist()
        for p in lms:
            all_points.append(p['point'])
        offset += len(lms)

    # Don't create a PointUndirectedGraph with no connectivity
    points = _ljson_parse_null_values(all_points)
    n_points = points.shape[0]

    labels_to_masks = OrderedDict()
    # go through each label and build the appropriate boolean array
    for label, l_slice in zip(labels, labels_slices):
        mask = np.zeros(n_points, dtype=np.bool)
        mask[l_slice] = True
        labels_to_masks[label] = mask

    return LabelledPointUndirectedGraph.init_from_edges(points, connectivity, labels_to_masks)
Beispiel #9
0
def _parse_ljson_v3(lms_dict):
    all_lms = {}
    for key, lms_dict_group in lms_dict["groups"].items():
        points = _ljson_parse_null_values(
            lms_dict_group["landmarks"]["points"])
        connectivity = lms_dict_group["landmarks"].get("connectivity")
        # TODO: create the metadata label!

        if connectivity is None and len(lms_dict_group["labels"]) == 0:
            all_lms[key] = PointCloud(points)
        else:
            # masks into the pointcloud per label
            labels_to_mask = OrderedDict()
            n_points = points.shape[0]
            for label in lms_dict_group["labels"]:
                mask = np.zeros(n_points, dtype=np.bool)
                mask[label["mask"]] = True
                labels_to_mask[label["label"]] = mask

            # Note that we can pass connectivity as None here and the edges
            # will be empty.
            all_lms[key] = LabelledPointUndirectedGraph.init_from_edges(
                points, connectivity, labels_to_mask)
    return all_lms
Beispiel #10
0
def asf_importer(filepath, asset=None, **kwargs):
    r"""
    Importer for the ASF file format.

    For images, the `x` and `y` axes are flipped such that the first axis is
    `y` (height in the image domain).

    Currently only open and closed path types are supported.

    Landmark labels:

    +---------+
    | label   |
    +=========+
    | all     |
    +---------+

    Parameters
    ----------
    filepath : `Path`
        Absolute filepath of the file.
    asset : `object`, optional
        An optional asset that may help with loading. This is unused for this
        implementation.
    \**kwargs : `dict`, optional
        Any other keyword arguments.

    Returns
    -------
    landmarks : `dict` {`str`: :map:`PointCloud`}
        Dictionary mapping landmark groups to menpo shapes

    References
    ----------
    .. [1] http://www2.imm.dtu.dk/~aam/datasets/datasets.html
    """
    with filepath.open("r") as f:
        landmarks = f.read()

    # Remove comments and blank lines
    landmarks = [l for l in landmarks.splitlines() if (l.rstrip() and not "#" in l)]

    # Pop the front of the list for the number of landmarks
    count = int(landmarks.pop(0))
    # Pop the last element of the list for the image_name
    image_name = landmarks.pop()

    xs = np.empty([count, 1])
    ys = np.empty([count, 1])
    connectivity = []

    # Only unpack the first 7 (the last 3 are always 0)
    split_landmarks = [ASFPath(*landmarks[i].split()[:7]) for i in range(count)]
    paths = [list(g) for k, g in itertools.groupby(split_landmarks, lambda x: x[0])]
    vert_index = 0
    for path in paths:
        if path:
            path_type = path[0].path_type
        for vertex in path:
            # Relative coordinates, will be scaled by the image size
            xs[vert_index, ...] = float(vertex.xpos)
            ys[vert_index, ...] = float(vertex.ypos)
            vert_index += 1
            # If True, isolated point
            if not (
                vertex.connects_from == vertex.connects_to
                and vertex.connects_to == vertex.point_num
            ):
                # Connectivity is defined by connects_from and connects_to
                # as well as the path_type:
                #   Bit 1: Outer edge point/Inside point
                #   Bit 2: Original annotated point/Artificial point
                #   Bit 3: Closed path point/Open path point
                #   Bit 4: Non-hole/Hole point
                # For now we only parse cases 0 and 4 (closed or open)
                connectivity.append((int(vertex.point_num), int(vertex.connects_to)))
        if path and path_type == "0":
            connectivity.append((int(path[-1].point_num), int(path[0].point_num)))

    connectivity = np.vstack(connectivity)
    points = np.hstack([ys, xs])
    if asset is not None:
        # we've been given an asset. As ASF files are normalized,
        # fix that here
        points = Scale(np.array(asset.shape)).apply(points)

    labels_to_masks = OrderedDict([("all", np.ones(points.shape[0], dtype=np.bool))])
    return {
        "ASF": LabelledPointUndirectedGraph.init_from_edges(
            points, connectivity, labels_to_masks
        )
    }
Beispiel #11
0
def asf_importer(filepath, asset=None, **kwargs):
    r"""
    Importer for the ASF file format.

    For images, the `x` and `y` axes are flipped such that the first axis is
    `y` (height in the image domain).

    Currently only open and closed path types are supported.

    Landmark labels:

    +---------+
    | label   |
    +=========+
    | all     |
    +---------+

    Parameters
    ----------
    filepath : `Path`
        Absolute filepath of the file.
    asset : `object`, optional
        An optional asset that may help with loading. This is unused for this
        implementation.
    \**kwargs : `dict`, optional
        Any other keyword arguments.

    Returns
    -------
    landmarks : :map:`LabelledPointUndirectedGraph`
        The landmarks including appropriate labels if available.

    References
    ----------
    .. [1] http://www2.imm.dtu.dk/~aam/datasets/datasets.html
    """
    with open(str(filepath), 'r') as f:
        landmarks = f.read()

    # Remove comments and blank lines
    landmarks = [l for l in landmarks.splitlines()
                 if (l.rstrip() and not '#' in l)]

    # Pop the front of the list for the number of landmarks
    count = int(landmarks.pop(0))
    # Pop the last element of the list for the image_name
    image_name = landmarks.pop()

    xs = np.empty([count, 1])
    ys = np.empty([count, 1])
    connectivity = []

    # Only unpack the first 7 (the last 3 are always 0)
    split_landmarks = [ASFPath(*landmarks[i].split()[:7])
                       for i in range(count)]
    paths = [list(g)
             for k, g in itertools.groupby(split_landmarks, lambda x: x[0])]
    vert_index = 0
    for path in paths:
        if path:
            path_type = path[0].path_type
        for vertex in path:
            # Relative coordinates, will be scaled by the image size
            xs[vert_index, ...] = float(vertex.xpos)
            ys[vert_index, ...] = float(vertex.ypos)
            vert_index += 1
            # If True, isolated point
            if not (vertex.connects_from == vertex.connects_to and
                    vertex.connects_to == vertex.point_num):
                # Connectivity is defined by connects_from and connects_to
                # as well as the path_type:
                #   Bit 1: Outer edge point/Inside point
                #   Bit 2: Original annotated point/Artificial point
                #   Bit 3: Closed path point/Open path point
                #   Bit 4: Non-hole/Hole point
                # For now we only parse cases 0 and 4 (closed or open)
                connectivity.append((int(vertex.point_num),
                                     int(vertex.connects_to)))
        if path_type == '0':
            connectivity.append((int(path[-1].point_num),
                                 int(path[0].point_num)))

    connectivity = np.vstack(connectivity)
    points = np.hstack([ys, xs])
    if asset is not None:
        # we've been given an asset. As ASF files are normalized,
        # fix that here
        points = Scale(np.array(asset.shape)).apply(points)

    labels_to_masks = OrderedDict(
        [('all', np.ones(points.shape[0], dtype=np.bool))])
    return LabelledPointUndirectedGraph.init_from_edges(points, connectivity,
                                                        labels_to_masks)