def dist_angle_sort(a, sort_point=None, close_poly=True):
    """Return a radial and distance sort of points relative to point.

    Parameters
    ----------
    a : array-like
        The array to sort.
    sort_point : list
        The [x, y] value of the sort origin.  If `None`, then the minimum
        x,y value from the inputs is used.

    Useful for polygons.  First and last point equality is checked.
    """
    def _e_2d_(a, p):
        """Array points to point distance."""
        diff = a - p[None, :]
        return np.sqrt(np.einsum('ij,ij->i', diff, diff))

    a = np.array(a)
    min_f = np.array([np.min(a[:, 0]), np.min(a[:, 1])])
    dxdy = np.subtract(a, np.atleast_2d(min_f))
    ang = np.degrees(np.arctan2(dxdy[:, 1], dxdy[:, 0]))
    dist = _e_2d_(a, min_f)
    ang_dist = np.vstack((ang, dist)).T
    keys = np.argsort(uts(ang_dist))
    rev_keys = keys[::-1]   # works
    arr = a[rev_keys]
    if np.all(arr[0] == arr[-1]):
        arr = np.concatenate((arr, arr[0][None, :]), axis=0)
    return arr
Exemple #2
0
def _polys_to_segments_(a, as_2d=True, as_structured=False):
    """Segment poly* structures into o-d pairs from start to finish

    Parameters
    ----------
    a: array
        A 2D array of x,y coordinates representing polyline or polygons.
    as_2d : boolean
        Returns a 2D array of from-to point pairs, [xf, yf, xt, yt] if True.
        If False, they are returned as a 3D array in the form
        [[xf, yf], [xt, yt]]
    as_structured : boolean
        Optional structured/recarray output.  Field names are currently fixed.

    Notes
    -----
    Any row containing np.nan is removed since this would indicate that the
    shape contains the null_pnt separator.
    prn_tbl`` if you want to see a well formatted output.
    """
    s0, s1 = a.shape
    fr_to = np.zeros((s0 - 1, s1 * 2), dtype=a.dtype)
    fr_to[:, :2] = a[:-1]
    fr_to[:, 2:] = a[1:]
    fr_to = fr_to[~np.any(np.isnan(fr_to), axis=1)]
    if as_structured:
        dt = np.dtype([('X_orig', 'f8'), ('Y_orig', 'f8'), ('X_dest', 'f8'),
                       ('Y_dest', 'f8')])
        return uts(fr_to, dtype=dt)
    if not as_2d:
        s0, s1 = fr_to.shape
        return fr_to.reshape(s0, s1 // 2, s1 // 2)
    return fr_to
Exemple #3
0
def fc_composition(in_fc, SR=None, prn=True, start=0, end=50):
    """Featureclass geometry composition in terms of shapes, shape parts, and
    point counts for each part.
    """
    if SR is None:
        SR = getSR(in_fc)
    with arcpy.da.SearchCursor(in_fc, ['OID@', 'SHAPE@'],
                               spatial_reference=SR) as cur:
        len_lst = []
        for _, row in enumerate(cur):
            p_id = row[0]
            p = row[1]
            parts = p.partCount
            num_pnts = np.asarray([p[i].count for i in range(parts)])
            IDs = np.repeat(p_id, parts)
            part_count = np.arange(parts)
            too = np.cumsum(num_pnts)
            result = np.stack((IDs, part_count, num_pnts, too), axis=-1)
            len_lst.append(result)
    tmp = np.concatenate(len_lst, axis=0)  # np.vstack(len_lst)
    too = np.cumsum(tmp[:, 2])
    frum = np.concatenate(([0], too))
    frum_too = np.array(list(zip(frum, too)))
    fc_comp = np.hstack((tmp[:, :3], frum_too))  # axis=0)
    dt = np.dtype({
        'names': ['IDs', 'Part', 'Points', 'From_pnt', 'To_pnt'],
        'formats': ['i4', 'i4', 'i4', 'i4', 'i4']
    })
    fc = uts(fc_comp, dtype=dt)
    frmt = "\nFeatureclass...  {}" + \
        "\nShapes :{:>5.0f}\nParts  :{:>5.0f}\n  max  :{:>5.0f}" + \
        "\nPoints :{:>5.0f}\n  min  :{:>5.0f}\n  med  :{:>5.0f}" + \
        "\n  max  :{:>5.0f}"
    if prn:  # ':>{}.0f
        uni, cnts = np.unique(fc['IDs'], return_counts=True)
        a0, a1 = [fc['Part'] + 1, fc['Points']]
        args = [
            in_fc,
            len(uni),
            np.sum(cnts),
            np.max(a0),
            np.sum(a1),
            np.min(a1),
            int(np.median(a1)),
            np.max(a1)
        ]
        msg = dedent(frmt).format(*args)
        print(msg)
        # ---- to structured and print
        frmt = "{:>8} " * 5
        start, end = sorted([
            abs(int(i)) if isinstance(i, (int, float)) else 0
            for i in [start, end]
        ])
        end = min([fc.shape[0], end])
        print(frmt.format(*fc.dtype.names))
        for i in range(start, end):
            print(frmt.format(*fc[i]))
        return None
    return fc
Exemple #4
0
    def info(self, prn=True, start=0, end=50):
        """Convert an IFT array to full information.

        Parameters
        ----------
        prn : boolean
            If True, the top and bottom ``rows`` will be printed.
            If False, the information will be returned and one can use
            ``prn_tbl`` for more control over the tabular output.
        start, end : integers
            The start to end locations within the geo-array to print or view.

        Notes
        -----
        Point count will include any null_pnts used to separate inner and
        outer rings.

        To see the data structure, use ``prn_geo``.
        """
        ift = self.IFT
        ids = ift[:, 0]
        uni, cnts = np.unique(ids, return_counts=True)
        part_count = np.concatenate([np.arange(i) for i in cnts])
        pnts = np.array([len(p) for p in self.parts])
        too = ift[:, 2]
        frum = ift[:, 1]
        id_len2 = np.stack((ids, part_count, pnts, frum, too), axis=-1)
        dt = np.dtype({
            'names': ['IDs', 'Part', 'Points', 'From_pnt', 'To_pnt'],
            'formats': ['i4', 'i4', 'i4', 'i4', 'i4']
        })
        IFT_2 = uts(id_len2, dtype=dt)
        frmt = "-"*14 + \
            "\nShapes :{:>6.0f}\nParts  :{:>6.0f}" + \
            "\nPoints :{:>6.0f}\n  min  :{:>6.0f}\n  med  :{:>6.0f}" + \
            "\n  max  :{:>6.0f}"
        shps = len(uni)  # ---- zero-indexed, hence add 1
        _, cnts = np.unique(IFT_2['Part'], return_counts=True)
        p0 = np.sum(cnts)
        p3 = np.sum(IFT_2['Points'])
        p4 = np.min(IFT_2['Points'])
        p5 = np.median(IFT_2['Points'])
        p6 = np.max(IFT_2['Points'])
        msg = dedent(frmt).format(shps, p0, p3, p4, p5, p6)
        if prn:
            frmt = "{:>8} " * 5
            start, end = sorted([
                abs(int(i)) if isinstance(i, (int, float)) else 0
                for i in [start, end]
            ])
            print(msg)
            print(frmt.format(*IFT_2.dtype.names))
            N = IFT_2.shape[0]
            for i in range(min(N, end)):
                print(frmt.format(*IFT_2[i]))
            # prn_tbl(IFT_2, rows)
        else:
            return IFT_2
def polys_to_segments(self, as_basic=True, to_orig=False, as_3d=False):
    """Segment poly* structures into o-d pairs from start to finish.

    as_basic : boolean
        True, returns an Nx4 array (x0, y0, x1, y1) of from-to coordinates.
        False, returns a structured array
        If `as_3d` is True, then `as_basic` is set to False.
    to_origin : boolean
        True, moves the coordinates back to their original position
        defined by the `LL` property of the Geo array.
    as_3d : boolean
        True, the point pairs are returned as a 3D array in the form
        [[X_orig', Y_orig'], ['X_dest', 'Y_dest']], without the distances.

    Notes
    -----
    Use `prn_tbl` if you want to see a well formatted output.
    """
    if self.K not in (1, 2):
        print("Poly* features required.")
        return None
    # -- basic return as ndarray used by common_segments
    if as_3d:  # The array cannot be basic if it is 3d
        as_basic = False
    if to_orig:
        tmp = self.XY + self.LL
        b_vals = [tmp[ft[0]:ft[1]] for ft in self.FT]  # shift to orig extent
    else:
        b_vals = self.bits
    # -- Do the concatenation
    fr_to = np.concatenate(
        [np.concatenate((b[:-1], b[1:]), axis=1) for b in b_vals], axis=0)
    # -- return if simple and not 3d representation
    if as_basic:
        return fr_to
    # -- return 3d from-to representation
    if as_3d:
        fr_to = fr_to[:, :4]
        s0, s1 = fr_to.shape
        return fr_to.reshape(s0, s1 // 2, s1 // 2)
    # -- structured array section
    # add bit ids and lengths to the output array
    b_ids = self.IFT
    segs = np.asarray([[[b_ids[i][0], *(b_ids[i][-2:])],
                        len(b) - 1] for i, b in enumerate(b_vals)],
                      dtype='O')
    s_ids = np.concatenate([np.tile(i[0], i[1]).reshape(-1, 3) for i in segs],
                           axis=0)
    dist = (np.sqrt(np.sum((fr_to[:, :2] - fr_to[:, 2:4])**2, axis=1)))
    fr_to = np.hstack((fr_to, s_ids, dist.reshape(-1, 1)))
    dt = np.dtype([('X_fr', 'f8'), ('Y_fr', 'f8'), ('X_to', 'f8'),
                   ('Y_to', 'f8'), ('Orig_id', 'i4'), ('Part', 'i4'),
                   ('Seq_ID', 'i4'), ('Length', 'f8')])
    fr_to = uts(fr_to, dtype=dt)
    return repack_fields(fr_to)
Exemple #6
0
def flatten_to_points(iterable):
    """Iteratively flattens an iterable containing potentially nested points
    down to X,Y pairs with feature ID, part, subpart/ring and point number.

    Requires
    --------
    iterable : list/array
        See notes

    Returns
    -------
    A structured array of coordinate geometry information as described by the
    array dtype.

    Notes
    -----
    `load_geojson`'s `coords` output is suitable for input or any ndarray or
    object array representing geometry coordinates.

    I added a x[0] check to try to prevent the flattening of the
    yield to beyond the final coordinate pair.

    References
    ----------
    `Stefan Pochmann on flatten nested lists with indices
    <https://stackoverflow.com/questions/48996063/python-flatten-nested-
    lists-with-indices>`_.
    """
    def gen(iterable):
        """Generator function to acquire the values."""
        stack = [None, enumerate(iterable)]
        pad = -1
        N = 6
        while stack:
            for stack[-2], x in stack[-1]:
                if isinstance(x[0], list):  # added [0] to check for pair
                    stack += None, enumerate(x)
                else:
                    z = [*x, *stack[::2]]
                    if len(z) < N:
                        z.extend([pad] * (N - len(z)))
                    yield z
                break
            else:
                del stack[-2:]

    # ----
    z = gen(iterable)
    dt = np.dtype({
        'names': ['Xs', 'Ys', 'a', 'b', 'c', 'd'],
        'formats': ['<f8', '<f8', '<i4', '<i4', '<i4', '<i4']
    })
    z0 = np.vstack(list(z))
    return uts(z0, dtype=dt)
Exemple #7
0
 def common_segments(self):
     """Return the common segments in poly features.  Result is an array of
     from-to pairs of points
     """
     h = self.polys_to_segments()
     h_0 = uts(h)
     names = h_0.dtype.names
     h_1 = h_0[list(names[-2:] + names[:2])]
     idx = np.isin(h_0, h_1)
     common = h_0[idx]
     return stu(common)
Exemple #8
0
 def is_multipart(self, as_structured=False):
     """For each shape, returns whether it has multiple parts.  A ndarray
     is returned with the first column being the shape number and the second
     is coded as 1 for True and 0 for False
     """
     partcnt = self.part_cnt
     w = np.where(partcnt[:, 1] > 1, 1, 0)
     arr = np.array(list(zip(np.arange(len(w)), w)))
     if as_structured:
         dt = np.dtype([('IDs', '<i4'), ('Parts', '<i4')])
         return uts(arr, dtype=dt)
     return arr
Exemple #9
0
 def unique_segments(self):
     """Return the unique segments in poly features.   Result is an array of
     from-to pairs of points
     """
     h = self.polys_to_segments()
     h_0 = uts(h)
     names = h_0.dtype.names
     h_1 = h_0[list(names[-2:] + names[:2])]
     idx0 = ~np.isin(h_0, h_1)
     uniq0 = h_0[idx0]
     uniq1 = h_0[~idx0]
     uniq01 = np.hstack((uniq0, uniq1))
     return stu(uniq01)
Exemple #10
0
    def min_area_rect(self, as_structured=False):
        """Determines the minimum area rectangle for a shape represented
        by a list of points.  If the shape is a polygon, then only the outer
        ring is used.  This is the MABR... minimum area bounding rectangle.
       """
        def _extent_area_(a):
            """Area of an extent polygon"""
            LBRT = np.concatenate((np.nanmin(a, axis=0), np.nanmax(a, axis=0)))
            dx, dy = np.diff(LBRT.reshape(2, 2), axis=0).squeeze()
            return dx * dy, LBRT

        def _extents_(a):
            """Extents are returned as L(eft), B(ottom), R(ight), T(op)"""
            def _sub_(i):
                """Extent of a sub-array in an object array"""
                return np.concatenate(
                    (np.nanmin(i, axis=0), np.nanmax(i, axis=0)))

            p_ext = [_sub_(i) for i in a]
            return np.asarray(p_ext)

        # ----
        chs = self.convex_hulls(False, 50)
        ang_ = [hlp._angles_(i) for i in chs]
        xt = _extents_(chs)
        cent_ = np.c_[np.mean(xt[:, 0::2], axis=1),
                      np.mean(xt[:, 1::2], axis=1)]
        rects = []
        for i, p in enumerate(chs):
            # ---- np.radians(np.unique(np.round(ang_[i], 2))) # --- round
            uni_ = np.radians(np.unique(ang_[i]))
            area_old, LBRT = _extent_area_(p)
            for angle in uni_:
                c, s = np.cos(angle), np.sin(angle)
                R = np.array(((c, s), (-s, c)))
                ch = np.einsum('ij,jk->ik', p - cent_[i], R) + cent_[i]
                area_, LBRT = _extent_area_(ch)
                Xmin, Ymin, Xmax, Ymax = LBRT
                vals = [area_, Xmin, Ymin, Xmax, Ymax]
                if area_ < area_old:
                    # min_area = area_
                    area_old = area_
                    Xmin, Ymin, Xmax, Ymax = LBRT
                    vals = [area_, Xmin, Ymin, Xmax, Ymax]  # min_area,
            rects.append(vals)
        rects = np.asarray(rects)
        if as_structured:
            dt = np.dtype([('Rect_area', '<f8'), ('Xmin', '<f8'),
                           ('Ymin', '<f8'), ('Xmax', '<f8'), ('Ymax', '<f8')])
            return uts(rects, dtype=dt)
        return rects
Exemple #11
0
def mst(arr, calc_dist=True):
    """Determine the minimum spanning tree for a set of points represented
    by their inter-point distances. ie their `W`eights

    Parameters
    ----------
    W : array, normally an interpoint distance array
        Edge weights for example, distance, time, for a set of points.
        W needs to be a square array or a np.triu perhaps

    calc_dist : boolean
        True, if W is a points array, calculate W as the interpoint distance.
        False means that W is not a points array, but some other `weight`
        representing the interpoint relationship

    Returns
    -------
    pairs - the pair of nodes that form the edges
    """
    arr = np.unique(arr, True, False, False, axis=0)[0]
    W = arr[~np.isnan(arr[:, 0])]
    a_copy = np.copy(W)
    if calc_dist:
        W = _e_dist_(W)
    if W.shape[0] != W.shape[1]:
        raise ValueError("W needs to be square matrix of edge weights")
    Np = W.shape[0]
    pairs = []
    pnts_seen = [0]  # Add the first point
    n_seen = 1
    # exclude self connections by assigning inf to the diagonal
    diag = np.arange(Np)
    W[diag, diag] = np.inf
    #
    while n_seen != Np:
        new_edge = np.argmin(W[pnts_seen], axis=None)
        new_edge = divmod(new_edge, Np)
        new_edge = [pnts_seen[new_edge[0]], new_edge[1]]
        pairs.append(new_edge)
        pnts_seen.append(new_edge[1])
        W[pnts_seen, new_edge[1]] = np.inf
        W[new_edge[1], pnts_seen] = np.inf
        n_seen += 1
    pairs = np.array(pairs)
    frum = a_copy[pairs[:, 0]]
    too = a_copy[pairs[:, 1]]
    fr_to = np.concatenate((frum, too), axis=1)  # np.vstack(pairs)
    fr_to = uts(fr_to, names=['X_orig', 'Y_orig', 'X_dest', 'Y_dest'])
    return repack_fields(fr_to)
Exemple #12
0
def f2pnts(in_fc):
    """Features to points.
    `getSR`, `shape_K` and `fc_geometry` from `npGeo_io`
    """
    SR = getSR(in_fc)
    kind, k = shape_K(in_fc)
    tmp, ift = fc_geometry(in_fc, SR=SR, IFT_rec=False)
    m = np.nanmin(tmp, axis=0)                   # shift to LB of whole extent
    info = "feature to points"
    a = tmp - m
    g = Geo(a, IFT=ift, Kind=k, Info=info)    # create the geo array
    cent = g.centroids + m                       # create the centroids
    dt = np.dtype([('Xs', '<f8'), ('Ys', '<f8')])
    cent = uts(cent, dtype=dt)
    return cent, SR
Exemple #13
0
 def is_clockwise(self, is_closed_polyline=False):
     """Utilize `shoelace` area calculation to determine whether polygon
     rings are clockwise or not.  If the geometry represent a closed-loop
     polyline, then set the `is_closed_polyline` to True.  Validity of the
     geometry is not checked.
     """
     msg = "Polygons or closed-loop polylines are required."
     if self.K not in (1, 2):
         print(msg)
         return None
     if self.K == 1:
         if not is_closed_polyline:
             print(msg)
             return None
     ids = self.bit_ids
     cw = np.asarray(
         [1 if hlp._area_part_(i) > 0. else 0 for i in self.bits])
     return uts(np.asarray(list(zip(ids, cw))), names=['IDs', 'Clockwise'])
Exemple #14
0
def f2pnts(in_fc):
    """Features to points"""
    result = check_path(out_fc)
    if result[0] is None:
        print(result[1])
        return result[1]
    gdb, name = result
    SR = getSR(in_fc)  # getSR, shape_to_K  and fc_geometry from
    kind = shape_to_K(in_fc)  # npGeo_io
    tmp, IFT, IFT_2 = fc_geometry(in_fc, SR)
    m = np.nanmin(tmp, axis=0)  # shift to bottom left of extent
    info = "feature to points"
    a = tmp - m
    g = Geo(a, IFT=IFT, Kind=kind, Info=info)  # create the geo array
    cent = g.centroids  # create the centroids
    cent = cent + m
    dt = np.dtype([('Xs', '<f8'), ('Ys', '<f8')])
    cent = uts(cent, dtype=dt)
    return cent, SR
Exemple #15
0
def split_at_vertices(in_fc, out_fc):
    """Unique segments retained when poly geometry is split at vertices.
    """
    result = check_path(out_fc)
    if result[0] is None:
        print(result[1])
        return result[1]
    gdb, name = result
    SR = getSR(in_fc)
    a, IFT, IFT_2 = fc_geometry(in_fc, SR)
    ag = Geo(a, IFT)
    #    fr_to = ag.unique_segments()  # geo method
    fr_to = ag.polys_to_segments()
    dt = np.dtype([('X_orig', 'f8'), ('Y_orig', 'f8'), ('X_dest', 'f8'),
                   ('Y_dest', 'f8')])
    od = uts(fr_to, dtype=dt)  # ---- unstructured to structured
    tmp = "memory/tmp"
    if arcpy.Exists(tmp):
        arcpy.Delete_management(tmp)
    arcpy.da.NumPyArrayToTable(od, tmp)
    args = [tmp, out_fc] + list(od.dtype.names) + ["GEODESIC", "", SR]
    arcpy.XYToLine_management(*args)
    return
Exemple #16
0
def pkg_info_json(folder=None):
    r"""Access package info from `*.json` files in a `folder`.

    Parameters
    ----------
    folder : text
        File path to the `json` file. By default, this can be derived from
    >>> sys.prefix
    ... r"C:\arc_pro\bin\Python\envs\arcgispro-py3"
    >>> # ---- conda-meta is appended to it to yield `folder`, see *`Example`*

    Notes
    -----
    The keyword to search on is **depends**.
    Other options in json files include::

        arch, auth, build, build_number, channel, depends, files, fn,
        has_prefix, license, link, md5, name, noarch, platform, preferred_env,
        priority, requires, schannel, size, subdir, timestamp, url, version,
        with_features_depends

    Example
    -------
    folder = "C:/...install path/bin/Python/envs/arcgispro-py3/conda-meta" ::

        folder = sys.prefix + "/conda-meta"
        packages, dep_counts, required_by = pkg_info_json(folder)
        f0 = r"C:\Git_Dan\npgeom\Project_npg\npgeom.gdb\dep_pkg_info"
        f1 = r"C:\Git_Dan\npgeom\Project_npg\npgeom.gdb\dep_counts"
        f2 = r"C:\Git_Dan\npgeom\Project_npg\npgeom.gdb\dep_required_by"
        arcpy.da.NumPyArrayToTable(packages, f0)
        arcpy.da.NumPyArrayToTable(dep_counts, f1)
        arcpy.da.NumPyArrayToTable(required_by, f2)

    """
    # ---- Checks
    if not folder:
        folder = sys.prefix + "\\conda-meta"
    folder = Path(folder)
    if not folder.is_dir():
        print("\nInvalid path... {}".format(folder))
        return
    files = list(folder.glob("*.json"))
    if not files:
        print("{} doesn't have any json files".format(folder))
        return
    #
    # --- Package, Filename, Dependencies
    packages = []
    m0 = m1 = m2 = 0
    for f in files:
        ret = parse_json(f, key="depends")  # ---- look at dependencies only
        nme = str(f.name).rsplit("-", 2)[0]  # ---- split off the last two
        if len(ret) == 1:
            ret = ret[0]
        elif len(ret) > 1:
            srted = sorted(ret)
            ret = "; ".join([i for i in srted if "py" not in i])  # `; ` used
        else:
            ret = "None"
        m0 = max(m0, len(nme))
        m1 = max(m1, len(str(f.name)))
        m2 = max(m2, len(ret))
        packages.append((nme, f.name, ret))
    dt1 = [("Package", "<U{}".format(m0)), ("Filename", "<U{}".format(m1)),
           ("Dependencies", "<U{}".format(m2))]
    packages = np.asarray(packages, dtype=dt1)
    #
    # ---- Dependency, Counts
    z = []
    for dep in packages['Dependencies']:
        if dep not in ("", " "):
            z += dep.split("; ")  # split on `; ` delimiter
    z = np.asarray(z)
    uniq, idx, cnts = np.unique(z, return_index=True, return_counts=True)
    uniq2 = [[u, u.split(" ")[0]][" " in u] for u in uniq if u != ""]
    m0 = max(np.char.str_len(uniq2))
    m1 = np.max(np.char.str_len(uniq2)) + 5
    dt2 = [("Full_name", "<U{}".format(m0)), ("Counts", "i8"),
           ("Simple_name", "<U{}".format(m1))]
    dep_counts = np.asarray(list(zip(uniq, cnts, uniq2)), dtype=dt2)
    #
    # ---- Package, Required_by
    required_by = []
    names = packages['Package']
    depends = packages['Dependencies']
    max_len = 0
    for nme in names:
        if nme in ('py', 'python'):
            required_by.append([nme, "many"])
            continue
        w = names[[nme in i for i in depends]]
        if np.size(w) > 0:
            v = w.tolist()
            v0 = "; ".join([i.split("; ")[0] for i in v])
            max_len = max(max_len, len(v0))
            required_by.append([nme, v0])
        else:
            required_by.append([nme, "None"])
    r_dt = "<U{}".format(max_len)
    dt = np.dtype([('Package', '<U30'), ('Required_by', r_dt)])
    required_by = uts(np.asarray(required_by), dtype=dt)
    return packages, dep_counts, required_by
Exemple #17
0
def pkg_info_conda(folder=None):
    r"""Access package info.

    Parameters
    ----------
    folder : text
        Path to the `user` conda folder in installed with arcgis pro.

    Requires
    --------
    `os` and `json` modules.

    Example
    -------
    ::

        folder = r'C:\Users\dan_p\AppData\Local\ESRI\conda\pkgs'
        sub_folder="info"
        file_name = "index.json"
        text = "python"
        np.isin(out['Depend'], 'python')

    """
    out = []
    if folder is None:
        arc_pth = r"\AppData\Local\ESRI\conda\pkgs"
        user = os.path.expandvars("%userprofile%")
        folder = "{}{}".format(user, arc_pth)
        if not os.path.isdir(folder):
            print("{} doesn't exist".format(folder))
            return None
    dir_lst = os.listdir(folder)
    max_len = 0
    for d in dir_lst:  # [:20]:
        if d not in ("cache", ".trash"):
            fname = folder + os.sep + d + os.sep + r"info\index.json"
            if os.path.isfile(fname):
                f = open(fname, 'r')
                d = json.loads(f.read())
                depends = " ".join([i.split(" ")[0] for i in d["depends"]])
                depends = "".join([i for i in depends])  # if i != 'python'])
                depends = depends.replace('python', '')
                max_len = max(max_len, len(depends))
                out.append([d["name"], d["version"], d["build"], depends])
                f.close()
    r_dt = "<U{}".format(max_len)
    dt = np.dtype([('Package', '<U30'), ('Version', '<U15'), ('Build', '<U15'),
                   ('Requires', r_dt)])
    packages = uts(np.asarray(out), dtype=dt)
    out = []
    names = packages['Package']
    depends = packages['Requires']
    max_len = 0
    for nme in names:
        f = np.char.find(depends, nme.split("-")[0])
        w = np.where(f != -1)[0]
        if np.size(w) > 0:
            v = names[w].tolist()
            v0 = " ".join([i.split(" ")[0] for i in v])
            max_len = max(max_len, len(v0))
            out.append([nme, v0])
        else:
            out.append([nme, "None"])
    r_dt = "<U{}".format(max_len)
    dt = np.dtype([('Package', '<U30'), ('Required_by', r_dt)])
    out = uts(np.asarray(out), dtype=dt)
    return packages, out
Exemple #18
0
def fc_geometry(in_fc, SR=None, IFT_rec=False, true_curves=False, deg=5):
    """Derive, arcpy geometry objects from a FeatureClass searchcursor.

    Parameters
    ----------
    in_fc : text
        Path to the input featureclass.  Points not supported.
    SR : spatial reference
        Spatial reference object, name or id
    deg : integer
        Used to densify curves found for circles and ellipses. Values of
        1, 2, 5 and 10 deg(rees) are appropriate.  No error checking
    IFT_rec : boolean
        Return the ``IFT`` as a structured array as well.

    Returns
    -------
    ``a_2d, IFT`` (ids_from_to), where a_2d are the points as a 2D array,
    ``IFT``represent the id numbers (which are repeated for multipart shapes),
    and the from-to pairs of the feature parts.

    See Also
    --------
    Use ``array_ift`` to produce ``Geo`` objects directly pre-existing arrays,
    or arrays derived form existing arcpy poly objects which originated from
    esri featureclasses.

    Notes
    -----
    Multipoint, polylines and polygons and its variants are supported.

    **Point and Multipoint featureclasses**

    >>> cent = arcpy.da.FeatureClassToNumPyArray(pnt_fc,
                                             ['OID@', 'SHAPE@X', 'SHAPE@Y'])

    For multipoints, use

    >>> allpnts = arcpy.da.FeatureClassToNumPyArray(multipnt_fc,
                            ['OID@', 'SHAPE@X', 'SHAPE@Y'],
                            explode_to_points=True)

    **IFT array structure**

    To see the ``IFT`` output as a structured array, use the following.

    >>> dt = np.dtype({'names': ['ID', 'From', 'To'], 'formats': ['<i4']*3})
    >>> z = IFT.view(dtype=dt).squeeze()
    >>> prn_tbl(z)  To see the output in tabular form

    **Flatten geometry tests**

    >>> %timeit fc_geometry(in_fc2, SR)
    105 ms ± 1.04 ms per loop (mean ± std. dev. of 7 runs, 10 loops each)
    ...
    >>> cur = arcpy.da.SearchCursor(in_fc, 'SHAPE@', None, SR)
    >>> polys = [row[0] for row in cur]
    >>> pts = [[(i.X, i.Y) if i else (np.nan, np.nan)
                for i in itertools.chain.from_iterable(shp)]
                for shp in polys]
    7.28 ms ± 105 µs per loop (mean ± std. dev. of 7 runs, 100 loops each)
    """
    msg = """
    Use arcpy.FeatureClassToNumPyArray for Point files.
    MultiPoint, Polyline and Polygons and its variants are supported.
    """

    def _multipnt_(in_fc, SR):
        """Convert multipoint geometry to array"""
        pnts = arcpy.da.FeatureClassToNumPyArray(
            in_fc, ['OID@', 'SHAPE@X', 'SHAPE@Y'],
            spatial_reference=SR,
            explode_to_points=True)
        id_len = np.vstack(np.unique(pnts['OID@'], return_counts=True)).T
        a_2d = stu(pnts[['SHAPE@X', 'SHAPE@Y']])  # ---- use ``stu`` to convert
        return id_len, a_2d

    def _polytypes_(in_fc, SR, true_curves, deg):
        """Convert polylines/polygons geometry to array.

        >>> cur = arcpy.da.SearchCursor( in_fc, ('OID@', 'SHAPE@'), None, SR)
        >>> ids = [r[0] for r in cur]
        >>> arrs = [[j for j in r[1]] for r in cur]
        """
        def _densify_curves_(geom, deg=deg):
            """Densify geometry for circle and ellipse (geom) at ``deg`` degree
            increments. deg, angle = (1, 361), (2, 181), (5, 73)
            """
            if 'curve' in geom.JSON:
                return geom.densify('ANGLE', 1, np.deg2rad(deg))
            return geom

        # ----
        null_pnt = (np.nan, np.nan)
        id_len = []
        a_2d = []
        with arcpy.da.SearchCursor(in_fc, ('OID@', 'SHAPE@'), None,
                                   SR) as cursor:
            for row in cursor:
                sub = []
                IDs = []
                num_pnts = []
                p_id = row[0]
                geom = row[1]
                prt_cnt = geom.partCount
                if true_curves:
                    p_num = geom.pointCount  # ---- added
                    if (prt_cnt == 1) and (p_num <= 4):
                        geom = _densify_curves_(geom, deg=deg)
                for arr in geom:
                    pnts = [[pt.X, pt.Y] if pt else null_pnt for pt in arr]
                    sub.append(np.asarray(pnts))
                    IDs.append(p_id)
                    num_pnts.append(len(pnts))
                part_count = np.arange(prt_cnt)
                result = np.stack((IDs, part_count, num_pnts), axis=-1)
                id_len.append(result)
                a_2d.extend([j for i in sub for j in i])
        # ----
        id_len = np.concatenate(id_len, axis=0)
        a_2d = np.asarray(a_2d)
        return id_len, a_2d

    #
    # ---- Check and process section ----------------------------------------
    desc = arcpy.da.Describe(in_fc)
    fc_kind = desc['shapeType']
    SR = desc['spatialReference']
    if fc_kind == "Point":
        print(dedent(msg))
        return None, None
    if fc_kind == "Multipoint":
        id_len, a_2d = _multipnt_(in_fc, SR)
    else:
        id_len, a_2d = _polytypes_(in_fc, SR, true_curves, deg)
    # ---- Return and send out
    ids = id_len[:, 0]
    too = np.cumsum(id_len[:, 2])
    frum = np.concatenate(([0], too))
    from_to = np.concatenate((frum[:-1, None], too[:, None]), axis=1)
    IFT = np.concatenate((ids[:, None], from_to), axis=1)
    if IFT_rec:
        id_len2 = np.concatenate((id_len, IFT[:, 1:]), axis=1)
        dt = np.dtype({
            'names': ['IDs', 'Part', 'Points', 'From_pnt', 'To_pnt'],
            'formats': ['i4', 'i4', 'i4', 'i4', 'i4']
        })
        IFT_2 = uts(id_len2, dtype=dt)
        return a_2d, IFT, IFT_2
    return a_2d, IFT