def _get_panas(data_dir=None, resume=True, verbose=1): """ Gets PANAS subscales from MyConnectome behavioral data Returns ------- panas : dict Where keys are PANAS subscales names and values are session-level composite measures """ from numpy.lib.recfunctions import structured_to_unstructured as stu # download behavioral data out = urlopen(BEHAVIOR) if out.status == 200: data = out.readlines() else: raise HTTPError('Cannot fetch behavioral data') # drop sessions with missing PANAS items sessions = np.genfromtxt(data, delimiter='\t', usecols=0, dtype=object, names=True, converters={0: lambda s: s.decode()}) keeprows = np.isin(sessions, ['ses-{}'.format(f) for f in SESSIONS]) panas = np.genfromtxt(data, delimiter='\t', names=True, dtype=float, usecols=range(28, 91))[keeprows] # create subscales from individual item scores measures = {} for subscale, items in PANAS.items(): measure = stu(panas[['panas{}'.format(f) for f in items]]) measures[subscale] = measure.sum(axis=-1) return measures
def fc2na(in_fc): """Return FeatureClassToNumPyArray. Shorthand interface. Get the geometry from a featureclass and clean it up. This involves shifting the coordinates to the 0,0 origin and rounding them. Returns ------- oids : array The object id values as derived from the featureclass a : structured array The coordinates with named fields ('X', 'Y'). These are useful for sorting and/or finding duplicates. xy : ndarray The coordinates in ``a`` as an ndarray. Notes ----- Projected/planar coordinates are assumed and they are rounded to the nearest millimeter, change if you like. """ arr = FeatureClassToNumPyArray(in_fc, ['OID@', 'SHAPE@X', 'SHAPE@Y'], explode_to_points=True) oids, x, y = [arr[name] for name in ['OID@', 'SHAPE@X', 'SHAPE@Y']] m = [np.min(x), np.min(y)] a = np.empty((len(x), ), dtype=np.dtype([('X', 'f8'), ('Y', 'f8')])) a['X'] = np.round(x - m[0], 3) # round `X` and `Y` values a['Y'] = np.round(y - m[1], 3) xy = stu(a) return oids, a, xy
def _multipnt_(in_fc, SR): """Convert multipoint geometry to array""" pnts = arcpy.da.FeatureClassToNumPyArray( in_fc, ['OID@', 'SHAPE@X', 'SHAPE@Y'], spatial_reference=SR, explode_to_points=True) id_len = np.vstack(np.unique(pnts['OID@'], return_counts=True)).T a_2d = stu(pnts[['SHAPE@X', 'SHAPE@Y']]) # ---- use ``stu`` to convert return id_len, a_2d
def common_segments(self): """Return the common segments in poly features. Result is an array of from-to pairs of points """ h = self.polys_to_segments() h_0 = uts(h) names = h_0.dtype.names h_1 = h_0[list(names[-2:] + names[:2])] idx = np.isin(h_0, h_1) common = h_0[idx] return stu(common)
def _view_(a): """Return a view of the array using the dtype and length Notes ----- The is a quick function. The expectation is that the array contains a uniform dtype (e.g 'f8'). For example, coordinate values in the form ``dtype([('X', '<f8'), ('Y', '<f8')])`` maybe with a Z See ``structured_to_unstructured`` in np.lib.recfunctions and the imports. """ return stu(a)
def unique_segments(self): """Return the unique segments in poly features. Result is an array of from-to pairs of points """ h = self.polys_to_segments() h_0 = uts(h) names = h_0.dtype.names h_1 = h_0[list(names[-2:] + names[:2])] idx0 = ~np.isin(h_0, h_1) uniq0 = h_0[idx0] uniq1 = h_0[~idx0] uniq01 = np.hstack((uniq0, uniq1)) return stu(uniq01)
def fc_arc_array(in_fc, SR=None): """fc to arcpy Array""" if SR is None: SR = getSR(in_fc) z = arcpy.da.FeatureClassToNumPyArray(in_fc, ['OID@', 'SHAPE@X', 'SHAPE@Y'], "", SR, explode_to_points=True) idz = z['OID@'] xy = stu(z[['SHAPE@X', 'SHAPE@Y']]) idbin = np.cumsum(np.bincount(idz)) m = np.nanmin(xy, axis=0) a0 = xy - m return idz, idbin, xy, a0
def _view_(a): """Return a view of the array using the dtype and length Notes ----- The is a quick function. The expectation is that the array contains a uniform dtype (e.g 'f8'). For example, coordinate values in the form ``dtype([('X', '<f8'), ('Y', '<f8')])`` maybe with a Z. References ---------- ``structured_to_unstructured`` in np.lib.recfunctions and its imports. `<https://github.com/numpy/numpy/blob/master/numpy/lib/recfunctions.py>`_. """ return stu(a) # ---- structured to unstructured
def polys_to_unique_pnts(a, as_structured=True): """Based on `polys_to_points`. Allows for recreation of original point order and unique points. Structured arrays is used for sorting. """ a = _view_as_struct_(a) # replace `uts` with an abbreviated version uni, idx, cnts = np.unique(a, return_index=True, return_counts=True, axis=0) uni = stu(uni) if as_structured: N = uni.shape[0] dt = [('New_ID', '<i4'), ('Xs', '<f8'), ('Ys', '<f8'), ('Num', '<i4')] z = np.zeros((N, ), dtype=dt) z['New_ID'] = idx z['Xs'] = uni[:, 0] z['Ys'] = uni[:, 1] z['Num'] = cnts return z[np.argsort(z, order='New_ID')] return a[np.sort(idx)]
def prn_arrays(a, edgeitems=2): """Print a different representation of object or ndarrays. The expectation is that the array has nested objects or ndim is > 3: edgeitems, threshold : integer This is on a per sub array basis. """ def _ht_(a, _e): """Print 2d array.""" head = repr(a[:_e].tolist())[:-1] tail = repr(a[-_e:].tolist())[1:] return head, tail _e = edgeitems s = n_h.shape_finder(a) u, cnts = np.unique(s[['shape', 'part']], return_counts=True) s0 = stu(u) N = np.arange(len(s0)) tb = " ... " for cnt in N: i, j = s0[cnt] sub = a[i] if sub.ndim == 2: head, tail = _ht_(sub, _e) print("\n({},{},0) {}{}{}".format(i, j, head, tb, tail)) else: sub = sub[j] if sub.ndim == 2: head, tail = _ht_(sub, _e) print("\n({},{},0) {}{}{}".format(i, j, head, tb, tail)) else: print("\n({},{},.)".format(i, j)) for k, val in enumerate(sub): head, tail = _ht_(val, _e) ht = head + " ... " + tail print(" {} - {}".format(k, ht)) # val.tolist())) return
# # (1) ---- get the points out_flds = ['OID@', 'SHAPE@X', 'SHAPE@Y'] + [group_by] a = arcpy.da.FeatureClassToNumPyArray(in_fc, out_flds, "", SR, True) # # (2) ---- determine the unique groupings of the points uniq, idx, rev = np.unique(a[group_by], True, True) groups = [a[np.where(a[group_by] == i)[0]] for i in uniq] # # (3) ---- for each group, perform the concave hull hulls = [] for i in range(0, len(groups)): p = groups[i] p = p[['SHAPE@X', 'SHAPE@Y']] n = len(p) p = stu(p) # # ---- point preparation section ------------------------------------ p = np.array(list(set([tuple(i) for i in p]))) # Remove duplicates idx_cr = np.lexsort((p[:, 0], p[:, 1])) # indices of sorted array in_pnts = np.asarray([p[i] for i in idx_cr]) # p[idx_cr] # in_pnts = in_pnts.tolist() in_pnts = [tuple(i) for i in in_pnts] if hull_type == 'concave': cx = np.array(concave(in_pnts, k_factor)) # requires a list of tuples else: cx = np.array(convex(in_pnts)) hulls.append(cx.tolist()) # ---- # if out_type == 'Polyline':
def fc_to_Geo(in_fc, geom_kind=2, minX=0, minY=0, sp_ref=None, info=""): """Convert a FeatureClassToNumPyArray to a Geo array. This works with the geometry only. Skip the attributes for later. The processing requirements are listed below. Just copy and paste. Parameters ---------- in_fc : featureclass Featureclass in a file geodatabase. geom_kind : integer Points (0), Polylines (1) and Polygons (2) minX, minY : numbers If these values are 0, then the minimum values will be determined and used to shift the data towards the origin. sp_ref : text Spatial reference name. eg `'NAD_1983_CSRS_MTM_9'` Notes ----- The `arcpy.da.Describe` method takes a substantial amount of time. >>> %timeit Describe(fc2) ... 355 ms ± 17.4 ms per loop (mean ± std. dev. of 7 runs, 1 loop each) """ def _area_part_(a): """Mini e_area, used by areas and centroids.""" x0, y1 = (a.T)[:, 1:] x1, y0 = (a.T)[:, :-1] e0 = np.einsum('...i,...i->...i', x0, y0) e1 = np.einsum('...i,...i->...i', x1, y1) return np.sum((e0 - e1) * 0.5) def _cw_(a): """Clockwise check.""" return 1 if _area_part_(a) > 0. else 0 # -- (1) Foundational steps # Create the array, extract the object id values. # To avoid floating point issues, extract the coordinates, round them to a # finite precision and shift them to the x-y origin # kind = geom_kind if sp_ref is None: # sp_ref = get_SR(in_fc, verbose=False) sp_ref = "undefined" a = FeatureClassToNumPyArray( in_fc, ['OID@', 'SHAPE@X', 'SHAPE@Y'], explode_to_points=True) # spatial_reference=sp_ref oids = a['OID@'] xy = a[['SHAPE@X', 'SHAPE@Y']] mn = [np.min(xy['SHAPE@X']), np.min(xy['SHAPE@Y'])] mx = [np.max(xy['SHAPE@X']), np.max(xy['SHAPE@Y'])] extent = np.array([mn, mx]) # -- shift if needed dx, dy = mn if minX != 0.: dx = minX # mn[0] - minX if minY != 0.: dy = minY # mn[1] - minY xy['SHAPE@X'] = np.round(xy['SHAPE@X'] - dx, 3) xy['SHAPE@Y'] = np.round(xy['SHAPE@Y'] - dy, 3) xy.dtype.names = ['X', 'Y'] xy = repack_fields(xy) # # -- (2) Prepare the oid data for use in identifying from-to points. uniq, indx, cnts = np.unique(oids, True, return_counts=True) id_vals = oids[indx] indx = np.concatenate((indx, [a.shape[0]])) # # -- (3) Construct the IFT data using `id_fr_to` to carry the load. IFT_ = np.asarray(id_fr_to(xy, oids)) cols = IFT_.shape[0] IFT = np.full((cols, 6), -1, dtype=np.int32) IFT[:, :3] = IFT_ # # -- (4) clockwise check for polygon parts to identify outer/inner rings if kind == 2: # polygons xy_arr = stu(xy) # View the data as an unstructured array cl_wise = np.array([_cw_(xy_arr[i[1]:i[2]]) for i in IFT_]) else: # not relevant for polylines or points cl_wise = np.full_like(oids, -1) IFT[:, 3] = cl_wise # # -- (5) construct part_ids and pnt_nums if kind == 2: parts = [np.cumsum(IFT[:, 3][IFT[:, 0] == i]) for i in id_vals] part_ids = np.concatenate(parts) ar = np.where(IFT[:, 3] == 1)[0] ar0 = np.stack((ar[:-1], ar[1:])).T pnt_nums = np.zeros(IFT.shape[0], dtype=np.int32) for (i, j) in ar0: # now provide the point numbers per part per shape pnt_nums[i:j] = np.arange((j - i)) # smooth!!! else: part_ids = np.ones_like(oids) pnt_nums = np.ones_like(oids) IFT[:, 4] = part_ids IFT[:, 5] = pnt_nums # # -- (6) Create the output array... as easy as ``a`` to ``z`` z = Geo(xy_arr, IFT, kind, Extent=extent, Info="test", SR=sp_ref) out = copy.deepcopy(z) return out