コード例 #1
0
    def _update_alternate_representations(self):
        # TODO ADD CHECK THAT ALL POINTS FROM self.points_set ARE INDEED WITHIN THE MAP !!!
        # IF NOT, SEND EXCEPTION (WILL HAVE TO CATCH THIS EXCEPTION ANYWHERE AN OBSTACLE IS CREATED OR UPDATED)

        # Shapely multipoint for determining envelope, convex hull, centroid
        self._shapely_multipoint = MultiPoint(list(self.points_set))

        # Envelope/Bounding box of the obstacle and convex hull
        self.envelope = self._shapely_multipoint.envelope
        self.convex_hull = self._shapely_multipoint.convex_hull

        # Polygonal reprentation of the obstacle
        self.polygon = self.envelope if self.axis_rect_hypothesis else self.convex_hull

        # Points in integer map coordinates
        self.map_points_set = set()
        for point in self.points_set:
            self.map_points_set.add(
                (int(point[0] / self.map_metadata.resolution),
                 int(point[1] / self.map_metadata.resolution)))

        # Envelope corners coordinates
        if type(self.envelope) is Point:
            x, y = self.envelope.xy
        else:
            x, y = self.envelope.exterior.xy
        self.bb_top_left_corner = (int(min(x) / self.map_metadata.resolution),
                                   int(min(y) / self.map_metadata.resolution))
        self.bb_bottom_right_corner = (int(
            max(x) / self.map_metadata.resolution),
                                       int(
                                           max(y) /
                                           self.map_metadata.resolution))

        # Discretized polygon as points in integer map coordinates
        self.discretized_polygon = Obstacle._make_discretized_polygon(
            self.convex_hull, self.polygon, self.map_metadata.resolution)

        ## Dependency on previously set attributes is specified as function call parameter

        # Points as ROS point cloud
        self.ros_point_cloud = self._make_ros_point_cloud()

        # Create pose from centroid of shapely polygon
        self.ros_pose = self._make_ros_pose(self.polygon)

        # Compute push poses, which are perpendicularly positioned at a
        # robot_radius distance from the middle point of each side of the
        # obstacle polygon representation, and directed toward the same middle
        # point.
        self.push_poses = self._make_push_poses(self.convex_hull, self.polygon,
                                                self.map_metadata.resolution)

        # Compute obstacle matrix and inflated obstacle
        self.obstacle_matrix = self._make_inflated_obstacle_grid(
            self.bb_top_left_corner,
            self.bb_bottom_right_corner,
            self.discretized_polygon,
            self.robot_metadata.footprint_2X,
            removeObstaclePoints=True)
        self.robot_inflated_obstacle = self._make_inflated_obstacle_grid(
            self.bb_top_left_corner,
            self.bb_bottom_right_corner,
            self.discretized_polygon,
            self.robot_metadata.footprint,
            removeObstaclePoints=False)
コード例 #2
0
ファイル: dbscan.py プロジェクト: chaserchen/location-cluster
def get_centermost_point(cluster):
    centroid = (MultiPoint(cluster).centroid.x, MultiPoint(cluster).centroid.y)
    centermost_point = min(
        cluster, key=lambda point: great_circle(point, centroid).miles)
    return tuple(centermost_point)
コード例 #3
0
 def add_polygon(self, points, idx=1):
     super(ShapelyShape, self).add_polygon(points, idx=idx)
     points = MultiPoint(points)
     pol = Polygon(points.convex_hull)
     self.polygon = pol
コード例 #4
0
ファイル: TrackingFunctions.py プロジェクト: ccg-esb-lab/uJ
    def get_cell_axis(poly):
        #  t=time.time()
        line = LineString()
        pre_cloud = []
        angle, center = get_max_x_angle(poly)
        poly = affinity.rotate(poly, angle, center)
        poly = fix_poly(poly)
        ext = poly.exterior.coords

        decs = 0
        for i, seed_point in enumerate(ext):
            pre_cloud_i = []
            seed_point = Point(seed_point)
            i = .5
            intersection = poly.exterior.intersection(seed_point)
            sphere = seed_point.buffer(i)
            intersection = poly.exterior.intersection(sphere.exterior)
            #while(len(list(intersection))>0):
            while (intersection.is_empty == False):
                arc = Point()
                sphere = seed_point.buffer(i)
                intersection = poly.exterior.intersection(sphere.exterior)
                i += 3
                #i+=.5
                #             if(len(list(intersection))<=1):
                #                 continue
                if (intersection.is_empty):
                    continue

                arc = poly.intersection(sphere.exterior)

                if (arc.geom_type == "MultiLineString"):

                    for this_geom in arc.geoms:
                        arc_xy = list(this_geom.coords)

                        for pi in arc_xy:
                            pi = Point(pi)
                            pre_cloud_i.append(
                                (round(pi.x, decs), round(pi.y, decs)))

                elif (arc.geom_type == "LineString"):
                    for pi in list(arc.coords):
                        pi = Point(pi)
                        pre_cloud_i.append((round(pi.x,
                                                  decs), round(pi.y, decs)))

            pre_cloud.append(pre_cloud_i)

        cloud = MultiPoint(pre_cloud[0])

        for line in pre_cloud:

            line = MultiPoint(line)
            cloud = cloud.union(line)

        ###make poly fit to points cloud
        xs = []
        ys = []
        for pi in cloud:
            pi = Point(pi)

            xs.append(pi.x)
            ys.append(pi.y)

        pf = np.poly1d(np.polyfit(xs, ys, 25))
        xr = np.linspace(np.min(xs), np.max(xs), 100)

        line = []
        for x in xr:
            pi = Point(x, pf(x))
            line.append(pi)

        line = LineString(line)
        linexy = np.array(line)
        linex = linexy[:, 0]
        liney = linexy[:, 1]

        line = poly.intersection(line)
        liner = affinity.rotate(line, -angle, center)
        if (liner.geom_type == "MultiLineString"):
            line = LineString()
            for this_geom in liner.geoms:
                if (this_geom.length > line.length):
                    line = this_geom
            liner = line

        linexy = np.array(liner)
        linex = linexy[:, 0]
        liney = linexy[:, 1]

        x = (np.max(xs) - np.min(xs)) / 2 + np.min(xs)
        pi = Point(x, pf(x))

        center = affinity.rotate(pi, -angle, center)

        return liner, center
コード例 #5
0
df_map = df_map.sort_values('district')
df_map.set_index('id', inplace=True)

#It contains current data for every sensor
df_map = df_map.join(sensors, how='inner',
                     lsuffix='sens').join(currentData,
                                          how='inner',
                                          rsuffix='current')
df_map = df_map.reset_index()

#Create Point objects in map coordinates from dataframe lon and lat values
map_points = pd.Series([
    Point(m(mapped_x, mapped_y))
    for mapped_x, mapped_y in zip(sensors['longitude'], sensors['latitude'])
])
plaque_points = MultiPoint(list(map_points.values))
wards_polygon = prep(MultiPolygon(list(df_map['poly'].values)))
ldn_points = filter(wards_polygon.contains, plaque_points)

df_map['count'] = df_map['poly'].map(
    lambda x: int(len(filter(prep(x).contains, ldn_points))))

# In[16]:

other_cities = currentData[['PM10'
                            ]].query('id not in @sensors.index').reset_index()
other_cities['Miasto'] = other_cities.id.apply(
    lambda x: "Wrocław" if x == 1127 else ("Kraków" if x == 820 else (
        "Warszawa" if x == 337 else ("Gdańsk" if x == 3432 else ""))))
other_cities.rename(index=str, columns={"PM10": "PM 10"}, inplace=True)
other_cities = other_cities.round(1).sort_values(by="PM 10", ascending=False)
コード例 #6
0
        points = angle_normalized + (np.diff(angle_plus) / 2)
        points[-1] = points[-1] - 1
        return list(points)

    angles = list(map(lambda x: calc_angulos(x), df_pt_pts.itertuples()))

    df_bounds = gpd.GeoDataFrame(geometry=df_cruzamentos.buffer(10).boundary)
    df_bounds['angles'] = angles

    from shapely.geometry import MultiPoint

    inter_points = list(
        map(
            lambda x: MultiPoint(
                list(
                    map(
                        lambda y: x.geometry.interpolate(
                            y - 0.75, normalized=True), x.angles))),
            df_bounds.itertuples()))

    df_inter_points = gpd.GeoDataFrame(geometry=inter_points)

    # ## Criando linhas para 'cortar' os polígonos
    #
    # Agora que temos que cortar o polígono precisamos de linhas para realizar esse fatiamento.
    # Primeiramente vamos criar uma intersecção dos buffers dos nós com o traçado dos logradouros

    from shapely.geometry import MultiLineString, LineString
    from shapely.affinity import scale

    cut_lines = list(
コード例 #7
0
def main():
    #-- start MPI communicator
    comm = MPI.COMM_WORLD

    #-- Read the system arguments listed after the program
    parser = argparse.ArgumentParser(
        description="""Create masks for reducing ICESat-2 data into floating
            ice shelf regions
            """,
        fromfile_prefix_chars="@")
    parser.convert_arg_line_to_args = \
        icesat2_toolkit.utilities.convert_arg_line_to_args
    #-- command line parameters
    parser.add_argument('file',
                        type=lambda p: os.path.abspath(os.path.expanduser(p)),
                        help='ICESat-2 ATL06 file to run')
    #-- working data directory for ice shelf shapefiles
    parser.add_argument('--directory',
                        '-D',
                        type=lambda p: os.path.abspath(os.path.expanduser(p)),
                        default=os.getcwd(),
                        help='Working data directory')
    #-- buffer in kilometers for extracting ice shelves (0.0 = exact)
    parser.add_argument(
        '--buffer',
        '-B',
        type=float,
        default=0.0,
        help='Distance in kilometers to buffer ice shelves mask')
    #-- verbosity settings
    #-- verbose will output information about each output file
    parser.add_argument('--verbose',
                        '-V',
                        default=False,
                        action='store_true',
                        help='Verbose output of run')
    #-- permissions mode of the local files (number in octal)
    parser.add_argument('--mode',
                        '-M',
                        type=lambda x: int(x, base=8),
                        default=0o775,
                        help='permissions mode of output files')
    args, _ = parser.parse_known_args()

    #-- create logger
    loglevel = logging.INFO if args.verbose else logging.CRITICAL
    logging.basicConfig(level=loglevel)

    #-- output module information for process
    info(comm.rank, comm.size)
    if (comm.rank == 0):
        logging.info('{0} -->'.format(args.file))

    #-- Open the HDF5 file for reading
    fileID = h5py.File(args.file, 'r', driver='mpio', comm=comm)
    DIRECTORY = os.path.dirname(args.file)
    #-- extract parameters from ICESat-2 ATLAS HDF5 file name
    rx = re.compile(
        r'(processed_)?(ATL\d{2})_(\d{4})(\d{2})(\d{2})(\d{2})'
        r'(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$')
    SUB, PRD, YY, MM, DD, HH, MN, SS, TRK, CYC, GRN, RL, VRS, AUX = rx.findall(
        args.file).pop()
    #-- set the hemisphere flag based on ICESat-2 granule
    HEM = set_hemisphere(GRN)
    #-- pyproj transformer for converting lat/lon to polar stereographic
    EPSG = dict(N=3413, S=3031)
    crs1 = pyproj.CRS.from_string("epsg:{0:d}".format(4326))
    crs2 = pyproj.CRS.from_string("epsg:{0:d}".format(EPSG[HEM]))
    transformer = pyproj.Transformer.from_crs(crs1, crs2, always_xy=True)

    #-- read data on rank 0
    if (comm.rank == 0):
        #-- read shapefile and create shapely polygon objects
        poly_dict, _ = load_ice_shelves(args.directory, args.buffer, HEM)
    else:
        #-- create empty object for dictionary of shapely objects
        poly_dict = None

    #-- Broadcast Shapely polygon objects
    poly_dict = comm.bcast(poly_dict, root=0)
    #-- combined validity check for all beams
    valid_check = False

    #-- read each input beam within the file
    IS2_atl06_beams = []
    for gtx in [k for k in fileID.keys() if bool(re.match(r'gt\d[lr]', k))]:
        #-- check if subsetted beam contains land ice data
        try:
            fileID[gtx]['land_ice_segments']['segment_id']
        except KeyError:
            pass
        else:
            IS2_atl06_beams.append(gtx)

    #-- copy variables for outputting to HDF5 file
    IS2_atl06_mask = {}
    IS2_atl06_fill = {}
    IS2_atl06_dims = {}
    IS2_atl06_mask_attrs = {}
    #-- number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC)
    #-- and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC)
    #-- Add this value to delta time parameters to compute full gps_seconds
    IS2_atl06_mask['ancillary_data'] = {}
    IS2_atl06_mask_attrs['ancillary_data'] = {}
    for key in ['atlas_sdp_gps_epoch']:
        #-- get each HDF5 variable
        IS2_atl06_mask['ancillary_data'][key] = fileID['ancillary_data'][
            key][:]
        #-- Getting attributes of group and included variables
        IS2_atl06_mask_attrs['ancillary_data'][key] = {}
        for att_name, att_val in fileID['ancillary_data'][key].attrs.items():
            IS2_atl06_mask_attrs['ancillary_data'][key][att_name] = att_val

    #-- for each input beam within the file
    for gtx in sorted(IS2_atl06_beams):
        #-- output data dictionaries for beam
        IS2_atl06_mask[gtx] = dict(land_ice_segments={})
        IS2_atl06_fill[gtx] = dict(land_ice_segments={})
        IS2_atl06_dims[gtx] = dict(land_ice_segments={})
        IS2_atl06_mask_attrs[gtx] = dict(land_ice_segments={})

        #-- number of segments
        segment_id = fileID[gtx]['land_ice_segments']['segment_id'][:]
        n_seg, = fileID[gtx]['land_ice_segments']['segment_id'].shape
        #-- invalid value for beam
        fv = fileID[gtx]['land_ice_segments']['h_li'].fillvalue
        #-- check if there are less segments than processes
        if (n_seg < comm.Get_size()):
            continue

        #-- define indices to run for specific process
        ind = np.arange(comm.Get_rank(), n_seg, comm.Get_size(), dtype=int)

        #-- extract delta time
        delta_time = fileID[gtx]['land_ice_segments']['delta_time'][:].copy()
        #-- extract lat/lon
        longitude = fileID[gtx]['land_ice_segments']['longitude'][:].copy()
        latitude = fileID[gtx]['land_ice_segments']['latitude'][:].copy()
        #-- convert lat/lon to polar stereographic
        X, Y = transformer.transform(longitude[ind], latitude[ind])
        #-- convert reduced x and y to shapely multipoint object
        xy_point = MultiPoint(np.c_[X, Y])

        #-- calculate mask for each ice shelf in the dictionary
        associated_map = {}
        for key, poly_obj in poly_dict.items():
            #-- create distributed intersection map for calculation
            distributed_map = np.zeros((n_seg), dtype=bool)
            #-- create empty intersection map array for receiving
            associated_map[key] = np.zeros((n_seg), dtype=bool)
            #-- finds if points are encapsulated (within ice shelf)
            int_test = poly_obj.intersects(xy_point)
            if int_test:
                #-- extract intersected points
                int_map = list(map(poly_obj.intersects, xy_point))
                int_indices, = np.nonzero(int_map)
                #-- set distributed_map indices to True for intersected points
                distributed_map[ind[int_indices]] = True
            #-- communicate output MPI matrices between ranks
            #-- operation is a logical "or" across the elements.
            comm.Allreduce(sendbuf=[distributed_map, MPI.BOOL], \
                recvbuf=[associated_map[key], MPI.BOOL], op=MPI.LOR)
            distributed_map = None
        #-- wait for all processes to finish calculation
        comm.Barrier()

        #-- group attributes for beam
        IS2_atl06_mask_attrs[gtx]['Description'] = fileID[gtx].attrs[
            'Description']
        IS2_atl06_mask_attrs[gtx]['atlas_pce'] = fileID[gtx].attrs['atlas_pce']
        IS2_atl06_mask_attrs[gtx]['atlas_beam_type'] = fileID[gtx].attrs[
            'atlas_beam_type']
        IS2_atl06_mask_attrs[gtx]['groundtrack_id'] = fileID[gtx].attrs[
            'groundtrack_id']
        IS2_atl06_mask_attrs[gtx]['atmosphere_profile'] = fileID[gtx].attrs[
            'atmosphere_profile']
        IS2_atl06_mask_attrs[gtx]['atlas_spot_number'] = fileID[gtx].attrs[
            'atlas_spot_number']
        IS2_atl06_mask_attrs[gtx]['sc_orientation'] = fileID[gtx].attrs[
            'sc_orientation']
        #-- group attributes for land_ice_segments
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['Description'] = (
            "The land_ice_segments group "
            "contains the primary set of derived products. This includes geolocation, height, and "
            "standard error and quality measures for each segment. This group is sparse, meaning "
            "that parameters are provided only for pairs of segments for which at least one beam "
            "has a valid surface-height measurement.")
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['data_rate'] = (
            "Data within this group are "
            "sparse.  Data values are provided only for those ICESat-2 20m segments where at "
            "least one beam has a valid land ice height measurement.")

        #-- geolocation, time and segment ID
        #-- delta time
        IS2_atl06_mask[gtx]['land_ice_segments']['delta_time'] = delta_time
        IS2_atl06_fill[gtx]['land_ice_segments']['delta_time'] = None
        IS2_atl06_dims[gtx]['land_ice_segments']['delta_time'] = None
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['delta_time'] = {}
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['delta_time'][
            'units'] = "seconds since 2018-01-01"
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['delta_time'][
            'long_name'] = "Elapsed GPS seconds"
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['delta_time'][
            'standard_name'] = "time"
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['delta_time'][
            'calendar'] = "standard"
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['delta_time'][
            'description'] = (
                "Number of GPS "
                "seconds since the ATLAS SDP epoch. The ATLAS Standard Data Products (SDP) epoch offset "
                "is defined within /ancillary_data/atlas_sdp_gps_epoch as the number of GPS seconds "
                "between the GPS epoch (1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP epoch. By "
                "adding the offset contained within atlas_sdp_gps_epoch to delta time parameters, the "
                "time in gps_seconds relative to the GPS epoch can be computed."
            )
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['delta_time']['coordinates'] = \
            "segment_id latitude longitude"
        #-- latitude
        IS2_atl06_mask[gtx]['land_ice_segments']['latitude'] = latitude
        IS2_atl06_fill[gtx]['land_ice_segments']['latitude'] = None
        IS2_atl06_dims[gtx]['land_ice_segments']['latitude'] = ['delta_time']
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['latitude'] = {}
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['latitude'][
            'units'] = "degrees_north"
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['latitude'][
            'contentType'] = "physicalMeasurement"
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['latitude'][
            'long_name'] = "Latitude"
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['latitude'][
            'standard_name'] = "latitude"
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['latitude'][
            'description'] = ("Latitude of "
                              "segment center")
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['latitude'][
            'valid_min'] = -90.0
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['latitude'][
            'valid_max'] = 90.0
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['latitude']['coordinates'] = \
            "segment_id delta_time longitude"
        #-- longitude
        IS2_atl06_mask[gtx]['land_ice_segments']['longitude'] = longitude
        IS2_atl06_fill[gtx]['land_ice_segments']['longitude'] = None
        IS2_atl06_dims[gtx]['land_ice_segments']['longitude'] = ['delta_time']
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['longitude'] = {}
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['longitude'][
            'units'] = "degrees_east"
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['longitude'][
            'contentType'] = "physicalMeasurement"
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['longitude'][
            'long_name'] = "Longitude"
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['longitude'][
            'standard_name'] = "longitude"
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['longitude'][
            'description'] = ("Longitude of "
                              "segment center")
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['longitude'][
            'valid_min'] = -180.0
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['longitude'][
            'valid_max'] = 180.0
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['longitude']['coordinates'] = \
            "segment_id delta_time latitude"
        #-- segment ID
        IS2_atl06_mask[gtx]['land_ice_segments']['segment_id'] = segment_id
        IS2_atl06_fill[gtx]['land_ice_segments']['segment_id'] = None
        IS2_atl06_dims[gtx]['land_ice_segments']['segment_id'] = ['delta_time']
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['segment_id'] = {}
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['segment_id'][
            'units'] = "1"
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['segment_id'][
            'contentType'] = "referenceInformation"
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['segment_id'][
            'long_name'] = "Along-track segment ID number"
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['segment_id'][
            'description'] = (
                "A 7 digit number "
                "identifying the along-track geolocation segment number.  These are sequential, starting with "
                "1 for the first segment after an ascending equatorial crossing node. Equal to the segment_id for "
                "the second of the two 20m ATL03 segments included in the 40m ATL06 segment"
            )
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['segment_id']['coordinates'] = \
            "delta_time latitude longitude"

        #-- subsetting variables
        IS2_atl06_mask[gtx]['land_ice_segments']['subsetting'] = {}
        IS2_atl06_fill[gtx]['land_ice_segments']['subsetting'] = {}
        IS2_atl06_dims[gtx]['land_ice_segments']['subsetting'] = {}
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['subsetting'] = {}
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['subsetting'][
            'Description'] = (
                "The subsetting group "
                "contains parameters used to reduce land ice segments to specific regions of interest."
            )
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['subsetting'][
            'data_rate'] = (
                "Data within this group "
                "are stored at the land_ice_segments segment rate.")

        #-- for each valid ice shelf
        combined_map = np.zeros((n_seg), dtype=bool)
        valid_keys = np.array(
            [k for k, v in associated_map.items() if v.any()])
        valid_check |= (np.size(valid_keys) > 0)
        for key in valid_keys:
            #-- add to combined map for output of total ice shelf mask
            combined_map += associated_map[key]
            #-- output mask for ice shelf to HDF5
            IS2_atl06_mask[gtx]['land_ice_segments']['subsetting'][
                key] = associated_map[key]
            IS2_atl06_fill[gtx]['land_ice_segments']['subsetting'][key] = None
            IS2_atl06_dims[gtx]['land_ice_segments']['subsetting'][key] = [
                'delta_time'
            ]
            IS2_atl06_mask_attrs[gtx]['land_ice_segments']['subsetting'][
                key] = {}
            IS2_atl06_mask_attrs[gtx]['land_ice_segments']['subsetting'][key][
                'contentType'] = "referenceInformation"
            IS2_atl06_mask_attrs[gtx]['land_ice_segments']['subsetting'][key][
                'long_name'] = '{0} Mask'.format(key)
            IS2_atl06_mask_attrs[gtx]['land_ice_segments']['subsetting'][key][
                'description'] = ('Mask calculated '
                                  'using delineations from the {0}.'.format(
                                      ice_shelf_description[HEM]))
            IS2_atl06_mask_attrs[gtx]['land_ice_segments']['subsetting'][key][
                'reference'] = ice_shelf_reference[HEM]
            IS2_atl06_mask_attrs[gtx]['land_ice_segments']['subsetting'][key][
                'source'] = args.buffer
            IS2_atl06_mask_attrs[gtx]['land_ice_segments']['subsetting'][key]['coordinates'] = \
                "../segment_id ../delta_time ../latitude ../longitude"

        #-- combined ice shelf mask
        IS2_atl06_mask[gtx]['land_ice_segments']['subsetting'][
            'ice_shelf'] = combined_map
        IS2_atl06_fill[gtx]['land_ice_segments']['subsetting'][
            'ice_shelf'] = None
        IS2_atl06_dims[gtx]['land_ice_segments']['subsetting']['ice_shelf'] = [
            'delta_time'
        ]
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['subsetting'][
            'ice_shelf'] = {}
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['subsetting'][
            'ice_shelf']['contentType'] = "referenceInformation"
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['subsetting'][
            'ice_shelf']['long_name'] = 'Ice Shelf Mask'
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['subsetting'][
            'ice_shelf']['description'] = (
                'Mask calculated '
                'using delineations from the {0}.'.format(
                    ice_shelf_description[HEM]))
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['subsetting'][
            'ice_shelf']['reference'] = ice_shelf_reference[HEM]
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['subsetting'][
            'ice_shelf']['source'] = args.buffer
        IS2_atl06_mask_attrs[gtx]['land_ice_segments']['subsetting']['ice_shelf']['coordinates'] = \
            "../segment_id ../delta_time ../latitude ../longitude"

        #-- wait for all processes to finish calculation
        comm.Barrier()

    #-- parallel h5py I/O does not support compression filters at this time
    if (comm.rank == 0) and valid_check:
        #-- output HDF5 files with ice shelf masks
        fargs = (PRD, 'ICE_SHELF_MASK', YY, MM, DD, HH, MN, SS, TRK, CYC, GRN,
                 RL, VRS, AUX)
        file_format = '{0}_{1}_{2}{3}{4}{5}{6}{7}_{8}{9}{10}_{11}_{12}{13}.h5'
        output_file = os.path.join(DIRECTORY, file_format.format(*fargs))
        #-- print file information
        logging.info('\t{0}'.format(output_file))
        #-- write to output HDF5 file
        HDF5_ATL06_mask_write(IS2_atl06_mask,
                              IS2_atl06_mask_attrs,
                              CLOBBER=True,
                              INPUT=os.path.basename(args.file),
                              FILL_VALUE=IS2_atl06_fill,
                              FILENAME=output_file)
        #-- change the permissions mode
        os.chmod(output_file, args.mode)
    #-- close the input file
    fileID.close()
コード例 #8
0
ファイル: backend.py プロジェクト: omrakn/satrap
 def get_nearest_POI(row, gdf2):
     geom = MultiPoint(list(gdf2.geometry))
     nearest_ = nearest_points(row["geometry"], geom)
     return nearest_[1]
コード例 #9
0
def synthesize_gps(dfEdges,
                   shapeCoords,
                   localEpsg,
                   distribution="normal",
                   noise=0,
                   sampleRate=1,
                   uuid="999999",
                   shapeMatch="map_snap",
                   mode="auto",
                   turnPenaltyFactor=0,
                   breakageDist=2000,
                   beta=3,
                   sigmaZ=4.07,
                   searchRadius=50):

    accuracy = round(min(100, norm.ppf(0.95, loc=0, scale=max(1, noise))), 2)
    mProj = Proj(init='epsg:{0}'.format(localEpsg))
    llProj = Proj(init='epsg:4326')
    jsonDict = {
        "uuid": uuid,
        "trace": [],
        "shape_match": shapeMatch,
        "match_options": {
            "mode": mode,
            "turn_penalty_factor": turnPenaltyFactor,
            "breakage_distance": breakageDist,
            "beta": beta,
            "sigma_z": sigmaZ,
            "search_radius": searchRadius,
            "gps_accuracy": accuracy
        }
    }
    trueRouteCoords = []
    resampledCoords = []
    gpsRouteCoords = []
    displacementLines = []
    lonAdjs = []
    latAdjs = []
    noiseLookback = int(np.ceil(30 / (sampleRate + 2)))
    sttm = int(t.time()) - 86400  # yesterday
    seconds = 0
    shapeIndexCounter = 0
    for i, edge in dfEdges.iterrows():
        if i == 0:
            trueCoords = shapeCoords[edge['begin_shape_index']]
            trueRouteCoords.append(trueCoords)
        trueCoords = shapeCoords[edge['end_shape_index']]
        trueRouteCoords.append(trueCoords)
        edgeShapeIndices = []
        for j, coordPair in enumerate(edge['oneSecCoords']):
            if (not seconds % sampleRate) | (
                (i + 1 == len(dfEdges)) &
                (j + 1 == len(edge['oneSecCoords']))):
                lon, lat = coordPair
                resampledCoords.append([lon, lat])
                if noise > 0:
                    projLon, projLat = transform(llProj, mProj, lon, lat)
                    while True:
                        lonAdj = np.random.normal(scale=noise)
                        latAdj = np.random.normal(scale=noise)
                        if shapeIndexCounter == 0:
                            noiseQuad = [np.sign(lonAdj), np.sign(latAdj)]
                            break
                        elif [np.sign(lonAdj), np.sign(latAdj)] == noiseQuad:
                            break
                    lonAdjs.append(lonAdj)
                    latAdjs.append(latAdj)
                    newProjLon = projLon + np.mean(lonAdjs[-noiseLookback:])
                    newProjLat = projLat + np.mean(latAdjs[-noiseLookback:])
                    projLon, projLat = newProjLon, newProjLat
                    lon, lat = transform(mProj, llProj, projLon, projLat)
                time = sttm + seconds
                lat = round(lat, 6)
                lon = round(lon, 6)
                jsonDict["trace"].append({
                    "lat": lat,
                    "lon": lon,
                    "time": time
                })
                gpsRouteCoords.append([lon, lat])
                displacementLines.append([coordPair, [lon, lat]])
                edgeShapeIndices.append(shapeIndexCounter)
                shapeIndexCounter += 1
            seconds += 1
        if len(edgeShapeIndices) > 0:
            dfEdges.loc[i,
                        'begin_resampled_shape_index'] = min(edgeShapeIndices)
            dfEdges.loc[i, 'end_resampled_shape_index'] = max(edgeShapeIndices)

    gpsShape = [{"lat": d["lat"], "lon": d["lon"]} for d in jsonDict['trace']]
    gpsMatchEdges, gpsMatchCoords, _ = get_trace_attrs(
        gpsShape,
        encoded=False,
        gpsAccuracy=accuracy,
        mode=mode,
        turnPenaltyFactor=turnPenaltyFactor,
        breakageDist=breakageDist,
        beta=beta,
        sigmaZ=sigmaZ,
        searchRadius=searchRadius)

    geojson = FeatureCollection([
        Feature(geometry=LineString(trueRouteCoords),
                properties={
                    "style": {
                        "color": "#ff0000",
                        "weight": "3px"
                    },
                    "name": "true_route_coords"
                }),
        Feature(geometry=MultiPoint(resampledCoords),
                properties={
                    "style": {
                        "color": "#ff0000",
                        "weight": "3px"
                    },
                    "name": "resampled_coords"
                }),
        Feature(geometry=MultiPoint(gpsRouteCoords),
                properties={
                    "style": {
                        "color": "#0000ff",
                        "weight": "3px"
                    },
                    "name": "gps_coords"
                }),
        Feature(geometry=MultiLineString(displacementLines),
                properties={
                    "style": {
                        "color": "#000000",
                        "weight": "1px",
                        "name": "displacement_lines"
                    }
                }),
        Feature(geometry=LineString(gpsMatchCoords),
                properties={
                    "style": {
                        "fillcolor": "#0000ff",
                        "weight": "3px",
                        "name": "matched_gps_route"
                    }
                })
    ])

    return dfEdges, jsonDict, geojson, gpsMatchEdges
コード例 #10
0
    def test_transform_interpolated_euclidean(self):
        # A pretend projection function that is easy to understand.
        def faux_transformation(x, y):
            return x * x * x, y * y * y

        transformed_point = transform_interpolated_euclidean(
            faux_transformation,
            Point(1, 2)
        )
        assert list(transformed_point.coords) == [(1, 8)]

        transformed_multi_point = transform_interpolated_euclidean(
            faux_transformation,
            MultiPoint([(1, 2), (2, 3), (3, 4)])
        )
        assert transformed_multi_point.bounds == (1, 8, 27, 64)

        transformed_line_string = transform_interpolated_euclidean(
            faux_transformation,
            LineString([(1, 2), (2, 3), (3, 4)])
        )
        assert transformed_line_string.bounds == (1, 8, 27, 64)
        assert len(transformed_line_string.coords) == 12

        transformed_multi_line_string = transform_interpolated_euclidean(
            faux_transformation,
            MultiLineString([
                [(1, 2), (2, 3), (3, 4)],
                [(-1, -2), (-2, -3), (-3, -4)]
            ])
        )
        assert transformed_multi_line_string.bounds == (-27, -64, 27, 64)
        assert len(transformed_multi_line_string.geoms[0].coords) == 12
        assert len(transformed_multi_line_string.geoms[1].coords) == 12

        # Empty objects are a thing in shapely, so we need to test that we can
        # still handle them!
        transformed_empty_line_string = transform_interpolated_euclidean(
            faux_transformation,
            LineString()
        )
        assert transformed_empty_line_string.is_empty

        # Polygon with exterior and interiors.
        transformed_polygon = transform_interpolated_euclidean(
            faux_transformation,
            Polygon(
                [(1, 1), (1, 4), (4, 4), (4, 1), (1, 1)],
                [[(2, 2), (2, 3), (3, 3), (3, 2), (2, 2)]]
            )
        )
        assert transformed_polygon.bounds == (1, 1, 64, 64)
        assert transformed_polygon.exterior.bounds == (1, 1, 64, 64)
        assert len(transformed_polygon.interiors) == 1
        assert transformed_polygon.interiors[0].bounds == (8, 8, 27, 27)

        # Finally, MultiPolygons!
        transformed_multi_polygon = transform_interpolated_euclidean(
            faux_transformation,
            MultiPolygon(
                [
                    (
                        [(1, 1), (1, 4), (4, 4), (4, 1), (1, 1)],
                        [[(2, 2), (2, 3), (3, 3), (3, 2), (2, 2)]]
                    ),
                    (
                        [(-1, -1), (-1, -4), (-4, -4), (-4, -1), (-1, -1)],
                        [[(-2, -2), (-2, -3), (-3, -3), (-3, -2), (-2, -2)]]
                    )
                ]
            )
        )
        assert transformed_multi_polygon.bounds == (-64, -64, 64, 64)
        assert len(transformed_multi_polygon.geoms) == 2
        assert len(transformed_multi_polygon.geoms[0].exterior.coords) == \
               5
        assert len(transformed_multi_polygon.geoms[1].exterior.coords) == \
               5
        assert transformed_multi_polygon.geoms[0].exterior.bounds == \
               (1, 1, 64, 64)
        assert transformed_multi_polygon.geoms[1].exterior.bounds == \
               (-64, -64, -1, -1)
        assert len(transformed_multi_polygon.geoms[0].interiors) == 1
        assert len(transformed_multi_polygon.geoms[1].interiors) == 1
        assert transformed_multi_polygon.geoms[0].interiors[0].bounds == \
               (8, 8, 27, 27)
        assert transformed_multi_polygon.geoms[1].interiors[0].bounds == \
               (-27, -27, -8, -8)