Ejemplo n.º 1
0
def get_masked_multipoint(coords,
                          masking_array,
                          plate_partitioner,
                          valid_time=None):
    # Inputs:
    # a list of coordinates (typically a regular grid of Lat/Long points),
    # an array of indices into that list of coordinates,
    # a set of polygons to use for cookie-cutting
    # a valid time to assign to the output features
    # Returns:
    # a multipoint feature that

    multipoint_feature = pygplates.Feature()
    multipoint_feature.set_geometry(
        pygplates.MultiPointOnSphere(
            zip(
                np.array(coords[0])[masking_array],
                np.array(coords[1])[masking_array])))
    (pg_points_masked, dummy) = plate_partitioner.partition_features(
        multipoint_feature,
        partition_return=pygplates.PartitionReturn.
        separate_partitioned_and_unpartitioned)

    if valid_time is not None:
        for feature in pg_points_masked:
            feature.set_valid_time(valid_time[0], valid_time[1])

    return pg_points_masked
Ejemplo n.º 2
0
def create_mesh_node_feature_from_points(points):
    """Create a GPlates 'gpml:MeshNode' feature from a sequence of points (pygplates.PointOnSphere objects)."""

    # Create the new multipoint feature.
    # This type of feature 'MeshNode' will cause GPlates to automatically create a velocity layer upon loading the points.
    mesh_node_feature = pygplates.Feature(
        pygplates.FeatureType.create_gpml('MeshNode'))

    multipoint = pygplates.MultiPointOnSphere(points)
    mesh_node_feature.add(pygplates.PropertyName.create_gpml('meshPoints'),
                          pygplates.GmlMultiPoint(multipoint))

    # Add time period property.
    mesh_node_feature.add(
        pygplates.PropertyName.create_gml('validTime'),
        pygplates.GmlTimePeriod(
            pygplates.GeoTimeInstant.create_distant_past(),
            pygplates.GeoTimeInstant.create_distant_future()))

    # Add reconstruction plate id property (use plate id zero).
    mesh_node_feature.add(
        pygplates.PropertyName.create_gpml('reconstructionPlateId'),
        pygplates.GpmlConstantValue(pygplates.GpmlPlateId(0)))

    return mesh_node_feature
Ejemplo n.º 3
0
    def __make_GPML_velocity_feature(self, coords):
        """ function to make a velocity mesh nodes at an arbitrary set of points defined in
             coords[# of points, 3] = x, y, z"""

        # Add points to a multipoint geometry
        multi_point = pygplates.MultiPointOnSphere([
            pygplates.PointOnSphere(x=coords[i, 0],
                                    y=coords[i, 1],
                                    z=coords[i, 2],
                                    normalise=True)
            for i in range(numpy.shape(coords)[0])
        ])

        # Create a feature containing the multipoint feature, and defined as MeshNode type
        meshnode_feature = pygplates.Feature(
            pygplates.FeatureType.create_from_qualified_string(
                'gpml:MeshNode'))
        meshnode_feature.set_geometry(multi_point)
        meshnode_feature.set_name('Velocity Mesh Nodes from pygplates')

        output_feature_collection = pygplates.FeatureCollection(
            meshnode_feature)

        # NB: at this point, the feature could be written to a file using
        # output_feature_collection.write('myfilename.gpmlz')

        # for use within the notebook, the velocity domain feature is returned from the function
        return output_feature_collection
Ejemplo n.º 4
0
def create_gpml_velocity_feature(longitude_array,
                                 latitude_array,
                                 filename=None,
                                 feature_type=None):
    # function to make a velocity mesh nodes at an arbitrary set of points defined in Lat
    # Long and Lat are assumed to be 1d arrays.

    multi_point = pygplates.MultiPointOnSphere(
        zip(latitude_array, longitude_array))

    # Create a feature containing the multipoint feature.
    # optionally, define as 'MeshNode' type, so that GPlates will recognise it as a velocity layer
    if feature_type == 'MeshNode':
        meshnode_feature = pygplates.Feature(
            pygplates.FeatureType.create_from_qualified_string(
                'gpml:MeshNode'))
        meshnode_feature.set_name('Velocity Mesh Nodes')
    else:
        meshnode_feature = pygplates.Feature()
        meshnode_feature.set_name('Multipoint Feature')

    meshnode_feature.set_geometry(multi_point)

    output_feature_collection = pygplates.FeatureCollection(meshnode_feature)

    if filename is not None:
        output_feature_collection.write(filename)
    else:
        return output_feature_collection
def create_coverage_feature_from_convergence_data(subduction_convergence_data,
                                                  time):
    """Create a feature with a coverage geometry containing the calculated convergence and absolute velocity data.
    
    Parameters
    ----------
    subduction_convergence_data : list of tuples
        The subduction convergence data calculated by :func:`subduction_convergence`.
        Each tuple in the list contains the calculated data for a single sample point on a subduction zone line.
    time: float
        The reconstruction time associated with the subduction convergence data.
    
    Returns
    -------
    pygplates.Feature
        The feature with a coverage geometry containing the calculated convergence and absolute velocity data.
    """

    # Convert the list of tuples (one tuple per sample point) into a tuple of lists (one list per data parameter).
    (all_lon, all_lat, all_convergence_velocity_magnitude_cm_per_yr,
     all_convergence_obliquity_degrees,
     all_absolute_velocity_magnitude_cm_per_yr, all_absolute_obliquity_degrees,
     all_subducting_length_degrees, all_subducting_arc_normal_azimuth_degrees,
     all_subducting_plate_id,
     all_overriding_plate_id) = zip(*subduction_convergence_data)

    # Put all convergence data for the current reconstruction time into a single feature.
    coverage_feature = pygplates.Feature()

    # Make it only appear at 'time'.
    coverage_feature.set_valid_time(time + 0.5, time - 0.5)

    # Add each data parameter as a separate scalar coverage.
    coverage_geometry = pygplates.MultiPointOnSphere(zip(all_lat, all_lon))
    coverage_scalars = {
        pygplates.ScalarType.create_gpml('ConvergenceVelocityMagnitude'):
        all_convergence_velocity_magnitude_cm_per_yr,
        pygplates.ScalarType.create_gpml('ConvergenceObliquityDegrees'):
        all_convergence_obliquity_degrees,
        pygplates.ScalarType.create_gpml('AbsoluteVelocityMagnitude'):
        all_absolute_velocity_magnitude_cm_per_yr,
        pygplates.ScalarType.create_gpml('AbsoluteObliquityDegrees'):
        all_absolute_obliquity_degrees,
        pygplates.ScalarType.create_gpml('SubductingLengthDegrees'):
        all_subducting_length_degrees,
        pygplates.ScalarType.create_gpml('SubductingArcNormalAzimuthDegrees'):
        all_subducting_arc_normal_azimuth_degrees,
        pygplates.ScalarType.create_gpml('SubductingPlateId'):
        all_subducting_plate_id,
        pygplates.ScalarType.create_gpml('OverridingPlateId'):
        all_overriding_plate_id,
    }
    coverage_feature.set_geometry((coverage_geometry, coverage_scalars))

    return coverage_feature
def get_mid_ocean_ridges(shared_boundary_sections,
                         rotation_model,
                         reconstruction_time,
                         time_step,
                         sampling=2.0):
    """ Get tessellated points along a mid ocean ridge"""

    shifted_mor_points = []

    for shared_boundary_section in shared_boundary_sections:
        # The shared sub-segments contribute either to the ridges or to the subduction zones.
        if shared_boundary_section.get_feature().get_feature_type(
        ) == pygplates.FeatureType.create_gpml('MidOceanRidge'):
            # Ignore zero length segments - they don't have a direction.
            spreading_feature = shared_boundary_section.get_feature()

            # Find the stage rotation of the spreading feature in the frame of reference of its
            # geometry at the current reconstruction time (the MOR is currently actively spreading).
            # The stage pole can then be directly geometrically compared to the *reconstructed* spreading geometry.
            stage_rotation = separate_ridge_transform_segments.get_stage_rotation_for_reconstructed_geometry(
                spreading_feature, rotation_model, reconstruction_time)
            if not stage_rotation:
                # Skip current feature - it's not a spreading feature.
                continue

            # Get the stage pole of the stage rotation.
            # Note that the stage rotation is already in frame of reference of the *reconstructed* geometry at the spreading time.
            stage_pole, _ = stage_rotation.get_euler_pole_and_angle()

            # One way rotates left and the other right, but don't know which - doesn't matter in our example though.
            rotate_slightly_off_mor_one_way = pygplates.FiniteRotation(
                stage_pole, np.radians(0.01))
            rotate_slightly_off_mor_opposite_way = rotate_slightly_off_mor_one_way.get_inverse(
            )

            # Iterate over the shared sub-segments.
            for shared_sub_segment in shared_boundary_section.get_shared_sub_segments(
            ):

                # Tessellate MOR section.
                mor_points = pygplates.MultiPointOnSphere(
                    shared_sub_segment.get_resolved_geometry().to_tessellated(
                        np.radians(sampling)))

                # NOTE temporary hack to avoid seed points at ridge trench intersections
                for point in mor_points.get_points()[1:-1]:
                    # Append shifted geometries (one with points rotated one way and the other rotated the opposite way).
                    shifted_mor_points.append(rotate_slightly_off_mor_one_way *
                                              point)
                    shifted_mor_points.append(
                        rotate_slightly_off_mor_opposite_way * point)

    #print shifted_mor_points
    return shifted_mor_points
Ejemplo n.º 7
0
def make_GPML_velocity_feature(Long,Lat):
    # Add points to a multipoint geometry
    multi_point = pygplates.MultiPointOnSphere([(float(lat),float(lon)) for lat, lon in zip(Lat,Long)])

    # Create a feature containing the multipoint feature, and defined as MeshNode type
    meshnode_feature = pygplates.Feature(pygplates.FeatureType.create_from_qualified_string('gpml:MeshNode'))
    meshnode_feature.set_geometry(multi_point)
    meshnode_feature.set_name('Velocity Mesh Nodes from pygplates')

    output_feature_collection = pygplates.FeatureCollection(meshnode_feature)
    
    # NB: at this point, the feature could be written to a file using
    # output_feature_collection.write('myfilename.gpmlz')
    
    # for use within the notebook, the velocity domain feature is returned from the function
    return output_feature_collection
Ejemplo n.º 8
0
def create_gpml_velocity_feature(longitude_array, latitude_array):
    # function to make a velocity mesh nodes at an arbitrary set of points defined in Lat
    # Long and Lat are assumed to be 1d arrays.

    multi_point = pygplates.MultiPointOnSphere(
        zip(latitude_array, longitude_array))

    # Create a feature containing the multipoint feature.
    # optionally, define as 'MeshNode' type, so that GPlates will recognise it as a velocity layer
    meshnode_feature = pygplates.Feature()
    meshnode_feature.set_name('Multipoint Feature')

    meshnode_feature.set_geometry(multi_point)

    output_feature_collection = pygplates.FeatureCollection(meshnode_feature)

    return output_feature_collection
Ejemplo n.º 9
0
def create_gpml_crustal_thickness(longitude_array,latitude_array,thickness,filename=None):

    multi_point = pygplates.MultiPointOnSphere(zip(latitude_array,longitude_array))
    
    scalar_coverages = {
        pygplates.ScalarType.create_gpml('CrustalThickness'): thickness}
    
    ct_feature = pygplates.Feature()
    ct_feature.set_geometry((multi_point,scalar_coverages))
    ct_feature.set_name('Crustal Thickness')
    
    output_feature_collection = pygplates.FeatureCollection(ct_feature)

    if filename is not None:
        output_feature_collection.write(filename)
    else:
        return output_feature_collection
Ejemplo n.º 10
0
def random_points_feature(N,filename=None):
# function to call Marsaglia's method and return
# feature collection or save to file

    points = marsaglias_method(N)

    #multipoint = pygplates.MultiPointOnSphere((points.T))
    multipoint_feature = pygplates.Feature()
    multipoint_feature.set_geometry(pygplates.MultiPointOnSphere((points.T)))
    multipoint_feature.set_name('Random Points from Marsaglia''s method')

    multipoint_feature_collection = pygplates.FeatureCollection(multipoint_feature)

    if filename is not None:
        multipoint_feature_collection.write(filename)
    else:
        return multipoint_feature_collection
Ejemplo n.º 11
0
def get_vertical_change_multipoints(pg_features,t1,t2,psl_t1,psl_t2,
                                    points,spatial_tree_of_uniform_recon_points,
                                    rotation_model,plot=False):
    # NOT WORKING DUE TO LACK OF SUPPORT FOR SCALAR COVERAGES
    
    print 'Working on interpolation from %0.2f Ma to %0.2f Ma .....' % (t1,t2)
        
    plate_partitioner = pygplates.PlatePartitioner(pg_features, rotation_model, reconstruction_time=t1)
    
    (distance_to_land_t1,
     distance_to_psl_t1,
     distance_to_land_t2,
     distance_to_psl_t2,
     regression_msk,
     transgression_msk,
     always_land_msk) = get_change_masks(t1,points,spatial_tree_of_uniform_recon_points,
                                         psl_t1,psl_t2,rotation_model)
    
    coords = zip(*[point.to_lat_lon() for point in points])
    
    pg_points_regression = get_masked_multipoint(coords,regression_msk,plate_partitioner,
                                                 valid_time=[t2,t1+0.01])
    pg_points_transgression = get_masked_multipoint(coords,transgression_msk,plate_partitioner,
                                                    valid_time=[t2,t1+0.01])
    pg_points_always_land = get_masked_multipoint(coords,always_land_msk,plate_partitioner,
                                                  valid_time=[t2,t1])

    
    # make a scalar coverage
    multi_point = pygplates.MultiPointOnSphere(points)
    scalar_coverages = {
        pygplates.ScalarType.create_gpml('distance_to_land_t1'): distance_to_land_t1,
        pygplates.ScalarType.create_gpml('distance_to_psl_t1'): distance_to_psl_t1,
        pygplates.ScalarType.create_gpml('distance_to_land_t2'): distance_to_land_t2,
        pygplates.ScalarType.create_gpml('distance_to_psl_t2'): distance_to_psl_t2}

    sc_feature = pygplates.Feature()
    sc_feature.set_geometry((multi_point,scalar_coverages))
    sc_feature.set_name('Paleotopography Test Points')
    
    (cc_sc_features,
     dummy) = plate_partitioner.partition_features(sc_feature,
                                                  partition_return = pygplates.PartitionReturn.separate_partitioned_and_unpartitioned)
    
    pygplates.FeatureCollection(cc_sc_features).write('./tween_feature_collections/mountain_scalar_coverages_%0.2fMa_%0.2fMa.gpmlz' % (t1,t2))
Ejemplo n.º 12
0
def make_GPML_velocity_feature(Long,Lat):
# function to make a velocity mesh nodes at an arbitrary set of points defined in Lat
# Long and Lat are assumed to be 1d arrays. 

    # Add points to a multipoint geometry
    SeedPoints = zip(Lat,Long)
    points = []
    for j in range(0,len(SeedPoints)):
        points.append(SeedPoints[j])
    multi_point = pygplates.MultiPointOnSphere(points)

    # Create a feature containing the multipoint feature, and defined as MeshNode type
    meshnode_feature = pygplates.Feature(pygplates.FeatureType.create_from_qualified_string('gpml:MeshNode'))
    meshnode_feature.set_geometry(multi_point)
    meshnode_feature.set_name('Velocity Mesh Nodes from pygplates')

    output_feature_collection = pygplates.FeatureCollection(meshnode_feature)
    
    # NB: at this point, the feature could be written to a file using
    # output_feature_collection.write('myfilename.gpmlz')
    
    # for use within the notebook, the velocity domain feature is returned from the function
    return output_feature_collection
        points_in_polygon = polygon_feature_to_points_mapping.setdefault(
            polygon_feature, [])

        points_in_polygon.append(points[point_index])

    # Create multi-point features.

    multi_point_features = []

    for polygon_feature, points_in_polygon in polygon_feature_to_points_mapping.iteritems(
    ):

        multi_point_feature = pygplates.Feature()

        multi_point_feature.set_geometry(
            pygplates.MultiPointOnSphere(points_in_polygon))

        # If points contained by any polygon then assign its plate ID, otherwise no plate ID assigned.

        if polygon_feature is not None:

            begin_time, end_time = polygon_feature.get_valid_time()

            multi_point_feature.set_valid_time(begin_time, end_time)

            multi_point_feature.set_reconstruction_plate_id(
                polygon_feature.get_reconstruction_plate_id())

        else:

            multi_point_feature.set_valid_time(
Ejemplo n.º 14
0
def reconstruct_raster_stage(static_polygon_features,
                             rotation_model,
                             time_from,
                             time_to,
                             uniform_recon_points,
                             spatial_tree_of_uniform_recon_points,
                             anchor_plate_id=0):

    print 'Reconstruct static polygons...'

    # Reconstruct the multipoint feature.
    recon_static_polygon_features = []
    pygplates.reconstruct(static_polygon_features,
                          rotation_model,
                          recon_static_polygon_features,
                          time_to,
                          anchor_plate_id=anchor_plate_id)

    # Extract the polygons and plate IDs from the reconstructed static polygons.
    recon_static_polygons = []
    recon_static_polygon_plate_ids = []
    for recon_static_polygon_feature in recon_static_polygon_features:
        recon_plate_id = recon_static_polygon_feature.get_feature(
        ).get_reconstruction_plate_id()
        recon_polygon = recon_static_polygon_feature.get_reconstructed_geometry(
        )

        recon_static_polygon_plate_ids.append(recon_plate_id)
        recon_static_polygons.append(recon_polygon)

    print 'Find static polygons...'

    # Find the reconstructed static polygon (plate IDs) containing the uniform (reconstructed) points.
    #
    # The order (and length) of 'recon_point_plate_ids' matches the order (and length) of 'uniform_recon_points'.
    # Points outside all static polygons return a value of None.
    recon_point_plate_ids = points_in_polygons.find_polygons_using_points_spatial_tree(
        uniform_recon_points, spatial_tree_of_uniform_recon_points,
        recon_static_polygons, recon_static_polygon_plate_ids)

    print 'Group by polygons...'

    # Group recon points with plate IDs so we can later create one multipoint per plate.
    recon_points_grouped_by_plate_id = {}
    for point_index, point_plate_id in enumerate(recon_point_plate_ids):
        # Reject any points outside all reconstructed static polygons.
        if point_plate_id is None:
            continue

        # Add empty list to dict if first time encountering plate ID.
        if point_plate_id not in recon_points_grouped_by_plate_id:
            recon_points_grouped_by_plate_id[point_plate_id] = []

        # Add to list of points associated with plate ID.
        recon_point = uniform_recon_points[point_index]
        recon_points_grouped_by_plate_id[point_plate_id].append(recon_point)

    print 'Reverse reconstruct points...'

    # Reconstructed points.
    recon_point_lons = []
    recon_point_lats = []

    # Present day points associated with reconstructed points.
    point_lons = []
    point_lats = []

    # Create a multipoint feature for each plate ID and reverse-reconstruct it to get present-day points.
    #
    # Iterate over key/value pairs in dictionary.
    for plate_id, recon_points_in_plate in recon_points_grouped_by_plate_id.iteritems(
    ):
        # Reverse reconstructing a multipoint is much faster than individually reverse-reconstructing points.
        multipoint_feature = pygplates.Feature()
        multipoint_feature.set_geometry(
            pygplates.MultiPointOnSphere(recon_points_in_plate))
        multipoint_feature.set_reconstruction_plate_id(plate_id)

        # Reverse reconstruct the multipoint feature.
        pygplates.reverse_reconstruct(multipoint_feature,
                                      rotation_model,
                                      time_to,
                                      anchor_plate_id=anchor_plate_id)

        #Forward reconstruct multipoint to
        multipoint_at_from_time = []
        pygplates.reconstruct(multipoint_feature,
                              rotation_model,
                              multipoint_at_from_time,
                              time_from,
                              anchor_plate_id=anchor_plate_id)

        # Extract reverse-reconstructed geometry.
        multipoint = multipoint_at_from_time[0].get_reconstructed_geometry()

        # Collect present day and associated reconstructed points.
        for point_index, point in enumerate(multipoint):
            lat, lon = point.to_lat_lon()
            point_lons.append(lon)
            point_lats.append(lat)

            recon_point = recon_points_in_plate[point_index]
            recon_lat, recon_lon = recon_point.to_lat_lon()
            recon_point_lons.append(recon_lon)
            recon_point_lats.append(recon_lat)

    print 'Sample present-day grid...'

    # Query present-day grid using present-day points.
    #
    # TODO: Note sure what happens in regions where there's no data in grid (need to ignore those points).
    #data = data_grid.ev(point_lons, point_lats)
    #data = [1.0] * len(recon_point_lons)
    #data = sample_grid_using_scipy(point_lons,point_lats,grdfile)

    return recon_point_lons, recon_point_lats, point_lons, point_lats
Ejemplo n.º 15
0
def reconstruct_feature_collection(request):
    DATA_DIR = Model_Root+'caltech/'

    if request.method == 'POST':
        return HttpResponse('POST method is not accepted for now.')

    geologicage = request.GET.get('geologicage', 140)
    output_format = request.GET.get('output', 'geojson')
    fc_str = request.GET.get('feature_collection')
    fc = json.loads(fc_str)
 
    features=[]
    for f in fc['features']:
        geom = f['geometry']
        feature = pygplates.Feature()
        if geom['type'] == 'Point':
            feature.set_geometry(pygplates.PointOnSphere(
                float(geom['coordinates'][1]),
                float(geom['coordinates'][0])))
        if geom['type'] == 'LineString':
            feature.set_geometry(
                pygplates.PolylineOnSphere([(point[1],point[0]) for point in geom['coordinates']]))
        if geom['type'] == 'Polygon':
            feature.set_geometry(
                pygplates.PolygonOnSphere([(point[1],point[0]) for point in geom['coordinates'][0]]))
        if geom['type'] == 'MultiPoint':
             feature.set_geometry(
                pygplates.MultiPointOnSphere([(point[1],point[0]) for point in geom['coordinates']]))

        features.append(feature)


    if float(geologicage) < 250:
        rotation_files = [DATA_DIR+'/Seton_etal_ESR2012_2012.1.rot']
    else :
        rotation_files = [DATA_DIR+'/Global_EB_410-250Ma_GK07_Matthews_etal.rot']

    rotation_model = pygplates.RotationModel(rotation_files)

    assigned_features = pygplates.partition_into_plates(
        DATA_DIR+'Seton_etal_ESR2012_StaticPolygons_2012.1.gpmlz',
        rotation_model,
        features,
        properties_to_copy = [
            pygplates.PartitionProperty.reconstruction_plate_id,
            pygplates.PartitionProperty.valid_time_period],
        partition_method = pygplates.PartitionMethod.most_overlapping_plate
    )


    reconstructed_geometries = []
    pygplates.reconstruct(assigned_features, rotation_model, reconstructed_geometries, float(geologicage), 0)
    
    
    data = {"type": "FeatureCollection"}
    data["features"] = []
    for g in reconstructed_geometries:
        geom =  g.get_reconstructed_geometry()
        feature = {"type": "Feature"}
        feature["geometry"] = {}
        if isinstance(geom, pygplates.PointOnSphere):
            feature["geometry"]["type"] = "Point"
            p = geom.to_lat_lon_list()[0]
            feature["geometry"]["coordinates"] = [p[1], p[0]]
        elif isinstance(geom, pygplates.MultiPointOnSphere):
            feature["geometry"]["type"] = 'MultiPoint'
            feature["geometry"]["coordinates"] = [[lon,lat] for lat, lon in geom.to_lat_lon_list()]
        elif isinstance(geom, pygplates.PolylineOnSphere):
            feature["geometry"]["type"] = 'LineString'
            feature["geometry"]["coordinates"] = [[lon,lat] for lat, lon in geom.to_lat_lon_list()]
        elif isinstance(geom, pygplates.PolygonOnSphere):
            feature["geometry"]["type"] = 'Polygon'
            feature["geometry"]["coordinates"] = [[[lon,lat] for lat, lon in geom.to_lat_lon_list()]]
        else:
            raise 'Unrecognized Geometry Type.'
        
        feature["properties"]={}    
        
        data["features"].append(feature)

    ret = json.dumps(pretty_floats(data))

    return HttpResponse(ret, content_type='application/json')
Ejemplo n.º 16
0
     subducting_points = []
     subducting_sed_thicknesses = []
     subducting_sediment_volumes_metres_3_per_year_per_metre = []
     convergence_normal_velocities_cms_per_year = []
     for (
         lon,
         lat,
         sed_thickness,
         subducting_sediment_volume_metres_3_per_year_per_metre,
         convergence_normal_velocity_cms_per_year) in subducting_lon_lat_thickness_velocity_volume_list:
         
         subducting_points.append(pygplates.PointOnSphere(lat, lon))
         subducting_sed_thicknesses.append(sed_thickness)
         subducting_sediment_volumes_metres_3_per_year_per_metre.append(subducting_sediment_volume_metres_3_per_year_per_metre)
         convergence_normal_velocities_cms_per_year.append(convergence_normal_velocity_cms_per_year)
     
     # Create a scalar coverage feature to display sediment thicknesses in GPlates.
     subducting_thickness_feature = pygplates.Feature()
     subducting_thickness_feature.set_geometry((
             pygplates.MultiPointOnSphere(subducting_points),
             {pygplates.ScalarType.create_gpml('subducting_sed_thick') : subducting_sed_thicknesses,
             pygplates.ScalarType.create_gpml('sed_volume_m_3_per_year_per_m') : subducting_sediment_volumes_metres_3_per_year_per_metre,
             pygplates.ScalarType.create_gpml('conv_normal_vel_cms_year') : convergence_normal_velocities_cms_per_year}))
     # Only want to display this feature at 'time' Ma.
     subducting_thickness_feature.set_valid_time(time + 0.5, time - 0.5)
     
     subducting_thickness_features.append(subducting_thickness_feature)
 
 pygplates.FeatureCollection(subducting_thickness_features).write('subducting_thicknesses.gpmlz')
 
 sys.exit(0)
Ejemplo n.º 17
0
def reconstruct_feature_collection(request):

    if request.method == 'POST':
        params = request.POST
    elif request.method == 'GET':
        params = request.GET
    else:
        return HttpResponseBadRequest('Unrecognized request type')

    anchor_plate_id = params.get('pid', 0)

    if 'time' in params:
        time = params['time']
    elif 'geologicage' in params:
        time = params['geologicage']
    else:
        time = 140 #default reconstruction age

    output_format = params.get('output', 'geojson')
    fc_str = params.get('feature_collection')
    model = str(params.get('model',settings.MODEL_DEFAULT))
    
    if 'keep_properties' in params:
        keep_properties = True
    else:
        keep_properties = False

    try:
        timef = float(time)
    except:
        return HttpResponseBadRequest('The "time" parameter is invalid ({0}).'.format(time))

    try:
        anchor_plate_id = int(anchor_plate_id)
    except:
        return HttpResponseBadRequest('The "pid" parameter is invalid ({0}).'.format(anchor_plate_id))
 
    # Convert geojson input to gplates feature collection
    features=[]
    try:
        fc = json.loads(fc_str)#load the input feature collection
        for f in fc['features']:
            geom = f['geometry']
            feature = pygplates.Feature()
            if geom['type'] == 'Point':
                feature.set_geometry(pygplates.PointOnSphere(
                    float(geom['coordinates'][1]),
                    float(geom['coordinates'][0])))
            if geom['type'] == 'LineString':
                feature.set_geometry(
                    pygplates.PolylineOnSphere([(point[1],point[0]) for point in geom['coordinates']]))
            if geom['type'] == 'Polygon':
                feature.set_geometry(
                    pygplates.PolygonOnSphere([(point[1],point[0]) for point in geom['coordinates'][0]]))
            if geom['type'] == 'MultiPoint':
                 feature.set_geometry(
                    pygplates.MultiPointOnSphere([(point[1],point[0]) for point in geom['coordinates']]))
            
            if keep_properties and 'properties' in f:
                for pk in f['properties']:           
                    p = f['properties'][pk] 
                    if isinstance(p, str):
                        p=str(p) 
                    feature.set_shapefile_attribute(str(pk),p)
            
            features.append(feature)
    except Exception as e:
        #print e
        return HttpResponseBadRequest('Invalid input feature collection')

    model_dict = get_reconstruction_model_dict(model)
    if not model_dict:
        return HttpResponseBadRequest('The "model" ({0}) cannot be recognized.'.format(model))

    rotation_model = pygplates.RotationModel([str('%s/%s/%s' %
        (settings.MODEL_STORE_DIR,model,rot_file)) for rot_file in model_dict['RotationFile']])

    assigned_features = pygplates.partition_into_plates(
        settings.MODEL_STORE_DIR+model+'/'+model_dict['StaticPolygons'],
        rotation_model,
        features,
        properties_to_copy = [
            pygplates.PartitionProperty.reconstruction_plate_id,
            pygplates.PartitionProperty.valid_time_period],
        partition_method = pygplates.PartitionMethod.most_overlapping_plate
    )

    reconstructed_geometries = []
    pygplates.reconstruct(assigned_features, 
        rotation_model, 
        reconstructed_geometries, 
        timef, 
        anchor_plate_id=anchor_plate_id)

    # convert feature collection back to geojson
    data = {"type": "FeatureCollection"}
    data["features"] = []
    for g in reconstructed_geometries:
        geom =  g.get_reconstructed_geometry()
        feature = {"type": "Feature"}
        feature["geometry"] = {}
        if isinstance(geom, pygplates.PointOnSphere):
            feature["geometry"]["type"] = "Point"
            p = geom.to_lat_lon_list()[0]
            feature["geometry"]["coordinates"] = [p[1], p[0]]
        elif isinstance(geom, pygplates.MultiPointOnSphere):
            feature["geometry"]["type"] = 'MultiPoint'
            feature["geometry"]["coordinates"] = [[lon,lat] for lat, lon in geom.to_lat_lon_list()]
        elif isinstance(geom, pygplates.PolylineOnSphere):
            feature["geometry"]["type"] = 'LineString'
            feature["geometry"]["coordinates"] = [[lon,lat] for lat, lon in geom.to_lat_lon_list()]
        elif isinstance(geom, pygplates.PolygonOnSphere):
            feature["geometry"]["type"] = 'Polygon'
            feature["geometry"]["coordinates"] = [[[lon,lat] for lat, lon in geom.to_lat_lon_list()]]
        else:
            return HttpResponseServerError('Unsupported Geometry Type.')

        feature["properties"] = {}
        if keep_properties:
            for pk in g.get_feature().get_shapefile_attributes():
                feature["properties"][pk] = g.get_feature().get_shapefile_attribute(pk)
        #print feature["properties"]
        data["features"].append(feature)

    ret = json.dumps(pretty_floats(data))
    
    #add header for CORS
    #http://www.html5rocks.com/en/tutorials/cors/
    response = HttpResponse(ret, content_type='application/json')
    #TODO:
    response['Access-Control-Allow-Origin'] = '*'
    return response
Ejemplo n.º 18
0
def motion_path(request):
    """
    http GET request to retrieve reconstructed static polygons

    **usage**
    
    <http-address-to-gws>/reconstruct/motion_path/seedpoints=\ *points*\&timespec=\ *time_list*\&fixplate=\ *fixed_plate_id*\&movplate=\ *moving_plate_id*\&time=\ *reconstruction_time*\&model=\ *reconstruction_model*
    
    :param seedpoints: integer value for reconstruction anchor plate id [required]

    :param timespec: specification for times for motion path construction, in format 'mintime,maxtime,increment' [defaults to '0,100,10']

    :param time: time for reconstruction [default=0]

    :param fixplate: integer plate id for fixed plate [default=0]

    :param movplate: integer plate id for moving plate [required]

    :param model: name for reconstruction model [defaults to default model from web service settings]

    :returns:  json containing reconstructed motion path features
    """
    seedpoints = request.GET.get('seedpoints', None)
    times = request.GET.get('timespec', '0,100,10')
    reconstruction_time = request.GET.get('time', 0)
    RelativePlate = request.GET.get('fixplate', 0)
    MovingPlate = request.GET.get('movplate', None)
    model = request.GET.get('model',settings.MODEL_DEFAULT)

    points = []
    if seedpoints:
        ps = seedpoints.split(',')
        if len(ps)%2==0:
            for lat,lon in zip(ps[1::2], ps[0::2]):
                points.append((float(lat),float(lon)))

    seed_points_at_digitisation_time = pygplates.MultiPointOnSphere(points)

    if times:
        ts = times.split(',')
        if len(ts)==3:
            times = np.arange(float(ts[0]),float(ts[1])+0.1,float(ts[2]))

    model_dict = get_reconstruction_model_dict(model)

    rotation_model = pygplates.RotationModel([str('%s/%s/%s' %
        (settings.MODEL_STORE_DIR,model,rot_file)) for rot_file in model_dict['RotationFile']])

    # Create the motion path feature
    digitisation_time = 0
    #seed_points_at_digitisation_time = pygplates.MultiPointOnSphere([SeedPoint])
    motion_path_feature = pygplates.Feature.create_motion_path(
            seed_points_at_digitisation_time,
            times = times,
            valid_time=(2000, 0),
            relative_plate=int(RelativePlate),
            reconstruction_plate_id = int(MovingPlate))

    # Create the shape of the motion path
    #reconstruction_time = 0
    reconstructed_motion_paths = []
    pygplates.reconstruct(
            motion_path_feature, rotation_model, reconstructed_motion_paths, float(reconstruction_time),
            reconstruct_type=pygplates.ReconstructType.motion_path)

    data = {"type": "FeatureCollection"}
    data["features"] = [] 
    for reconstructed_motion_path in reconstructed_motion_paths:
        Dist = []
        for segment in reconstructed_motion_path.get_motion_path().get_segments():
            Dist.append(segment.get_arc_length()*pygplates.Earth.mean_radius_in_kms)
        feature = {"type": "Feature"}
        feature["geometry"] = {}
        feature["geometry"]["type"] = "LineString"
        #### NOTE CODE TO FLIP COORDINATES TO 
        feature["geometry"]["coordinates"] = [(lon,lat) for lat,lon in reconstructed_motion_path.get_motion_path().to_lat_lon_list()]
        feature["geometry"]["distance"] = Dist
        feature["properties"] = {}
        data["features"].append(feature)

    ret = json.dumps(pretty_floats(data))
    
    #add header for CORS
    #http://www.html5rocks.com/en/tutorials/cors/
    response = HttpResponse(ret, content_type='application/json')
    #TODO:
    response['Access-Control-Allow-Origin'] = '*'
    return response
Ejemplo n.º 19
0
def plot_groups(equal_area_points, bin_values, fig=None, filename=None, grid_resolution=0.2,
                color_range=None, cmap='hot', reverse=True, pen='0.1p,gray50', transparency=0, **kwargs):

    """
    Generate a visual representation of spatially binned data generated by 'groupby_healpix'.
    The result can either be added to a pygmt figure or saved to a GIS file (with the file
    type taken from the filename extension of the optional 'filename' parameter, e.g. shp, gmt, geojson) 
    """

    points = pygplates.MultiPointOnSphere(zip(equal_area_points.latitude,equal_area_points.longitude)).to_xyz_array() 

    radius = 1
    center = np.array([0, 0, 0])
    sv = spatial.SphericalVoronoi(points, radius, center)
    sv.sort_vertices_of_regions()

    polygon_features = []
    for region,zval in zip(sv.regions,bin_values):
        polygon = np.vstack((sv.vertices[region],sv.vertices[region][0,:]))
        polygon_feature = pygplates.Feature()
        polygon_feature.set_geometry(pygplates.PolygonOnSphere(polygon))
        polygon_feature.set_shapefile_attribute('zval', zval)
        polygon_features.append(polygon_feature)

    if filename:
        return_file = True
        pygplates.FeatureCollection(polygon_features).write(filename)

    else:
        return_file = False
        plot_file = tempfile.NamedTemporaryFile(delete=False, suffix='.gmt')
        plot_file.close()
        filename = plot_file.name
        pygplates.FeatureCollection(polygon_features).write(filename)

    if fig:
        grid_lon, grid_lat = np.meshgrid(np.arange(-180.,180.,grid_resolution),np.arange(-90.,90.,grid_resolution))
    
        d,l = sampleOnSphere(np.radians(equal_area_points.longitude),
                            np.radians(equal_area_points.latitude),
                            np.array(bin_values),
                            np.radians(grid_lon).ravel(),
                            np.radians(grid_lat).ravel(),
                            k=1)
        grid_z = np.array(bin_values)[l].reshape(grid_lon.shape)
        
        #spherical_triangulation = stripy.sTriangulation(lons=np.radians(equal_area_points.longitude), lats=np.radians(equal_area_points.latitude))
        #grid_z,_ = spherical_triangulation.interpolate_nearest(np.radians(grid_lon).ravel(), np.radians(grid_lat).ravel(), np.array(bin_values))

        ds = xr.DataArray(grid_z.reshape(grid_lon.shape), coords=[('lat',grid_lat[:,0]), ('lon',grid_lon[0,:])], name='z')

        #pygmt.config(COLOR_FOREGROUND='white', COLOR_BACKGROUND='black')
        if not color_range:
            color_range = (np.nanmin(bin_values), np.nanmax(bin_values))
            reverse = True
        pygmt.makecpt(cmap=cmap, series='{:f}/{:f}'.format(color_range[0],color_range[1]), 
                      reverse=reverse, background='o')

        # This line would allow the polygons to be plotted directly with a colormap, but tends to crash when 
        # healpix of N=32 or greater is input
        #fig.plot(data=filename, pen=pen, color='+z', cmap=True, a='Z=zval', close=True, **kwargs)
        fig.grdimage(ds, transparency=transparency, cmap=True, nan_transparent=True)
        fig.plot(data=filename, pen=pen, transparency=transparency, close=True, **kwargs)


    if not return_file:
        os.unlink(plot_file.name)