Ejemplo n.º 1
0
def rasterise_paleogeography(pg_features,rotation_model,time,
							 sampling=0.5,env_list=None,meshtype='LongLatGrid',
							 masking=None):
    # takes paleogeography polygons like those from Cao++ 2017 and converts them
    # into a raster
    # if meshtype is set to 'healpix', sampling should be set to an integer defining nSide

    #pg_features = load_paleogeography(pg_dir,env_list)
    if meshtype=='healpix':
        raster_domain = create_gpml_healpix_mesh(sampling,filename=None,feature_type='MeshNode')
    else:
        raster_domain = create_gpml_regular_long_lat_mesh(sampling,filename=None,feature_type='MeshNode')

    plate_partitioner = pygplates.PlatePartitioner(pg_features, rotation_model, reconstruction_time=time)

    if masking is not None:
        pg_points = plate_partitioner.partition_features(raster_domain,
														 partition_return = pygplates.PartitionReturn.separate_partitioned_and_unpartitioned,
                                                         properties_to_copy=[pygplates.PropertyName.gpml_shapefile_attributes])
        if masking == 'Outside':
            pg_points = pg_points[0]
        elif masking == 'Inside':
            pg_points = pg_points[1]

    else:
        pg_points = plate_partitioner.partition_features(raster_domain,
                                                         properties_to_copy=[pygplates.PropertyName.gpml_shapefile_attributes])

    return pg_points
Ejemplo n.º 2
0
def rasterise_polygons(polygon_features, rotation_model, reconstruction_time, raster_domain_points=None, 
					   sampling=0.5, meshtype='LongLatGrid', masking=None):
    # takes a set of polygons and converts them into a raster, or other regular point distribution,
    # with the polygon shapefile attributes mapped to points 
    # if meshtype is set to 'healpix', sampling should be set to an integer defining nSide

    if not raster_domain_points:
        if meshtype=='healpix':
            raster_domain_points = create_gpml_healpix_mesh(sampling,filename=None,feature_type='MeshNode')
        else:
            raster_domain_points = create_gpml_regular_long_lat_mesh(sampling,filename=None,feature_type='MeshNode')

    plate_partitioner = pygplates.PlatePartitioner(polygon_features, rotation_model, reconstruction_time=reconstruction_time)

    if masking is not None:
        pg_points = plate_partitioner.partition_features(raster_domain_points,
														 partition_return = pygplates.PartitionReturn.separate_partitioned_and_unpartitioned,
                                                         properties_to_copy=[pygplates.PropertyName.gpml_shapefile_attributes])
        if masking == 'outside':
            pg_points = pg_points[0]
        elif masking == 'inside':
            pg_points = pg_points[1]

    else:
        pg_points = plate_partitioner.partition_features(raster_domain_points,
                                                         properties_to_copy=[pygplates.PropertyName.gpml_shapefile_attributes])

    return pg_points
def get_plate_velocities(velocity_domain_features,
                         topology_features,
                         rotation_model,
                         time,
                         delta_time,
                         rep):

    # Define a function 

    # All domain points and associated (magnitude, azimuth, inclination) velocities for the current time.
    all_domain_points = []
    all_velocities = []

    # Partition our velocity domain features into our topological plate polygons at the current 'time'.
    plate_partitioner = pygplates.PlatePartitioner(topology_features, rotation_model, float(time))

    for velocity_domain_feature in velocity_domain_features:

        # A velocity domain feature usually has a single geometry but we'll assume it can be any number.
        # Iterate over them all.
        for velocity_domain_geometry in velocity_domain_feature.get_geometries():

            for velocity_domain_point in velocity_domain_geometry.get_points():

                all_domain_points.append(velocity_domain_point)

                partitioning_plate = plate_partitioner.partition_point(velocity_domain_point)
                if partitioning_plate:

                    # We need the newly assigned plate ID to get the equivalent stage rotation of that tectonic plate.
                    partitioning_plate_id = partitioning_plate.get_feature().get_reconstruction_plate_id()

                    # Get the stage rotation of partitioning plate from 'time + delta_time' to 'time'.
                    equivalent_stage_rotation = rotation_model.get_rotation(float(time), partitioning_plate_id, time + float(delta_time))

                    # Calculate velocity at the velocity domain point.
                    # This is from 'time + delta_time' to 'time' on the partitioning plate.
                    velocity_vectors = pygplates.calculate_velocities(
                        [velocity_domain_point],
                        equivalent_stage_rotation,
                        delta_time)
                    
                    if rep=='mag_azim':
                        # Convert global 3D velocity vectors to local (magnitude, azimuth, inclination) tuples (one tuple per point).
                        velocities = pygplates.LocalCartesian.convert_from_geocentric_to_magnitude_azimuth_inclination(
                            [velocity_domain_point],
                            velocity_vectors)
                        all_velocities.append(velocities[0])

                    elif rep=='vector_comp':
                        # Convert global 3D velocity vectors to local (magnitude, azimuth, inclination) tuples (one tuple per point).
                        velocities = pygplates.LocalCartesian.convert_from_geocentric_to_north_east_down(
                                [velocity_domain_point],
                                velocity_vectors)
                        all_velocities.append(velocities[0])

                else:
                    all_velocities.append((0,0,0))

    return all_velocities
Ejemplo n.º 4
0
 def __init__(self, grid_list_filename, static_polygon_filename, rotation_filenames, longitude, latitude, age=None):
     """
     Load dynamic topography grid filenames and associated ages from grid list file 'grid_list_filename'.
     
     The present day location ('longitude' / 'latitude' in degrees) is also assigned a plate ID using the static polygons.
     """
     
     self.location = pygplates.PointOnSphere((latitude, longitude))
     self.age = age
     
     self.grids = TimeDependentGrid(grid_list_filename)
     self.rotation_model = pygplates.RotationModel(rotation_filenames)
     
     # Find the plate ID of the static polygon containing the location (or zero if not in any plates).
     plate_partitioner = pygplates.PlatePartitioner(static_polygon_filename, self.rotation_model)
     partitioning_plate = plate_partitioner.partition_point(self.location)
     if partitioning_plate:
         self.reconstruction_plate_id = partitioning_plate.get_feature().get_reconstruction_plate_id()
     else:
         self.reconstruction_plate_id = 0
     
     # Use the age of the containing static polygon if location is None (ie, outside age grid).
     if self.age is None:
         if partitioning_plate:
             self.age, _ = partitioning_plate.get_feature().get_valid_time()
         else:
             self.age = 0.0
Ejemplo n.º 5
0
def get_change_mask_multipoints(pg_features,t1,t2,psl_t1,psl_t2,
                                points,spatial_tree_of_uniform_recon_points,
                                rotation_model,plot=False):
    
    print 'Working on interpolation from %0.2f Ma to %0.2f Ma .....' % (t1,t2)
        
    plate_partitioner = pygplates.PlatePartitioner(pg_features, rotation_model, reconstruction_time=t1)
    
    (distance_to_land_t1,
     distance_to_psl_t1,
     distance_to_land_t2,
     distance_to_psl_t2,
     regression_msk,
     transgression_msk,
     always_land_msk) = get_change_masks(t1,points,spatial_tree_of_uniform_recon_points,
                                         psl_t1,psl_t2,rotation_model)
    
    coords = zip(*[point.to_lat_lon() for point in points])
    
    pg_points_regression = get_masked_multipoint(coords,regression_msk,plate_partitioner,
                                                 valid_time=[t2,t1+0.01])
    pg_points_transgression = get_masked_multipoint(coords,transgression_msk,plate_partitioner,
                                                    valid_time=[t2,t1+0.01])
    pg_points_always_land = get_masked_multipoint(coords,always_land_msk,plate_partitioner,
                                                  valid_time=[t2,t1])

    pygplates.FeatureCollection(pg_points_regression).write('./tween_feature_collections/mountain_regression_%0.2fMa_%0.2fMa.gpmlz' % (t1,t2))
    pygplates.FeatureCollection(pg_points_transgression).write('./tween_feature_collections/mountain_transgression_%0.2fMa_%0.2fMa.gpmlz' % (t1,t2))
    pygplates.FeatureCollection(pg_points_always_land).write('./tween_feature_collections/mountain_stable_%0.2fMa_%0.2fMa.gpmlz' % (t1,t2))
Ejemplo n.º 6
0
    def __init__(self,
                 grid_list_filename,
                 static_polygon_filename,
                 rotation_filenames,
                 longitude,
                 latitude,
                 age=None):
        """
        Load dynamic topography grid filenames and associated ages from grid list file 'grid_list_filename'.
        
        Parameters
        ----------
        grid_list_filename : str
            The filename of the grid list file.
        static_polygon_filename : str
            The filename of the static polygons file.
        rotation_filenames : list of str
            The list of rotation filenames.
        longitude : float
            Longitude of the ocean point location.
        latitude : float
            Latitude of the ocean point location.
        age : float, optional
            The age of the crust that the point location is on.
            If not specified then the appearance age of the static polygon containing the point is used.
        
        Notes
        -----
        Each row in the grid list file should contain two columns. First column containing
        filename (relative to directory of list file) of a dynamic topography grid at a particular time.
        Second column containing associated time (in Ma).
        
        The present day location ('longitude' / 'latitude' in degrees) is also assigned a plate ID using the static polygons,
        and the rotations are used to reconstruct the location when sampling the grids at a reconstructed time.
        """

        self.latitude = latitude
        self.longitude = longitude
        self.location = pygplates.PointOnSphere((latitude, longitude))
        self.age = age

        self.grids = TimeDependentGrid(grid_list_filename)
        self.rotation_model = pygplates.RotationModel(rotation_filenames)

        # Find the plate ID of the static polygon containing the location (or zero if not in any plates).
        plate_partitioner = pygplates.PlatePartitioner(static_polygon_filename,
                                                       self.rotation_model)
        partitioning_plate = plate_partitioner.partition_point(self.location)
        if partitioning_plate:
            self.reconstruction_plate_id = partitioning_plate.get_feature(
            ).get_reconstruction_plate_id()
        else:
            self.reconstruction_plate_id = 0

        # Use the age of the containing static polygon if location is None (ie, outside age grid).
        if self.age is None:
            if partitioning_plate:
                self.age, _ = partitioning_plate.get_feature().get_valid_time()
            else:
                self.age = 0.0
Ejemplo n.º 7
0
def profile_plate_ids(resolved_topologies,rotation_model,GreatCirclePoints):

    partitioner = pygplates.PlatePartitioner(resolved_topologies,rotation_model)

    plate_ids = []
    for point in GreatCirclePoints:
        partitioned_point = partitioner.partition_point(pygplates.PointOnSphere(point))
        plate_ids.append(partitioned_point.get_feature().get_reconstruction_plate_id())

    return plate_ids
Ejemplo n.º 8
0
    def __calc_velocities(self, velocity_domain_features, topology_features,
                          rotation_model, time, delta_time):
        # All domain points and associated (magnitude, azimuth, inclination) velocities for the current time.
        all_domain_points = []
        all_velocities = []

        # Partition our velocity domain features into our topological plate polygons at the current 'time'.
        plate_partitioner = pygplates.PlatePartitioner(topology_features,
                                                       rotation_model, time)

        for velocity_domain_feature in velocity_domain_features:

            # A velocity domain feature usually has a single geometry but we'll assume it can be any number.
            # Iterate over them all.
            for velocity_domain_geometry in velocity_domain_feature.get_geometries(
            ):

                for velocity_domain_point in velocity_domain_geometry.get_points(
                ):

                    all_domain_points.append(velocity_domain_point)

                    partitioning_plate = plate_partitioner.partition_point(
                        velocity_domain_point)
                    if partitioning_plate:

                        # We need the newly assigned plate ID to get the equivalent stage rotation of that tectonic plate.
                        partitioning_plate_id = partitioning_plate.get_feature(
                        ).get_reconstruction_plate_id()

                        # Get the stage rotation of partitioning plate from 'time + delta_time' to 'time'.
                        equivalent_stage_rotation = rotation_model.get_rotation(
                            time, partitioning_plate_id, time + delta_time)

                        # Calculate velocity at the velocity domain point.
                        # This is from 'time + delta_time' to 'time' on the partitioning plate.
                        # NB: velocity unit is fixed to cm/yr, but we convert it to m/yr and further on non-dimensionalise it later.
                        velocity_vectors = pygplates.calculate_velocities(
                            [velocity_domain_point],
                            equivalent_stage_rotation,
                            delta_time,
                            velocity_units=pygplates.VelocityUnits.cms_per_yr)

                        # add it to the list
                        all_velocities.extend(velocity_vectors)
                    else:
                        all_velocities.extend([
                            pygplates.Vector3D(numpy.NaN, numpy.NaN, numpy.NaN)
                        ])

        return all_velocities
Ejemplo n.º 9
0
def get_vertical_change_multipoints(pg_features,t1,t2,psl_t1,psl_t2,
                                    points,spatial_tree_of_uniform_recon_points,
                                    rotation_model,plot=False):
    # NOT WORKING DUE TO LACK OF SUPPORT FOR SCALAR COVERAGES
    
    print 'Working on interpolation from %0.2f Ma to %0.2f Ma .....' % (t1,t2)
        
    plate_partitioner = pygplates.PlatePartitioner(pg_features, rotation_model, reconstruction_time=t1)
    
    (distance_to_land_t1,
     distance_to_psl_t1,
     distance_to_land_t2,
     distance_to_psl_t2,
     regression_msk,
     transgression_msk,
     always_land_msk) = get_change_masks(t1,points,spatial_tree_of_uniform_recon_points,
                                         psl_t1,psl_t2,rotation_model)
    
    coords = zip(*[point.to_lat_lon() for point in points])
    
    pg_points_regression = get_masked_multipoint(coords,regression_msk,plate_partitioner,
                                                 valid_time=[t2,t1+0.01])
    pg_points_transgression = get_masked_multipoint(coords,transgression_msk,plate_partitioner,
                                                    valid_time=[t2,t1+0.01])
    pg_points_always_land = get_masked_multipoint(coords,always_land_msk,plate_partitioner,
                                                  valid_time=[t2,t1])

    
    # make a scalar coverage
    multi_point = pygplates.MultiPointOnSphere(points)
    scalar_coverages = {
        pygplates.ScalarType.create_gpml('distance_to_land_t1'): distance_to_land_t1,
        pygplates.ScalarType.create_gpml('distance_to_psl_t1'): distance_to_psl_t1,
        pygplates.ScalarType.create_gpml('distance_to_land_t2'): distance_to_land_t2,
        pygplates.ScalarType.create_gpml('distance_to_psl_t2'): distance_to_psl_t2}

    sc_feature = pygplates.Feature()
    sc_feature.set_geometry((multi_point,scalar_coverages))
    sc_feature.set_name('Paleotopography Test Points')
    
    (cc_sc_features,
     dummy) = plate_partitioner.partition_features(sc_feature,
                                                  partition_return = pygplates.PartitionReturn.separate_partitioned_and_unpartitioned)
    
    pygplates.FeatureCollection(cc_sc_features).write('./tween_feature_collections/mountain_scalar_coverages_%0.2fMa_%0.2fMa.gpmlz' % (t1,t2))
Ejemplo n.º 10
0
def assign_plate_ids(vgps, reconstruction_model):
    '''
    assign plate ids to Virtual Geomagnetic Poles (vgps), which is a special case
    of plate partitioning where we must use the 'AverageSampleSitePosition' rather than
    the feature geometry
    The input type can be a geodataframe or a pygplates FeatureCollection. The output type
    will match the input 
    '''

    plate_partitioner = pygplates.PlatePartitioner(reconstruction_model.static_polygons, 
                                                   reconstruction_model.rotation_model)

    if isinstance(vgps, _gpd.GeoDataFrame):
        partition_plate_ids = []
        for i,row in vgps.iterrows():
            partition_polygon = plate_partitioner.partition_point(pygplates.PointOnSphere(row.geometry.y,
                                                                                          row.geometry.x))
            partition_plate_ids.append(partition_polygon.get_feature().get_reconstruction_plate_id())

        vgps['PlateID'] = partition_plate_ids

        return vgps

    elif isinstance(vgps, (pygplates.FeatureCollection, list)):
        if isinstance(vgps, list):
            vgps = pygplates.FeatureCollection(vgps)
        partitioned_vgps = []
        for vgp in vgps:
            partition_polygon = plate_partitioner.partition_point(vgp.get(pygplates.PropertyName.gpml_average_sample_site_position).get_value().get_geometry())
            vgp.set_reconstruction_plate_id(partition_polygon.get_feature().get_reconstruction_plate_id())
            partitioned_vgps.append(vgp)

        return pygplates.FeatureCollection(partitioned_vgps)

    else:
        raise TypeError('Unexpected type {:} for vgp input'.format(type(vgps)))
Ejemplo n.º 11
0
def get_velocities(rotation_model,
                   topology_features,
                   time,
                   velocity_domain_features=None,
                   delta_time=1,
                   velocity_type='MagAzim'):

    if velocity_domain_features is None:
        velocity_domain_features = create_gpml_healpix_mesh(
            32, feature_type='MeshNode')

    # All domain points and associated (magnitude, azimuth, inclination) velocities for the current time.
    all_domain_points = []
    all_velocities = []
    plate_ids = []

    # Partition our velocity domain features into our topological plate polygons at the current 'time'.
    plate_partitioner = pygplates.PlatePartitioner(topology_features,
                                                   rotation_model, time)

    for velocity_domain_feature in velocity_domain_features:

        # A velocity domain feature usually has a single geometry but we'll assume it can be any number.
        # Iterate over them all.
        for velocity_domain_geometry in velocity_domain_feature.get_geometries(
        ):

            for velocity_domain_point in velocity_domain_geometry.get_points():

                all_domain_points.append(velocity_domain_point)

                partitioning_plate = plate_partitioner.partition_point(
                    velocity_domain_point)
                if partitioning_plate:

                    # We need the newly assigned plate ID to get the equivalent stage rotation of that tectonic plate.
                    partitioning_plate_id = partitioning_plate.get_feature(
                    ).get_reconstruction_plate_id()

                    # Get the stage rotation of partitioning plate from 'time + delta_time' to 'time'.
                    equivalent_stage_rotation = rotation_model.get_rotation(
                        time, partitioning_plate_id, time + delta_time)

                    # Calculate velocity at the velocity domain point.
                    # This is from 'time + delta_time' to 'time' on the partitioning plate.
                    velocity_vectors = pygplates.calculate_velocities(
                        [velocity_domain_point], equivalent_stage_rotation,
                        delta_time)

                    if velocity_type == 'east_north':
                        # Convert global 3D velocity vectors to local (magnitude, azimuth, inclination) tuples (one tuple per point).
                        velocities = pygplates.LocalCartesian.convert_from_geocentric_to_north_east_down(
                            [velocity_domain_point], velocity_vectors)
                        all_velocities.append(velocities[0])

                    else:
                        # Convert global 3D velocity vectors to local (magnitude, azimuth, inclination) tuples (one tuple per point).
                        velocities = pygplates.LocalCartesian.convert_from_geocentric_to_magnitude_azimuth_inclination(
                            [velocity_domain_point], velocity_vectors)
                        all_velocities.append(velocities[0])

                    plate_ids.append(partitioning_plate_id)

                else:
                    # If point is not within a polygon, set velocity and plate_id to zero
                    all_velocities.append((0, 0, 0))
                    plate_ids.append(0)

    pt_vel1 = []
    pt_vel2 = []
    if velocity_type == 'east_north':
        for velocity_vector in all_velocities:
            if getattr(velocity_vector, 'get_x', None) is not None:
                pt_vel1.append(velocity_vector.get_y())
                pt_vel2.append(velocity_vector.get_x())
            else:
                pt_vel1.append(0.)
                pt_vel2.append(0.)
    else:
        for velocity_vector in all_velocities:
            pt_vel1.append(velocity_vector[0])
            pt_vel2.append(velocity_vector[1])

    pt_lon = []
    pt_lat = []
    for pt in all_domain_points:
        pt_lon.append(pt.to_lat_lon()[1])
        pt_lat.append(pt.to_lat_lon()[0])

    return pt_lat, pt_lon, pt_vel1, pt_vel2, plate_ids
Ejemplo n.º 12
0
def interpolate_paleoshoreline_for_stage(pg_features,
                                         t1,
                                         t2,
                                         psl_t1,
                                         psl_t2,
                                         time_step,
                                         points,
                                         spatial_tree_of_uniform_recon_points,
                                         rotation_model,
                                         plot=False):

    print 'Working on interpolation from %0.2f Ma to %0.2f Ma .....' % (t1, t2)

    plate_partitioner = pygplates.PlatePartitioner(pg_features,
                                                   rotation_model,
                                                   reconstruction_time=t1)

    (distance_to_land_t1, distance_to_psl_t1, distance_to_land_t2,
     distance_to_psl_t2, regression_msk, transgression_msk,
     always_land_msk) = get_change_masks(t1, points,
                                         spatial_tree_of_uniform_recon_points,
                                         psl_t1, psl_t2, rotation_model)

    # normalised distance derivation
    # for each point, divide the distance to shoreline at t0 by the total distance to both shorelines
    # --> if the point is halfway between the shorelines, value will be 0.5
    #     if the point is closer to the t1 shoreline, the value will be less than 0.5
    #     all values will be between 0 and 1
    psl_dist_norm = np.divide(distance_to_psl_t1,
                              (distance_to_psl_t1 + distance_to_psl_t2))

    t_diff = (t2 - t1)

    # before looping over the time steps, create an empty list to put the features in
    pg_points_land_list = []
    pg_points_marine_list = []

    # don't need to do t2 itself, since this will be first step in next iteration
    for reconstruction_time in np.arange(t1, t2, time_step):

        if reconstruction_time == t1:
            land_points = np.where(distance_to_land_t1 == 0)[0]

        else:
            # normalised time, in range 0 to 1 between start and end of stage
            t_norm = (reconstruction_time - t1) / t_diff

            is_transgressing_land_msk = np.less_equal(psl_dist_norm, t_norm)

            is_regressing_land_msk = np.greater_equal(psl_dist_norm, t_norm)

            land_points = np.where(
                np.logical_or(
                    np.logical_or(
                        np.logical_and(is_regressing_land_msk, regression_msk),
                        np.logical_and(is_transgressing_land_msk,
                                       transgression_msk)),
                    always_land_msk))[0]

        marine_mask = np.ones(distance_to_land_t1.shape, dtype=bool)
        marine_mask[land_points] = False
        marine_points = np.where(marine_mask)

        coords = zip(*[point.to_lat_lon() for point in points])

        pg_points_land = get_masked_multipoint(
            coords,
            land_points,
            plate_partitioner,
            valid_time=[
                reconstruction_time + (time_step / 2.),
                reconstruction_time - (time_step / 2.) + 0.01
            ])

        pg_points_marine = get_masked_multipoint(
            coords,
            marine_points,
            plate_partitioner,
            valid_time=[
                reconstruction_time + (time_step / 2.),
                reconstruction_time - (time_step / 2.) + 0.01
            ])

        # append the point features for this time to the overall list
        pg_points_land_list += pg_points_land
        pg_points_marine_list += pg_points_marine

    pygplates.FeatureCollection(pg_points_land_list).write(
        './tween_feature_collections/tweentest_land_%0.2fMa_%0.2fMa.gpmlz' %
        (t1, t2))
    pygplates.FeatureCollection(pg_points_marine_list).write(
        './tween_feature_collections/tweentest_ocean_%0.2fMa_%0.2fMa.gpmlz' %
        (t1, t2))
    'Global_EarthByte_GPlates_PresentDay_StaticPlatePolygons_2014.1.shp')
rotation_files = [
    os.path.join(
        DYNAMIC_TOPOGRAPHY_RECONSTRUCTIONS_PATH,
        'Global_EarthByte_TPW_GeeK07_2014.1_VanDerMeer_CrossoverFix.rot')
]
# DYNAMIC_TOPOGRAPHY_RECONSTRUCTIONS_PATH = r'D:\Users\john\Development\Usyd\gplates\source-code\pygplates_scripts\Other\Backstrip\backstrip\bundle_data\dynamic_topography\reconstructions\Global_Model_WD_Internal_Release_2015_v2'
#
# multipoints_file = os.path.join(DYNAMIC_TOPOGRAPHY_RECONSTRUCTIONS_PATH, 'lat_lon_velocity_domain_720_1440.shp')
# static_polygons_file = os.path.join(DYNAMIC_TOPOGRAPHY_RECONSTRUCTIONS_PATH, 'Global_EarthByte_GPlates_PresentDay_StaticPlatePolygons_2015_v2.gpmlz')
# rotation_files = [
#     os.path.join(DYNAMIC_TOPOGRAPHY_RECONSTRUCTIONS_PATH, 'Global_EB_250-0Ma_GK07_2015_v2.rot'),
#     os.path.join(DYNAMIC_TOPOGRAPHY_RECONSTRUCTIONS_PATH, 'Global_EB_410-250Ma_GK07_2015_v2.rot')
# ]

plate_partitioner = pygplates.PlatePartitioner(static_polygons_file,
                                               rotation_files)

failed_multipoints = []
for multipoint in pygplates.FeatureCollection(multipoints_file):
    partitioned_multipoints = plate_partitioner.partition_features(
        multipoint,
        properties_to_copy=[
            pygplates.PartitionProperty.reconstruction_plate_id,
            pygplates.PartitionProperty.valid_time_period
        ])

    for partitioned_multipoint in partitioned_multipoints:
        if partitioned_multipoint.get_reconstruction_plate_id(
        ) != multipoint.get_reconstruction_plate_id():
            failed_multipoints.append(partitioned_multipoint)
            print 'Failed ({0} points): original plate {1}, new plate {2}'.format(
Ejemplo n.º 14
0
    def _find_resolved_topologies_containing_points(self):

        current_time = self.get_current_time()

        # Resolve the plate polygons for the current time.
        resolved_topologies = []
        pygplates.resolve_topologies(self.topology_features,
                                     self.rotation_model, resolved_topologies,
                                     current_time)

        if ReconstructByTopologies.use_plate_partitioner:
            # Create a plate partitioner from the resolved polygons.
            plate_partitioner = pygplates.PlatePartitioner(
                resolved_topologies, self.rotation_model)
        else:
            # Some of 'curr_points' will be None so 'curr_valid_points' contains only the valid (not None)
            # points, and 'curr_valid_points_indices' is the same length as 'curr_points' but indexes into
            # 'curr_valid_points' so we can quickly find which point (and hence which resolved topology)
            # in 'curr_valid_points' is associated with the a particular point in 'curr_points'.
            curr_valid_points = []
            curr_valid_points_indices = [None] * self.num_points
            for point_index, curr_point in enumerate(self.curr_points):
                if curr_point is not None:
                    curr_valid_points_indices[point_index] = len(
                        curr_valid_points)
                    curr_valid_points.append(curr_point)
            # For each valid current point find the resolved topology containing it.
            resolved_topologies_containing_curr_valid_points = points_in_polygons.find_polygons(
                curr_valid_points, [
                    resolved_topology.get_resolved_boundary()
                    for resolved_topology in resolved_topologies
                ], resolved_topologies)

        # Iterate over all points.
        for point_index, curr_point in enumerate(self.curr_points):

            if curr_point is None:
                # Current point is not currently active - so skip it.
                self.curr_topology_plate_ids[point_index] = None
                self.curr_resolved_plate_boundaries[point_index] = None
                continue

            # Find the plate id of the polygon that contains 'curr_point'.
            if ReconstructByTopologies.use_plate_partitioner:
                curr_polygon = plate_partitioner.partition_point(curr_point)
            else:
                curr_polygon = resolved_topologies_containing_curr_valid_points[
                    # Index back into 'curr_valid_points' and hence also into
                    # 'resolved_topologies_containing_curr_valid_points'.
                    curr_valid_points_indices[point_index]]
            self.curr_resolved_plate_boundaries[point_index] = curr_polygon

            # If the polygon is None, that means (presumably) that it fell into a crack between
            # topologies. So it will be skipped and thrown away from future iterations.
            if curr_polygon is None:
                self.curr_topology_plate_ids[point_index] = None
                continue

            # Set the plate ID of resolved topology containing current point.
            self.curr_topology_plate_ids[
                point_index] = curr_polygon.get_feature(
                ).get_reconstruction_plate_id()