예제 #1
0
	def __init__(self, srs, bbox, width=None, height=None, format=None, resource_id=None):
		super(WmsQuery, self).__init__()
		self.query_type = 'WMS'
		self.srs = srs
		self.bbox = bbox
		self.width = width
		self.height = height
		self.format = format
		self.resource_id = resource_id
		if width is not None and height is not None:
			# calculate resolution... this should slow things down, yay... :-(
			p = Proj(init=srs.lower())
			if not p.is_latlong():
				min_lon, min_lat = p(bbox.min_x,bbox.min_y, inverse=True)
				max_lon, max_lat = p(bbox.max_x,bbox.max_y, inverse=True)
			else:
				min_lon, min_lat = bbox.min_x, bbox.min_y
				max_lon, max_lat = bbox.max_x, bbox.max_y
			g = Geod(ellps='clrk66') # Use Clarke 1966 ellipsoid. 
			_,_,diagonal = g.inv(min_lon, min_lat, max_lon, max_lat)
			# distance calculated geodesic
			dist_x = sqrt(diagonal**2 / (1 + float(height)/float(width)) )
			dist_y = dist_x * (float(height)/float(width))
			self.x_res = dist_x / float(width)
			self.y_res = dist_y / float(height)
		else:
			self.x_res = None
			self.y_res = None
예제 #2
0
 def __init__(self,
              srs,
              bbox,
              width=None,
              height=None,
              format=None,
              resource_id=None):
     super(WmsQuery, self).__init__()
     self.query_type = 'WMS'
     self.srs = srs
     self.bbox = bbox
     self.width = width
     self.height = height
     self.format = format
     self.resource_id = resource_id
     if width is not None and height is not None:
         # calculate resolution... this should slow things down, yay... :-(
         p = Proj(init=srs.lower())
         if not p.is_latlong():
             min_lon, min_lat = p(bbox.min_x, bbox.min_y, inverse=True)
             max_lon, max_lat = p(bbox.max_x, bbox.max_y, inverse=True)
         else:
             min_lon, min_lat = bbox.min_x, bbox.min_y
             max_lon, max_lat = bbox.max_x, bbox.max_y
         g = Geod(ellps='clrk66')  # Use Clarke 1966 ellipsoid.
         _, _, diagonal = g.inv(min_lon, min_lat, max_lon, max_lat)
         # distance calculated geodesic
         dist_x = sqrt(diagonal**2 / (1 + float(height) / float(width)))
         dist_y = dist_x * (float(height) / float(width))
         self.x_res = dist_x / float(width)
         self.y_res = dist_y / float(height)
     else:
         self.x_res = None
         self.y_res = None
예제 #3
0
    def handle(self, **options):
        input_file = options['input_file']
        year = options['year']
        scheme = options['scheme']
        field = options['field']
        name = options['name']
        # Create ValidClassification objects list
        # Push it to database

        # Read file and Optionally reproject the features to longlat
        with fiona.open(input_file) as src:
            p = Proj(src.crs)
            if p.is_latlong(
            ):  # Here we assume that geographic coordinates are automatically 4326 (not quite true)
                fc = list(src)
            else:
                crs_str = to_string(src.crs)
                fc = [
                    feature_transform(x,
                                      crs_out='+proj=longlat',
                                      crs_in=crs_str) for x in src
                ]

        # Write features to ValidObject table
        def valid_obj_builder(x):
            """Build individual ValidObjects
            """
            geom = GEOSGeometry(json.dumps(x['geometry']))
            obj = ValidObject(the_geom=geom)
            return obj

        obj_list = [valid_obj_builder(x) for x in fc]
        ValidObject.objects.bulk_create(obj_list)

        # Get list of unique tags
        unique_numeric_codes = list(set([x['properties'][field] for x in fc]))

        # Update Tag table using get or create
        def make_tag_tuple(x):
            obj, _ = Tag.objects.get_or_create(numeric_code=x, scheme=scheme)
            return (x, obj)

        tag_dict = dict([make_tag_tuple(x) for x in unique_numeric_codes])

        # Build validClassification object list (valid_tag, valid_object, valid_set)
        def valid_class_obj_builder(x):
            """x is a tuple (ValidObject, feature)"""
            tag = tag_dict[x[1]['properties'][field]]
            obj = ValidClassification(valid_tag=tag,
                                      valid_object=x[0],
                                      valid_set=name,
                                      interpretation_year=year)
            return obj

        valid_class_obj_list = [
            valid_class_obj_builder(x) for x in zip(obj_list, fc)
        ]

        ValidClassification.objects.bulk_create(valid_class_obj_list)
예제 #4
0
def extentToLatLon(extent, proj):
    if proj == '' or proj is None:
        return None
    p1 = Proj(proj, preserve_units=True) 
    if p1.is_latlong():
        return extent
    x1,y1 = p1(extent[0], extent[1], inverse=True)
    x2,y2 = p1(extent[2], extent[3], inverse=True)
    return x1, y1, x2, y2
예제 #5
0
def mosaic_and_clip(raster_tiles, xmin, ymin, xmax, ymax, output_path):
    from pyproj import Proj
    import rasterio
    import subprocess

    print 'Mosaic and clip to bounding box extents'
    output_vrt = os.path.splitext(output_path)[0] + '.vrt'
    print subprocess.check_output(['gdalbuildvrt', '-overwrite', output_vrt] + raster_tiles)
    # check crs
    with rasterio.drivers():
        with rasterio.open(output_vrt) as src:
            p = Proj(src.crs)

    if not p.is_latlong():
        [xmax, xmin],[ymax, ymin] = p([xmax, xmin],[ymax,ymin])

    print subprocess.check_output(['gdalwarp', '-overwrite', '-te', repr(xmin), repr(ymin), repr(xmax), repr(ymax), output_vrt, output_path])
    print 'Output raster saved at %s', output_path
예제 #6
0
파일: raster.py 프로젝트: yosukefk/ulmo
def mosaic_and_clip(raster_tiles, xmin, ymin, xmax, ymax, output_path):
    from pyproj import Proj
    import rasterio
    import subprocess

    print('Mosaic and clip to bounding box extents')
    output_vrt = os.path.splitext(output_path)[0] + '.vrt'
    print(subprocess.check_output(['gdalbuildvrt', '-overwrite', output_vrt] + raster_tiles))
    # check crs
    with rasterio.drivers():
        with rasterio.open(output_vrt) as src:
            p = Proj(src.crs)

    if not p.is_latlong():
        [xmax, xmin],[ymax, ymin] = p([xmax, xmin],[ymax,ymin])

    print(subprocess.check_output(['gdalwarp', '-overwrite', '-te', repr(xmin), repr(ymin), repr(xmax), repr(ymax), output_vrt, output_path]))
    print('Output raster saved at %s', output_path)
예제 #7
0
class TrajectoryPredictionEvaluator:
    def __init__(self, groundtruth_sample, predicted_location, crs, input_crs='epsg:4326'):
        self.truth = groundtruth_sample.future_pos
        self.true_traj = groundtruth_sample.future_traj
        self.prediction = predicted_location
        self.evaluation_crs = Proj(init=crs)
        self.input_crs = Proj(init=input_crs)
        self.linestring = self.create_linestring()
        self.projected_prediction = self.project_prediction_onto_linestring()

    def create_linestring(self):
        linestring = self.true_traj.to_linestring().coords
        return LineString([Point(self.project_point(Point(p))) for p in linestring])

    def get_errors(self):
        return {'distance': self.get_distance_error(),
                'cross_track': self.get_cross_track_error(),
                'along_track': self.get_along_track_error()}
    
    def get_distance_error(self):
        if self.input_crs.is_latlong():
            return measure_distance_spherical(self.truth, self.prediction)
        else:
            return measure_distance_euclidean(self.truth, self.prediction)

    def project_point(self, pt) :
        x, y = transform(self.input_crs, self.evaluation_crs, pt.x, pt.y)
        return Point(x, y)
        
    def project_back(self, pt):
        lon, lat = transform(self.evaluation_crs, self.input_crs, pt.x, pt.y)
        return Point(lon, lat)   

    def project_prediction_onto_linestring(self):
        predicted_point = self.project_point(self.prediction)
        return self.project_back(self.linestring.interpolate(self.linestring.project(predicted_point)))
        
    def get_cross_track_error(self):
        return measure_distance_spherical(self.prediction, self.projected_prediction)
        
    def get_along_track_error(self):
        truth_dist_along_line = self.linestring.project(self.truth)
        predicted_dist_along_line = self.linestring.project(self.projected_prediction)
        return abs(predicted_dist_along_line - truth_dist_along_line)
예제 #8
0
    def geoGrid(extent,dims,nativeProj,wgsBounds=False):

        west, south, east, north = extent

        gcsProj = Proj(init='epsg:4326')
        native = Proj(nativeProj)

        gcs = native.is_latlong()

        if wgsBounds and ~gcs:
            llx,lly = transform(gcsProj,native,west,south)
            urx,ury = transform(gcsProj,native,east,north)
        else:
            llx,lly = west,south
            urx,ury = east,north

        yCoords = np.linspace(lly,ury,dims[0],endpoint=False)[::-1]
        xCoords = np.linspace(llx,urx,dims[1],endpoint=False)

        xx,yy = np.meshgrid(xCoords,yCoords)

        return xx,yy
예제 #9
0
def main(raster, outputdir, verbose):
    """ Convert a raster file (e.g. GeoTIFF) into an HDF5 file.

        RASTER: Path to raster file to convert to hdf5.

        The HDF5 file has the following datasets:

            - Raster: (original image data)

            - Latitude: (vector or matrix of pixel latitudes)

            - Longitude: (vector or matrix of pixel longitudes)

        And the following attributes:

            - affine: The affine transformation of the raster

            - Various projection information.
    """

    if verbose:
        print("Opening raster ...")

    # Read raster bands directly to Numpy arrays.
    # Much of this is from:
    #   http://gis.stackexchange.com/questions/129847/
    #   obtain-coordinates-and-corresponding-pixel-values-from-geotiff-using
    #   -python-gdal
    with rasterio.open(os.path.expanduser(raster)) as f:
        T0 = f.affine  # upper-left pixel corner affine transform
        crs = f.crs
        p1 = Proj(crs)
        I = f.read()
        nanvals = f.get_nodatavals()

    # Make sure rasterio is always giving us a 3D array
    assert(I.ndim == 3)

    # This only works on lat-lon projections for now
    if not p1.is_latlong():
        print("Error: This only works on spherical projections for now (YAGNI"
              " you know)...")
        exit(1)

    if verbose:
        print("Extracting coordinate sytem ...")

    # Get affine transform for pixel centres
    T1 = T0 * Affine.translation(0.5, 0.5)

    # Just find lat/lons of axis if there is no rotation/shearing
    # https://en.wikipedia.org/wiki/Transformation_matrix#Affine_transformations
    if (T1[1] == 0) and (T1[3] == 0):
        lons = T1[2] + np.arange(I.shape[2]) * T1[0]
        lats = T1[5] + np.arange(I.shape[1]) * T1[4]

    # Else, find lat/lons of every pixel!
    else:
        print("Error: Not yet tested... or even implemented properly!")
        exit(1)

        # Need to apply affine transformation to all pixel coords
        cls, rws = np.meshgrid(np.arange(I.shape[2]), np.arange(I.shape[1]))

        # Convert pixel row/column index (from 0) to lat/lon at centre
        rc2ll = lambda r, c: (c, r) * T1

        # All eastings and northings (there a better way to do this)
        lons, lats = np.vectorize(rc2ll, otypes=[np.float, np.float])(rws, cls)

    # Permute layers to be more like a standard image, i.e. (band, lon, lat) ->
    #   (lon, lat, band)
    I = (I.transpose([2, 1, 0]))[:, ::-1]
    lats = lats[::-1]

    # Mask out NaN vals if they exist
    if nanvals is not None:
        for v in nanvals:
            if v is not None:
                if verbose:
                    print("Writing missing values")
                I[I == v] = np.nan

    # Now write the hdf5
    if verbose:
        print("Writing HDF5 file ...")

    file_stump = os.path.basename(raster).split('.')[-2]
    hdf5name = os.path.join(outputdir, file_stump + ".hdf5")
    with h5py.File(hdf5name, 'w') as f:
        drast = f.create_dataset("Raster", I.shape, dtype=I.dtype, data=I)
        drast.attrs['affine'] = T1
        for k, v in crs.items():
            drast.attrs['k'] = v
        f.create_dataset("Latitude", lats.shape, dtype=float, data=lats)
        f.create_dataset("Longitude", lons.shape, dtype=float, data=lons)

    if verbose:
        print("Done!")
예제 #10
0
################################################################################
import math
x,y=p(math.radians(105),math.radians(36),radians=True)
print( '%.3f,%.3f' %(x,y) )

################################################################################
lons=(105,106,104)
lats=(36,35,34)
x,y=p(lons,lats)

################################################################################
print('%.3f,%.3f,%.3f' %x)
print('%.3f,%.3f,%.3f' %y)
type(x)

################################################################################

################################################################################
utm=Proj(proj='utm',zone=48,ellps='WGS84')

################################################################################
x,y=utm(105,36)
x,y

################################################################################
utm.is_geocent()
utm.is_latlong()
latlong=Proj('+proj=latlong')
latlong.is_latlong()
latlong.is_geocent()
예제 #11
0
def _load(filename,
          table,
          column,
          work_dir,
          server_url,
          capacity,
          usewith,
          srid=0):
    '''load pointclouds data using pdal and add metadata needed by lopocs'''
    # intialize flask application
    app = create_app()

    filename = Path(filename)
    work_dir = Path(work_dir)
    extension = filename.suffix[1:].lower()
    # laz uses las reader in PDAL
    extension = extension if extension != 'laz' else 'las'
    basename = filename.stem
    basedir = filename.parent

    pending('Creating metadata table')
    Session.create_pointcloud_lopocs_table()
    ok()

    pending('Reading summary with PDAL')
    json_path = os.path.join(
        str(work_dir.resolve()),
        '{basename}_{table}_pipeline.json'.format(**locals()))

    # tablename should be always prefixed
    if '.' not in table:
        table = 'public.{}'.format(table)

    cmd = "pdal info --summary {}".format(filename)
    try:
        output = check_output(shlex.split(cmd))
    except CalledProcessError as e:
        fatal(e)

    summary = json.loads(output.decode())['summary']
    ok()

    if 'srs' not in summary and not srid:
        fatal(
            'Unable to find the spatial reference system, please provide a SRID with option --srid'
        )

    if not srid:
        # find authority code in wkt string
        srid = re.findall('EPSG","(\d+)"', summary['srs']['wkt'])[-1]

    p = Proj(init='epsg:{}'.format(srid))

    if p.is_latlong():
        # geographic
        scale_x, scale_y, scale_z = (1e-6, 1e-6, 1e-2)
    else:
        # projection or geocentric
        scale_x, scale_y, scale_z = (0.01, 0.01, 0.01)
    offset_x = summary['bounds']['X']['min'] + (
        summary['bounds']['X']['max'] - summary['bounds']['X']['min']) / 2
    offset_y = summary['bounds']['Y']['min'] + (
        summary['bounds']['Y']['max'] - summary['bounds']['Y']['min']) / 2
    offset_z = summary['bounds']['Z']['min'] + (
        summary['bounds']['Z']['max'] - summary['bounds']['Z']['min']) / 2

    reproject = ""

    if usewith == 'cesium':
        from_srid = srid
        # cesium only use epsg:4978, so we must reproject before loading into pg
        srid = 4978

        reproject = """
        {{
           "type":"filters.reprojection",
           "in_srs":"EPSG:{from_srid}",
           "out_srs":"EPSG:{srid}"
        }},""".format(**locals())
        # transform bounds in new coordinate system
        pini = Proj(init='epsg:{}'.format(from_srid))
        pout = Proj(init='epsg:{}'.format(srid))
        # recompute offset in new space and start at 0
        pending('Reprojected bounds', nl=True)
        # xmin, ymin, zmin = transform(pini, pout, offset_x, offset_y, offset_z)
        xmin, ymin, zmin = transform(pini, pout, summary['bounds']['X']['min'],
                                     summary['bounds']['Y']['min'],
                                     summary['bounds']['Z']['min'])
        xmax, ymax, zmax = transform(pini, pout, summary['bounds']['X']['max'],
                                     summary['bounds']['Y']['max'],
                                     summary['bounds']['Z']['max'])
        offset_x, offset_y, offset_z = xmin, ymin, zmin
        click.echo('{} < x < {}'.format(xmin, xmax))
        click.echo('{} < y < {}'.format(ymin, ymax))
        click.echo('{} < z < {}  '.format(zmin, zmax), nl=False)
        ok()
        pending('Computing best scales for cesium')
        # override scales for cesium if possible we try to use quantized positions
        scale_x = min(compute_scale_for_cesium(xmin, xmax), 1)
        scale_y = min(compute_scale_for_cesium(ymin, ymax), 1)
        scale_z = min(compute_scale_for_cesium(zmin, zmax), 1)
        ok('[{}, {}, {}]'.format(scale_x, scale_y, scale_z))

    pg_host = app.config['PG_HOST']
    pg_name = app.config['PG_NAME']
    pg_port = app.config['PG_PORT']
    pg_user = app.config['PG_USER']
    pg_password = app.config['PG_PASSWORD']
    realfilename = str(filename.resolve())
    schema, tab = table.split('.')

    pending('Loading point clouds into database')

    with io.open(json_path, 'w') as json_file:
        json_file.write(PDAL_PIPELINE.format(**locals()))

    cmd = "pdal pipeline {}".format(json_path)

    try:
        check_call(shlex.split(cmd), stderr=DEVNULL, stdout=DEVNULL)
    except CalledProcessError as e:
        fatal(e)
    ok()

    pending("Creating indexes")
    Session.execute("""
        create index on {table} using gist(pc_envelopegeometry(points));
        alter table {table} add column morton bigint;
        select Morton_Update('{table}', 'points', 'morton', 128, TRUE);
        create index on {table}(morton);
    """.format(**locals()))
    ok()

    pending("Adding metadata for lopocs")
    Session.update_metadata(table, column, srid, scale_x, scale_y, scale_z,
                            offset_x, offset_y, offset_z)
    lpsession = Session(table, column)
    ok()

    # retrieve boundingbox
    fullbbox = lpsession.boundingbox
    bbox = [
        fullbbox['xmin'], fullbbox['ymin'], fullbbox['zmin'], fullbbox['xmax'],
        fullbbox['ymax'], fullbbox['zmax']
    ]

    if usewith == 'potree':
        lod_min = 0
        lod_max = 5
        # add schema currently used by potree (version 1.5RC)
        Session.add_output_schema(table, column, 0.01, 0.01, 0.01, offset_x,
                                  offset_y, offset_z, srid, potree_schema)
        cache_file = ("{0}_{1}_{2}_{3}_{4}.hcy".format(
            lpsession.table, lpsession.column, lod_min, lod_max,
            '_'.join(str(e) for e in bbox)))
        pending("Building greyhound hierarchy")
        new_hcy = greyhound.build_hierarchy_from_pg(lpsession, lod_min,
                                                    lod_max, bbox)
        greyhound.write_in_cache(new_hcy, cache_file)
        ok()
        create_potree_page(str(work_dir.resolve()), server_url, table, column)

    if usewith == 'cesium':
        pending("Building 3Dtiles tileset")
        hcy = threedtiles.build_hierarchy_from_pg(lpsession, server_url, bbox)

        tileset = os.path.join(str(work_dir.resolve()),
                               'tileset-{}.{}.json'.format(table, column))

        with io.open(tileset, 'wb') as out:
            out.write(hcy.encode())
        ok()
        create_cesium_page(str(work_dir.resolve()), table, column)
예제 #12
0
파일: cli.py 프로젝트: LI3DS/lopocs
def _load(filename, table, column, work_dir, server_url, capacity, usewith, srid=0):
    '''load pointclouds data using pdal and add metadata needed by lopocs'''
    # intialize flask application
    app = create_app()

    filename = Path(filename)
    work_dir = Path(work_dir)
    extension = filename.suffix[1:].lower()
    # laz uses las reader in PDAL
    extension = extension if extension != 'laz' else 'las'
    basename = filename.stem
    basedir = filename.parent

    pending('Creating metadata table')
    Session.create_pointcloud_lopocs_table()
    ok()

    pending('Reading summary with PDAL')
    json_path = os.path.join(
        str(work_dir.resolve()),
        '{basename}_{table}_pipeline.json'.format(**locals()))

    # tablename should be always prefixed
    if '.' not in table:
        table = 'public.{}'.format(table)

    cmd = "pdal info --summary {}".format(filename)
    try:
        output = check_output(shlex.split(cmd))
    except CalledProcessError as e:
        fatal(e)

    summary = json.loads(output.decode())['summary']
    ok()

    if 'srs' not in summary and not srid:
        fatal('Unable to find the spatial reference system, please provide a SRID with option --srid')

    if not srid:
        # find authority code in wkt string
        srid = re.findall('EPSG","(\d+)"', summary['srs']['wkt'])[-1]

    p = Proj(init='epsg:{}'.format(srid))

    if p.is_latlong():
        # geographic
        scale_x, scale_y, scale_z = (1e-6, 1e-6, 1e-2)
    else:
        # projection or geocentric
        scale_x, scale_y, scale_z = (0.01, 0.01, 0.01)

    offset_x = summary['bounds']['X']['min'] + (summary['bounds']['X']['max'] - summary['bounds']['X']['min']) / 2
    offset_y = summary['bounds']['Y']['min'] + (summary['bounds']['Y']['max'] - summary['bounds']['Y']['min']) / 2
    offset_z = summary['bounds']['Z']['min'] + (summary['bounds']['Z']['max'] - summary['bounds']['Z']['min']) / 2

    reproject = ""

    if usewith == 'cesium':
        from_srid = srid
        # cesium only use epsg:4978, so we must reproject before loading into pg
        srid = 4978

        reproject = """
        {{
           "type":"filters.reprojection",
           "in_srs":"EPSG:{from_srid}",
           "out_srs":"EPSG:{srid}"
        }},""".format(**locals())
        # transform bounds in new coordinate system
        pini = Proj(init='epsg:{}'.format(from_srid))
        pout = Proj(init='epsg:{}'.format(srid))
        # recompute offset in new space and start at 0
        pending('Reprojected bounds', nl=True)
        # xmin, ymin, zmin = transform(pini, pout, offset_x, offset_y, offset_z)
        xmin, ymin, zmin = transform(pini, pout, summary['bounds']['X']['min'], summary['bounds']['Y']['min'], summary['bounds']['Z']['min'])
        xmax, ymax, zmax = transform(pini, pout, summary['bounds']['X']['max'], summary['bounds']['Y']['max'], summary['bounds']['Z']['max'])
        offset_x, offset_y, offset_z = xmin, ymin, zmin
        click.echo('{} < x < {}'.format(xmin, xmax))
        click.echo('{} < y < {}'.format(ymin, ymax))
        click.echo('{} < z < {}  '.format(zmin, zmax), nl=False)
        ok()
        pending('Computing best scales for cesium')
        # override scales for cesium if possible we try to use quantized positions
        scale_x = min(compute_scale_for_cesium(xmin, xmax), 1)
        scale_y = min(compute_scale_for_cesium(ymin, ymax), 1)
        scale_z = min(compute_scale_for_cesium(zmin, zmax), 1)
        ok('[{}, {}, {}]'.format(scale_x, scale_y, scale_z))

    pg_host = app.config['PG_HOST']
    pg_name = app.config['PG_NAME']
    pg_port = app.config['PG_PORT']
    pg_user = app.config['PG_USER']
    pg_password = app.config['PG_PASSWORD']
    realfilename = str(filename.resolve())
    schema, tab = table.split('.')

    pending('Loading point clouds into database')

    with io.open(json_path, 'w') as json_file:
        json_file.write(PDAL_PIPELINE.format(**locals()))

    cmd = "pdal pipeline {}".format(json_path)

    try:
        check_call(shlex.split(cmd), stderr=DEVNULL, stdout=DEVNULL)
    except CalledProcessError as e:
        fatal(e)
    ok()

    pending("Creating indexes")
    Session.execute("""
        create index on {table} using gist(pc_envelopegeometry(points));
        alter table {table} add column morton bigint;
        select Morton_Update('{table}', 'points', 'morton', 128, TRUE);
        create index on {table}(morton);
    """.format(**locals()))
    ok()

    pending("Adding metadata for lopocs")
    Session.update_metadata(
        table, column, srid, scale_x, scale_y, scale_z,
        offset_x, offset_y, offset_z
    )
    lpsession = Session(table, column)
    ok()

    # retrieve boundingbox
    fullbbox = lpsession.boundingbox
    bbox = [
        fullbbox['xmin'], fullbbox['ymin'], fullbbox['zmin'],
        fullbbox['xmax'], fullbbox['ymax'], fullbbox['zmax']
    ]

    if usewith == 'potree':
        lod_min = 0
        lod_max = 5
        # add schema currently used by potree (version 1.5RC)
        Session.add_output_schema(
            table, column, 0.01, 0.01, 0.01,
            offset_x, offset_y, offset_z, srid, potree_schema
        )
        cache_file = (
            "{0}_{1}_{2}_{3}_{4}.hcy".format(
                lpsession.table,
                lpsession.column,
                lod_min,
                lod_max,
                '_'.join(str(e) for e in bbox)
            )
        )
        pending("Building greyhound hierarchy")
        new_hcy = greyhound.build_hierarchy_from_pg(
            lpsession, lod_min, lod_max, bbox
        )
        greyhound.write_in_cache(new_hcy, cache_file)
        ok()
        create_potree_page(str(work_dir.resolve()), server_url, table, column)

    if usewith == 'cesium':
        pending("Building 3Dtiles tileset")
        hcy = threedtiles.build_hierarchy_from_pg(
            lpsession, server_url, bbox
        )

        tileset = os.path.join(str(work_dir.resolve()), 'tileset-{}.{}.json'.format(table, column))

        with io.open(tileset, 'wb') as out:
            out.write(hcy.encode())
        ok()
        create_cesium_page(str(work_dir.resolve()), table, column)
예제 #13
0
def create_area_def(area_id,
                    projection,
                    width=None,
                    height=None,
                    area_extent=None,
                    shape=None,
                    upper_left_extent=None,
                    center=None,
                    resolution=None,
                    radius=None,
                    units=None,
                    **kwargs):
    """Takes data the user knows and tries to make an area definition from what can be found.

    Parameters
    ----------
    area_id : str
        ID of area
    projection : dict or str
        Projection parameters as a proj4_dict or proj4_string
    description : str, optional
        Description/name of area. Defaults to area_id
    proj_id : str, optional
        ID of projection (deprecated)
    units : str, optional
        Units that provided arguments should be interpreted as. This can be
        one of 'deg', 'degrees', 'rad', 'radians', 'meters', 'metres', and any
        parameter supported by the
        `cs2cs -lu <https://proj4.org/apps/cs2cs.html#cmdoption-cs2cs-lu>`_
        command. Units are determined in the following priority:

        1. units expressed with each variable through a DataArray's attrs attribute.
        2. units passed to ``units``
        3. units used in ``projection``
        4. meters

    width : str, optional
        Number of pixels in the x direction
    height : str, optional
        Number of pixels in the y direction
    area_extent : list, optional
        Area extent as a list (lower_left_x, lower_left_y, upper_right_x, upper_right_y)
    shape : list, optional
        Number of pixels in the y and x direction (height, width)
    upper_left_extent : list, optional
        Upper left corner of upper left pixel (x, y)
    center : list, optional
        Center of projection (x, y)
    resolution : list or float, optional
        Size of pixels: (dx, dy)
    radius : list or float, optional
        Length from the center to the edges of the projection (dx, dy)
    rotation: float, optional
        rotation in degrees or radians (negative is cw)
    nprocs : int, optional
        Number of processor cores to be used
    lons : numpy array, optional
        Grid lons
    lats : numpy array, optional
        Grid lats
    optimize_projection:
        Whether the projection parameters have to be optimized for a DynamicAreaDefinition.

    Returns
    -------
    AreaDefinition or DynamicAreaDefinition : AreaDefinition or DynamicAreaDefinition
        If shape and area_extent are found, an AreaDefinition object is returned.
        If only shape or area_extent can be found, a DynamicAreaDefinition object is returned

    Raises
    ------
    ValueError:
        If neither shape nor area_extent could be found

    Notes
    -----
    * ``resolution`` and ``radius`` can be specified with one value if dx == dy
    * If ``resolution`` and ``radius`` are provided as angles, center must be given or findable. In such a case,
      they represent [projection x distance from center[0] to center[0]+dx, projection y distance from center[1] to
      center[1]+dy]
    """
    from pyproj import Proj
    description = kwargs.pop('description', area_id)
    proj_id = kwargs.pop('proj_id', None)

    # Get a proj4_dict from either a proj4_dict or a proj4_string.
    proj_dict = _get_proj_data(projection)
    try:
        p = Proj(proj_dict, preserve_units=True)
    except RuntimeError:
        return _make_area(area_id, description, proj_id, proj_dict, shape,
                          area_extent, **kwargs)

    # If no units are provided, try to get units used in proj_dict. If still none are provided, use meters.
    if units is None:
        units = proj_dict.get('units',
                              'm' if not p.is_latlong() else 'degrees')

    # Allow height and width to be provided for more consistency across functions in pyresample.
    if height is not None or width is not None:
        shape = _validate_variable(shape, (height, width), 'shape',
                                   ['height', 'width'])

    # Makes sure list-like objects are list-like, have the right shape, and contain only numbers.
    center = _verify_list('center', center, 2)
    radius = _verify_list('radius', radius, 2)
    upper_left_extent = _verify_list('upper_left_extent', upper_left_extent, 2)
    resolution = _verify_list('resolution', resolution, 2)
    shape = _verify_list('shape', shape, 2)
    area_extent = _verify_list('area_extent', area_extent, 4)

    # Converts from lat/lon to projection coordinates (x,y) if not in projection coordinates. Returns tuples.
    center = _convert_units(center, 'center', units, p, proj_dict)
    upper_left_extent = _convert_units(upper_left_extent, 'upper_left_extent',
                                       units, p, proj_dict)
    if area_extent is not None:
        # convert area extent, pass as (X, Y)
        area_extent_ll = area_extent[:2]
        area_extent_ur = area_extent[2:]
        area_extent_ll = _convert_units(area_extent_ll, 'area_extent', units,
                                        p, proj_dict)
        area_extent_ur = _convert_units(area_extent_ur, 'area_extent', units,
                                        p, proj_dict)
        area_extent = area_extent_ll + area_extent_ur
        kwargs['rotation'] = _convert_rotation(kwargs.get('rotation'), units)

    # Fills in missing information to attempt to create an area definition.
    if area_extent is None or shape is None:
        area_extent, shape = _extrapolate_information(area_extent, shape,
                                                      center, radius,
                                                      resolution,
                                                      upper_left_extent, units,
                                                      p, proj_dict)
    return _make_area(area_id, description, proj_id, proj_dict, shape,
                      area_extent, **kwargs)
예제 #14
0
def main(raster, outputdir, verbose):
    """ Convert a raster file (e.g. GeoTIFF) into an HDF5 file.

        RASTER: Path to raster file to convert to hdf5.

        The HDF5 file has the following datasets:

            - Raster: (original image data)

            - Latitude: (vector or matrix of pixel latitudes)

            - Longitude: (vector or matrix of pixel longitudes)

        And the following attributes:

            - affine: The affine transformation of the raster

            - Various projection information.
    """

    if verbose:
        print("Opening raster ...")

    # Read raster bands directly to Numpy arrays.
    # Much of this is from:
    #   http://gis.stackexchange.com/questions/129847/
    #   obtain-coordinates-and-corresponding-pixel-values-from-geotiff-using
    #   -python-gdal
    with rasterio.open(os.path.expanduser(raster)) as f:
        T0 = f.affine  # upper-left pixel corner affine transform
        crs = f.crs
        p1 = Proj(crs)
        I = f.read()
        nanvals = f.get_nodatavals()

    # Make sure rasterio is always giving us a 3D array
    assert (I.ndim == 3)

    # This only works on lat-lon projections for now
    if not p1.is_latlong():
        print("Error: This only works on spherical projections for now (YAGNI"
              " you know)...")
        exit(1)

    if verbose:
        print("Extracting coordinate sytem ...")

    # Get affine transform for pixel centres
    T1 = T0 * Affine.translation(0.5, 0.5)

    # Just find lat/lons of axis if there is no rotation/shearing
    # https://en.wikipedia.org/wiki/Transformation_matrix#Affine_transformations
    if (T1[1] == 0) and (T1[3] == 0):
        lons = T1[2] + np.arange(I.shape[2]) * T1[0]
        lats = T1[5] + np.arange(I.shape[1]) * T1[4]

    # Else, find lat/lons of every pixel!
    else:
        print("Error: Not yet tested... or even implemented properly!")
        exit(1)

        # Need to apply affine transformation to all pixel coords
        cls, rws = np.meshgrid(np.arange(I.shape[2]), np.arange(I.shape[1]))

        # Convert pixel row/column index (from 0) to lat/lon at centre
        rc2ll = lambda r, c: (c, r) * T1

        # All eastings and northings (there a better way to do this)
        lons, lats = np.vectorize(rc2ll, otypes=[np.float, np.float])(rws, cls)

    # Permute layers to be more like a standard image, i.e. (band, lon, lat) ->
    #   (lon, lat, band)
    I = (I.transpose([2, 1, 0]))[:, ::-1]
    lats = lats[::-1]

    # Mask out NaN vals if they exist
    if nanvals is not None:
        for v in nanvals:
            if v is not None:
                if verbose:
                    print("Writing missing values")
                I[I == v] = np.nan

    # Now write the hdf5
    if verbose:
        print("Writing HDF5 file ...")

    file_stump = os.path.basename(raster).split('.')[-2]
    hdf5name = os.path.join(outputdir, file_stump + ".hdf5")
    with h5py.File(hdf5name, 'w') as f:
        drast = f.create_dataset("Raster", I.shape, dtype=I.dtype, data=I)
        drast.attrs['affine'] = T1
        for k, v in crs.items():
            drast.attrs['k'] = v
        f.create_dataset("Latitude", lats.shape, dtype=float, data=lats)
        f.create_dataset("Longitude", lons.shape, dtype=float, data=lons)

    if verbose:
        print("Done!")
예제 #15
0
    def read(cls,
             infile,
             yxzAxes=[0, 1, None],
             bandNames=None,
             time=None,
             preprocessing=None,
             preprocessingArgs=None,
             preprocessingKwargs=None,
             attrs=None,
             sensor='raster',
             crs='4326'):
        cls.crs = {'init': 'epsg:{}'.format(crs)}
        cls.sensor = sensor

        ds = gdal.Open(infile, GA_ReadOnly)

        if ds.GetDriver().ShortName == 'GTiff':
            nBands = ds.RasterCount
            yDim = ds.RasterYSize
            xDim = ds.RasterXSize
            dims = (yDim, xDim)

            srs = osr.SpatialReference()
            srs.ImportFromWkt(ds.GetProjection())

            projStr = srs.ExportToProj4()
            proj = Proj(projStr)

            mask = np.ones([yDim, xDim])

            if type(bandNames) != list:
                bandNames = []

            if len(bandNames) < nBands:
                bandNames = [
                    bandNames[b] if b < len(bandNames) else 'b{}'.format(b + 1)
                    for b in range(nBands)
                ]

            elif len(bandNames) > nBands:
                bandNames = bandNames[:nBands]

            else:
                pass

            args = [[ds, band] for band in range(1, nBands + 1)]

            dataarr = list(
                map(
                    lambda x: cls._readBand(x, preprocessing, preprocessingArgs
                                            ), args))
            dataarr.append(mask)
            bandNames.append('mask')

            dataarr = utils.formatDataarr(dataarr)

            west, xres, xskew, north, yskew, yres = ds.GetGeoTransform()
            east = west + (xDim * xres)
            south = north + (yDim * yres)

            extent = (west, south, east, north)

            lons, lats = cls.geoGrid(extent,
                                     dims,
                                     projStr,
                                     wgsBounds=proj.is_latlong())

            if time:
                if type(time) == str:
                    dt = datetime.datetime.strptime(time, '%Y-%m-%d')
                elif type(time) == datetime.datetime:
                    dt = time
                else:
                    raise ValueError(
                        'Time either needs to be a datetime object or string in format of YYYY-MM-DD'
                    )
            else:
                dt = datetime.datetime(1970, 1, 1, 0, 0, 0, 0)

        # elif ds.GetDriver().ShortName == 'HDF5':
        #     subdata = ds.GetSubDatasets()

        else:
            raise NotImplementedError(
                'Input dataset was not able to be read in')

        coords = {
            'z': range(dataarr.shape[2]),
            'lat': lats[:, 0],
            'lon': lons[0, :],
            'band': (bandNames),
            'time': ([np.datetime64(dt)])
        }

        dims = ('lat', 'lon', 'z', 'band', 'time')

        attrs = {
            'projStr': projStr,
            'bandNames': tuple(bandNames),
            'extent': extent,
            'date': dt,
            'resolution': (yres, xres)
        }

        ds = xr.DataArray(dataarr,
                          coords=coords,
                          dims=dims,
                          attrs=attrs,
                          name=cls.sensor)

        return ds
예제 #16
0
class PROJ4Transform(BaseTransform):
    glsl_map = None

    glsl_imap = None

    # Flags used to describe the transformation. Subclasses should define each
    # as True or False.
    # (usually used for making optimization decisions)

    # If True, then for any 3 colinear points, the
    # transformed points will also be colinear.
    Linear = False

    # The transformation's effect on one axis is independent
    # of the input position along any other axis.
    Orthogonal = False

    # If True, then the distance between two points is the
    # same as the distance between the transformed points.
    NonScaling = False

    # Scale factors are applied equally to all axes.
    Isometric = False

    def __init__(self, proj4_str, inverse=False):
        self.proj4_str = proj4_str
        self.proj = Proj(proj4_str)
        self._proj4_inverse = inverse
        proj_dict = self.create_proj_dict(proj4_str)

        # Get the specific functions for this projection
        proj_funcs = PROJECTIONS[proj_dict['proj']]
        # set default function parameters
        proj_init = proj_funcs[0]
        proj_args = proj_init(proj_dict)

        if proj_args.get('over'):
            proj_args['over'] = ''
        else:
            proj_args['over'] = 'lambda = adjlon(lambda);'

        if proj_dict['a'] == proj_dict['b']:
            # spheroid
            self.glsl_map = proj_funcs[2]
            self.glsl_imap = proj_funcs[4]
            if self.glsl_map is None or self.glsl_imap is None:
                raise ValueError(
                    "Spheroid transform for {} not implemented yet".format(
                        proj_dict['proj']))
        else:
            # ellipsoid
            self.glsl_map = proj_funcs[1]
            self.glsl_imap = proj_funcs[3]
            if self.glsl_map is None or self.glsl_imap is None:
                raise ValueError(
                    "Ellipsoid transform for {} not implemented yet".format(
                        proj_dict['proj']))

        self.glsl_map = self.glsl_map.format(**proj_args)
        self.glsl_imap = self.glsl_imap.format(**proj_args)

        if self._proj4_inverse:
            self.glsl_map, self.glsl_imap = self.glsl_imap, self.glsl_map

        super(PROJ4Transform, self).__init__()

        # Add common definitions and functions
        for d in COMMON_DEFINITIONS + (pj_tsfn, pj_phi2, hypot):
            self._shader_map._add_dep(d)
            self._shader_imap._add_dep(d)

        if proj_args['over']:
            self._shader_map._add_dep(adjlon_func)
            self._shader_imap._add_dep(adjlon_func)

        # Add special handling of possible infinity lon/lat values
        self._shader_map['pre'] = """
    if (abs(pos.x) > 1e30 || abs(pos.y) > 1e30)
        return vec4(1. / 0., 1. / 0., pos.z, pos.w);
        """

        # print(self._shader_map.compile())

    def create_proj_dict(self, proj_str):
        d = tuple(x.replace("+", "").split("=") for x in proj_str.split(" "))
        d = dict((x[0], x[1] if len(x) > 1 else 'true') for x in d)

        # convert numerical parameters to floats
        for k in d.keys():
            try:
                d[k] = float(d[k])
            except ValueError:
                pass

        d['proj4_str'] = proj_str

        # if they haven't provided a radius then they must have provided a datum or ellps
        if 'a' not in d:
            if 'datum' not in d:
                d.setdefault('ellps', d.setdefault('datum', 'WGS84'))
            else:
                d.setdefault('ellps', d.get('datum'))

        # if they provided an ellps/datum fill in information we know about it
        if d.get('ellps') is not None:
            # get information on the ellps being used
            ellps_info = pj_ellps[d['ellps']]
            for k in ['a', 'b', 'rf']:
                if k in ellps_info:
                    d.setdefault(k, ellps_info[k])

        # derive b, es, f, e
        if 'rf' not in d:
            if 'f' in d:
                d['rf'] = 1. / d['f']
            elif d['a'] == d['b']:
                d['rf'] = 0.
            else:
                d['rf'] = d['a'] / (d['a'] - d['b'])
        if 'f' not in d:
            if d['rf']:
                d['f'] = 1. / d['rf']
            else:
                d['f'] = 0.
        if 'b' not in d:
            # a and rf must be in the dict
            d['b'] = d['a'] * (1. - d['f'])
        if 'es' not in d:
            if 'e' in d:
                d['es'] = d['e']**2
            else:
                d['es'] = 2 * d['f'] - d['f']**2
        if 'e' not in d:
            d['e'] = d['es']**0.5

        return d

    @arg_to_vec4
    def map(self, coords):
        """Map coordinates

        Parameters
        ----------
        coords : array-like
            Coordinates to map.
        """
        m = np.empty(coords.shape)
        if self.proj.is_latlong():
            m[:, 0] = coords[:, 0]
            m[:, 1] = coords[:, 1]
        else:
            m[:, 0], m[:, 1] = self.proj(coords[:, 0],
                                         coords[:, 1],
                                         inverse=self._proj4_inverse)
        m[:, 2:] = coords[:, 2:]
        return m

    @arg_to_vec4
    def imap(self, coords):
        """Inverse map coordinates

        Parameters
        ----------
        coords : array-like
            Coordinates to inverse map.
        """
        m = np.empty(coords.shape)
        if self.proj.is_latlong():
            m[:, 0] = coords[:, 0]
            m[:, 1] = coords[:, 1]
        else:
            m[:, 0], m[:, 1] = self.proj(coords[:, 0],
                                         coords[:, 1],
                                         inverse=not self._proj4_inverse)
        m[:, 2:] = coords[:, 2:]
        return m

    def __repr__(self):
        return "<%s:%s at 0x%x>" % (self.__class__.__name__, self.proj4_str,
                                    id(self))
예제 #17
0
def main():
    parser = get_parser()
    args = parser.parse_args()

    levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
    logging.basicConfig(level=levels[min(3, args.verbosity)])

    if args.output_filename is None:
        args.output_filename = [x[:-3] + "png" for x in args.input_tiff]
    else:
        assert len(args.output_filename) == len(
            args.input_tiff
        ), "Output filenames must be equal to number of input tiffs"

    if not (args.add_borders or args.add_coastlines or args.add_grid
            or args.add_rivers):
        LOG.error(
            "Please specify one of the '--add-X' options to modify the image")
        return -1

    for input_tiff, output_filename in zip(args.input_tiff,
                                           args.output_filename):
        LOG.info("Creating {} from {}".format(output_filename, input_tiff))
        gtiff = gdal.Open(input_tiff)
        proj4_str = osr.SpatialReference(gtiff.GetProjection()).ExportToProj4()
        ul_x, res_x, _, ul_y, _, res_y = gtiff.GetGeoTransform()
        half_pixel_x = res_x / 2.
        half_pixel_y = res_y / 2.
        area_extent = (
            ul_x - half_pixel_x,  # lower-left X
            ul_y + res_y * gtiff.RasterYSize - half_pixel_y,  # lower-left Y
            ul_x + res_x * gtiff.RasterXSize + half_pixel_x,  # upper-right X
            ul_y + half_pixel_y,  # upper-right Y
        )
        p = Proj(proj4_str)
        if p.is_latlong():
            # convert lat/lons to radians
            area_extent = p(area_extent[0], area_extent[1]) + p(
                area_extent[2], area_extent[3])
        img = Image.open(input_tiff).convert('RGB')
        area_def = (proj4_str, area_extent)

        cw = ContourWriter(args.shapes_dir)

        if args.add_coastlines:
            outline = args.coastlines_outline[0] if len(
                args.coastlines_outline) == 1 else tuple(
                    int(x) for x in args.coastlines_outline)
            if args.coastlines_fill:
                fill = args.coastlines_fill[0] if len(
                    args.coastlines_fill) == 1 else tuple(
                        int(x) for x in args.coastlines_fill)
            else:
                fill = None
            cw.add_coastlines(img,
                              area_def,
                              resolution=args.coastlines_resolution,
                              level=args.coastlines_level,
                              outline=outline,
                              fill=fill)

        if args.add_rivers:
            outline = args.rivers_outline[0] if len(
                args.rivers_outline) == 1 else tuple(
                    int(x) for x in args.rivers_outline)
            cw.add_rivers(img,
                          area_def,
                          resolution=args.rivers_resolution,
                          level=args.rivers_level,
                          outline=outline)

        if args.add_borders:
            outline = args.borders_outline[0] if len(
                args.borders_outline) == 1 else tuple(
                    int(x) for x in args.borders_outline)
            cw.add_borders(img,
                           area_def,
                           resolution=args.borders_resolution,
                           level=args.borders_level,
                           outline=outline)

        if args.add_grid:
            try:
                font = ImageFont.truetype(args.grid_font, args.grid_text_size)
            except IOError:
                font_path = get_resource_filename('polar2grid.fonts',
                                                  args.grid_font)
                if not os.path.exists(font_path):
                    raise ValueError(
                        "Font path does not exist: {}".format(font_path))
                font = ImageFont.truetype(font_path, args.grid_text_size)

            outline = args.grid_outline[0] if len(
                args.grid_outline) == 1 else tuple(
                    int(x) for x in args.grid_outline)
            minor_outline = args.grid_minor_outline[0] if len(
                args.grid_minor_outline) == 1 else tuple(
                    int(x) for x in args.grid_minor_outline)
            fill = args.grid_fill[0] if len(args.grid_fill) == 1 else tuple(
                int(x) for x in args.grid_fill)
            cw.add_grid(img,
                        area_def,
                        args.grid_D,
                        args.grid_d,
                        font,
                        fill=fill,
                        outline=outline,
                        minor_outline=minor_outline,
                        lon_placement=args.grid_lon_placement,
                        lat_placement=args.grid_lat_placement)

        img.save(output_filename)