Ejemplo n.º 1
0
def init():

    # enable Shapely speedups, if possible

    if speedups.available:
        speedups.enable()
    print 'IN;'
Ejemplo n.º 2
0
def adjust_channel_depth(grd,shpfile,lcmax=500.):
    """
    Adjusts the depths of a suntans grid object using a line shapefile.

    The shapefile must have an attribute called "contour"
    """
    from shapely import geometry, speedups
    from maptools import readShpPointLine

    if speedups.available:
        speedups.enable()

    print 'Adjusting depths in channel regions with a shapefile...'
        
    # Load the shapefile
    xyline,contour = readShpPointLine(shpfile,FIELDNAME='contour')
    
    # Load all of the points into shapely type geometry
    
    # Distance method won't work with numpy array
    #P = geometry.asPoint(xy)
    
    P = [geometry.Point(grd.xv[i],grd.yv[i]) for i in range(grd.Nc)]
    
    L=[]
    for ll in xyline:
        L.append(geometry.asLineString(ll))
     
    nlines = len(L)
    weight_all = np.zeros((grd.Nc,nlines))
    for n in range(nlines):
        print 'Calculating distance from line %d...'%n
        
        dist = [L[n].distance(P[i]) for i in range(grd.Nc)]
        dist = np.array(dist)

        # Calculate the weight from the distance
        weight = -dist/lcmax+1.
        weight[dist>=lcmax]=0.
        
        weight_all[:,n] = weight

    # Now go through and re-calculate the depths
    dv =  grd.dv*(1-weight_all.sum(axis=-1))
    for n in range(nlines):
        dv += weight_all[:,n]*contour[n]
        
    grd.dv=dv   
    return grd
Ejemplo n.º 3
0
 def __init__(self, in_dir):
     # path to CSV directory
     self.in_dir = in_dir
     self.file_structure = {
         'AreaCode': {'class_id': 101, 'files': ['AreaCode.csv', 'LocalDataAreaCode.csv']},
         'County': {'class_id': 2, 'files': ['CountySynonyms.csv', 'LocalDataCounty.csv', 'County.csv']},
         'ZipCode': {'class_id': 100, 'files': ['ZipCode.csv', 'LocalDataZipCode.csv']},
         'CMSA': {'class_id': 102, 'files': ['CMSA.csv', 'CMSASynonyms.csv', 'LocalDataCMSA.csv']},
         'State': {'class_id': 1, 'files': ['State.csv', 'StateSynonyms.csv', 'LocalDataState.csv']},
         'City': {'class_id': 10, 'files': ['City.csv', 'CitySynonyms.csv', 'LocalDataCity.csv']},
         'Congress': {'class_id': 103, 'files': ['Congress.csv', 'CongressSynonyms.csv', 'LocalDataCongress.csv']},
         'Country': {'class_id': 0, 'files': ['Country.csv', 'CountrySynonyms.csv', 'LocalDataCountry.csv']},
     }
     self._cursor = self._create_cursor(in_dir)
     speedups.enable()
Ejemplo n.º 4
0
def append_nuts3_region(dfin, shapefile_path):
    """ Take a pandas dataframe with lat and long columns and a shapefile.
    Convert coordinates to Shapely points to do a point-in-polygon check
    for each point. Append name of nuts3 region corresponding to
    point-in-polygon to the dataframe.
    This is extremely slow. Needs to be optimized with Shapely boundary box.
    """
    df = dfin.copy()
    df['nuts3'] = np.nan
    fc = fiona.open(shapefile_path)
    speedups.enable()
    for feature in fc:
        prepared_shape = prep(asShape(feature['geometry']))
        for index, row in df.iterrows():
            point = Point(row.loc['long'], row.loc['lat'])
            if prepared_shape.contains(point):
                df.loc[index, 'nuts3name'] = feature['properties']['NUTS315NM']
                df.loc[index, 'nuts3id'] = feature['properties']['NUTS315CD']
    return df
    def __init__(self, info_queue: Queue, output_queue: Queue, counter: Counter, exit_event):
        """

        :param info_queue:
        :param multiprocessing.queue.Queue output_queue:
        :param Counter counter: Ze Counter class for counting counts
        """
        super().__init__()
        self.queue = info_queue
        self.output = output_queue
        self.counter = counter
        self.exit_event = exit_event

        self.logging = logging.getLogger(self.name)
        self.total = info_queue.qsize()
        self.t = 2

        if speedups.available:
            speedups.enable()
Ejemplo n.º 6
0
import uuid

from collections import OrderedDict

import pandas as pd
import six

from shapely.geometry import (
    box,
    Point,
)

from shapely import speedups as shapely_speedups

if shapely_speedups.available:
    shapely_speedups.enable()

from rtree.core import RTreeError

import six

from ..utils.data import get_dataframe
from ..utils.exceptions import OasisException
from ..utils.log import oasis_log
from ..utils.peril import (
    DEFAULT_RTREE_INDEX_PROPS,
    PerilAreasIndex,
)
from ..utils.status import (
    KEYS_STATUS_FAIL,
    KEYS_STATUS_NOMATCH,
Ejemplo n.º 7
0
def subset_CS2(cs2file,ccfile,cc=0,version='V001'):
    '''
    subset_cs2
        Takes the CryoSat-2 L2 output from IDL, and the corner coordinate file
        produced by pyoval. The CC file is cut to the size of the CryoSat-2 
        file. Furthermore, if you give it both an AWI and an ESA CS2 file, it 
        makes sure that both have the same points and same extent (in case any
        differences do exist, we use the ESA CS2 file as the standard (since
        the CryoVal project is designed to examine the ESA CS2 data.)
        module: subset_cs2
            parameters:
                cs2file:    the .csv file with the CS2 L2 Baseline C data.
                            this can be an ESA file or AWI CS2 file, or both.
                ccfile:     the .dat file of corner coordinates
                cc:         flag should be set to 1 if you wish to output the 
                            CS2ORB corner coordinate file that is now sized to
                            the extent of the CS2 file.
                version:    processing version code string.
    Author: Justin Beckers
    Author Contact: [email protected]
    
    Verion: 1.0
    Version Notes:
        1.0: Initial release.
        
    Release Date: 2016/03/17
    
    Usage Notes: 
        Case 1: Using Main 
            From commandline/terminal, can simply run:
                >>>python subset_CS2.py
            This executes the program with the parameters set on 
            lines 327 - 349. Modify this section in the script file text to 
            meet your requirements
                if __name__=='__main__':
    '''


    #Import python modules.
    import os
    import numpy as np
    from shapely.geometry import Polygon,Point
    from rtree import index
    from shapely import speedups
    speedups.enable()
    
    #Add a quick converter  to convert longitude to 0/360 from -180/180.
    cv = lambda x: float(x)+360.0 if float(x) < 0.0 else float(x)
    
    #Load in the Cryosat-2 Corner Coordinates file. Convert the Longitude to 
    #0-360
    ccdata = np.genfromtxt(ccfile,converters={6:cv,8:cv,10:cv,12:cv,14:cv})
    
    #Load the Cryosat-2 L2 Data from AWI or ESA. Convert the Coordinates to
    #0-360
    if os.path.basename(cs2file).startswith('CS2AWI'): #If it is an AWI file
        cs2data=np.genfromtxt(cs2file,delimiter=',',skip_header=1,
        missing_values='nan',filling_values=np.nan,converters={1:cv})
    
    else: # It is assumed to be an ESA CS L2 file
        cs2data=np.genfromtxt(cs2file,delimiter=',',skip_header=1,
        missing_values='nan',filling_values=np.nan,converters={10:cv})

    #The AWI CS L2 data contains NaNs in the Lon,Lat, and Time fields that 
    #result from the L1B missing waveforms. These NaN values cause problems for
    #the polygon generation later on, and we don't really need them as they are
    #not in the ESA CS2 output from IDL (already cut out) so they are cut.
    foo=[] #A temporary variable
    #If of the longitude,latitude, or time are NaN, keep a list of the row.
    for j in np.arange(len(cs2data)):
        if (np.isnan(cs2data[j,1]) or np.isnan(cs2data[j,2]) or 
            np.isnan(cs2data[j,0])): 
            foo.append(j)
    cs2data=np.delete(cs2data,foo,axis=0) #Cut the bad rows.
   
    #Calculating a polygon over the Prime Meridian or the International Date 
    #Line can be troublesome (polygons sometimes wrap around the earth in the
    #wrong direction, depending on your coordinate system). Here we check
    #if we cross either the Prime Meridian or the I.D.L., and either convert to
    #0/360 coordinate system (already done), or back to -180/180
    converted=1 #Flag to indicate if we needed to convert the longitudes.
    if os.path.basename(cs2file).startswith('CS2AWI'): #If AWI CS2 file:
        if (np.any(cs2data[:,1] >=359.)==1 and np.any(cs2data[:,1] <=1.)==1 and
            np.any(np.logical_and(179.<cs2data[:,10],cs2data[:,10]<181.))==0):
            print "Crosses Prime Meridian but not IDL. Coordinates will be " 
            print "processed in -180/180 system."
            for i in np.arange(len(cs2data)):
                cs2data[i,1]=deconvert_lon(cs2data[i,1])
            for i in np.arange(len(ccdata)): 
                ccdata[i][6]=deconvert_lon(ccdata[i][6])
                ccdata[i][8]=deconvert_lon(ccdata[i][8])
                ccdata[i][10]=deconvert_lon(ccdata[i][10])
                ccdata[i][12]=deconvert_lon(ccdata[i][12])
                ccdata[i][14]=deconvert_lon(ccdata[i][14])
            converted=0
        elif (np.any(cs2data[:,1] >=359.)==0 and np.any(cs2data[:,1] <=1.)==0 
                and np.any(np.logical_and(
                179.<cs2data[:,10],cs2data[:,10]<181.))==1):
            print "Does not cross prime meridian but crosses IDL. Coordinates " 
            print "will be processed in 0/360 system."
            converted=1
        elif (np.any(cs2data[:,1] >=359.)==1 and np.any(cs2data[:,1] <=1.)==1 
                and np.any(np.logical_and(179.<cs2data[:,10],
                cs2data[:,10]<181.))==1):
            print "Crosses both the Prime Meridian and the IDL. Coordinates " 
            print "will be processed in 0/360 system."
            converted=1
        elif (np.any(cs2data[:,1] >=359.)==0 and np.any(cs2data[:,1] <=1.)==0 
                and np.any(np.logical_and(179.<cs2data[:,10],
                    cs2data[:,10]<181.))==0):
            print "Does not cross the IDL or the Prime Meridian. Coordinates "
            print "will be processed in 0/360 system."
            converted=1
    else: #If ESA CS2 file
        if (np.any(cs2data[:,10] >=359.) and np.any(cs2data[:,10] <=1.) and
                np.any(np.logical_and(179.<cs2data[:,10],
                cs2data[:,10]<181.))==0):
            print "Crosses Prime Meridian but not IDL. Coordinates will be " 
            print "processed in -180/180 system."
            for i in np.arange(len(cs2data)):
                cs2data[i,10]=deconvert_lon(cs2data[i,10]) 
            for i in np.arange(len(ccdata)):
                ccdata[i][6]=deconvert_lon(ccdata[i][6])
                ccdata[i][8]=deconvert_lon(ccdata[i][8])
                ccdata[i][10]=deconvert_lon(ccdata[i][10])
                ccdata[i][12]=deconvert_lon(ccdata[i][12])
                ccdata[i][14]=deconvert_lon(ccdata[i][14])
            converted=0
            pass
        elif (np.any(cs2data[:,10] >=359.)==0 and np.any(cs2data[:,10] <=1.)==0
                and np.any(np.logical_and(179.<cs2data[:,10],
                cs2data[:,10]<181.))==1):
            print "Does not cross prime meridian but crosses IDL. Coordinates "
            print "will be processed in 0/360 system."
            converted=1
        elif (np.any(cs2data[:,10] >=359.)==1 and np.any(cs2data[:,10] <=1.)==1
                and np.any(np.logical_and(179.<cs2data[:,10],
                cs2data[:,10]<181.))==1):
            print "Crosses both the Prime Meridian and the IDL. Coordinates "
            print "will be processed in 0/360 system."
            converted=1
        elif (np.any(cs2data[:,10] >=359.)==0 and np.any(cs2data[:,10] <=1.)==0 
                and np.any(np.logical_and(179.<cs2data[:,10],
                cs2data[:,10]<181.))==0):
            print "Does not cross the IDL or the Prime Meridian. Coordinates "
            print "will be processed in 0/360 system."
            converted=1
    
    #Setup some variables for later       
    n_records=len(ccdata)#Number of polygons
    idx=index.Index() #Setup a spatial index
    p=np.ndarray((n_records),dtype=object) #array to hold the polygons
    newpolys=[] #holds the index positions of the cc polygons
    newcs2=[] #holds the index positions of the cs2 data
    outpoly=[] #output polygon holder
    t=np.ndarray(0) # a temporary variable 
    nanline = np.empty(len(cs2data[0]))*np.nan
    
    #Fill the spatial index with the CS2 data
    if os.path.basename(cs2file).startswith('CS2AWI'): #If AWI CS2
        for i in np.arange(len(cs2data)):
            idx.insert(i,Point(cs2data[i,1],cs2data[i,2]).bounds)
    else: #If ESA CS2
        for i in np.arange(len(cs2data)):
            idx.insert(i,Point(cs2data[i,10],cs2data[i,9]).bounds)
    
    #Calculate the polygons
    n_polygon_points=5
    polygon_points = np.ndarray(shape=(n_polygon_points, 2), dtype=np.float64)        
    for i in np.arange(n_records):
        #self.cs2fp.lon_ur[i], self.cs2fp.lon_ul[i], self.cs2fp.lon_ll[i], self.cs2fp.lon_lr[i]]
        fp_x = [ccdata[i,8],ccdata[i,10],ccdata[i,14],ccdata[i,12],ccdata[i,8]]
        fp_y = [ccdata[i,9],ccdata[i,11],ccdata[i,15],ccdata[i,13],ccdata[i,9]]
        polygon_points[:,0]=fp_x[j] #Polygon X coordinates
        polygon_points[:,1]=fp_y[j] #Polygon Y coordinates
        p[i]=Polygon(polygon_points) #Builds the polygons

    #The work: go through each polygon, find the CS2 points that belong 
    #There should only be 1. #Intersection does not imply containment so need 
    #to test for intersection (grabs only the possible data), then test for 
    #containment.
    for i,poly in enumerate(p):
        for j in idx.intersection(poly.bounds): #Test for intersection
            if poly.area>0.01: #because polygon is crossing prime meridian but 
                #in the wrong direction. Let's transform the polygon 
                #coordinates back to -180/180 system, transform the points to 
                #-180/180 and test for containment of points.
                points=list(poly.exterior.coords)
                points_x,points_y=zip(*points)
                newx=[] #holds the retransformed points
                for i in np.arange(len(points_x)):
                    if points_x[i]>180.0:
                        newx.append(points_x[i]-360.0)
                    else:
                        newx.append(points_x[i])
                points_x=newx
                polygon_points[:,0]=points_x[:]
                polygon_points[:,1]=points_y[:]
                newpoly=Polygon(polygon_points)
                #Do the actual test for containment.
                if os.path.basename(cs2file).startswith('CS2AWI'):
                    if (newpoly.contains(Point(deconvert_lon(cs2data[j,1]),
                            cs2data[j,2]))):
                        newpolys.append(ccdata[i])
                        newcs2.append(cs2data[j])
                        outpoly.append(poly)
                        t=np.append(t,cs2data[j,0])
                    else:
                        newcs2.append(nanline)
                        newpolys.append(ccdata[i])
                        outpoly.append(poly)
                        pass
                else:
                    if (newpoly.contains(Point(deconvert_lon(cs2data[j,10]),
                            cs2data[j,9]))==1):
                        newpolys.append(ccdata[i])
                        newcs2.append(cs2data[j])
                        outpoly.append(poly)    
                    else:
                        pass
            else:
                if os.path.basename(cs2file).startswith('CS2AWI'):
                    if poly.contains(Point(cs2data[j,1],cs2data[j,2]))==1:
                        newpolys.append(ccdata[i])
                        newcs2.append(cs2data[j])
                        outpoly.append(poly)
                        t=np.append(t,cs2data[j,0])  
                    else:
                        pass #So if no points are found in the polygon, then we
                        #do not write out the polygon. This limits the output
                        #to the CS2 file extent. So if you wanted to keep each
                        #polygon, despite no CS2 data, you can do so here.
                else: #ESA CS2 file
                    if poly.contains(Point(cs2data[j,10],cs2data[j,9]))==1:
                        newpolys.append(ccdata[i])
                        newcs2.append(cs2data[j])
                        outpoly.append(poly)
                    else:
                        pass

    #Now we do some back conversion of lat/lon to -180/180 system
    if os.path.basename(cs2file).startswith('CS2AWI'):
        if converted==1:
            for i in np.arange(len(newcs2)):
                newcs2[i][1]=deconvert_lon(newcs2[i][1])
                newpolys[i][6]=deconvert_lon(newpolys[i][6])
                newpolys[i][8]=deconvert_lon(newpolys[i][8])
                newpolys[i][10]=deconvert_lon(newpolys[i][10])
                newpolys[i][12]=deconvert_lon(newpolys[i][12])
                newpolys[i][14]=deconvert_lon(newpolys[i][14])  
        else: #Was not converted out of -180/180 so we can leave it
            pass
    else:
        if converted ==1:
            for i in np.arange(len(newcs2)) :
                newcs2[i][10]=deconvert_lon(newcs2[i][10]) 
                newpolys[i][6]=deconvert_lon(newpolys[i][6])
                newpolys[i][8]=deconvert_lon(newpolys[i][8])
                newpolys[i][10]=deconvert_lon(newpolys[i][10])
                newpolys[i][12]=deconvert_lon(newpolys[i][12])
                newpolys[i][14]=deconvert_lon(newpolys[i][14]) 
        else:#Was not converted out of -180/180 so we can leave it
            pass
   
    #Print some information to the user. This is a useful check that the file
    #was correctly sized.
    print "Length of CCFile to start: ", len(ccdata)
    print "Length of CS2data: ",len(newcs2)," Length of CCs: ",len(newpolys)    
    print "Length of CCs == Length of CS2data: ",len(newcs2)==len(newpolys)
    print ""
    
    #Let's write the new corner coordinate file to CS2ORB_*.dat?
    if cc==1:#Yes let's write it
        if os.path.basename(cs2file).startswith('CS2AWI'):
            ccoutfile=os.path.join(os.path.dirname(ccfile),
                'CS2ORB_'+
                os.path.basename(ccfile).split('_')[2].zfill(6)+'_'+
                os.path.basename(cs2file).split('_')[2]+'_'+
                os.path.basename(cs2file).split('_')[3]+'_'+
                os.path.basename(cs2file).split('_')[4][:-4]+
                '_'+version+os.path.basename(ccfile)[-4:])    
        else:
            

            ccoutfile=os.path.join(os.path.dirname(ccfile),
                'CS2ORB_'+
                os.path.basename(ccfile).split('_')[2].zfill(6)+'_'+
                os.path.basename(cs2file).split('_')[2]+'_'+
                os.path.basename(cs2file).split('_')[3]+'_'+
                os.path.basename(cs2file).split('_')[4][:-4]+
                '_'+version+os.path.basename(ccfile)[-4:])
    with open(ccoutfile,'w') as ccout:
            np.savetxt(ccout,newpolys,delimiter=' ',fmt='%14.8f', newline='\n')        
    
    #Setup the output CS2 file names.
    if os.path.basename(cs2file).startswith('CS2AWI'):
        csoutfile=os.path.join(os.path.dirname(ccfile),
            os.path.basename(cs2file)[:-14]+'_'+version+
            os.path.basename(cs2file)[-4:])
    else:          
        csoutfile=os.path.join(os.path.dirname(ccfile),
            os.path.basename(cs2file).split('_')[0]+'_'+
            os.path.basename(cs2file).split('_')[1].zfill(6)+'_'+
            os.path.basename(cs2file).split('_')[2]+'_'+
            os.path.basename(cs2file).split('_')[3]+'_'+
            os.path.basename(cs2file).split('_')[4][:-4]+
            '_'+version+os.path.basename(cs2file)[-4:])
    #Write the cs2 output data
    #Open the file, read in the header line   
    with open(cs2file)as r:
        header=r.readline()
    #Now write the output file, first writing in the header line.
    with open(csoutfile,'w') as csout:
        csout.write(header)
        np.savetxt(csout,newcs2,delimiter=',',fmt='%14.8f', newline='\n')
Ejemplo n.º 8
0
def rank():
    su.enable()    
    
    index = rtree.index.Index()
    house_properties = []
    with open("geo_value_clp.json", "r") as house_file:
        houses = geojson.loads(house_file.read())
        for ix, feature in enumerate(houses['features']):
            point = sg.shape(feature['geometry'])
            index.insert(id=ix, 
                         coordinates=(point.bounds))
            props = feature['properties']
            props['geom'] = point
            props['trees'] = 0              
            house_properties.append(props)
            if ix % 10000 == 0:
                print ix
    del houses
            
    with fiona.collection("sfutc_m.shp", "r") as tree_file:  
        tree_polys = [sg.shape(feature['geometry']) for feature in tree_file]
    
    for ix, poly in enumerate(tree_polys):
        if not poly.is_valid:            
            # try to fix invalidities
            poly = poly.buffer(0)
            poly = so.unary_union(poly)
            if not poly.is_valid:
                continue
        # let canopy 'radiate' on surrounding houses
        size_factor = math.sqrt(poly.area) / 3.14         
        #print size_factor
        bpoly = poly.buffer(50 + math.sqrt(size_factor), resolution=8)
        # first get rough estimate of houses
        for hit in index.intersection(bpoly.bounds):
            point = house_properties[hit]['geom']
            # chech more thoroughly
            if bpoly.contains(point):
                house_properties[hit]['trees'] += size_factor 
        if ix % 500 == 0:
                print ix
    del index
    del tree_polys
            
    geo_features = []
    print 'houses to convert: ' + str(len(house_properties))
    for idx, house in enumerate(house_properties):
        if idx % 10000 == 0:
            print idx
        point = house['geom']
        geometry = geojson.Point((point.x, point.y))
        properties = {'addr': house['addr'], 
                      're': house['re'],
                      'rei': house['rei'],
                      'trs': house['trees']}
        gj_house = geojson.Feature(geometry=geometry, properties=properties)
        geo_features.append(gj_house)
    print 'writing geojson'
    gj = geojson.FeatureCollection(geo_features) 
    dump = geojson.dumps(gj)
    with open('geo_value_trees.json', 'w') as f:
        f.write(dump)
    print 'done'
    del gj
    del geo_features
    del dump    
    return
Ejemplo n.º 9
0
def create_pos_file(posfile,scalefile, xlims,ylims,dx,\
    geofile=None,ndmin=5, lcmax=2000.,r=1.05, scalefac=1.0):
    """
    Generates a gmsh background scale file (*.pos)
    
    If a geofile is specified the mesh is embedded
    """
    from shapely import geometry, speedups

    if speedups.available:
        speedups.enable()
        
    X,Y = np.meshgrid(np.arange(xlims[0],xlims[1],dx),np.arange(ylims[0],ylims[1],dx))
    xy = np.vstack((X.ravel(),Y.ravel())).T
    Np = xy.shape[0]
    nj,ni=X.shape
    # Load the scalefile
    xyscale,gridscale = readShpPointLine(scalefile,FIELDNAME='scale')
    
    # Load all of the points into shapely type geometry
    
    # Distance method won't work with numpy array
    #P = geometry.asPoint(xy)
    
    P = [geometry.Point(xy[i,0],xy[i,1]) for i in range(Np)]
    
    L=[]
    for ll in xyscale:
        L.append(geometry.asLineString(ll))
     
    nlines = len(L)
    scale_all = np.zeros((nj,ni,nlines))
    for n in range(nlines):
        print 'Calculating distance from line %d...'%n
        ss = gridscale[n] * scalefac
        lmin = ndmin * ss
        
        # Find the maximum distance
        Nk =  np.log(lcmax/ss)/np.log(r)
        print ss,Nk
        lmax = lmin + Nk * ss
        
        dist = [L[n].distance(P[i]) for i in range(Np)]
        dist = np.array(dist).reshape((nj,ni))
        
        # Calculate the scale
        N = (dist-lmin)/ss
        scale = ss*r**N
        
        ind = dist<=lmin
        if ind.any():
            scale[ind] = ss
            
        ind = scale>lcmax
        if ind.any():
            scale[ind] = lcmax
        
        scale_all[:,:,n] = scale
        
    scale_min = scale_all.min(axis=-1)
    
    write_pos_file(posfile,X,Y,scale_min)  
    
    if not geofile == None:
        fgeo = open(geofile,'a')
        fgeo.write("// Merge a post-processing view containing the target mesh sizes\n")
        fgeo.write('Merge "%s";'%posfile)
        fgeo.write("// Apply the view as the current background mesh\n")
        fgeo.write("Background Mesh View[0];\n")
        fgeo.close()
Ejemplo n.º 10
0
 def setUp(self):
     self.assertFalse(speedups._orig)
     speedups.enable()
     self.assertTrue(speedups._orig)
Ejemplo n.º 11
0
 def use_speedups():
     if speedups.available:
         speedups.enable()
Ejemplo n.º 12
0
 def setUp(self):
     self.assertFalse(speedups._orig)
     if speedups.available:
         speedups.enable()
         self.assertTrue(speedups._orig)
Ejemplo n.º 13
0
 def setUp(self):
     self.assertFalse(speedups._orig)
     if speedups.available:
         speedups.enable()
         self.assertTrue(speedups._orig)
from typing import NoReturn, Tuple

import geopandas as gpd
import numpy as np
import shapely.geometry as sg
from holoviews.element import Geometry
from shapely import speedups

from seedpod_ground_risk.layers.osm_tag_layer import OSMTagLayer

gpd.options.use_pygeos = True  # Use GEOS optimised C++ routines
speedups.enable()  # Enable shapely speedups


class ResidentialLayer(OSMTagLayer):
    _census_wards: gpd.GeoDataFrame

    def __init__(self, key, **kwargs):
        super(ResidentialLayer, self).__init__(key, 'landuse=residential',
                                               **kwargs)
        delattr(self, '_colour')
        self._census_wards = gpd.GeoDataFrame()

    def preload_data(self):
        print("Preloading Residential Layer")
        self.ingest_census_data()

    def generate(self, bounds_polygon: sg.Polygon, raster_shape: Tuple[int, int], from_cache: bool = False, **kwargs) -> \
            Tuple[Geometry, np.ndarray, gpd.GeoDataFrame]:
        import colorcet
        import datashader as ds
Ejemplo n.º 15
0
# DeNSE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with DeNSE. If not, see <http://www.gnu.org/licenses/>.
""" Python/cython interface to the growth C++ code """

import sys as _sys
import atexit as _atexit

# enable shapely speedups
from shapely import speedups as _spdups
if _spdups.available:
    _spdups.enable()

__version__ = "0.1.dev"

# declare default units

_units = {}

from . import _pygrowth
from . import elements
from . import environment
from . import io
from . import morphology
from . import tools
from . import units
Ejemplo n.º 16
0
import geopandas as gpd
import pandas as pd
import shapely.geometry as geom
import shapely.speedups as fast  # Really?
import matplotlib.pyplot as plt
fast.enable()
# Load CSB points
#df = pd.read_csv(r"../data/reprocessed/xyz/uuid_20190626_8bfee6d7ec345d3b503a4ed3adc0288b_pointData.xyz")
df = pd.read_csv(r"../data/reprocessed/xyz/subset.xyz")
df.drop_duplicates(['UUID', 'LON', 'LAT', 'DEPTH'], inplace=True)
geometry = [geom.Point(xy) for xy in zip(df.LON, df.LAT)]
crs = "epsg:4326"  #http://www.spatialreference.org/ref/epsg/4326/
pointsGdf = gpd.GeoDataFrame(df, crs=crs, geometry=geometry)
print(pointsGdf)
# Read in the polygons used to filter CSB points
fp = r"../data/gis/TERRITORIAL_SEAS.shp"
dataGdf = gpd.read_file(fp)
print("CRS: " + str(dataGdf.crs))

# Create a subset of with just country name and exclude columns
subGdf = dataGdf[['SOVEREIGN1', 'EXCLUDE', 'geometry']].head(10)
# Create a subset of polygons for just those areas we want to filter out
polyFilterGf = subGdf[subGdf['EXCLUDE'] == "Y"]
# Reset indexes
polyFilterGf.reset_index(drop=True, inplace=True)
# Find all points NOT within exclusion areas
pipMask = dataGdf.within(polyFilterGf.loc[0, 'geometry'])
pnipMask = ~pipMask
#print(pnipMask)
pnipGdf = pointsGdf.loc[pnipMask]
pipGdf = pointsGdf.loc[pipMask]
Ejemplo n.º 17
0
def get_grid_layer_pt(input_file, height, field_name,
                      grid_shape="square", mask_layer=None,
                      polygon_layer=None, func='density'):
    if speedups.available and not speedups.enabled:
        speedups.enable()
    proj_robinson = (
            "+proj=robin +lon_0=0 +x_0=0 +y_0=0 "
            "+ellps=WGS84 +datum=WGS84 +units=m +no_defs")

    gdf, replaced_id_field = try_open_geojson(input_file)
    if replaced_id_field and field_name == 'id':
        field_name = '_id'

    if field_name:
        if not gdf[field_name].dtype in (int, float):
            # gdf.loc[:, field_name] = gdf[field_name].replace('', np.NaN)
            # gdf.loc[:, field_name] = gdf[field_name].astype(float)
            gdf.loc[:, field_name] = gdf[field_name].apply(to_float)
        gdf = gdf[gdf[field_name].notnull()]
        gdf = gdf[gdf.geometry.notnull()]
        gdf = gdf[~gdf.geometry.is_empty]
        gdf.index = range(len(gdf))

    if polygon_layer:
        polygon_layer = GeoDataFrame.from_file(
            polygon_layer).to_crs(crs=proj_robinson)
        gdf.to_crs(crs=proj_robinson, inplace=True)
        result_values = get_dens_from_pt(gdf, field_name, polygon_layer, func)
        polygon_layer[func] = [i[0] for i in result_values]
        polygon_layer['count'] = [i[1] for i in result_values]
        return polygon_layer.to_crs({"init": "epsg:4326"}).to_json()

    else:
        if mask_layer:
            _mask = GeoDataFrame.from_file(mask_layer)

            mask = GeoSeries(
                cascaded_union(_mask.geometry.buffer(0)),
                crs=_mask.crs,
                ).to_crs(crs=proj_robinson).values[0]

            try:
                mask = mask.buffer(1).buffer(-1)
            except TopologicalError:
                mask = mask.buffer(0)
        else:
            mask = None

        gdf.to_crs(crs=proj_robinson, inplace=True)

        cell_generator = {
            "square": square_grid_gen,
            "diamond": diams_grid_gen,
            "hexagon": hex_grid_gen,
            }[grid_shape]

        res_geoms = get_dens_grid_pt(
            gdf, height, field_name, mask, func, cell_generator)
        result = GeoDataFrame(
            index=range(len(res_geoms)),
            data={'id': [i for i in range(len(res_geoms))],
                  func: [i[1] for i in res_geoms],
                  'count': [i[2] for i in res_geoms]},
            geometry=[i[0] for i in res_geoms],
            crs=gdf.crs
            ).to_crs({"init": "epsg:4326"})

        total_bounds = result.total_bounds
        if total_bounds[0] < -179.9999 or total_bounds[1] < -89.9999 \
                or total_bounds[2] > 179.9999 or total_bounds[3] > 89.9999:
            result = json.loads(result.to_json())
            repairCoordsPole(result)
            return json.dumps(result)
        else:
            return result.to_json()
Ejemplo n.º 18
0
    def write_file(filename, wkt, **attr):

        from xml.etree import ElementTree as et

        # Create an SVG XML element
        # @ToDo: Allow customisation of height/width
        iheight = 74
        height = str(iheight)
        iwidth = 74
        width = str(iwidth)
        doc = et.Element("svg",
                         width=width,
                         height=height,
                         version="1.1",
                         xmlns="http://www.w3.org/2000/svg")

        # Convert WKT
        from shapely.wkt import loads as wkt_loads
        try:
            # Enable C-based speedups available from 1.2.10+
            from shapely import speedups
            speedups.enable()
        except:
            current.log.info("S3GIS",
                             "Upgrade Shapely for Performance enhancements")

        shape = wkt_loads(wkt)

        geom_type = shape.geom_type
        if geom_type not in ("MultiPolygon", "Polygon"):
            current.log.error("Unsupported Geometry", geom_type)
            return

        # Scale Points & invert Y axis
        from shapely import affinity
        bounds = shape.bounds  # (minx, miny, maxx, maxy)
        swidth = abs(bounds[2] - bounds[0])
        sheight = abs(bounds[3] - bounds[1])
        width_multiplier = iwidth / swidth
        height_multiplier = iheight / sheight
        multiplier = min(width_multiplier, height_multiplier) * 0.9  # Padding
        shape = affinity.scale(shape,
                               xfact=multiplier,
                               yfact=-multiplier,
                               origin="centroid")

        # Center Shape
        centroid = shape.centroid
        xoff = (iwidth / 2) - centroid.x
        yoff = (iheight / 2) - centroid.y
        shape = affinity.translate(shape, xoff=xoff, yoff=yoff)

        if geom_type == "MultiPolygon":
            polygons = shape.geoms
        elif geom_type == "Polygon":
            polygons = [shape]
        # @ToDo:
        #elif geom_type == "LineString":
        #    _points = shape
        #elif geom_type == "Point":
        #    _points = [shape]

        points = []
        pappend = points.append
        for polygon in polygons:
            _points = polygon.exterior.coords
            for point in _points:
                pappend("%s,%s" % (point[0], point[1]))

        points = " ".join(points)

        # Wrap in Square for Icon
        # @ToDo: Anti-Aliased Rounded Corners
        # @ToDo: Make optional
        fill = "rgb(167, 192, 210)"
        stroke = "rgb(114, 129, 145)"
        et.SubElement(doc,
                      "rect",
                      width=width,
                      height=height,
                      fill=fill,
                      stroke=stroke)

        # @ToDo: Allow customisation of options
        fill = "rgb(225, 225, 225)"
        stroke = "rgb(165, 165, 165)"
        et.SubElement(doc, "polygon", points=points, fill=fill, stroke=stroke)

        # @ToDo: Add Attributes from list_fields

        # Write out File
        path = os.path.join(current.request.folder, "static", "cache", "svg")
        if not os.path.exists(path):
            os.makedirs(path)
        filepath = os.path.join(path, filename)
        with open(filepath, "w") as f:
            # ElementTree 1.2 doesn't write the SVG file header errata, so do that manually
            f.write("<?xml version=\"1.0\" standalone=\"no\"?>\n")
            f.write("<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\"\n")
            f.write("\"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">\n")
            f.write(et.tostring(doc))

        return filepath
Ejemplo n.º 19
0
def resample_OIB(oibdata,ccdata,ql=1):
    '''
    Does the resampling of the OIB IDCSI files.
    Uses a spatial index to search points fast. Does not use prepared polygons
    '''
    from shapely.geometry import Point, Polygon
    from shapely import speedups 
    import datetime
    import numpy as np
    import pytz
    #Setup which columns we want to calculate which statistics for.
    meancols = [0,1,2,3,4,5,6,7,8,11,12,13]
    stdevcols = [2,3,4,5,6,7,8,11,12]
    mediancols = [2,3,4,5,6,7,8,11,12]
    maxmincols = [2,3,4,5,6,7,8,11,12,13] 
    speedups.enable()
    #Check for crossings of IDL/prime meridian
    if ((oibdata['lon'] >=359.).any(skipna=True)==1 and 
        (oibdata['lon']<1.).any(skipna=True)==1 and 
        np.logical_and((179.<oibdata['lon']).any(skipna=True),
        (oibdata['lon']<181.).any(skipna=True)==0)):
        print "Crosses Prime Meridian but not IDL. Coordinates will be" 
        print "processed in -180/180 system."
        oibdata['lon']=deconvert_lon(oibdata['lon'])
        for i in np.arange(len(ccdata)): 
            ccdata[i][6]=deconvert_lon(ccdata[i][6])
            ccdata[i][8]=deconvert_lon(ccdata[i][8])
            ccdata[i][10]=deconvert_lon(ccdata[i][10])
            ccdata[i][12]=deconvert_lon(ccdata[i][12])
            ccdata[i][14]=deconvert_lon(ccdata[i][14])
            converted=0
    elif ((oibdata['lon'] >=359.).any(skipna=True)==0 and
        (oibdata['lon']<1.).any(skipna=True)==0 and 
        np.logical_and((179.<oibdata['lon']).any(skipna=True),
        (oibdata['lon']<181.).any(skipna=True)==1)):
        print "Does not cross prime meridian but crosses IDL. Coordinates will" 
        print "be processed in 0/360 system."
        converted=1
    elif ((oibdata['lon'] >=359.).any(skipna=True)==1 and
        (oibdata['lon']<1.).any(skipna=True)==1 and 
        np.logical_and((179.<oibdata['lon']).any(skipna=True),
        (oibdata['lon']<181.).any(skipna=True)==1)):
        print "Crosses both the Prime Meridian and the IDL. Coordinates will"
        print "be processed in -180/180 system."
        oibdata['lon']=deconvert_lon(oibdata['lon'])
        for i in np.arange(len(ccdata)): 
            ccdata[i][6]=deconvert_lon(ccdata[i][6])
            ccdata[i][8]=deconvert_lon(ccdata[i][8])
            ccdata[i][10]=deconvert_lon(ccdata[i][10])
            ccdata[i][12]=deconvert_lon(ccdata[i][12])
            ccdata[i][14]=deconvert_lon(ccdata[i][14])
            converted=0
    elif ((oibdata['lon'] >=359.).any(skipna=True)==0 and 
        (oibdata['lon']<1.).any(skipna=True)==0 and 
        np.logical_and((179.<oibdata['lon']).any(skipna=True),
        (oibdata['lon']<181.).any(skipna=True)==0)):
        print "Does not cross the IDL or the Prime Meridian. Coordinates will" 
        print "be processed in 0/360 system."
        converted=1

    #Try calculating polygons, filling the index.
    try:
        p=[]
        idx=index.Index() #Setup a spatial indec
        n_records=len(ccdata)
        n_polygon_points=5
        polygon_points =np.ndarray(shape=(n_polygon_points,2),dtype=np.float64)
        p=np.ndarray(shape=(n_records),dtype=object)
        #Calculate the polygons for the CryoSat-2 (cc) footprints
        for i in np.arange(n_records):
            fp_x = [ccdata[i,8],ccdata[i,10],ccdata[i,14],ccdata[i,12],
                ccdata[i,8]]
            fp_y = [ccdata[i,9],ccdata[i,11],ccdata[i,15],ccdata[i,13],
                ccdata[i,9]]
            n_polygon_points=len(fp_x)
            polygon_points[:,0]=fp_x[:]
            polygon_points[:,1]=fp_y[:]
            p[i](Polygon(polygon_points))
        #Fill the spatial index with the icebridge data points
        for j in np.arange(len(oibdata['lon'])):
            idx.insert(j,Point(oibdata.iloc[j]['lon'],
                oibdata.iloc[j]['lat']).bounds)
        #Setup our variables to hold the resampled data. Preallocation is fast.
        newpolys = []
        polys = []
        navg=np.empty((n_records,1))*np.nan
        foobar = np.empty((n_records))*np.nan
        myiflag = np.empty((n_records,1))*np.nan #mode of my ice flag (0 or 1)
        u=np.empty((n_records,len(meancols)))*np.nan #mean
        s=np.empty((n_records,len(stdevcols)))*np.nan #stdev
        m=np.empty((n_records,len(mediancols)))*np.nan #median
        h=np.empty((n_records,len(maxmincols)))*np.nan #max
        l=np.empty((n_records,len(maxmincols)))*np.nan #min
        out = np.empty((n_records,(len(stdevcols)+len(mediancols)+(2*len(maxmincols))+len(meancols)+1)))
        #Let's try the resampling.
        try:
            for i,poly in enumerate(p):   
                newpts=[]
                n_pts = 0
                #Use spatial index to find points that intersect the polygon.
                #Just because a point intersects the polygon, does not mean 
                #that it is inside the polygon, so we test that further on.
                #j is an iterative point in the list of points (in the spatial)
                #index that does intersect with the polygon bounds.
                for j in idx.intersection(poly.bounds): 
                    
                    #We discovered an issue with the polygons near the IDL/PM
                    #where they wrapped the wrong way around the Earth. We test
                    #for them here and fix them. 
                    if poly.area>0.1: 
                    #because polygon is crossing prime meridian area is large
                    #Let's transform the polygon coordinates back to -180/180 
                    #Transform the points to -180/180 and test
                        print poly 
                        #left in so that we notice when these cases happen.
                        points=list(poly.exterior.coords)
                        points_x,points_y=zip(*points)
                        newx=[]
                        for i in np.arange(len(points_x)):
                            if points_x[i]>180.0:
                                newx.append(points_x[i]-360.0)
                            else:
                                newx.append(points_x[i])
                        points_x=newx
                        n_polygon_points=len(points_x)
                        polygon_points = np.ndarray(shape=(n_polygon_points, 2), dtype=np.float64)
                        for k in np.arange(n_polygon_points):
                            polygon_points[k,0]=points_x[k]
                            polygon_points[k,1]=points_y[k]
                        poly=Polygon(polygon_points)
                        #Test that the point is in the polygon
                        if poly.contains(
                                Point(deconvert_lon(oibdata.iloc[j]['lon']),
                                oibdata.iloc[j]['lat'])):
                            newpts.append(j)
                            n_pts+=1
                    #If the area is okay, let's move on and check if the point
                    #is in the polygon
                    else:    
                        if poly.contains(Point(oibdata.iloc[j]['lon'],
                            oibdata.iloc[j]['lat'])):
                            newpts.append(j)
                            n_pts+=1
                #let's append the polygon number (is keeping all polygons).
                #Relies on having already calculated orbit extent for CS2 file.
                newpolys.append(i) 
                #Based on the number of points, calculate the statistics
                #If no points, values are nan or 0 (for navg)
                if n_pts == 0:
                    u[i] = np.empty(len(meancols))*np.nan
                    s[i] = np.empty(len(stdevcols))*np.nan
                    m[i] = np.empty(len(mediancols))*np.nan
                    h[i] = np.empty(len(maxmincols))*np.nan
                    l[i] =np.empty(len(maxmincols))*np.nan
                    myiflag[i] = np.empty(1)*np.nan
                    #foobar will hold the median point of the points in a poly
                    foobar[i]= 0.0 #
                    navg[i]=0
                #If 1 point, calculate the statistics, but std will be nan,
                #others will just be the value of the point
                elif n_pts == 1: 
                    u[i] = oibdata.iloc[newpts,meancols].mean()
                    s[i] = oibdata.iloc[newpts,stdevcols].std()
                    m[i] = oibdata.iloc[newpts,mediancols].median()
                    h[i] = oibdata.iloc[newpts,maxmincols].max()
                    l[i] = oibdata.iloc[newpts,maxmincols].min()
                    myiflag[i] = oibdata.iloc[newpts,14].mean()
                    navg[i] = n_pts
                    foobar[i]=np.floor(np.median(newpts))
                #If more than one point (what we expect), calculate the 
                #statistics. Note that the mode (most common value) is 
                #calculated for all fields, but only the value for the 
                #MY_ICE_FLAG is kept.
                elif n_pts > 1:
                    foo = oibdata.iloc[newpts].mode()
                    u[i] = oibdata.iloc[newpts,meancols].mean()
                    s[i] = oibdata.iloc[newpts,stdevcols].std()
                    m[i] = oibdata.iloc[newpts,mediancols].median()
                    h[i] = oibdata.iloc[newpts,maxmincols].max()
                    l[i] = oibdata.iloc[newpts,maxmincols].min()
                    myiflag[i]=foo['my_ice_flag'].iloc[0]
                    navg[i] = n_pts
                    foobar[i]=np.floor(np.median(newpts))
        except:
            print traceback.format_exc()
        #Newpolys sould be unique anyways, but doublecheck and return only the
        #unique polygon records (no doubles)  
        polys=ccdata[np.unique(newpolys),:]
        #Concatenate the variables holding the resampled data.
        out=np.concatenate((u,s,m,h,l,myiflag),axis=1)
        #Out should have lenght of newpolys so again check for uniqueness
        out=out[np.unique(newpolys),:]
        #Same with navg.
        navg=navg[np.unique(newpolys),:]
        #Lets rewrite the header here to separate out IDCSI and IDCSI quickooks
        if ql==1:
            outhead=['OIBql_lat_mean','OIBql_lon_mean','OIBql_thickness_mean','OIBql_thickness_unc_mean','OIBql_mean_fb_mean',
            'OIBql_ATM_fb_mean','OIBql_fb_unc_mean','OIBql_snow_depth_mean','OIBql_snow_depth_unc_mean',
            'OIBql_ssh_mean','OIBql_ssh_sd_mean','OIBql_ssh_tp_dist_mean',
            'OIBql_thickness_stdev','OIBql_thickness_unc_stdev','OIBql_mean_fb_stdev',
            'OIBql_ATM_fb_stdev','OIBql_fb_unc_stdev','OIBql_snow_depth_stdev','OIBql_snow_depth_unc_stdev',
            'OIBql_ssh_stdev','OIBql_ssh_sd_stdev', 
            'OIBql_thickness_median','OIBql_thickness_unc_median','OIBql_mean_fb_median','OIBql_ATM_fb_median',
            'OIBql_fb_unc_median','OIBql_snow_depth_median','OIBql_snow_depth_unc_median',
            'OIBql_ssh_median','OIBql_ssh_sd_median',
            'OIBql_thickness_max','OIBql_thickness_unc_max','OIBql_mean_fb_max','OIBql_ATM_fb_max','OIBql_fb_unc_max',
            'OIBql_snow_depth_max','OIBql_snow_depth_unc_max','OIBql_ssh_max','OIBql_ssh_sd_max','OIBql_ssh_tp_dist_max',
            'OIBql_thickness_min','OIBql_thickness_unc_min',
            'OIBql_mean_fb_min','OIBql_ATM_fb_min','OIBql_fb_unc_min','OIBql_snow_depth_min','OIBql_snow_depth_unc_min',
            'OIBql_ssh_min','OIBql_ssh_sd_min','OIBql_ssh_tp_dist_min','OIBql_MYIflag_mode']
        else:
             outhead=['OIB_lat_mean','OIB_lon_mean','OIB_thickness_mean','OIB_thickness_unc_mean','OIB_mean_fb_mean',
            'OIB_ATM_fb_mean','OIB_fb_unc_mean','OIB_snow_depth_mean','OIB_snow_depth_unc_mean',
            'OIB_ssh_mean','OIB_ssh_sd_mean','OIB_ssh_tp_dist_mean',
            'OIB_thickness_stdev','OIB_thickness_unc_stdev','OIB_mean_fb_stdev',
            'OIB_ATM_fb_stdev','OIB_fb_unc_stdev','OIB_snow_depth_stdev','OIB_snow_depth_unc_stdev',
            'OIB_ssh_stdev','OIB_ssh_sd_stdev',
            'OIB_thickness_median','OIB_thickness_unc_median','OIB_mean_fb_median','OIB_ATM_fb_median',
            'OIB_fb_unc_median','OIB_snow_depth_median','OIB_snow_depth_unc_median',
            'OIB_ssh_median','OIB_ssh_sd_median',
            'OIB_thickness_max','OIB_thickness_unc_max','OIB_mean_fb_max','OIB_ATM_fb_max','OIB_fb_unc_max',
            'OIB_snow_depth_max','OIB_snow_depth_unc_max','OIB_ssh_max','OIB_ssh_sd_max','OIB_ssh_tp_dist_max',
            'OIB_thickness_min','OIB_thickness_unc_min',
            'OIB_mean_fb_min','OIB_ATM_fb_min','OIB_fb_unc_min','OIB_snow_depth_min','OIB_snow_depth_unc_min',
            'OIB_ssh_min','OIB_ssh_sd_min','OIB_ssh_tp_dist_min','OIB_MYIflag_mode']

        #Let's create some of the other variables, like the timestamp
        oib_time = np.ndarray(shape=(len(oibdata)),dtype=object)
        secs = np.empty(len(oibdata))*np.nan
        msecs = np.empty(len(oibdata))*np.nan
        secs = np.floor(oibdata['elapsed'].values.tolist())
        msecs = np.floor(1e6 * (oibdata['elapsed'].values.tolist() - np.floor(secs)))
        date = oibdata['date'].values.tolist()
        date=map(int,date)
        date=map(str,date)
        #Lets calculate the OIB timestamp.
        for i in np.arange(len(date)):
            oib_time[i] = datetime.datetime.strptime(date[i],"%Y%m%d").replace(
                tzinfo=pytz.utc) +\
                datetime.timedelta(seconds=int(secs[i]), microseconds=int(
                msecs[i]))
        foobar=foobar.astype(int)       
        #Get the for the middle point of all OIB points in each footprint
        oib_time=np.where(foobar==0,0,oib_time[foobar])

        #Let's calculate the CS2 timestamp from the CC data.
        cc_time=np.ndarray(shape=(len(polys)),dtype=object)
        for i in np.arange(len(cc_time)):
            cc_time[i] = datetime.datetime(int(ccdata[i,0]), int(ccdata[i,1]),int(ccdata[i,2]),
            int(ccdata[i,3]), int(ccdata[i,4]), int(ccdata[i,5]),int(1e6*(ccdata[i,5] - np.floor(ccdata[i,5]))),
            tzinfo=pytz.utc)
        #Let's calculate the difference between the CS2 time and OIB.
        dt_time=np.ndarray(shape=(len(cc_time)),dtype=object)
        for i in np.arange(len(cc_time)):
            if oib_time[i]==0:
                dt_time[i]=np.nan
            else:
                dt_time[i]=(cc_time[i]-oib_time[i]).total_seconds()
        
        #Check for uniqueness in the shapely polygon objects.
        g = np.unique(newpolys)
        c = [p[x] for x in g]
        #Setup the output dataframe
        outdf=pd.DataFrame(data=out,columns=outhead)

        #Add in the delta time, footprint latitude,longitude, and npts 
        if ql == 1:
            outdf['OIB_dt_time']=dt_time
            outdf['OIBql_fp_lat']=polys[:,7]
            outdf['OIBql_fp_lon']=polys[:,6]
            outdf['OIBql_n_pts']=navg[:,0]
            if converted == 1:
                outdf['OIBql_fp_lon']=deconvert_lon(outdf['OIBql_fp_lon'])
                outdf['OIBql_lon_mean']=deconvert_lon(outdf['OIBql_lon_mean'])
            nfinite = np.count_nonzero(np.isfinite(outdf['OIBql_mean_fb_mean']))
        else:
            outdf['OIB_dt_time']=dt_time
            outdf['OIB_fp_lat']=polys[:,7]
            outdf['OIB_fp_lon']=polys[:,6]
            outdf['OIB_n_pts']=navg[:,0]
            if converted == 1:
                outdf['OIB_fp_lon']=deconvert_lon(outdf['OIB_fp_lon'])
                outdf['OIB_lon_mean']=deconvert_lon(outdf['OIB_lon_mean'])
            nfinite = np.count_nonzero(np.isfinite(outdf['OIB_mean_fb_mean']))
        #Let's add in the polygon geometry objects
        
        outdf['OIB_geometry']=c
        #Print out a bit of info about the resampling result and return data.
        print "Number of Resampled IceBridge Data Points: ", outdf.shape[0]
        print "Number of finite freeboard values: ",nfinite
        return outdf
        
    except ValueError:
        pass
Ejemplo n.º 20
0
def packCurves():
    if speedups.available:
        speedups.enable()
    t = time.time()
    packsettings = bpy.context.scene.cam_pack

    sheetsizex = packsettings.sheet_x
    sheetsizey = packsettings.sheet_y
    direction = packsettings.sheet_fill_direction
    distance = packsettings.distance
    tolerance =packsettings.tolerance
    rotate = packsettings.rotate
    rotate_angle=packsettings.rotate_angle

    polyfield = []  # in this, position, rotation, and actual poly will be stored.
    for ob in bpy.context.selected_objects:
        allchunks = []
        simple.activate(ob)
        bpy.ops.object.make_single_user(type='SELECTED_OBJECTS')
        bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY')
        z = ob.location.z
        bpy.ops.object.location_clear()
        bpy.ops.object.rotation_clear()

        chunks = utils.curveToChunks(ob)
        npolys = utils.chunksToShapely(chunks)
        # add all polys in silh to one poly
        poly = shapely.ops.unary_union(npolys)

        poly = poly.buffer(distance / 1.5, 8)
        poly = poly.simplify(0.0003)
        polyfield.append([[0, 0], 0.0, poly, ob, z])
    random.shuffle(polyfield)
    # primitive layout here:
    allpoly = prepared.prep(sgeometry.Polygon())  # main collision poly.
    # allpoly=sgeometry.Polygon()#main collision poly.

    shift = tolerance  # one milimeter by now.
    rotchange = rotate_angle  # in radians

    xmin, ymin, xmax, ymax = polyfield[0][2].bounds
    if direction == 'X':
        mindist = -xmin
    else:
        mindist = -ymin
    i = 0
    p = polyfield[0][2]
    placedpolys = []
    rotcenter = sgeometry.Point(0, 0)
    for pf in polyfield:
        print(i)
        rot = 0
        porig = pf[2]
        placed = False
        xmin, ymin, xmax, ymax = p.bounds
        # p.shift(-xmin,-ymin)
        if direction == 'X':
            x = mindist
            y = -ymin
        if direction == 'Y':
            x = -xmin
            y = mindist

        itera = 0
        best = None
        hits = 0
        besthit = None
        while not placed:

            # swap x and y, and add to x
            # print(x,y)
            p = porig

            if rotate:
                ptrans = affinity.rotate(p, rot, origin=rotcenter, use_radians=True)
                ptrans = affinity.translate(ptrans, x, y)
            else:
                ptrans = affinity.translate(p, x, y)
            xmin, ymin, xmax, ymax = ptrans.bounds
            # print(iter,p.bounds)

            if xmin > 0 and ymin > 0 and (
                    (direction == 'Y' and xmax < sheetsizex) or (direction == 'X' and ymax < sheetsizey)):
                if not allpoly.intersects(ptrans):
                    # if allpoly.disjoint(ptrans):
                    # print('gothit')
                    # we do more good solutions, choose best out of them:
                    hits += 1
                    if best == None:
                        best = [x, y, rot, xmax, ymax]
                        besthit = hits
                    if direction == 'X':
                        if xmax < best[3]:
                            best = [x, y, rot, xmax, ymax]
                            besthit = hits
                    elif ymax < best[4]:
                        best = [x, y, rot, xmax, ymax]
                        besthit = hits

            if hits >= 15 or (
                    itera > 20000 and hits > 0):  # here was originally more, but 90% of best solutions are still 1
                placed = True
                pf[3].location.x = best[0]
                pf[3].location.y = best[1]
                pf[3].location.z = pf[4]
                pf[3].rotation_euler.z = best[2]

                pf[3].select_set(state=True)

                # print(mindist)
                mindist = mindist - 0.5 * (xmax - xmin)
                # print(mindist)
                # print(iter)

                # reset polygon to best position here:
                ptrans = affinity.rotate(porig, best[2], rotcenter, use_radians=True)
                ptrans = affinity.translate(ptrans, best[0], best[1])

                # polygon_utils_cam.polyToMesh(p,0.1)#debug visualisation
                keep = []
                print(best[0], best[1],itera)
                # print(len(ptrans.exterior))
                # npoly=allpoly.union(ptrans)

                # for ci in range(0,len(allpoly)):
                #     cminx,cmaxx,cminy,cmaxy=allpoly.boundingBox(ci)
                #     if direction=='X' and cmaxx>mindist-.1:
                #             npoly.addContour(allpoly[ci])
                #     if direction=='Y' and cmaxy>mindist-.1:
                #             npoly.addContour(allpoly[ci])

                # allpoly=npoly
                placedpolys.append(ptrans)
                allpoly = prepared.prep(sgeometry.MultiPolygon(placedpolys))
                # *** temporary fix until prepared geometry code is setup properly
                # allpoly=sgeometry.MultiPolygon(placedpolys)

                # polygon_utils_cam.polyToMesh(allpoly,0.1)#debug visualisation

                # for c in p:
                #	allpoly.addContour(c)
                # cleanup allpoly
                print(itera, hits, besthit)
            if not placed:
                if direction == 'Y':
                    x += shift
                    mindist = y
                    if xmax + shift > sheetsizex:
                        x = x - xmin
                        y += shift
                if direction == 'X':
                    y += shift
                    mindist = x
                    if ymax + shift > sheetsizey:
                        y = y - ymin
                        x += shift
                if rotate: rot += rotchange
            itera += 1
        i += 1
    t = time.time() - t

    polygon_utils_cam.shapelyToCurve('test', sgeometry.MultiPolygon(placedpolys), 0)
    print(t)
Ejemplo n.º 21
0
    def write_file(filename, wkt, **attr):

        from xml.etree import ElementTree as et

        # Create an SVG XML element
        # @ToDo: Allow customisation of height/width
        iheight = 74
        height = str(iheight)
        iwidth = 74
        width = str(iwidth)
        doc = et.Element("svg", width=width, height=height, version="1.1", xmlns="http://www.w3.org/2000/svg")

        # Convert WKT
        from shapely.wkt import loads as wkt_loads
        try:
            # Enable C-based speedups available from 1.2.10+
            from shapely import speedups
            speedups.enable()
        except:
            from ..s3utils import s3_debug
            s3_debug("S3GIS", "Upgrade Shapely for Performance enhancements")

        shape = wkt_loads(wkt)

        geom_type = shape.geom_type
        if geom_type not in ("MultiPolygon", "Polygon"):
            error = "Unsupported Geometry: %s" % geom_type
            from ..s3utils import s3_debug
            s3_debug(error)
            return

        # Scale Points & invert Y axis
        from shapely import affinity
        bounds = shape.bounds # (minx, miny, maxx, maxy)
        swidth = abs(bounds[2] - bounds[0])
        sheight = abs(bounds[3] - bounds[1])
        width_multiplier = iwidth / swidth
        height_multiplier = iheight / sheight
        multiplier = min(width_multiplier, height_multiplier) * 0.9 # Padding
        shape = affinity.scale(shape, xfact=multiplier, yfact=-multiplier, origin="centroid")

        # Center Shape
        centroid = shape.centroid
        xoff = (iwidth / 2) - centroid.x
        yoff = (iheight / 2) - centroid.y
        shape = affinity.translate(shape, xoff=xoff, yoff=yoff)

        if geom_type == "MultiPolygon":
            polygons = shape.geoms
        elif geom_type == "Polygon":
            polygons = [shape]
        # @ToDo:
        #elif geom_type == "LineString":
        #    _points = shape
        #elif geom_type == "Point":
        #    _points = [shape]

        points = []
        pappend = points.append
        for polygon in polygons:
            _points = polygon.exterior.coords
            for point in _points:
                pappend("%s,%s" % (point[0], point[1]))

        points = " ".join(points)

        # Wrap in Square for Icon
        # @ToDo: Anti-Aliased Rounded Corners
        # @ToDo: Make optional
        fill = "rgb(167, 192, 210)"
        stroke = "rgb(114, 129, 145)"
        et.SubElement(doc, "rect", width=width, height=height, fill=fill, stroke=stroke)

        # @ToDo: Allow customisation of options
        fill = "rgb(225, 225, 225)"
        stroke = "rgb(165, 165, 165)"
        et.SubElement(doc, "polygon", points=points, fill=fill, stroke=stroke)

        # @ToDo: Add Attributes from list_fields

        # Write out File
        path = os.path.join(current.request.folder, "static", "cache", "svg")
        if not os.path.exists(path):
            os.makedirs(path)
        filepath = os.path.join(path, filename)
        with open(filepath, "w") as f:
            # ElementTree 1.2 doesn't write the SVG file header errata, so do that manually
            f.write("<?xml version=\"1.0\" standalone=\"no\"?>\n")
            f.write("<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\"\n")
            f.write("\"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">\n")
            f.write(et.tostring(doc))

        return filepath
Ejemplo n.º 22
0
from .. import transformations

from . import raster
from . import simplify
from . import entities
from . import polygons
from . import traversal

from .io.export import export_path

try:
    # try running shapely speedups
    # these mostly speed up object instantiation
    from shapely import speedups
    if speedups.available:
        speedups.enable()
except BaseException:
    pass


class Path(object):
    """
    A Path object consists of:

    vertices: (n,[2|3]) coordinates, stored in self.vertices

    entities: geometric primitives (aka Lines, Arcs, etc.)
              that reference indexes in self.vertices
    """

    def __init__(self,
Ejemplo n.º 23
0
def packCurves():
	if speedups.available:
		speedups.enable()
	t=time.time()
	packsettings=bpy.context.scene.cam_pack
	
	sheetsizex=packsettings.sheet_x
	sheetsizey=packsettings.sheet_y
	direction=packsettings.sheet_fill_direction
	distance=packsettings.distance
	rotate = packsettings.rotate
	
	polyfield=[]#in this, position, rotation, and actual poly will be stored.
	for ob in bpy.context.selected_objects:
		allchunks=[]
		simple.activate(ob)
		bpy.ops.object.make_single_user(type='SELECTED_OBJECTS')
		bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY')
		z=ob.location.z
		bpy.ops.object.location_clear()
		bpy.ops.object.rotation_clear()

		chunks=utils.curveToChunks(ob)
		npolys=utils.chunksToShapely(chunks)
		#add all polys in silh to one poly
		poly=shapely.ops.unary_union(npolys)
		
		poly=poly.buffer(distance/1.5,8)
		poly=poly.simplify(0.0003)
		polyfield.append([[0,0],0.0,poly,ob,z])
	random.shuffle(polyfield)
	#primitive layout here:
	allpoly=prepared.prep(sgeometry.Polygon())#main collision poly.
	#allpoly=sgeometry.Polygon()#main collision poly.
	
	shift=0.0015#one milimeter by now.
	rotchange=.3123456#in radians
	
	xmin,ymin,xmax,ymax=polyfield[0][2].bounds
	if direction=='X':
		mindist=-xmin
	else:
		mindist=-ymin
	i=0
	p=polyfield[0][2]
	placedpolys=[]
	rotcenter=sgeometry.Point(0,0)
	for pf in polyfield:
		print(i)
		rot=0
		porig=pf[2]
		placed=False
		xmin,ymin,xmax,ymax=p.bounds
		#p.shift(-xmin,-ymin)
		if direction=='X':
			x=mindist
			y=-ymin
		if direction=='Y':
			x=-xmin
			y=mindist
		
		iter=0
		best=None
		hits=0
		besthit=None
		while not placed:
			
			#swap x and y, and add to x
			#print(x,y)
			p=porig
			
			if rotate: 
				#ptrans=srotate(p,rot,0,0)
				ptrans=affinity.rotate(p,rot,origin = rotcenter, use_radians=True)
				#ptrans = translate(ptrans,x,y)
				ptrans = affinity.translate(ptrans,x,y)
			else:
				#ptrans = translate(p,x,y)
				ptrans = affinity.translate(p,x,y)
			xmin,ymin,xmax,ymax=ptrans.bounds
			#print(iter,p.bounds)
			
			if xmin>0 and ymin>0 and ((direction=='Y' and xmax<sheetsizex) or (direction=='X' and ymax<sheetsizey)):
				if not allpoly.intersects(ptrans):
				#if allpoly.disjoint(ptrans):
					#print('gothit')
					#we do more good solutions, choose best out of them:
					hits+=1
					if best==None:
						best=[x,y,rot,xmax,ymax]
						besthit=hits
					if direction=='X':
						if xmax<best[3]:
							best=[x,y,rot,xmax,ymax]
							besthit=hits
					elif ymax<best[4]:
						best=[x,y,rot,xmax,ymax]
						besthit=hits
					


					
			
			if hits>=15 or (iter>10000 and hits>0):#here was originally more, but 90% of best solutions are still 1
				placed=True
				pf[3].location.x=best[0]
				pf[3].location.y=best[1]
				pf[3].location.z=pf[4]
				pf[3].rotation_euler.z=best[2]
				
				
				pf[3].select=True
				
				#print(mindist)
				mindist=mindist-0.5*(xmax-xmin)
				#print(mindist)
				#print(iter)
				
				#reset polygon to best position here:
				ptrans=affinity.rotate(porig,best[2],rotcenter, use_radians = True)
				#ptrans=srotate(porig,best[2],0,0)
				ptrans = affinity.translate(ptrans,best[0],best[1])
				#ptrans = translate(ptrans,best[0],best[1])
				
				#polygon_utils_cam.polyToMesh(p,0.1)#debug visualisation
				keep=[]
				print(best[0],best[1])
				#print(len(ptrans.exterior))
				#npoly=allpoly.union(ptrans)
				'''
				for ci in range(0,len(allpoly)):
					cminx,cmaxx,cminy,cmaxy=allpoly.boundingBox(ci)
					if direction=='X' and cmaxx>mindist-.1:
							npoly.addContour(allpoly[ci])
					if direction=='Y' and cmaxy>mindist-.1:
							npoly.addContour(allpoly[ci])
				'''			
				#allpoly=npoly
				placedpolys.append(ptrans)
				allpoly=prepared.prep(sgeometry.MultiPolygon(placedpolys))
				#*** temporary fix until prepared geometry code is setup properly
				#allpoly=sgeometry.MultiPolygon(placedpolys)
				
				#polygon_utils_cam.polyToMesh(allpoly,0.1)#debug visualisation
				
				#for c in p:
				#	allpoly.addContour(c)
				#cleanup allpoly
				print(iter,hits,besthit)
			if not placed:
				if direction=='Y':
					x+=shift
					mindist=y
					if (xmax+shift>sheetsizex):
						x=x-xmin
						y+=shift
				if direction=='X':
					y+=shift
					mindist=x
					if (ymax+shift>sheetsizey):
						y=y-ymin
						x+=shift
				if rotate: rot+=rotchange
			iter+=1
		i+=1
	t=time.time()-t

	polygon_utils_cam.shapelyToCurve('test',sgeometry.MultiPolygon(placedpolys),0)
	print(t)
Ejemplo n.º 24
0
"""
import numpy as np
from matplotlib.pyplot import contourf
from shapely import speedups
from shapely.ops import unary_union, transform
from shapely.geometry import Polygon, MultiPolygon
from geopandas import GeoDataFrame
from fiona import _err as fiona_err
try:
    from jenkspy import jenks_breaks
except:
    jenks_breaks = None
from .helpers_classif import maximal_breaks, head_tail_breaks
from .compute import _compute_stewart

if speedups.available and not speedups.enabled: speedups.enable()


def quick_stewart(input_geojson_points,
                  variable_name,
                  span,
                  beta=2,
                  typefct='exponential',
                  nb_class=None,
                  nb_pts=10000,
                  resolution=None,
                  mask=None,
                  user_defined_breaks=None,
                  variable_name2=None,
                  output="GeoJSON",
                  **kwargs):
Ejemplo n.º 25
0
#!/bin/env python
from matplotlib import pyplot

from fabtotum import gerber
from fabtotum.gerber.render import *
from fabtotum.toolpath import *
from fabtotum.gcode import *

from shapely.geometry import Point, Polygon, LineString
from shapely.ops import linemerge

from shapely import speedups
if speedups.available:
	speedups.enable()

BLUE = '#6699cc'
RED =  '#ff0000'
GREEN =  '#00cc00'
GRAY = '#999999'

colors = ['#ff0000','#00cc00', '#0000cc', '#cccc00', '#cc00cc', '#00cccc']

def plot_rect(ax, x,y,w,h, color=GRAY, width=0.5, mirror_x=0):
    if mirror_x:
        x = [x,x-w,x-w,x,x]
        y = [y,y,y+h,y+h,y]
    else:
        x = [x,x+w,x+w,x,x]
        y = [y,y,y+h,y+h,y]
    ax.plot(x, y, color=color, linewidth=width, solid_capstyle='round', zorder=1)
Ejemplo n.º 26
0
    def __init__(self):
        self.seed()
        self.viewer = None

        self.continuous = False  # Denotes if the action space is continuous
        dense_reward = False  # True if the reard is dense, else it will be +1 if success -1 if not
        self.angular_movement = False  # When True dynamics are modeled with steer and thrust, else they are modeled with dx, dy
        self.observation_with_image = False  # When True return an image alongside the observation
        self.reset_always = False  # When True change the environment origin, target and obstacles every time it resets.
        self.controlled_speed = False  # When True the task is to reach the target with a controlled speed, else is just reach the target
        # Shapely Speedups
        # if speedups.available:
        speedups.enable()

        # TODO things that shouldn't be here
        self.render_episode = False
        self.num_envs = 1

        # # move this outside the env (check if they serve the resetalways flag)
        # self.latest_results = collections.deque(maxlen=150)

        self.done = [False]
        self.reward_range = (-OUT_OF_BOUNDS_REWARD, POSITIVE_REWARD)
        self.spec = None

        # Counters
        self.episode = 0
        self.total_timestep = 0
        self.episode_success = True
        self.n_done = 0
        self.oo_time = 0
        self.oob = 0
        self.crashes = 0

        # Rendering variables
        self.trajectory = copy.deepcopy(EMPTY_TRAJECTORY)
        self.RENDER_FLAG = False

        # Simulation parameters
        self.time_step = 0.1  # To obtain trajectories

        u_max = MAX_VEL / np.sqrt(2)
        v_max = MAX_VEL / np.sqrt(2)
        u_min = MAX_VEL / np.sqrt(2)
        v_min = MAX_VEL / np.sqrt(2)

        # Dynamic characteristics
        um = max([abs(u_max), abs(u_min)])
        vm = max([abs(v_max), abs(v_min)])
        self.w = np.sqrt(um ** 2 + vm ** 2)
        self.F = SCALING_FACTOR  # It was 20 in airsim i think
        self.k = self.F / self.w

        # Obstacles Variables
        self.obstacles = []
        self.prep_obstacles = []
        self.obstacle_centers = np.array([])
        self.culled = []

        # IMPLEMENTED THE IMAGE AS PARTIAL OBSERVABILITY
        perception_shape = (IMAGE_HEIGTH, IMAGE_WIDTH, 3)
        observation_shape = (OBS_IMAGE_HEIGTH, OBS_IMAGE_WIDTH, 3)
        self.field_of_view = np.zeros(perception_shape, np.uint8)

        # State definition
        self.s = {'x': [0.0],  # Position.x
                  'y': [0.0],  # Position y
                  'u': [0.0],  # Velocity x
                  'v': [0.0],  # Velocity y
                  'target_x': [0.0],
                  'target_y': [0.0],
                  'origin_x': [0.0],
                  'origin_y': [0.0]
                  }


        # Perception matrix initialization, only with cartesian(dx,dy) dynamics.
        self.perception_matrix = create_perception_matrix(dist=PERCEPTION_DISTANCE, n_radius=16, pts_p_radius=6)
        self.perception_ray_points = create_perception_distance_points(max_dist=PERCEPTION_DISTANCE, n_radius=16)
        n_discrete_actions = 4  # +dx, -dx, +dy, -dy
        env_act_lows = np.array([-1, -1])  # dx and dy
        env_act_high = np.array([1, 1])

        # Define the action space either CONTINUOUS or DISCRETE.

        self.action_space = spaces.Discrete(n_discrete_actions)

        ENV_OBS_LOWS = np.zeros(shape=self.get_cartesian_observation().shape)
        ENV_OBS_HIGH = np.ones(shape=self.get_cartesian_observation().shape)



        self.observation_space = spaces.Box(low=ENV_OBS_LOWS, high=ENV_OBS_HIGH, dtype=np.float32)

        self.reward_function = self._airsim_no_speed_reward  # no speed reward