Esempio n. 1
0
def interpolation(shp_path, out_path, tmp_path, longs, lats, r_value):
    Write2CSV(tmp_path, longs, lats, r_value)
    Reed_CSV(tmp_path)
    xnew = [105.487122, 111.241882]
    ynew = [31.706726, 39.585327]

    filename = os.path.join(tmp_path, "test_inter.vrt")
    tmp_path_file_aaa = os.path.join(tmp_path, "aaa_test.tif")
    cutrst = gdal.Grid(destName=tmp_path_file_aaa,
                       srcDS=filename,
                       format='GTiff',
                       width=300,
                       height=500,
                       outputBounds=[xnew[0], ynew[1], xnew[1], ynew[0]],
                       outputSRS="WGS84",
                       noData=-100,
                       algorithm="invdist")
    cutrst.FlushCache()
    cutrst = None
    print("Invdist OK!")
    gdal.Warp(out_path,
              tmp_path_file_aaa,
              cutlineDSName=shp_path,
              dstNodata=-99,
              cropToCutline=True,
              dstSRS="WGS84")
    # os.remove(tmp_path_file_aaa)
    # os.remove(filename)

    print("Mask OK!")
Esempio n. 2
0
def idw(output_file, point_station_file):
    """
	idw空间插值
	:param output_file:插值结果
	:param point_station_file: 矢量站点数据
	:return:
	"""
    opts = gdal.GridOptions(
        algorithm=
        "invdistnn:power=2.0:smothing=0.0:radius=1.0:max_points=12:min_points=0:nodata=0.0",
        format="GTiff",
        outputType=gdal.GDT_Float32,
        zfield="WAVEHEIGHT")
    gdal.Grid(destName=output_file, srcDS=point_station_file, options=opts)
Esempio n. 3
0
    def interpolation(shp_path, out_tiff_path, z_field='value', method='idw', cell_size=None, output_bounds=None,
                      algo_str=None, outputType=None, res_format= 'GTiff'):
        """
        插值,反距离加权(idw),最邻近(nearest),移动均值(average)
        :param shp_path: 用于插值的点 shp 路径
        :param out_tiff_path: 输出的 tiff 路径
        :param z_field:  插值使用的字段
        :param method:  插值方法
        :param cell_size:  插值后的 tiff 的分辨率
        :param output_bounds:  插值输出范围(x_min, y_min, x_max, y_max)
        :param algo_str: 算法参数设置
        :param outputType: 输出数据类型,如 gdal.GDT_Float64
        :param res_format: 返回类型, GTiff 输出为文件, MEM 输出为临时文件
        :return: None
        """
        # ------------------------------------------------------------------------------------------------------------------
        # 算法描述字符串
        if method == 'idw' and algo_str is None:
            algo_str = "invdist:power=2.0:smoothing=0.0:radius1=0.0:radius2=0.0:angle=0.0:" \
                       "max_points=0:min_points=0:nodata=0.0"
        elif method == 'average' and algo_str is None:
            # algo_str = "average:radius1=0.0:radius2=0.0:angle=0.0:min_points=0:nodata=0.0"
            raise TypeError('not support yet')
        elif method == 'nearest':
            # algo_str = "minimum={0}:maximum={1}:range={2}:count={3}:average_distance={4}:average_distance_pts={5}"
            raise TypeError('not support yet')
        elif method in ['idw', 'average', 'nearest']:
            algo_str = algo_str
        else:
            raise ValueError('method can only be : idw | average | nearest')
        # ------------------------------------------------------------------------------------------------------------------
        # 根据 cell_size 得到长宽
        if cell_size is None:
            width, height = None, None
        else:
            if output_bounds:
                x_min, y_min, x_max, y_max = output_bounds
                width, height = int((x_max - x_min) / float(cell_size)), int((y_max - y_min) / float(cell_size))
            else:
                # width, height = None, None   # 获取 point 的范围,得到当前点的范围
                raise ValueError('warning : 当未输入 outputBounds 时,cell_size 的设置无效')
        # ------------------------------------------------------------------------------------------------------------------
        grid_option = gdal.GridOptions(algorithm=algo_str, zfield=z_field, outputBounds=output_bounds, width=width,
                                       height=height, outputType=outputType, format=res_format)  # 设置算法和插值所用的字段

        gdal.Grid(out_tiff_path, shp_path, options=grid_option)  # 插值
Esempio n. 4
0
def csvToGrid(fn):
    vrt_fn = fn.replace(".csv", ".vrt")
    #lyr_name = fn.replace('.csv', '')
    out_tif = fn.replace('.csv', '.tiff')
    with open(vrt_fn, 'w') as fn_vrt:
        fn_vrt.write('<OGRVRTDataSource>\n')
        fn_vrt.write('\t<OGRVRTLayer name="%s">\n' % 'cluster')
        fn_vrt.write('\t\t<SrcDataSource>%s</SrcDataSource>\n' % fn)
        fn_vrt.write('\t\t<GeometryType>wkbPoint</GeometryType>\n')
        fn_vrt.write(
            '\t\t<GeometryField encoding="PointFromColumns" x="x" y="y" z="kmeans_cluster"/>\n'
        )
        fn_vrt.write('\t</OGRVRTLayer>\n')
        fn_vrt.write('</OGRVRTDataSource>\n')

    output = gdal.Grid(out_tif, vrt_fn)
    # below using your settings - I don't have sample large enough to properly test it, but it is generating file as well
    #output2 = gdal.Grid('outcome2.tif','name.vrt', algorithm='invdist:power=2.0:smoothing=1.0')
    return output
Esempio n. 5
0
def gdal_grid_image_band(
        image_to_warp,  # type: ndarray
        output_fname,  # type: str
        image_x_coords,  # type: ndarray
        image_y_coords,  # type: ndarray
        npix_x,  # type: int
        npix_y,  # type: int
        projection_wkt,  # type: str
        nodata_val=0  # type: int
):  # type:

    fileformat = "MEMORY"
    driver = ogr.GetDriverByName(fileformat)
    ys, xs = image_to_warp.size

    datasource = driver.CreateDataSource('memData')
    datasource.SetProjection(projection_wkt)

    layer = datasource.CreateLayer('image', geom_type=ogr.wkbPoint)
    layer.CreateField(ogr.FieldDefn("Z", ogr.OFTReal))

    for i in range(xs):
        for j in range(ys):
            feature = ogr.Feature(layer.GetLayerDefn())
            point = ogr.Geometry(ogr.wkbPoint)
            point.AddPoint(image_x_coords[j, i], image_y_coords[j, i])
            feature.SetGeometry(point)

            feature.SetField("Z", image_to_warp[j, i])

            layer.CreateFeature(feature)
            feature = None

    dst_fname = output_fname

    ops = gdal.GridOptions(width=npix_x, height=npix_y, noData=nodata_val)

    dst_dataset = gdal.Grid(dst_fname, datasource, options=ops)
    src_dataset = None
    dst_dataset = None

    print("done")
Esempio n. 6
0
def calculateNitrateRaster(kValue, destinationPath):
    """does the raster interpelation for the project based on a provided k value using gdal"""
    print "calculate nitrate raster called!"
    algorithmOptions = 'invdist:power=' + str(
        kValue) + ':max_points=30:min_points=5:nodata=-999'
    outBounds = [
        294822.7680654586292803, 225108.6378120621666312,
        774373.8599878594977781, 759802.2332637654617429
    ]
    outRasterSRS = osr.SpatialReference()
    outRasterSRS.ImportFromEPSG(3070)
    gdal.Grid(destinationPath,
              wellShapefilePath,
              width=944,
              height=1053,
              outputType=gdal.GDT_Float32,
              zfield="nitr_ran",
              algorithm=algorithmOptions,
              outputBounds=outBounds,
              outputSRS=outRasterSRS)
Esempio n. 7
0
y_diff = (y_max - y_min) / resolution
print str(x_diff) + ' ' + str(y_diff)

# make vrt file (determines format for gdal_grid)
vrtfile = 'outputformatIFDM.vrt'
f = open(vrtfile, 'w')
content='<OGRVRTDataSource> \n <OGRVRTLayer name="'+result_ifdm_file+'"> \n <SrcDataSource>'+result_ifdm+ ''\
    +'</SrcDataSource> \n <GeometryType>wkbPoint</GeometryType> \n ' \
    +'<LayerSRS>EPSG:31370</LayerSRS> <GeometryField encoding="PointFromColumns" x="X" y="Y" z="'+pollutant+'"' \
    +'/> \n </OGRVRTLayer> \n </OGRVRTDataSource>'
f.write(content)
f.close()

# grid data
gridded=gdal.Grid(gridded_ifdm,vrtfile, algorithm = 'linear:radius=0:nodata=-9999',\
    outputBounds = [x_min, y_max, x_max , y_min],  creationOptions = ['BIGTIFF=yes', 'COMPRESS=deflate'],\
    outputType = gdal.GDT_Float32,width = x_diff, height = y_diff, \
    zfield = pollutant)
del gridded

############################
##  gridding ospm results ##
############################
print 'Grid OSPM resultaten'
# take mean over 10m
ospm = pd.read_csv(result_ospm)
ospm['geometry'] = ospm.apply(lambda p: Point(p.X, p.Y), axis=1)
ospm = gpd.GeoDataFrame(ospm, crs=crs)

buffer_points = ospm.geometry.buffer(10)
buffer_points = gpd.GeoDataFrame(buffer_points, columns=['geometry'], crs=crs)
buffer_points.index = buffer_points.index.rename('left_index')
Esempio n. 8
0
def main(raster_path):
    os.chdir(os.path.join('Source_Data', "DEM_rasters"))

    latlon_crs = '+proj = longlat +ellps = WGS84 +datum = WGS84 +no_defs'

    data_path = os.path.join(orig_dir, 'intermediate_data',
                             'R_download_recs.txt')

    crs = None
    files = sorted([f for f in os.listdir() if f[-4:] == '.img'])
    with progress_saver(data_path) as dic:
        for file in files[dic['i']:]:
            try:
                with rasterio.open(file) as r:
                    if not crs:
                        crs = r.crs
                    print(dic['i'])
                    coords = (r.bounds[0], r.bounds[-1])
                    gdf_point = gpd.GeoDataFrame(geometry=[Point(*coords)])
                    gdf_point.crs = crs
                    point = gdf_point.to_crs(latlon_crs).geometry.iloc[0]
                    res = json.loads(get_R_fac((point.x, point.y)))['rfactor']
                    dic['data'].append({
                        'x': coords[0],
                        'y': coords[1],
                        'r_factor': res
                    })
                    time.sleep(2)
            except rasterio.RasterioIOError:
                pass
            dic['i'] += 1
    if not crs:
        crs = rasterio.open(files[0]).crs

    #to do: turn data into grid, and interpolate missing values.

    data = dic['data']

    data = [r for r in data if r['x'] != 0]

    points = [(round(r['x'], 2), round(r['y'], 2)) for r in data]
    values = [r['r_factor'] for r in data]

    n_x = len(set([r['x'] for r in data]))
    n_y = len(set([r['y'] for r in data]))

    xmin = min([p[0] for p in points])
    xmax = max([p[0] for p in points])
    ymin = min([p[1] for p in points])
    ymax = max([p[1] for p in points])

    cell_y = (ymax - ymin) / n_y
    cell_x = (xmax - xmin) / n_x

    gdf = gpd.GeoDataFrame(values,
                           geometry=gpd.points_from_xy([d[0] for d in points],
                                                       [d[1] for d in points]))
    gdf.rename(columns={0: 'r_factor'}, inplace=True)
    #gdf['r_factor'] = gdf['r_factor'].fillna(-9999)

    gdf.crs = crs
    os.chdir('..')
    os.chdir('..')

    shp_path = os.path.join('intermediate_data', 'rfactor_points')
    gdf.to_file(shp_path)

    options = gdal.GridOptions(
        height=n_x,
        width=n_y,
        zfield='r_factor',
        outputType=gdal.GDT_Float32,
    )
    ds = gdal.Grid(
        raster_path,
        shp_path,
        options=options,
    )
    def analysisTest(self, cancer_file, nitrate_file, out_dir, IDW_value):
        print('Running the IDW Interpolation')
        # wPath = '/Users/Sigfrido/Documents/project1/geospatialProject1/data/files/well_nitrate/'

        #take the file path from the gui and fun the analysis
        cancerTract = str(cancer_file)
        wellNitrate = str(nitrate_file)
        oPath = str(out_dir)
        IDW = IDW_value

        #files created during the analysis

        oTiff = '/test.tiff'
        #rasterOutput IDW
        wIDWresult = oPath + oTiff
        #Raster2Polygon
        rPolyShape = '/wellsPolygon.shp'
        #raster2CSV
        cPolyShape = '/wellsPoint.csv'
        #csv2pointsShape
        pPolyShape = '/wellsPoints.shp'
        #Polygon Results
        pResults = oPath + rPolyShape
        #CSVresults
        cResults = oPath + cPolyShape
        #csv2Points
        shpPntResults = oPath + pPolyShape

        nitrate_IDW = 'IDW_Results.shp'

        nitrate_IDW_results = oPath + nitrate_IDW
        #######################################################################################
        file = ogr.Open(wellNitrate)
        # print(file)
        shape = file.GetLayer(0)
        #first feature of the shapefile
        feature = shape.GetFeature(0)
        #print(feature)
        first = feature.ExportToJson()

        #######################################################################################
        #1 Nitrate levels should use Spatial Interpolation Inverse Weighted Method (IDW)

        print('starting the GDAL Interpolation')
        # option = gdal.GridOptions(format='GTiff',algorithm='invdist:power={0}'.format(IDW),outputSRS='EPSG:4326',zfield='nitr_ran')
        option = gdal.GridOptions(format='GTiff',
                                  algorithm='invdist:power={0}'.format(IDW),
                                  zfield='nitr_ran')

        out = gdal.Grid(
            wIDWresult,  #results
            wellNitrate,  #shapefile
            options=option)  #options

        # out.FlushCache()
        out = None
        del out
        #convert to a CSV
        print("converting out to CSV for next step")
        os.system(
            "gdal_translate -of xyz -co ADD_HEADER_LINE=YES -co COLUMN_SEPARATOR=',' {0} {1}"
            .format(wIDWresult, cResults))

        print("converting CSV to Shapefile using FIONA")
        import csv
        from shapely.geometry import Point, mapping
        from fiona import collection

        schema = {'geometry': 'Point', 'properties': {'Z': 'float'}}
        with collection(shpPntResults, "w", "ESRI Shapefile",
                        schema) as output:
            with open(cResults, 'r') as f:
                reader = csv.DictReader(f)

                for row in reader:
                    point = Point(float(row['X']), float(row['Y']))
                    output.write({
                        'properties': {
                            'Z': row['Z']
                        },
                        'geometry': mapping(point)
                    })
        #######################################################################################
        #2  Aggregated Points(well data ) to census tract information

        print("Aggregating Points to Census Tract Shapefile")
        cancerFile = gpd.read_file(cancerTract)
        wellPointsShape = gpd.read_file(shpPntResults)
        #print(wellPointsShape.head())`
        cancerFile.crs = wellPointsShape.crs
        wellPointsShape = wellPointsShape.rename(columns={'Z': 'NewNitrate'})

        print(
            "Using geopandas to join Cancer Census Tracts with Well Points Shapefile"
        )
        join = gpd.sjoin(cancerFile, wellPointsShape, how="left")

        # Save to disk
        join.to_file(nitrate_IDW_results)
        #prints file path
        # print(nitrate_IDW_results)#not needed
        print("Building The Regression Residuals Results Map")
        self.results_residual_map()
        print("Analysis Complete")
Esempio n. 10
0
    print str(x_diff)+' '+str(y_diff)

    # make vrt file (determines format for gdal_grid)
    vrtfile=folder_tmp+'outputformat'+pol+stat+'.vrt'
    f= open(vrtfile, 'w')
    content='<OGRVRTDataSource> \n <OGRVRTLayer name="results'+pol+'"> \n <SrcDataSource>'+tmpfile+ ''\
        +'</SrcDataSource> \n <GeometryType>wkbPoint</GeometryType> \n ' \
        +'<LayerSRS>EPSG:4326</LayerSRS> <GeometryField encoding="PointFromColumns" x="Lon" y="Lat" z="'+ \
        stat+'"/> \n </OGRVRTLayer> \n </OGRVRTDataSource>'
    f.write(content)
    f.close()

    # grid data
    result=folder_output+'/'+stat+pol+'.tif'   
    #commando=gdal_grid+' -a LINEAR:radius=0:nodata=-9999 -outsize '+str(int(x_diff))+' '+str(int(y_diff))+' -zfield '+stat+' #'+vrtfile+' '+result
    #print commando
    #os.system(commando)  
    gridded=gdal.Grid(result,vrtfile, algorithm = 'linear:radius=0:nodata=-9999',\
        outputBounds = [x_min, y_max, x_max , y_min],  creationOptions = ['BIGTIFF=yes', 'COMPRESS=deflate'],\
        outputType = gdal.GDT_Float32,width = x_diff, height = y_diff, \
        zfield = stat)
    del gridded
            
# remove temporary folder
#for i in os.listdir(folder_tmp):
#    os.remove(folder_tmp+i)
#os.rmdir(folder_tmp) 

print ' OK'   
    
Esempio n. 11
0
    def ptsTime2Raster(self,
                       out_name,
                       var_list=None,
                       outputBounds=None,
                       outCRS='WGS84',
                       mask_shp=None,
                       buffer_mask=0):
        '''
        This method convert points time series in rasters data series by linear interpolation.
        
        input:
            :param out_name  = output name base name
                If out_name = 'D:/output/dir/path/raster_name.tif',this implies that the output 
                raster file names will have the following format: 
                'D:/output/dir/path/raster_name_time_stamp_variable_name.tif')
                
            :param var_list (optional) = variables list to convert in rasters files.
                If not informed, all available variables in dataset will be converted.
                
           
            :param outputBounds (optional) = is set as a list instance with the following format: 
                
                [upperLeft Longitude, upperLeft Latitude, lowerRight Longitude, lowerRight Latitude]
                
                This defines the interpolation area in a rectangle delimited by its upper left point 
                coordinates and lower right point coordinates.
                
                default value: the rectangle area is defined by upper left and lower right dataset 
                coordinates increased by 1%.
                
                
            :param outCRS (optional) = output Coordinate Reference System.
                                        default value: WGS84.
            
            :param mask_shp (optional) = If informed, the interpolation raster is croped to shapefile boundaries.
            
            :param buffer_mask (optional) = buffer in shapefile mask area ( in %percentage).
                                            It is only used if the mask shapefile is inserted.
                                            default value: 0%.
        
        output:
            return: multiples raster files ( format .tif)
        
        '''

        if not var_list:
            var_list = self._data.columns.drop(
                ['latitude', 'longitude', 'geometry'], errors='ignore')

        if not outputBounds:
            geom = self._data.geometry.drop_duplicates()
            x1 = geom.x.max()
            x2 = geom.x.min()
            y1 = geom.y.max()
            y2 = geom.y.min()

            if x1 * x2 >= 0:
                if abs(x1) < abs(x2):
                    aux = x1
                    x1 = x2
                    x2 = aux
            if y1 * y2 >= 0:
                if abs(y1) < abs(y2):
                    aux = y1
                    y1 = y2
                    y2 = aux

#            outputBounds = [x*1.02 for x in [geom.x.max(), geom.y.min(), geom.x.min(), geom.y.max()]]
            outputBounds = [x1, y2, x2, y1]

        if '.tif' in out_name:
            out_name = out_name.replace('.tif', '')

        if mask_shp:
            mask_shp = gpd.GeoDataFrame.from_file(mask_shp)

        out_dir = '\\'.join(out_name.split('\\')[:-1])
        timeSteps = self._data.sort_index().index.drop_duplicates()

        for time in timeSteps:
            timeName = str(time).replace(' ',
                                         '_').replace('-',
                                                      '_').replace(':', '_')

            for varName in var_list:
                vrt_fn = os.path.join(out_dir, varName + 'Vrt.vrt')
                lyr_name = varName
                out_tif = '_'.join([out_name, varName, timeName, '.tif'])
                tempPath = os.path.join(out_dir, varName + '.csv')
                self._data[[varName, 'latitude',
                            'longitude']].loc[time].to_csv(tempPath,
                                                           header=True,
                                                           index=False)

                with open(vrt_fn, 'w') as fn_vrt:
                    fn_vrt.write('<OGRVRTDataSource>\n')
                    fn_vrt.write('\t<OGRVRTLayer name="%s">\n' % lyr_name)
                    fn_vrt.write('\t\t<SrcDataSource>%s</SrcDataSource>\n' %
                                 tempPath)
                    fn_vrt.write('\t\t<SrcLayer>%s</SrcLayer>\n' % lyr_name)
                    fn_vrt.write('\t\t<GeometryType>wkbPoint</GeometryType>\n')
                    fn_vrt.write(
                        '\t\t<GeometryField encoding="PointFromColumns" x="longitude" y="latitude" z="%s"/>\n'
                        % varName)
                    fn_vrt.write('\t</OGRVRTLayer>\n')
                    fn_vrt.write('</OGRVRTDataSource>\n')

                gridOp = gdal.GridOptions(
                    format='Gtiff',
                    outputBounds=outputBounds,
                    algorithm='linear:radius=0.0:nodata = -9999',
                    outputSRS=outCRS)

                if isinstance(mask_shp, gpd.GeoDataFrame):
                    temp_tif = out_name + '_' + varName + '.tif'
                    gdal.Grid(temp_tif, vrt_fn, options=gridOp)
                    self._cropRst(temp_tif,
                                  mask_shp,
                                  out_tif,
                                  remove=True,
                                  buffer_mask=buffer_mask)
                else:
                    gdal.Grid(out_tif, vrt_fn, options=gridOp)

                os.remove(tempPath)
                os.remove(vrt_fn)
        return
Esempio n. 12
0

def find_csv_filenames(path_to_dir, suffix=".csv"):
    relativePath = path_to_dir + "*.csv"
    print(relativePath)
    # filenames = glob.glob(relativePath)
    filenames = [os.path.relpath(x) for x in glob(relativePath)]
    return filenames


csvfiles = find_csv_filenames(dir_with_csvs)
for fn in csvfiles:
    vrt_fn = fn.replace(".csv", ".vrt")
    lyr_name = fn.replace('.csv', '')
    out_tif = fn.replace('.csv', '.tiff')
    with open(vrt_fn, 'w') as fn_vrt:
        fn_vrt.write('<OGRVRTDataSource>\n')
        fn_vrt.write('\t<OGRVRTLayer name="%s">\n' % lyr_name)
        fn_vrt.write('\t\t<SrcDataSource>%s</SrcDataSource>\n' % fn)
        fn_vrt.write('\t\t<GeometryType>wkbPoint</GeometryType>\n')
        fn_vrt.write(
            '\t\t<GeometryField encoding="PointFromColumns" x="Lon" y="Lat" z="Ref"/>\n'
        )
        fn_vrt.write('\t</OGRVRTLayer>\n')
        fn_vrt.write('</OGRVRTDataSource>\n')

output = gdal.Grid(out_tif, vrt_fn)
# below using your settings - I don't have sample large enough to properly test it, but it is generating file as well
output2 = gdal.Grid('outcome2.tif',
                    'name.vrt',
                    algorithm='invdist:power=2.0:smoothing=1.0')
def convert_xyz_to_raster(x, y, z, res, method, outfile):
	"""Function to produce raster from point data
	presented with X, Y, Z Coordinates

	:param float x: x coordinates
	:param float y: y coordinates
	:param float z: z coordinates or other variable
		that you want to display in the raster
	:param float res: resolution of the output 
		raster in relevant projection units
	:param str outfile: file to save raster.
		NOTE: if size of raster is large, outfile
		will be split into numbered raster files
	"""

	bean = 500 # Minimum size (square) of output raster before cutting it into smaller chunks 
		# This is to resolve an issue with the slowness of gdal.Grid

	# Determine size of output grid based on desired resolution
	w1 = np.ceil( np.ptp(x) / res )
	h1 = np.ceil( np.ptp(y) / res)
	# Define the algorithm for extrapolating data:
	alg = method+':radius1='+str(res/2)+':radius2='+str(res/2)+':nodata=-9999'

	# As long as output raster size is less than bean x bean, output one raster:
	if w1 * h1 < bean**2:
	
		xyz = np.column_stack((x, y, z))
		# Sort by x, then by y:
		xyz = xyz[xyz[:,0].argsort()]
		xyz = xyz[xyz[:,1].argsort(kind='mergesort')]
		
		write_csv_file('grid.csv',xyz) # Write temporary xyz file 
		#Convert to Grid
		gdal.Grid(outfile,'grid.vrt', width=w1, height=h1, algorithm=alg)
		os.remove('./grid.csv') # Remove temporary xyz file
		
		print 
		print('Produced single map with filename {}'.format(outfile))
		print
	# If size larger than bean x bean, split large grid into subgrids for faster processing:
	else:
		#merge_command = ['', '-o', outfile] #attempting merging rasters; this never worked out
		# Cutting into squares size bean x bean:
		Wind = int(np.ceil(w1/bean))
		Hind = int(np.ceil(h1/bean))
		nn = 0
		for W in range(Wind):
			x1 = W*bean*res + np.min(x)
			x2 = (W+1)*bean*res + np.min(x)
			xind = (x >= x1) & (x < x2)
			if W < Wind:
				wt = bean
			else:
				wt = w1 % bean
			for H in range(Hind):
				y1 = (H)*bean*res + np.min(y)
				y2 = (H+1)*bean*res + np.min(y)
				yind = (y >= y1) & (y < y2)
				ind = xind & yind
	
				# Check for data within current square:
				if any(ind):
					xyz_temp = np.column_stack((x[ind], y[ind], z[ind]))
					xyz_temp = xyz_temp[xyz_temp[:,0].argsort()]
					xyz = xyz_temp[xyz_temp[:,1].argsort(kind='mergesort')]
					if H < Hind:
						ht = bean
					else:
						ht = h1 % bean

					out = outfile[:-4] + str(nn+1) + '.tif' # Numbered outfile
					write_csv_file('grid.csv',xyz_temp)
					gdal.Grid(out, 'grid.vrt', width=wt, height=ht, algorithm=alg)
					os.remove('./grid.csv') #Remove temporary xyz file
					#merge_command.append(out)  
					nn = nn+1
		print
		print 'Dataset too large for single grid production'
		print('Produced {} files with the following names: '.format(nn))
		for n in range(nn-1):
			print('{}'.format(outfile[:-4] + str(n+1) + '.tif'))
		print