def slp_asp_to_ell(DEM):
    '''Fit ellipse against a given clipped DEM'''
    #Generate slope with GDAL
    slp = tmpdir + str(np.random.randint(0,1e6)) + '_reproj_slope.tif'
    gdal_comm = ['gdaldem', 'Slope', '-compute_edges', DEM, slp]
    process = subprocess.Popen(gdal_comm, stdout=subprocess.PIPE, stderr=FNULL)
    process.communicate()
    _ = process.wait()
    if not os.path.exists(slp):
        time.sleep(1)
    
    #Generate aspect with GDAL
    asp = tmpdir + str(np.random.randint(0,1e6)) + '_reproj_aspect.tif'
    gdal_comm = ['gdaldem', 'Aspect', '-compute_edges', DEM, asp]
    process = subprocess.Popen(gdal_comm, stdout=subprocess.PIPE, stderr=FNULL)
    process.communicate()
    _ = process.wait()
    if not os.path.exists(asp):
        time.sleep(1)
        
    #Load in the data and mask out NaNs
    slope = np.array(gdalnumeric.LoadFile(slp).astype(float))
    slope[slope < 0] = np.nan

    aspect = np.array(gdalnumeric.LoadFile(asp).astype(float))
    aspect[aspect < 0] = np.nan 
    
    ell, out_raw = fit_ellipse(slope, aspect)
    
    #Pull the center, eccentricity, and north/east steepening from the ellipse
    metadata = ell_meta(ell)
    
    return ell, out_raw, metadata
Esempio n. 2
0
def array_stack(ndvi,qflag,xrows1,xrows2,yrows1,yrows2): #this should be list
    index=np.arange(0,len(ndvi))
    arrstack=np.empty(shape=(46,abs(xrows2-xrows1),abs(yrows2-yrows1))) #dtype=float
    for nv,q,x in itertools.izip(ndvi,qflag,index):     
        arrndvi=gdalnumeric.LoadFile(nv)[xrows1:xrows2,yrows1:yrows2]
        #print "%s, x1=%s, x2=%s, y1=%s, y2=%s" %(nv,xrows1,xrows2,yrows1,yrows2)  
        arrqflag=gdalnumeric.LoadFile(q)[xrows1:xrows2,yrows1:yrows2] 
        mask_clouds=np.where(arrqflag==3,np.nan,arrndvi)
        arrstack[x,:,:]=mask_clouds
    return arrstack
def openImgs(img_files, gt_files, maps, only_img=False):
    imgs = [None] * len(img_files)
    gts = [None] * len(img_files)
    for m in maps:
        #print img_files[m]
        #print gt_files[m]
        imgs[m] = gdalnumeric.LoadFile(img_files[m])
        if not only_img:
            gts[m] = gdalnumeric.LoadFile(gt_files[m])
    return imgs, gts
Esempio n. 4
0
 def open(self, file=None, dtype=None):
     """
     Open a local file handle for reading and assignment
     :param file:
     :return: None
     """
     # args[0]/file=
     if file is None:
         raise IndexError("invalid file= argument provided")
     # grab raster meta information from GeoRasters
     try:
         self.ndv, self.x_cell_size, self.y_cell_size, self.geot, self.projection, self.dtype = \
             get_geo_info(file)
     except Exception:
         raise AttributeError("problem processing file input -- is this",
                              "a raster file?")
     # args[1]/dtype=
     if dtype is not None:
         # override our shadow'd value from GeoRasters if
         # something was specified by the user
         self.dtype = dtype
     # re-cast our datatype as a numpy type, if needed
     if type(self.dtype) == str:
         self.dtype = NUMPY_TYPES[self.dtype.lower()]
     if self.ndv is None:
         self.ndv = _DEFAULT_NA_VALUE
     # low-level call to gdal with explicit type specification
     # that will store in memory or as a disc cache, depending
     # on the state of our _using_disc_caching property
     if self._using_disc_caching is not None:
         # create a cache file
         self.array = np.memmap(self._using_disc_caching,
                                dtype=dtype,
                                mode='w+',
                                shape=(_x_size, _y_size))
         # load file contents into the cache
         self.array[:] = gdalnumeric.LoadFile(
             filename=self.filename,
             buf_type=gdal_array.NumericTypeCodeToGDALTypeCode(
                 self.dtype))[:]
     # by default, load the whole file into memory
     else:
         self.array = gdalnumeric.LoadFile(
             filename=self.filename,
             buf_type=gdal_array.NumericTypeCodeToGDALTypeCode(self.dtype))
     # make sure we honor our no data value
     self.array = np.ma.masked_array(self.array,
                                     mask=self.array == self.ndv,
                                     fill_value=self.ndv)
def produceClassifyTif(srcTifPath, classify, outClassifyTifPath):
    dataset = gdal.Open(srcTifPath)
    width = dataset.RasterXSize
    height = dataset.RasterYSize
    bands = dataset.RasterCount
    data = dataset.ReadAsArray(0, 0, width, height)
    geotransform = dataset.GetGeoTransform()
    proj = dataset.GetProjection()
    band = dataset.GetRasterBand(1)
    noDataValue = band.GetNoDataValue()

    src_image = gdalnumeric.LoadFile(srcTifPath)

    dst_image = numpy.zeros((height, width))

    for class_info in classify:
        class_id = class_info[0]
        class_start = class_info[1]
        class_end = class_info[2]

        class_value = numpy.ones((height, width)) * class_id

        mask = numpy.bitwise_and(
            numpy.greater_equal(src_image, class_start),
            numpy.less_equal(src_image, class_end))

        dst_image = numpy.choose(mask, (dst_image, class_value))

    writeTiff(dst_image, width, height, bands, geotransform, proj, noDataValue, outClassifyTifPath)
Esempio n. 6
0
def doit(src_filename, dst_filename):
    class_defs = [(1, 10, 20),
                  (2, 20, 30),
                  (3, 128, 255)]

    src_ds = gdal.Open(src_filename)
    xsize = src_ds.RasterXSize
    ysize = src_ds.RasterYSize

    src_image = gdalnumeric.LoadFile(src_filename)

    dst_image = numpy.zeros((ysize, xsize))

    for class_info in class_defs:
        class_id = class_info[0]
        class_start = class_info[1]
        class_end = class_info[2]

        class_value = numpy.ones((ysize, xsize)) * class_id

        mask = numpy.bitwise_and(
            numpy.greater_equal(src_image, class_start),
            numpy.less_equal(src_image, class_end))

        dst_image = numpy.choose(mask, (dst_image, class_value))

    gdalnumeric.SaveArray(dst_image, dst_filename)
Esempio n. 7
0
def numpy_rw_5():

    if gdaltest.numpy_drv is None:
        return 'skip'

    import gdalnumeric

    array = gdalnumeric.LoadFile('data/rgbsmall.tif', 35, 21, 1, 1)

    if array[0][0][0] != 78:
        print(array)
        gdaltest.post_reason('value read improperly.')
        return 'fail'

    if array[1][0][0] != 117:
        print(array)
        gdaltest.post_reason('value read improperly.')
        return 'fail'

    if array[2][0][0] != 24:
        print(array)
        gdaltest.post_reason('value read improperly.')
        return 'fail'

    return 'success'
def changeDetection(image1, image2, outputname):

    ar1 = gdalnumeric.LoadFile(image1).astype(np.int8)
    ar2 = gdalnumeric.LoadFile(image2)[1].astype(np.int8)

    diff = ar2 - ar1

    classes = np.histogram(diff, bins=5)[1]

    lut = [[0,0,0], [0,0,0], [0,0,0], [0,0,0], [0,255,0], [255,0,0]]
    start = 1

    rgb = np.zeros((3, diff.shape[0], diff.shape[1],), np.int8)
    for i in range(len(classes)):
        mask = np.logical_and(start <= diff, diff <= classes[i])
        for j in range(len(lut[i])):
            rgb[j] = np.choose(mask, (rgb[j], lut[i][j]))
        start = classes[i] + 1

    gdal_array.SaveArray(rgb, outputname, format="GTiff")
Esempio n. 9
0
 def rasterShow(self, rasterFp):    
     try:
         rasterArray=gdalnumeric.LoadFile(rasterFp)  #加载栅格数据为gdal数组格式
     except RuntimeError:
         print("Unable to open %s"%rasterFp)
     multiple=2 #配置图像大小的倍数
     fig=plt.figure(figsize=(20*multiple, 12*multiple))
     ax=fig.add_subplot(111)
     plt.xticks([x for x in range(rasterArray.shape[0]) if x%200==0])
     plt.yticks([y for y in range(rasterArray.shape[0]) if y%200==0])
     ax.imshow(rasterArray)
def rasterShow(fn, rasterImageName):
    NDVIPath = os.path.join(fn, rasterImageName)
    try:
        rasterArray = gdalnumeric.LoadFile(NDVIPath)  #加载栅格数据为gdal数组格式
    except RuntimeError:
        print("Unable to open %s" % rasterPath)
    fig = plt.figure(figsize=(20, 12))
    ax = fig.add_subplot(111)
    plt.xticks([x for x in range(rasterArray.shape[0]) if x % 200 == 0])
    plt.yticks([y for y in range(rasterArray.shape[0]) if y % 200 == 0])
    ax.imshow(rasterArray)
Esempio n. 11
0
 def open(self, file=None):
     """Does what it says."""
     self.ndv, self.xsize, self.ysize, self.geot, self.projection, datatype = georasters.get_geo_info(
         file)
     if self.ndv is None:
         self.ndv = -99999
     self.array = gdalnumeric.LoadFile(file)
     self.y_cell_size = self.geot[1]
     self.x_cell_size = self.geot[5]
     self.array = numpy.ma.masked_array(self.array,
                                        mask=self.array == self.ndv,
                                        fill_value=self.ndv)
def getGroundTruth(folders, testing=False):
    files = []
    for f in folders:
        for gt in os.listdir(os.path.join(f, "GroundTruths")):
            if ".png" in gt:
                files.append(os.path.join(f, "GroundTruths", gt))
    names = [os.path.split(gt.split('.png')[0])[-1] for gt in files]
    gts = [gdalnumeric.LoadFile(os.path.join(f)) for f in files]
    preds = [np.zeros((2, g.shape[0], g.shape[1])) for g in gts]
    if testing:
        del gts
        gts = None
    return gts, preds, names
Esempio n. 13
0
def load_inputs(input_csv, csv_epsg, flow_directions_file):
    """Load input files and reproject CSV to same projection and flow directions
    """
    raster = gdal.Open(flow_directions_file)
    flow_directions = gdalnumeric.LoadFile(flow_directions_file)
    input_df = pd.read_csv(input_csv)

    # Reproject vector geometry to same projection as raster
    sourceSR = osr.SpatialReference()
    sourceSR.ImportFromEPSG(csv_epsg)
    targetSR = osr.SpatialReference()
    targetSR.ImportFromWkt(raster.GetProjectionRef())
    coordTrans = osr.CoordinateTransformation(sourceSR, targetSR)
    geoTrans = raster.GetGeoTransform()

    return (input_df, flow_directions, coordTrans, geoTrans)
Esempio n. 14
0
    def basic_region_analysis(self, cview, clayer):
        import Numeric, gdalnumeric

        line = int(self.RP_ToolDlg.entry_dict['start_line'].get_text())
        pix = int(self.RP_ToolDlg.entry_dict['start_pix'].get_text())
        sl = int(self.RP_ToolDlg.entry_dict['num_lines'].get_text())
        sp = int(self.RP_ToolDlg.entry_dict['num_pix'].get_text())

        clayer = self.app.sel_manager.get_active_layer()
        if clayer is None:
            # Target can only be extracted if a layer is selected
            return 'No selected raster layer'

        dsb = clayer.get_data(0).get_band()

        # Modify default to be whole image.
        if sl is 1 and sp is 1 and line is 1 and pix is 1:
            sl = dsb.YSize - line + 1
            sp = dsb.XSize - pix + 1

    # Create a temporary band for sampling if we have a subrect.
        temp_copy = 0
        if line != 1 or pix != 1 or sp != dsb.XSize or sl != dsb.YSize:
            # print line, pix, sp, sl
            temp_copy = 1
            filename = clayer.get_parent().get_dataset().GetDescription()
            target_data = gdalnumeric.LoadFile(filename, pix - 1, line - 1, sp,
                                               sl)
            target_ds = gdalnumeric.OpenArray(target_data)
            dsb = target_ds.GetRasterBand(1)

    # Compute the histogram.

        x_min = clayer.min_get(0)
        x_max = clayer.max_get(0)
        histogram = dsb.GetHistogram(x_min, x_max, 256, 1, 0)

        y_min = 0
        y_max = max(histogram[1:-1])

        (pm, mask) = self.RP_ToolDlg.get_histview(histogram, x_min, x_max,
                                                  y_min, y_max)

        self.RP_ToolDlg.viewarea.set(pm, mask)
def balancePatches(targetPoints,
                   relevantPoints,
                   groundTruthList,
                   patchSize,
                   leftoverPercentage=0.05):
    balancedRelevantPoints = []
    if leftoverPercentage > 1:
        print "Error: leftoverPercentage is greater than 1."
        return targetPoints, relevantPoints

    for i, tlist in enumerate(targetPoints):
        totalTpixels = 0
        partialRpixels = 0
        data = (gdalnumeric.LoadFile(groundTruthList[i]) > 0).astype(int)
        print data.shape
        for pos in targetPoints[i]:
            #if i % 10 == 0:
            #print data[pos[1]:pos[1]+patchSize, pos[2]:pos[2]+patchSize]
            b = np.bincount(data[pos[1]:pos[1] + patchSize,
                                 pos[2]:pos[2] + patchSize].flatten())
            print b[0], b[1]
            totalTpixels += b[1]
            partialRpixels += b[0]

        print(i, totalTpixels)
        data = None
        del data
        totalAddRpixels = max(0.0, ((1 - leftoverPercentage) * totalTpixels) -
                              partialRpixels)
        print totalAddRpixels
        totalRpatches = int(math.ceil(totalAddRpixels / float(patchSize**2)))
        print "## Numero de pixels de erosao:{0} \n## Numero de pixels de nao-erosao:{1}".format(
            totalTpixels, totalAddRpixels + partialRpixels)

        print(len(relevantPoints[i]), totalRpatches)
        balancedRelevantPoints.append(
            random.sample(relevantPoints[i],
                          min(len(relevantPoints[i]), totalRpatches)))

    return np.asarray(targetPoints,
                      dtype=object), np.asarray(balancedRelevantPoints,
                                                dtype=object)
def produceMaskTif(srcTifPath, maskTifPath):
    dataset = gdal.Open(srcTifPath)
    width = dataset.RasterXSize
    height = dataset.RasterYSize
    bands = dataset.RasterCount
    srcArray = dataset.ReadAsArray(0, 0, width, height)
    geotransform = dataset.GetGeoTransform()
    proj = dataset.GetProjection()
    band = dataset.GetRasterBand(1)
    noDataValue = band.GetNoDataValue()

    src_image = gdalnumeric.LoadFile(srcTifPath)

    mask_image = numpy.zeros((height, width))
    for i in range(0, height):
        for j in range(0, width):
            if not srcArray[i][j] - noDataValue <= 1e-9:
                mask_image[i][j] = 1

    writeTiff(mask_image, width, height, bands, geotransform, proj, noDataValue, maskTifPath)
def createData(imgfile, gt, outputfolder, references, geotransform, size):
    #print "Opening img: {}".format(imgfile)
    data = gdalnumeric.LoadFile(imgfile)
    #print "Done opening img: {}".format(imgfile)
    if not os.path.exists(outputfolder):
        os.mkdir(outputfolder)
    if not os.path.exists(os.path.join(outputfolder, 'JPEGImages')):
        os.mkdir(os.path.join(outputfolder, 'JPEGImages'))
    if not os.path.exists(os.path.join(outputfolder, 'Masks')):
        os.mkdir(os.path.join(outputfolder, 'Masks'))

    imgfilesplit = os.path.splitext(os.path.split(imgfile)[-1])
    for num, point in enumerate(references):
        outputfileimg = os.path.join(
            outputfolder, 'JPEGImages',
            imgfilesplit[0] + '_crop' + str(num) + '.jpg')
        outputfilemask = os.path.join(
            outputfolder, 'Masks',
            imgfilesplit[0] + '_crop' + str(num) + '_mask.jpg')
        #print "Creating files: \t{0}{1}".format(outputfileimg, outputfilemask)
        cropImg(data, point, outputfileimg, size)
        cropImg(gt, point, outputfilemask, size)
Esempio n. 18
0
 def open(self):
     """
     Read a raster file from disc as a formatted numpy array
     :rval none:
     """
     # grab raster meta informationP from GeoRasters
     try:
         self.ndv, self.x_size, self.y_size, self.geot, self.projection, self.dtype = get_geo_info(
             self.filename)
     except Exception:
         raise AttributeError("problem processing file input -- is this" +
                              " a raster file?")
     # call gdal with explicit type specification
     # that will store in memory or as a disc cache, depending
     # on the state of our use_disc_caching property
     if self.use_disc_caching is True:
         # create a cache file and load file contents into the cache
         _cached_file = NdArrayDiscCache(
             input=self.filename,
             dtype=self.dtype,
             x_size=self.x_size,
             y_size=self.y_size,
         )
         self.array = _cached_file.array
         self.disc_cache_file = _cached_file.disc_cache_file
     # by default, load the whole file into memory
     else:
         self.array = gdalnumeric.LoadFile(
             filename=self.filename,
             buf_type=gdal_array.NumericTypeCodeToGDALTypeCode(
                 _to_numpy_type(self.dtype)),
         )
     # make sure we honor our no data value
     self.array = np.ma.masked_array(self.array,
                                     mask=self.array == self.ndv,
                                     fill_value=self.ndv)
Esempio n. 19
0
def numpy_rw_2():

    if gdaltest.numpy_drv is None:
        return 'skip'

    import gdalnumeric

    array = gdalnumeric.LoadFile('data/utmsmall.tif')
    if array is None:
        gdaltest.post_reason('Failed to load utmsmall.tif into array')
        return 'fail'

    ds = gdalnumeric.OpenArray(array)
    if ds is None:
        gdaltest.post_reason('Failed to open memory array as dataset.')
        return 'fail'

    bnd = ds.GetRasterBand(1)
    if bnd.Checksum() != 50054:
        gdaltest.post_reason('Didnt get expected checksum on reopened file')
        return 'fail'
    ds = None

    return 'success'
Esempio n. 20
0
    y_ratio = 450.0 / pixels
    # Add more colors to this list if comparing
    # more than 3 bands or 1 image
    colors = ["red", "green", "blue"]
    for j in range(len(hist)):
        h = hist[j]
        x = -354
        y = -199
        t.up()
        t.goto(x, y)
        t.down()
        t.color(colors[j])
        for i in range(256):
            x = i * x_ratio
            y = h[i] * y_ratio
            x = x - (709 / 2)
            y = y + -199
            t.goto((x, y))


os.chdir(r'C:\Users\Fabio\Downloads\Studio\landsat\LC81910312016217LGN00')
im = "falco.tif"
histograms = []
arr = gdalnumeric.LoadFile(im)
for b in arr:
    histograms.append(histogram(b))
draw_histogram(histograms, scale=False)

t.pen(shown=False)
t.done()
Esempio n. 21
0
    def __init__(self, hdfFile, MOD11A1, h5File, ndv=None):

        # Product subdataset information
        gq_lst = 0  # 1km LST
        gq_red = 1  # 250m red band
        gq_nir = 2  # 250m NIR band

        # Open and find subdatasets
        gq_ds = gdal.Open(hdfFile, gdal.GA_ReadOnly)  # MODIS
        mod11_ds = gdal.Open(MOD11A1, gdal.GA_ReadOnly)  # MODIS
        self.sm_ds = gdal.Open(h5File, gdal.GA_ReadOnly)  # SMAP

        # Read in datasets
        self.array_sm = gdalnumeric.LoadFile(h5File)
        ds_red = gdal.Open(gq_ds.GetSubDatasets()[gq_red][0],
                           gdal.GA_ReadOnly)  # MODIS Dataset
        ds_nir = gdal.Open(gq_ds.GetSubDatasets()[gq_nir][0],
                           gdal.GA_ReadOnly)  # MODIS Dataset
        ds_lst = gdal.Open(mod11_ds.GetSubDatasets()[gq_lst][0],
                           gdal.GA_ReadOnly)  # MODIS Dataset

        # Wrap To WGS84
        geotref = ds_red.GetProjectionRef()
        geoTransform = ds_red.GetGeoTransform()
        # pixel = geoTransform[1] / 100000
        pixel = 0.001853250866278332

        # MOD11A1 Geotransform
        geotref_lst = ds_lst.GetProjectionRef()
        geoTransform_lst = ds_lst.GetGeoTransform()
        pixel_lst = geoTransform_lst[1] / 100000

        self.w_ds_red = self.reproject_dataset(dataset=ds_red,
                                               pixel_spacing=pixel,
                                               epsg_from=geotref,
                                               epsg_to=4326)
        self.w_ds_nir = self.reproject_dataset(dataset=ds_nir,
                                               pixel_spacing=pixel,
                                               epsg_from=geotref,
                                               epsg_to=4326)
        self.w_ds_lst = self.reproject_dataset(dataset=ds_lst,
                                               pixel_spacing=pixel_lst,
                                               epsg_from=geotref_lst,
                                               epsg_to=4326)

        # Write out image
        red = self.w_ds_red.GetRasterBand(1).ReadAsArray().astype(float)
        nir = self.w_ds_nir.GetRasterBand(1).ReadAsArray().astype(float)
        lst = self.w_ds_lst.GetRasterBand(1).ReadAsArray().astype(float)

        # masking invalid values
        eqn = '(red <= 0) | (red >= 10000) | (nir <= 0) | (nir >= 10000)'

        invalid = ne.evaluate(eqn)

        # Apply valid range mask to data
        red[invalid] = ndv
        nir[invalid] = ndv

        # Set Variabels
        self.red = red
        self.nir = nir
        self.lst = ne.evaluate('(lst / 50.0)').astype(float)
        self.sm = self.Cilpper(srcArray=self.array_sm,
                               smapDataset=self.sm_ds,
                               modisWrap=self.w_ds_lst)
        self.clippedSM = self.smapClipRsizer(modisWrap=self.w_ds_lst,
                                             clipArray=self.smapClipped,
                                             transform=self.clippedGeotranse)
        self.ndvi = self.NDVI()
        self.fvc = self.FVC()
        self.fv1km = self.resmapleFvc(fvc250m=self.fvc, lst1km=self.lst)
        self.Tv1km = self.calculateTv1km(LST=self.lst, FVC=self.fv1km)
        self.Ts1km = self.calculateTs1km(Tmodis=self.lst,
                                         fv1km=self.fv1km,
                                         Tv1km=self.Tv1km)
        self.see = self.SEEMODIS1km(
            Ts1km=self.Ts1km)  #, Tsmax=self.Tsmax, Tsmin=self.Tsmin)
        self.SEE40km = self.calculateMeanSEE(SEE=self.see, clip=self.sm)
        self.SEEavg = self.SEEMean(modisWrap=self.w_ds_lst,
                                   seeMean=self.SEE40km,
                                   transform=self.clippedGeotranse)
        self.smp = self.soilMoistureParameter(rescaleSMAP=self.clippedSM,
                                              rescaleMeanSEE=self.SEEavg)
        self.rescalclippedSM = self.smapClipRsizer(
            modisWrap=self.w_ds_lst,
            clipArray=self.smapClipped,
            transform=self.clippedGeotranse)
        self.disaggregated = self.Disaggregation(
            rescaleSMAP=self.rescalclippedSM,
            rescaleMeanSEE=self.SEEavg,
            SEEmodis1km=self.see,
            SMp=self.smp)
@author:richieBao-caDesign设计(cadesign.cn)
借鉴SIR传染病模型
"""
import gdal, ogr, os, osr, gdalnumeric
import numpy as np
import matplotlib.pyplot as plt
from sklearn.preprocessing import MinMaxScaler
from scipy.ndimage.filters import convolve
import moviepy.editor as mpy
import time
'''读取“空间用地分类”.tif地理信息数据'''
#rasterPath=r'D:\MUBENAcademy\pythonSystem\dataA\testClip6.tif'
rasterPath = r'C:\Users\Richi\Music\SIR\testClip6.tif'
try:
    srcArray = gdalnumeric.LoadFile(rasterPath)  #加载栅格数据为gdal数组格式
except RuntimeError:
    print("Unable to open %s" % rasterPath)
print(srcArray.shape)
originalRaster = srcArray.copy()
print(np.unique(originalRaster))
uniqueValues = np.unique(srcArray)
#print(uniqueValues)
'''配置空间阻力并归一化到[0,1]'''
#costMapping_LandType={"空值A":(0,0),"空值B":(256,0),"农田":(2,1),"河流":(3,7),"建设":(4,0),"森林山":(5,10),"森林平":(6,10)}
costMapping_LandType = {
    "空值A": (0, 0),
    "空值B": (256, 0),
    "农田": (2, 200),
    "河流": (3, 500),
    "建设": (4, 0),
Esempio n. 23
0
dst_segmentation = "/home/vortex/Desktop/LAYERS/lorenzo/tiles/1995/Mosaic/test_clipped/enanched_sobel_seg.tif"

# image = Image.open(src)
#
#
# im = image.convert('L')
# contr = ImageEnhance.Contrast(im)
# im = contr.enhance(2)
# # bright = ImageEnhance.Brightness(im)
# # im = bright.enhance(1)
# im.save(dst, "tiff")

# sobel2

# SEGMENTATION
image_array = gdalnumeric.LoadFile(src)
for a in image_array:
    print("a %s") % a
    print("-----------")
    for b in a:
        print("%s") % b
    break
markers = np.zeros_like(image_array)
markers[image_array < 90] = 1
markers[image_array > 90] = 2
markers[image_array > 110] = 3

# print image_array
# print markers

segmentation = watershed(image_array, markers)
Esempio n. 24
0
    def __init__(self, input=None, **kwargs):
        """
        ArrayDiscCache helps build a local numpy memmap file and either loads raster data from disc into the cache, 
        or attempts a direct assignment of a user-specific numpy array to a cache file (via the input= argument).
        :param str input: Either a full path to a raster file memmap'd to disk or an nd array object that is mapped directly
        :param str dtype: Numpy datatype specification to use for our array cache
        :param str x_size: Number of cells (x-axis) to use for our array cache
        :param str y_size: Number of cells (y-axis) to use for our array cache
        """
        self.disc_cache_file = os.path.abspath(
            kwargs.get("cache_filename",
                       str(randint(1, 9e09)) + "_np_array.dat"))

        self.dtype = kwargs.get("dtype", _DEFAULT_DTYPE)

        self.x_size = kwargs.get("x_size", None)
        self.y_size = kwargs.get("y_size", None)

        self.ndv = kwargs.get("ndv", _DEFAULT_NA_VALUE)

        LOGGER.debug("Using disc caching file for large numpy array: " +
                     self.disc_cache_file)

        # if the user specified a valid file path or numpy array object, try to read it into our cache
        # but otherwise allow an empty specification
        if input is None:
            # by default, just create an empty cache file from the parameters specified by the user
            self.array = np.memmap(
                filename=self.disc_cache_file,
                dtype=_to_numpy_type(self.dtype),
                mode="w+",
                shape=(self.y_size, self.x_size),
            )
        else:
            if os.path.isfile(input):
                LOGGER.debug("Loading file contents into disc cache file:" +
                             self.disc_cache_file)
                _raster_file = gdal.Open(input)
                self.array = np.memmap(
                    filename=self.disc_cache_file,
                    dtype=_to_numpy_type(self.dtype),
                    mode="w+",
                    shape=(_raster_file.RasterYSize, _raster_file.RasterXSize),
                )
                del _raster_file
                self.array[:] = gdalnumeric.LoadFile(
                    filename=input,
                    buf_type=gdal_array.NumericTypeCodeToGDALTypeCode(
                        _to_numpy_type(self.dtype)),
                )[:]
            else:
                LOGGER.debug(
                    "Treating input= object as numpy array object and reading into disc cache file:"
                    + self.disc_cache_file)
                self.array = np.memmap(
                    filename=self.disc_cache_file,
                    dtype=input.dtype,
                    mode="w+",
                    shape=input.shape,
                )
                self.array[:] = input[:]
Esempio n. 25
0
#******************************************************************************

from osgeo import gdal
import gdalnumeric
try:
    import numpy
except ImportError:
    import Numeric as numpy

class_defs = [(1, 10, 20), (2, 20, 30), (3, 128, 255)]

src_ds = gdal.Open('utm.tif')
xsize = src_ds.RasterXSize
ysize = src_ds.RasterYSize

src_image = gdalnumeric.LoadFile('utm.tif')

dst_image = numpy.zeros((ysize, xsize))

for class_info in class_defs:
    class_id = class_info[0]
    class_start = class_info[1]
    class_end = class_info[2]

    class_value = numpy.ones((ysize, xsize)) * class_id

    mask = numpy.bitwise_and(numpy.greater_equal(src_image, class_start),
                             numpy.less_equal(src_image, class_end))

    dst_image = numpy.choose(mask, (dst_image, class_value))
Esempio n. 26
0
import gdalnumeric

#Input File
src = "../dati/islands/islands.tif"

#Output
tgt = "../dati/islands/islands_classified.jpg"

srcArr = gdalnumeric.LoadFile(src)

classes = gdalnumeric.numpy.histogram(srcArr, bins=2)[1]
print classes

#Color look-up table (LUT) - must be len(classes)+1.
#Specified as R,G,B tuples
lut = [[255, 0, 0], [0, 0, 0], [255, 255, 255]]

start = 1

rgb = gdalnumeric.numpy.zeros((
    3,
    srcArr.shape[0],
    srcArr.shape[1],
), gdalnumeric.numpy.float32)

# Process all classes and assign colors
for i in range(len(classes)):
    mask = gdalnumeric.numpy.logical_and(start <= srcArr, srcArr <= classes[i])
    for j in range(len(lut[i])):
        rgb[j] = gdalnumeric.numpy.choose(mask, (rgb[j], lut[i][j]))
    start = classes[i] + 1
Esempio n. 27
0
    QRslp[mask] = np.nan
    QRasp[mask] = np.nan

    return QRslp, QRasp


#Loop through each DEM resolution
base_dir = '~/example/'
spacings = range(2, 31)[::-1]
qr_s, qr_a = [], []
for i in spacings:
    dem = base_dir + 'DEM/Pozo_UTM11_NAD83_g_' + str(i) + 'm.tif'
    uncertainty = base_dir + 'STD/Pozo_UTM11_NAD83_g_' + str(i) + 'm_std.tif'

    #Load the data as arrays
    elev = gdalnumeric.LoadFile(dem).astype(float)
    std = gdalnumeric.LoadFile(uncertainty).astype(float)

    #Mask out water areas
    mask = np.where(elev < 0)
    elev[mask] = np.nan
    std[mask] = np.nan

    #Calculate the Quality Ratios
    QRslp, QRasp = QR(elev, i, std, mask)
    QRslp[mask] = np.nan
    QRasp[mask] = np.nan

    #Append the QR mean values to a list
    qr_s.append(np.nanmean(QRslp))
    qr_a.append(np.nanmean(QRasp))
Esempio n. 28
0
valid_date_path = os.path.join(stats_folder, 'valid_date.csv')

# step1: calculate cloud cover ##############################################################
print 'step1: calculate cloud cover '

# load data
snow_date = pd.DataFrame.from_csv(model_snow_date_path, header=0)
snow_date['modis_cloud_cover'] = ''
snow_date['modis_snow_count'] = ''

# calculate the modis cloud cover and check whether it has snow
for time in snow_date.index:
    modis_proj_path = snow_date['modis_proj_folder'].ix[time]
    if os.path.isfile(modis_proj_path):
        try:
            raster = gdalnumeric.LoadFile(modis_proj_path)
            cloud_count = (raster[0] == 250).sum()
            total_count = raster.shape[1] * raster.shape[2]
            nodata_count = (raster[0] == 255).sum()
            snow_count = ((raster[0] >= modis_threshold) &
                          (raster[0] <= 100)).sum()
            cloud_cover = float(cloud_count) / (total_count - nodata_count)
            snow_date['modis_cloud_cover'].ix[time] = cloud_cover
            snow_date['modis_snow_count'].ix[time] = snow_count
        except Exception as e:
            continue

snow_date.to_csv(model_snow_date_path)

# step2: get valid date #######################################################################
# this is to find the date that snow17, ueb or modis has snow (>0m or 0%) and modis cloud cover less than threshold
Esempio n. 29
0
processing.runalg("gdalogr:warpreproject", rasterLyr, srs,srs, res, 0, None, 0, "/qgis_data/rasters/resampled.tif")
# Verify that the resampled.tif image was created in your /qgis_data/rasters directory.

#========================================================
#               Resampling raster resolution
#========================================================
#Remotely-sensed images are not just pictures; they are data. The value of the pixels has
#meaning that can be automatically analyzed by a computer

#Numpy can be accessed through the GDAL package's gdalnumeric module.

#we must import the bridge module called gdalnumeric, which connects GDAL
#to Numpy in order to perform an array math on geospatial images:
import gdalnumeric
# Now, we will load our raster image directly into a multidimensional array:
a = gdalnumeric.LoadFile("/qgis_data/rasters/satimage.tif")
# The following code counts the number of pixel combinations in the image:
b = a.T.ravel()
c=b.reshape((b.size/3,3))
order = gdalnumeric.numpy.lexsort(c.T)
c = c[order]
diff = gdalnumeric.numpy.diff(c, axis=0)
ui = gdalnumeric.numpy.ones(len(c), 'bool')
ui[1:] = (diff != 0).any(axis=1)
u = c[ui]

#  Now, we can take a look at the size of the resulting one-dimensional array to get the unique values count:
u.size

#========================================================
#               Mosaicing rasters
Esempio n. 30
0
        pixel_count_path = os.path.join(
            stats_folder,
            'pixel_count_{}.csv'.format(swe_col_name.split('_')[0]))

        # get pixel count
        for time in snow_date.index:
            modis_proj_path = os.path.join(
                modis_proj_folder, snow_date['modis_proj_folder'].ix[time])
            swe_proj_path = os.path.join(swe_proj_folder,
                                         snow_date[swe_col_name].ix[time])

            if os.path.isfile(swe_proj_path) and os.path.isfile(
                    modis_proj_path):
                try:
                    # count watershed, watershed_out, invalid pixels
                    modis = gdalnumeric.LoadFile(modis_proj_path)
                    sca_data = modis[0] / 100.0
                    modis_bin = np.where(modis[1] != 0, sca_data, -999)
                    count_watershed = (modis[1] == 255).sum()
                    count_watershed_out = (modis[1] == 0).sum()
                    count_invalid = (modis_bin > 100).sum()

                    # get moids binary
                    modis_bin[modis_bin > 100] = -999
                    modis_bin[modis_bin > 0] = 1

                    # count modis snow, no snow pixels
                    count_modis_snow = (modis_bin == 1).sum()
                    count_modis_dry = (modis_bin == 0).sum()

                    # calculate weight