def iota2Formatting(invector, classes, outvector=""):
    '''
    python simplification/ZonalStats.py -wd ~/tmp/ -inr /work/OT/theia/oso/vincent/testmpi/mini_SAR_pad/final/Classif_Seed_0.tif /work/OT/theia/oso/vincent/testmpi/mini_SAR_pad/final/Confidence_Seed_0.tif /work/OT/theia/oso/vincent/testmpi/mini_SAR_pad/final/PixelsValidity.tif -shape /work/OT/theia/oso/vincent/testmpi/mini_SAR_pad/final/simplification/vectors/dept_1.shp -output /work/OT/theia/oso/vincent/outstats_oso.sqlite -params 1:rate 2:statsmaj 3:statsmaj -classes simplification/nomenclature17.cfg -iota2
    '''
    def Sort(sub_li):
        sub_li.sort(key=lambda x: x[0])
        return sub_li

    nomenc = nomenclature.Iota2Nomenclature(classes, 'cfg')
    desclasses = nomenc.HierarchicalNomenclature.get_level_values(
        int(nomenc.getLevelNumber() - 1))
    cols = [[x, str(z)] for x, y, w, z in desclasses]
    sortalias = [x[1] for x in Sort(cols)]

    exp = ""
    for name in sortalias:
        exp += "CAST(%s AS NUMERIC(6,2)) AS %s, " % (name, name)

    if outvector == "":
        layerout = os.path.splitext(os.path.basename(invector))[0]
        outvector = os.path.splitext(invector)[0] + '_tmp.shp'
    else:
        layerout = os.path.splitext(os.path.basename(outvector))[0]

    command = "ogr2ogr -lco ENCODING=UTF-8 -overwrite -q -f 'ESRI Shapefile' -nln %s -sql "\
              "'SELECT CAST(cat AS INTEGER(4)) AS Classe, "\
              "CAST(meanmajb3 AS INTEGER(4)) AS Validmean, "\
              "CAST(stdmajb3 AS NUMERIC(6,2)) AS Validstd, "\
              "CAST(meanmajb2 AS INTEGER(4)) AS Confidence, %s"\
              "CAST(area AS NUMERIC(10,2)) AS Aire "\
              "FROM %s' "\
              "%s %s"%(layerout, exp, layerout, outvector, invector)

    Utils.run(command)
def dataframeExport(geodataframe, output, schema):
    """Export a GeoPandas DataFrame as a vector file (shapefile, sqlite and geojson)

    Parameters
    ----------
    geodataframe : GeoPandas DataFrame
        GeoPandas DataFrame

    output : string
        output vector file

    schema : dict / Fiona schema
        schema giving colums name and format

    """

    # TODO Export format depending on columns number (shapefile, sqlite, geojson) # Check Issue on framagit
    convert = False
    outformat = os.path.splitext(output)[1]
    if outformat == ".shp":
        driver = "ESRI Shapefile"
    elif outformat == ".geojson":
        driver = "GeoJSON"
    elif outformat == ".sqlite":
        driver = "ESRI Shapefile"
        convert = True
    else:
        raise Exception("The output format '%s' is not handled" %
                        (outformat[1:]))

    if not convert:
        geodataframe.to_file(output,
                             driver=driver,
                             schema=schema,
                             encoding='utf-8')
    else:
        outputinter = os.path.splitext(output)[0] + '.shp'
        geodataframe.to_file(outputinter,
                             driver=driver,
                             schema=schema,
                             encoding='utf-8')
        output = os.path.splitext(output)[0] + '.sqlite'
        Utils.run('ogr2ogr -f SQLite %s %s' % (output, outputinter))
예제 #3
0
def OSORegularization(classif, umc1, core, path, output, ram = "10000", noSeaVector = None, rssize = None, umc2 = None, logger = logger):

    if not os.path.exists(output):
        # OTB Number of threads
        os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = str(core)

        # first regularization
        regulClassif, time_regul1 = AdaptRegul23.regularisation(classif, umc1, core, path, ram)

        logger.info(" ".join([" : ".join(["First regularization", str(time_regul1)]), "seconds"]))

        # second regularization
        if umc2 != None :
            if rssize != None :
                if os.path.exists(os.path.join(path, "reechantillonnee.tif")):
                    os.remove(os.path.join(path, "reechantillonnee.tif"))

                command = "gdalwarp -q -multi -wo NUM_THREADS=%s -r mode -tr %s %s %s %s/reechantillonnee.tif" %(core, \
                                                                                                              rssize, \
                                                                                                              rssize, \
                                                                                                              regulClassif, \
                                                                                                              path)
                Utils.run(command)
                logger.info(" ".join([" : ".join(["Resample", str(time.time() - time_regul1)]), "seconds"]))

            regulClassif, time_regul2 = AdaptRegul23.regularisation(os.path.join(path, "reechantillonnee.tif"), umc2, core, path, ram)
            os.remove(os.path.join(path, "reechantillonnee.tif"))
            logger.info(" ".join([" : ".join(["Second regularization", str(time_regul2)]), "seconds"]))

        if noSeaVector is not None:
            outfilename = os.path.basename(output)
            rastToVectRecode(path, regulClassif, noSeaVector, os.path.join(path, outfilename), ram, "uint8")
        else:
            outfilename = regulClassif

        shutil.copyfile(os.path.join(path, outfilename), output)
        os.remove(os.path.join(path, outfilename))

    else:
        logger.info("One regularised file '%s' already exists for this classification"%(output))
예제 #4
0
def zonalstats(path,
               rasters,
               params,
               output,
               paramstats,
               classes="",
               bufferDist=None,
               gdalpath="",
               write_ouput=False,
               gdalcachemax="9000"):
    """Compute zonal statistitics (descriptive and categorical)
       on multi-band raster or multi-rasters
       based on Point (buffered or not) or Polygon zonal vector

    Parameters
    ----------
    path : string
        working directory

    rasters : list
        list of rasters to analyse

    params : list
        list of fid list and vector file

    output : vector file (sqlite, shapefile and geojson)
        vector file to store statistitics

    paramstats : list
        list of statistics to compute (e.g. {1:'stats', 2:'rate'})

            - paramstats = {1:"rate", 2:"statsmaj", 3:"statsmaj", 4:"stats", 2:stats_cl}
            - stats : mean_b, std_b, max_b, min_b
            - statsmaj : meanmaj, stdmaj, maxmaj, minmaj of majority class
            - rate : rate of each pixel value (classe names)
            - stats_cl : mean_cl, std_cl, max_cl, min_cl of one class
            - val : value of corresponding pixel (only for Point geometry and without other stats)

    classes : nomenclature file
        nomenclature

    bufferDist : int
        in case of point zonal vector : buffer size

    gdalpath : string
        path of gdal binaries (for system execution)

    write_ouput : boolean
        if True, wrapped raster are stored in working dir

    gdalcachemax : string
        gdal cache for wrapping operation (in Mb)

    """

    # Features and vector file to intersect
    vector, idvals = params

    # Raster resolution
    # TODO : Check if all rasters have same extent and resolution
    res = abs(fut.getRasterResolution(rasters[0])[0])

    # if no vector subsetting (all features)
    if not idvals:
        idvals = getFidList(vector)

    # vector open and iterate features and/or buffer geom
    vectorname = os.path.splitext(os.path.basename(vector))[0]
    vectorgeomtype = vf.getGeomType(vector)
    vectorbuff = None

    # Read statistics parameters
    if isinstance(paramstats, list):
        paramstats = dict([(x.split(':')[0], x.split(':')[1])
                           for x in paramstats])

    # Value extraction
    if not bufferDist and vectorgeomtype in (1, 4, 1001, 1004):
        if 'val' in paramstats.values():
            if vectorgeomtype == 1:
                schema = {'geometry': 'Point', 'properties': {}}
            elif vectorgeomtype == 4:
                schema = {'geometry': 'MultiPoint', 'properties': {}}
        else:
            raise Exception("Only pixel value extraction available "\
                            "when Point geometry without buffer distance is provided")

    # Stats extraction
    else:
        # Point geometry
        if vectorgeomtype in (1, 4, 1001, 1004):
            if vectorgeomtype == 1:
                schema = {'geometry': 'Point', 'properties': {}}
            elif vectorgeomtype == 4:
                schema = {'geometry': 'MultiPoint', 'properties': {}}
            vectorbuff = vectorname + "buff.shp"
            _ = bfo.bufferPoly(vector, vectorbuff, bufferDist=bufferDist)

        # Polygon geometry
        elif vectorgeomtype in (3, 6, 1003, 1006):
            if vectorgeomtype == 3:
                schema = {'geometry': 'Polygon', 'properties': {}}
            elif vectorgeomtype == 6:
                schema = {'geometry': 'MultiPolygon', 'properties': {}}
        else:
            raise Exception("Geometry type of vector file not handled")

    # Vector reading
    dataset = vf.openToRead(vector)
    lyr = dataset.GetLayer()
    spatialref = lyr.GetSpatialRef().ExportToProj4()

    # Prepare stats DataFrame
    stats = definePandasDf(idvals, paramstats, classes)

    # Iterate vector's features (FID)
    for idval in idvals:
        lyr.SetAttributeFilter("FID=" + str(idval))
        feat = lyr.GetNextFeature()
        geom = feat.GetGeometryRef()
        if geom:
            # Insert geometry in DataFrame
            geomdf = pad.DataFrame(index=[idval], \
                                   columns=["geometry"], \
                                   data=[str(geom.ExportToWkt())])

            # Get Point coordinates (pixel value case)
            if vectorgeomtype in (1, 4, 1001,
                                  1004) and 'val' in paramstats.values():
                xpt, ypt, _ = geom.GetPoint()

            stats.update(geomdf)

        if vectorbuff:
            vector = vectorbuff

        # creation of wrapped rasters
        if gdalpath != "" and gdalpath is not None:
            gdalpath = gdalpath + "/"
        else:
            gdalpath = ""

        bands = []
        success = True
        for idx, raster in enumerate(rasters):

            # Value extraction
            if 'val' in paramstats.values():
                if vectorgeomtype not in (1, 4, 1001, 1004):
                    raise Exception("Type of input vector %s must be "\
                                    "'Point' for pixel value extraction"%(vector))
                else:
                    bands.append(raster)
                    tmpfile = raster

            # Stats Extraction
            else:
                tmpfile = os.path.join(
                    path, 'rast_%s_%s_%s' % (vectorname, str(idval), idx))
                try:
                    # TODO : test gdal version : >= 2.2.4
                    if write_ouput:
                        cmd = '%sgdalwarp -tr %s %s -tap -q -overwrite -cutline %s '\
                              '-crop_to_cutline --config GDAL_CACHEMAX %s -wm %s '\
                              '-wo "NUM_THREADS=ALL_CPUS" -wo "CUTLINE_ALL_TOUCHED=YES" "\
                              "-cwhere "FID=%s" %s %s -ot Float32'                                                                  %(gdalpath, \
                                                                    res, \
                                                                    res, \
                                                                    vector, \
                                                                    gdalcachemax, \
                                                                    gdalcachemax, \
                                                                    idval, \
                                                                    raster, \
                                                                    tmpfile)
                        Utils.run(cmd)
                    else:
                        gdal.SetConfigOption("GDAL_CACHEMAX", gdalcachemax)
                        tmpfile = gdal.Warp('', raster, xRes=res, \
                                            yRes=res, targetAlignedPixels=True, \
                                            cutlineDSName=vector, cropToCutline=True, \
                                            cutlineWhere="FID=%s"%(idval), format='MEM', \
                                            warpMemoryLimit=gdalcachemax, \
                                            warpOptions=[["NUM_THREADS=ALL_CPUS"], ["CUTLINE_ALL_TOUCHED=YES"]])

                    bands.append(tmpfile)
                    success = True
                except:
                    success = False
                    pass

        if success:
            for param in paramstats:
                # Multi-raster / Multi-band data preparation
                if len(rasters) != 1:
                    band = bands[int(param) - 1]
                    nbband = 1
                else:
                    band = tmpfile
                    nbband = int(param)

                # Statistics extraction
                if band:
                    methodstat = paramstats[param]

                    if methodstat == 'rate':
                        classStats, classmaj, posclassmaj = countPixelByClass(
                            band, idval, nbband)
                        stats.update(classStats)

                        # Add columns when pixel values are not identified in nomenclature file
                        if list(classStats.columns) != list(stats.columns):
                            newcols = list(
                                set(list(classStats.columns)).difference(
                                    set(list(stats.columns))))
                            pad.concat([stats, classStats[newcols]], axis=1)

                    elif methodstat == 'stats':

                        cols = ["meanb%s"%(int(param)), "stdb%s"%(int(param)), \
                                "maxb%s"%(int(param)), "minb%s"%(int(param))]

                        stats.update(pad.DataFrame(data=[rasterStats(band, nbband)], \
                                                   index=[idval], \
                                                   columns=cols))

                    elif methodstat == 'statsmaj':
                        if not classmaj:
                            if "rate" in paramstats.values():
                                idxbdclasses = [
                                    x for x in paramstats
                                    if paramstats[x] == "rate"
                                ][0]
                                if len(rasters) != 1:
                                    bandrate = bands[idxbdclasses - 1]
                                    nbbandrate = 0
                                else:
                                    bandrate = band
                                    nbbandrate = idxbdclasses - 1
                            else:
                                raise Exception("No classification raster provided "\
                                                "to check position of majority class")

                            classStats, classmaj, posclassmaj = countPixelByClass(
                                bandrate, idval, nbbandrate)
                            classStats = None

                        cols = ["meanmajb%s"%(int(param)), "stdmajb%s"%(int(param)), \
                                "maxmajb%s"%(int(param)), "minmajb%s"%(int(param))]
                        stats.update(pad.DataFrame(data=[rasterStats(band, nbband, posclassmaj)], \
                                                   index=[idval], \
                                                   columns=cols))

                    elif "stats_" in methodstat:
                        if "rate" in paramstats.values():
                            # get positions of class
                            cl = paramstats[param].split('_')[1]
                            idxbdclasses = [
                                x for x in paramstats
                                if paramstats[x] == "rate"
                            ][0]
                            rastertmp = gdal.Open(bands[idxbdclasses - 1], 0)
                            data = rastertmp.ReadAsArray()
                            posclass = np.where(data == int(cl))
                            data = None
                        else:
                            raise Exception("No classification raster provided "\
                                            "to check position of requested class")

                        cols = ["meanb%sc%s"%(int(param), cl), "stdb%sc%s"%(int(param), cl), \
                                "maxb%sc%s"%(int(param), cl), "minb%sc%s"%(int(param), cl)]

                        stats.update(pad.DataFrame(data=[rasterStats(band, nbband, posclass)], \
                                                   index=[idval], \
                                                   columns=cols))

                    elif "val" in methodstat:
                        colpt, rowpt = fut.geoToPix(band, xpt, ypt)
                        cols = "valb%s" % (param)
                        stats.update(pad.DataFrame(data=[rasterStats(band, nbband, None, (colpt, rowpt))], \
                                                   index=[idval], \
                                                   columns=[cols]))
                    else:
                        print("The method %s is not implemented" %
                              (paramstats[param]))

                band = None

            if write_ouput:
                os.remove(tmpfile)

        else:
            print(
                "gdalwarp problem for feature %s (geometry error, too small area, etc.)"
                % (idval))

    # Prepare geometry and projection
    stats["geometry"] = stats["geometry"].apply(wkt.loads)
    statsfinal = gpad.GeoDataFrame(stats, geometry="geometry")
    statsfinal.fillna(0, inplace=True)
    statsfinal.crs = {'init': 'proj4:%s' % (spatialref)}

    # change column names if rate stats expected and nomenclature file is provided
    if "rate" in paramstats and classes != "":
        # get multi-level nomenclature
        # classes="/home/qt/thierionv/iota2/iota2/scripts/simplification/nomenclature17.cfg"
        nomenc = nomenclature.Iota2Nomenclature(classes, 'cfg')
        desclasses = nomenc.HierarchicalNomenclature.get_level_values(
            nomenc.getLevelNumber() - 1)
        cols = [(str(x), str(z)) for x, y, w, z in desclasses]

        # rename columns with alias
        for col in cols:
            statsfinal.rename(columns={col[0]: col[1].decode('utf8')},
                              inplace=True)

    # change columns type
    schema['properties'] = OrderedDict([(x, 'float:10.2') for x in list(statsfinal.columns) \
                                        if x != 'geometry'])

    # exportation # TO TEST
    # TODO Export format depending on columns number (shapefile, sqlite, geojson) # Check Issue on framagit
    convert = False
    outformat = os.path.splitext(output)[1]
    if outformat == ".shp":
        driver = "ESRI Shapefile"
    elif outformat == ".geojson":
        driver = "GeoJSON"
    elif outformat == ".sqlite":
        driver = "ESRI Shapefile"
        convert = True
    else:
        raise Exception("The output format '%s' is not handled" %
                        (outformat[1:]))

    if not convert:
        statsfinal.to_file(output,
                           driver=driver,
                           schema=schema,
                           encoding='utf-8')
    else:
        outputinter = os.path.splitext(output)[0] + '.shp'
        statsfinal.to_file(outputinter,
                           driver=driver,
                           schema=schema,
                           encoding='utf-8')
        output = os.path.splitext(output)[0] + '.sqlite'
        Utils.run('ogr2ogr -f SQLite %s %s' % (output, outputinter))
def extractRasterArray(rasters,
                       paramstats,
                       vector,
                       vectorgeomtype,
                       fid,
                       gdalpath="",
                       gdalcachemax="9000",
                       systemcall=True,
                       path=""):
    """Clip raster and store in ndarrays

    Parameters
    ----------

    rasters : list
        list of rasters to analyse

    paramstats : dict
        list of statistics to compute (e.g. {1:'stats', 2:'rate'})

    vector : string
        vector file for cutline opetation

    vectorgeomtype : int
        Type of geometry of input/output vector (http://portal.opengeospatial.org/files/?artifact_id=25355)

    fid : integer
        FID value to clip raster (cwhere parameter of gdalwarp)

    gdalpath : string
        gdal binaries path

    gdalcachemax : string
        gdal cache for wrapping operation (in Mb)

    systemcall : boolean
        if True, use os system call to execute gdalwarp (usefull to control gdal binaries version - gdalpath parameter)

    path : string
        temporary path to store temporary date if systemcall is True

    Return
    ----------
    boolean
        if True, wrap operation well terminated    

    ndarray ndarrays
        
    """

    bands = []
    todel = []
    success = True

    # Get rasters resolution
    res = abs(fut.getRasterResolution(rasters[0])[0])
    print(fid)
    # Get vector name
    vectorname = os.path.splitext(os.path.basename(vector))[0]
    for idx, raster in enumerate(rasters):

        # Value extraction
        if 'val' in list(paramstats.values()):
            if vectorgeomtype not in (1, 4, 1001, 1004):
                raise Exception("Type of input vector %s must be "\
                                "'Point' for pixel value extraction"%(vector))
            else:
                bands.append(raster)
                todel = []

        # Stats Extraction
        else:
            try:
                # TODO : test gdal version : >= 2.2.4
                if systemcall:
                    tmpfile = os.path.join(
                        path, 'rast_%s_%s_%s' % (vectorname, str(fid), idx))
                    cmd = '%sgdalwarp -tr %s %s -tap -q -overwrite -cutline %s '\
                          '-crop_to_cutline --config GDAL_CACHEMAX %s -wm %s '\
                          '-wo "NUM_THREADS=ALL_CPUS" -wo "CUTLINE_ALL_TOUCHED=YES" '\
                          '-cwhere "FID=%s" %s %s -ot Float32'%(os.path.join(gdalpath, ''), \
                                                                res, \
                                                                res, \
                                                                vector, \
                                                                gdalcachemax, \
                                                                gdalcachemax, \
                                                                fid, \
                                                                raster, \
                                                                tmpfile)
                    Utils.run(cmd)
                    todel.append(tmpfile)
                else:
                    gdal.SetConfigOption("GDAL_CACHEMAX", gdalcachemax)
                    tmpfile = gdal.Warp('', raster, xRes=res, \
                                        yRes=res, targetAlignedPixels=True, \
                                        cutlineDSName=vector, cropToCutline=True, \
                                        cutlineWhere="FID=%s"%(fid), format='MEM', \
                                        warpMemoryLimit=gdalcachemax, \
                                        warpOptions=[["NUM_THREADS=ALL_CPUS"], ["CUTLINE_ALL_TOUCHED=YES"]])

                bands.append(tmpfile)
                todel = []

                # store rasters in ndarray
                ndbands = storeRasterInArray(bands)

            except:
                success = False

    # Remove tmp rasters
    for filtodel in todel:
        os.remove(filtodel)

    if not success:
        nbbands = None

    return success, ndbands
예제 #6
0
파일: AdaptRegul.py 프로젝트: inglada/iota2
def regularisation(raster, threshold, nbcores, path, ram = "128"):

    filetodelete = []
    
    # First regularisation in connection 8, second in connection 4
    init_regul = time.time()    

    # A mask for each regularization rule
    # Agricultuture
    bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": raster,
                                                        "exp": '(im1b1==11 || im1b1==12)?im1b1:0',
                                                        "ram": str(0.2 * float(ram)),
                                                        "pixType": "uint8",
                                                        "out": os.path.join(path, 'mask_1.tif')})
    bandMathAppli.ExecuteAndWriteOutput()
    filetodelete.append(os.path.join(path, 'mask_1.tif'))

    # Forest    
    bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": raster,
                                                        "exp": '(im1b1==31 || im1b1==32)?im1b1:0', 
                                                        "ram": str(0.2 * float(ram)),
                                                        "pixType": "uint8",
                                                        "out": os.path.join(path, 'mask_2.tif')})
    bandMathAppli.ExecuteAndWriteOutput()
    filetodelete.append(os.path.join(path, 'mask_2.tif'))    
    # Urban    
    bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": raster,
                                                        "exp": '(im1b1==41 || im1b1==42 || im1b1==43)?im1b1:0',
                                                        "ram": str(0.2 * float(ram)),
                                                        "pixType": "uint8",
                                                        "out": os.path.join(path, 'mask_3.tif')})
    bandMathAppli.ExecuteAndWriteOutput()
    filetodelete.append(os.path.join(path, 'mask_3.tif'))    
    # Open natural areas     
    bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": raster,
                                                        "exp": '(im1b1==34 || im1b1==36 || im1b1==211)?im1b1:0',
                                                        "ram": str(0.2 * float(ram)),
                                                        "pixType": "uint8",
                                                        "out": os.path.join(path, 'mask_4.tif')})
    bandMathAppli.ExecuteAndWriteOutput()
    filetodelete.append(os.path.join(path, 'mask_4.tif'))    
    # Bare soil    
    bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": raster,
                                                        "exp": '(im1b1==45 || im1b1==46)?im1b1:0',
                                                        "ram": str(0.2 * float(ram)),
                                                        "pixType": "uint8",
                                                        "out": os.path.join(path, 'mask_5.tif')})
    bandMathAppli.ExecuteAndWriteOutput()
    filetodelete.append(os.path.join(path, 'mask_5.tif'))    
    # Perennial agriculture    
    bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": raster,
                                                        "exp": '(im1b1==221 || im1b1==222)?im1b1:0',
                                                        "ram": str(0.2 * float(ram)),
                                                        "pixType": "uint8",
                                                        "out": os.path.join(path, 'mask_6.tif')})
    bandMathAppli.ExecuteAndWriteOutput()
    filetodelete.append(os.path.join(path, 'mask_6.tif'))    
    # Road
    bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": raster,
                                                        "exp": '(im1b1==44)?im1b1:0',
                                                        "ram": str(0.2 * float(ram)),
                                                        "pixType": "uint8",
                                                        "out": os.path.join(path, 'mask_7.tif')})
    bandMathAppli.ExecuteAndWriteOutput()
    filetodelete.append(os.path.join(path, 'mask_7.tif'))    
    # Water
    bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": raster,
                                                        "exp": '(im1b1==51)?im1b1:0',
                                                        "ram": str(0.2 * float(ram)),
                                                        "pixType": "uint8",
                                                        "out": os.path.join(path, 'mask_8.tif')})
    bandMathAppli.ExecuteAndWriteOutput()
    filetodelete.append(os.path.join(path, 'mask_8.tif'))        
    # Snow and glacier    
    bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": raster,
                                                        "exp": '(im1b1==53)?im1b1:0',
                                                        "ram": str(0.2 * float(ram)),
                                                        "pixType": "uint8",
                                                        "out": os.path.join(path, 'mask_9.tif')})
    bandMathAppli.ExecuteAndWriteOutput()
    filetodelete.append(os.path.join(path, 'mask_9.tif'))
    
    for i in range(9):        
        command = "gdalwarp -q -multi -wo NUM_THREADS=%s -dstnodata 0 %s/mask_%s.tif %s/mask_nd_%s.tif"%(nbcores, \
                                                                                                         path, \
                                                                                                         str(i + 1), \
                                                                                                         path, \
                                                                                                         str(i + 1))
        Utils.run(command)
        filetodelete.append("%s/mask_nd_%s.tif"%(path, str(i + 1)))            

    masktime = time.time()
    print(" ".join([" : ".join(["Masks generation for adaptive rules", str(masktime - init_regul)]), "seconds"]))

    # Two successive regularisation (8 neighbors then 4 neighbors)
    for i in range(2):
        
        if i == 0:
            connexion = 8
        else :
            connexion = 4
   
        # Tiles number to treat in parralel
        pool = Pool(processes = 6)
        iterable = (np.arange(6)).tolist()
        function = partial(gdal_sieve, threshold, connexion, path)
        pool.map(function, iterable)
        pool.close()
        pool.join()
    
        for j in range(6):
            command = "gdalwarp -q -multi -wo NUM_THREADS=%s -dstnodata 0 %s/mask_%s_%s.tif %s/mask_nd_%s_%s.tif"%(nbcores, \
                                                                                                                path, \
                                                                                                                str(j + 1), \
                                                                                                                str(connexion), \
                                                                                                                path, \
                                                                                                                str(j + 1), \
                                                                                                                str(connexion))
            Utils.run(command)
        
        for j in range(6):
            os.remove(path + "/mask_%s_%s.tif"%(str(j + 1),str(connexion)))
    
    for j in range(6):
        os.remove(path + "/mask_nd_%s_8.tif"%(str(j + 1)))
        
    adaptativetime = time.time()
    print(" ".join([" : ".join(["Adaptative regularizations", str(adaptativetime - masktime)]), "seconds"]))
    
    # Fusion of rule-based regularisation 
    rastersList = [os.path.join(path, "mask_nd_1_4.tif"), os.path.join(path, "mask_nd_2_4.tif"), os.path.join(path, "mask_nd_3_4.tif"), \
                   os.path.join(path, "mask_nd_4_4.tif"), os.path.join(path, "mask_nd_5_4.tif"), os.path.join(path, "mask_nd_6_4.tif"), \
                   os.path.join(path, "mask_nd_7.tif"), os.path.join(path, "mask_nd_8.tif"), os.path.join(path, "mask_nd_9.tif")]
    
    bandMathAppli = OtbAppBank.CreateBandMathApplication({"il": rastersList,
                                                        "exp": 'im1b1+im2b1+\
                                                                im3b1+im4b1+\
                                                                im5b1+im6b1+\
                                                                im7b1+im8b1+\
                                                                im9b1',
                                                        "ram": str(0.2 * float(ram)),
                                                        "pixType": "uint8",
                                                        "out": os.path.join(path, 'mask_regul_adapt.tif')})
    bandMathAppli.ExecuteAndWriteOutput()
        
    for filemask in rastersList:
        os.remove(filemask)

    command = "gdalwarp -q -multi -wo NUM_THREADS="
    command += "%s -dstnodata 0 %s/mask_regul_adapt.tif %s/mask_nd_regul_adapt.tif"%(nbcores, \
                                                                                     path, \
                                                                                     path)
    Utils.run(command)
    filetodelete.append("%s/mask_regul_adapt.tif"%(path))
    
    # Regularisation based on majority voting
    
    # 8 neighbors
    command = "gdal_sieve.py -q -8 -st "
    command += "%s %s/mask_nd_regul_adapt.tif %s/mask_regul_adapt_0.tif" %(threshold, \
                                                                           path, \
                                                                           path)
    Utils.run(command)
    filetodelete.append("%s/mask_nd_regul_adapt.tif"%(path))
    
    command = "gdalwarp -q -multi -wo NUM_THREADS="
    command += "%s -dstnodata 0 %s/mask_regul_adapt_0.tif %s/mask_nd_regul_adapt_0.tif"%(nbcores, \
                                                                                         path, \
                                                                                         path)
    Utils.run(command)
    filetodelete.append("%s/mask_regul_adapt_0.tif"%(path))
    
    # 4 neighbors    
    command = "gdal_sieve.py -q -4 -st "
    command += "%s %s/mask_nd_regul_adapt_0.tif %s/regul_adapt_maj.tif" %(threshold, \
                                                                          path, \
                                                                          path)
    Utils.run(command)
    filetodelete.append("%s/mask_nd_regul_adapt_0.tif"%(path))    
    
    out_classif_sieve = "%s/regul_adapt_maj.tif"%(path)
    
    majoritytime = time.time()
    print(" ".join([" : ".join(["Majority voting regularization", str(majoritytime - adaptativetime)]), "seconds"]))

    for filetodel in filetodelete:
        if os.path.exists(filetodel):
            os.remove(filetodel)
            
    end_regul = time.time() - init_regul
    
    return out_classif_sieve, end_regul
예제 #7
0
def searchCrownTile(inpath, raster, clump, ram, grid, outpath, nbcore = 4, ngrid = -1, logger=logger):
    """

        in :
            inpath : working directory with datas
            raster : name of raster
            ram : ram for otb application
            grid : grid name for serialisation
            out : output path
            ngrid : tile number

        out :
            raster with normelized name (tile_ngrid.tif)
    """

    begintime = time.time()

    if os.path.exists(os.path.join(outpath, "tile_%s.tif"%(ngrid))):
        logger.error("Output file '%s' already exists"%(os.path.join(outpath, \
                                                                     "tile_%s.tif"%(ngrid))))
        sys.exit()

    rasterfile = gdal.Open(clump, 0)
    clumpBand = rasterfile.GetRasterBand(1)

    xsize = rasterfile.RasterXSize
    ysize = rasterfile.RasterYSize
    clumpArray = clumpBand.ReadAsArray()
    clumpProps = regionprops(clumpArray)
    rasterfile = clumpBand = clumpArray = None

    # Get extent of all image clumps
    params = {x.label:x.bbox for x in clumpProps}

    timeextents = time.time()
    logger.info(" ".join([" : ".join(["Get extents of all entities", str(round(timeextents - begintime, 2))]), "seconds"]))

    # Open Grid file
    driver = ogr.GetDriverByName("ESRI Shapefile")
    shape = driver.Open(grid, 0)
    grid_layer = shape.GetLayer()

    allTile = False
    # for each tile
    for feature in grid_layer :
        
        if ngrid is None:
            ngrid = int(feature.GetField("FID"))
            allTile = True

        # get feature FID
        idtile = int(feature.GetField("FID"))

        # feature ID vs. requested tile (ngrid)
        if idtile == int(ngrid):
            logger.info("Tile : %s"%(idtile))

            # manage environment
            if not os.path.exists(os.path.join(inpath, str(ngrid))):
                os.mkdir(os.path.join(inpath, str(ngrid)))                           

            # entities ID list of tile
            listTileId = listTileEntities(raster, outpath, feature)

            # if no entities in tile
            if len(listTileId) != 0 :

                timentities = time.time()
                logger.info(" ".join([" : ".join(["Entities ID list of tile", str(round(timentities - timeextents, 2))]), "seconds"]))
                logger.info(" : ".join(["Entities number", str(len(listTileId))]))

                # tile entities bounding box
                listExtent = ExtentEntitiesTile(listTileId, params, xsize, ysize, False)
                timeextent = time.time()
                logger.info(" ".join([" : ".join(["Compute geographical extent of entities", str(round(timeextent - timentities, 2))]), "seconds"]))

                # Extract classification raster on tile entities extent
                tifRasterExtract = os.path.join(inpath, str(ngrid), "tile_%s.tif"%(ngrid))
                if os.path.exists(tifRasterExtract):os.remove(tifRasterExtract)

                xmin, ymax = pixToGeo(raster, listExtent[1], listExtent[0])
                xmax, ymin = pixToGeo(raster, listExtent[3], listExtent[2])


                command = "gdalwarp -q -multi -wo NUM_THREADS={} -te {} {} {} {} -ot UInt32 {} {}".format(nbcore,\
                                                                                                          xmin, \
                                                                                                          ymin, \
                                                                                                          xmax, \
                                                                                                          ymax, \
                                                                                                          raster, \
                                                                                                          tifRasterExtract)
                Utils.run(command)
                timeextract = time.time()
                logger.info(" ".join([" : ".join(["Extract classification raster on tile entities extent", str(round(timeextract - timeextent, 2))]), "seconds"]))

                # Crown entities research
                ds = gdal.Open(tifRasterExtract)
                idx = ds.ReadAsArray()[1]
                g = graph.RAG(idx.astype(int), connectivity = 2)

                # Create connection duplicates
                listelt = []
                for elt in g.edges():
                    if elt[0] > 301 and elt[1] > 301:
                        listelt.append(elt)
                        listelt.append((elt[1], elt[0]))

                # group by tile entities id
                topo = dict(fu.sortByFirstElem(listelt))

                # Flat list and remove tile entities
                flatneighbors = set(chain(*list(dict((key,value) for key, value in list(topo.items()) if key in listTileId).values())))

                timecrownentities = time.time()
                logger.info(" ".join([" : ".join(["List crown entities", str(round(timecrownentities - timeextract, 2))]), "seconds"]))

                # Crown raster extraction
                listExtentneighbors = ExtentEntitiesTile(flatneighbors, params, xsize, ysize, False)
                xmin, ymax = pixToGeo(raster, listExtentneighbors[1], listExtentneighbors[0])
                xmax, ymin = pixToGeo(raster, listExtentneighbors[3], listExtentneighbors[2])

                rastEntitiesNeighbors = os.path.join(inpath, str(ngrid), "crown_%s.tif"%(ngrid))
                if os.path.exists(rastEntitiesNeighbors):os.remove(rastEntitiesNeighbors)
                command = "gdalwarp -q -multi -wo NUM_THREADS={} -te {} {} {} {} -ot UInt32 {} {}".format(nbcore,\
                                                                                                          xmin, \
                                                                                                          ymin, \
                                                                                                          xmax, \
                                                                                                          ymax, \
                                                                                                          raster, \
                                                                                                          rastEntitiesNeighbors)

                Utils.run(command)

                timeextractcrown = time.time()
                logger.info(" ".join([" : ".join(["Extract classification raster on crown entities extent", str(round(timeextractcrown - timecrownentities, 2))]), "seconds"]))

                shutil.copy(rastEntitiesNeighbors, os.path.join(outpath, "crown_%s.tif"%(ngrid)))

                with open(os.path.join(inpath, str(ngrid), "listid_%s"%(ngrid)), 'wb') as fp:
                    pickle.dump([listTileId + list(flatneighbors)], fp)

                shutil.copy(os.path.join(inpath, str(ngrid), "listid_%s"%(ngrid)), os.path.join(outpath, "listid_%s"%(ngrid)))
                shutil.rmtree(os.path.join(inpath, str(ngrid)), ignore_errors=True)

            if allTile:
                ngrid += 1
예제 #8
0
def joinShapeStats(shapefile, stats, tmp, outfile):
    import sqlite3 as db
    layer = os.path.splitext(os.path.basename(shapefile))[0]
    tmpfile = os.path.join(tmp, 'tmp_%s.sqlite' % (layer))
    Utils.run('ogr2ogr -f SQLite %s %s -nln %s' % (tmpfile, shapefile, layer))

    database = sqlite3.connect(tmpfile)
    cursor = database.cursor()
    cursor.execute("ATTACH '%s' as db;" % (stats))
    cursor.execute("create table stats as select * from db.statsfinal;")

    # get shapefile fid colname
    cursor.execute('select * from %s' % (layer))
    fieldnames = [f[0] for f in cursor.description]
    idcolname = fieldnames[0]

    cursor.execute("CREATE INDEX idx_shp ON %s(%s);" % (layer, idcolname))
    cursor.execute("CREATE INDEX idx_stats ON %s(%s);" % ('stats', 'idstats'))

    cursor.execute(
        "create view datajoin as SELECT * FROM %s LEFT JOIN stats ON %s.%s = stats.idstats;"
        % (layer, layer, idcolname))

    database.commit()
    database.close()

    outfiletmp = os.path.join(
        tmp,
        os.path.splitext(os.path.basename(outfile))[0] + '_tmp.shp')
    Utils.run(
        'ogr2ogr -f "ESRI Shapefile" -sql "select * from datajoin" %s %s -nln %s'
        % (outfiletmp, tmpfile, layer))

    #exp1, exp2, exp3 = manageClassName(nomenclature)

    layerout = os.path.splitext(os.path.basename(outfiletmp))[0]
    '''
    command = "ogr2ogr -overwrite -q -f 'ESRI Shapefile' -overwrite -sql "\
              "'SELECT CAST(class AS INTEGER(4)) AS Classe, "\
              "CAST(valmean AS INTEGER(4)) AS Validmean, "\
              "CAST(valstd AS NUMERIC(6,2)) AS Validstd, "\
              "CAST(mconf AS INTEGER(4)) AS Confidence, %s"\
              "CAST(Area AS NUMERIC(10,2)) AS Aire "\
              "FROM %s' "\
              "%s %s"%(exp3, layerout, outfile, outfiletmp)
    '''
    command = "ogr2ogr -overwrite -q -f 'ESRI Shapefile' -overwrite -sql "\
              "'SELECT CAST(class AS INTEGER(4)) AS Classe, "\
              "CAST(valmean AS INTEGER(4)) AS Validmean, "\
              "CAST(valstd AS NUMERIC(6,2)) AS Validstd, "\
              "CAST(mconf AS INTEGER(4)) AS Confidence, "\
              "CAST(Hiver AS NUMERIC(6,2)) AS Hiver, "\
              "CAST(Ete AS NUMERIC(6,2)) AS Ete, "\
              "CAST(Feuillus AS NUMERIC(6,2)) AS Feuillus, "\
              "CAST(Coniferes AS NUMERIC(6,2)) AS Coniferes, "\
              "CAST(Pelouse AS NUMERIC(6,2)) AS Pelouse, "\
              "CAST(Landes AS NUMERIC(6,2)) AS Landes, "\
              "CAST(UrbainDens AS NUMERIC(6,2)) AS UrbainDens, "\
              "CAST(UrbainDiff AS NUMERIC(6,2)) AS UrbainDiff, "\
              "CAST(ZoneIndCom AS NUMERIC(6,2)) AS ZoneIndCom, "\
              "CAST(Route AS NUMERIC(6,2)) AS Route, "\
              "CAST(PlageDune AS NUMERIC(6,2)) AS PlageDune, "\
              "CAST(SurfMin AS NUMERIC(6,2)) AS SurfMin, "\
              "CAST(Eau AS NUMERIC(6,2)) AS Eau, "\
              "CAST(GlaceNeige AS NUMERIC(6,2)) AS GlaceNeige, "\
              "CAST(Prairie AS NUMERIC(6,2)) AS Prairie, "\
              "CAST(Vergers AS NUMERIC(6,2)) AS Vergers, "\
              "CAST(Vignes AS NUMERIC(6,2)) AS Vignes, "\
              "CAST(Area AS NUMERIC(10,2)) AS Aire "\
              "FROM %s' "\
              "%s %s"%(layerout, outfile, outfiletmp)
    '''    
    command = "ogr2ogr -overwrite -q -f 'ESRI Shapefile' -overwrite -sql "\
              "'SELECT CAST(class AS INTEGER(4)) AS Classe, "\
              "CAST(valmean AS INTEGER(4)) AS Validmean, "\
              "CAST(valstd AS NUMERIC(6,2)) AS Validstd, "\
              "CAST(mconf AS INTEGER(4)) AS Confidence, "\
              "CAST(UrbainDens AS NUMERIC(6,2)) AS UrbainDens, "\
              "CAST(UrbainDiff AS NUMERIC(6,2)) AS UrbainDiff, "\
              "CAST(ZoneIndCom AS NUMERIC(6,2)) AS ZoneIndCom, "\
              "CAST(Route AS NUMERIC(6,2)) AS Route, "\
              "CAST(Colza AS NUMERIC(6,2)) AS Colza, "\
              "CAST(CerealPail AS NUMERIC(6,2)) AS CerealPail, "\
              "CAST(Proteagine AS NUMERIC(6,2)) AS Proteagine, "\
              "CAST(Soja AS NUMERIC(6,2)) AS Soja, "\
              "CAST(Tournesol AS NUMERIC(6,2)) AS Tournesol, "\
              "CAST(Mais AS NUMERIC(6,2)) AS Mais, "\
              "CAST(Riz AS NUMERIC(6,2)) AS Riz, "\
              "CAST(TuberRacin AS NUMERIC(6,2)) AS TuberRacin, "\
              "CAST(Prairie AS NUMERIC(6,2)) AS Prairie, "\
              "CAST(Vergers AS NUMERIC(6,2)) AS Vergers, "\
              "CAST(Vignes AS NUMERIC(6,2)) AS Vignes, "\
              "CAST(Feuillus AS NUMERIC(6,2)) AS Feuillus, "\
              "CAST(Coniferes AS NUMERIC(6,2)) AS Coniferes, "\
              "CAST(Pelouse AS NUMERIC(6,2)) AS Pelouse, "\
              "CAST(Landes AS NUMERIC(6,2)) AS Landes, "\
              "CAST(SurfMin AS NUMERIC(6,2)) AS SurfMin, "\
              "CAST(PlageDune AS NUMERIC(6,2)) AS PlageDune, "\
              "CAST(GlaceNeige AS NUMERIC(6,2)) AS GlaceNeige, "\
              "CAST(Eau AS NUMERIC(6,2)) AS Eau, "\
              "CAST(Area AS NUMERIC(10,2)) AS Aire "\
              "FROM %s' "\
              "%s %s"%(layerout, outfile, outfiletmp)
    '''
    Utils.run(command)

    for ext in ['.dbf', '.shp', '.prj', '.shx']:
        os.remove(os.path.splitext(outfiletmp)[0] + ext)

    os.remove(stats)
    os.remove(tmpfile)
예제 #9
0
def clumpAndStackClassif(path,
                         raster,
                         outpath,
                         ram,
                         float64=False,
                         exe64="",
                         logger=logger):

    begin_clump = time.time()

    # split path and file name of outfilename
    out = os.path.dirname(outpath)
    outfilename = os.path.basename(outpath)

    # Clump Classif with OTB segmentation algorithm
    clumpAppli = OtbAppBank.CreateClumpApplication({
        "in":
        raster,
        "filter.cc.expr":
        'distance<1',
        "ram":
        str(0.2 * float(ram)),
        "pixType":
        'uint32',
        "mode":
        "raster",
        "filter":
        "cc",
        "mode.raster.out":
        os.path.join(path, 'clump.tif')
    })

    if not float64:
        clumpAppli.Execute()

        clumptime = time.time()
        logger.info(" ".join([
            " : ".join(
                ["Input raster well clumped : ",
                 str(clumptime - begin_clump)]), "seconds"
        ]))

        # Add 300 to all clump ID
        bandMathAppli = OtbAppBank.CreateBandMathApplication({
            "il":
            clumpAppli,
            "exp":
            'im1b1+300',
            "ram":
            str(0.2 * float(ram)),
            "pixType":
            'uint32',
            "out":
            os.path.join(path, 'clump300.tif')
        })
        bandMathAppli.Execute()

        dataRamAppli = OtbAppBank.CreateBandMathApplication({
            "il":
            raster,
            "exp":
            'im1b1',
            "ram":
            str(0.2 * float(ram)),
            "pixType":
            'uint8'
        })
        dataRamAppli.Execute()

        concatImages = OtbAppBank.CreateConcatenateImagesApplication({
            "il": [dataRamAppli, bandMathAppli],
            "ram":
            str(0.2 * float(ram)),
            "pixType":
            'uint32',
            "out":
            os.path.join(path, outfilename)
        })
        concatImages.ExecuteAndWriteOutput()

        concattime = time.time()
        logger.info(" ".join([
            " : ".join([
                "Regularized and Clumped rasters concatenation : ",
                str(concattime - clumptime)
            ]), "seconds"
        ]))

        shutil.copyfile(os.path.join(path, outfilename),
                        os.path.join(out, outfilename))

    else:
        clumpAppli.ExecuteAndWriteOutput()

        command = '%s/iota2BandMath %s "%s" %s %s'%(exe64, \
                                                    os.path.join(path, 'clump.tif'), \
                                                    "im1b1+300", \
                                                    os.path.join(path, 'clump300.tif'), \
                                                    10)
        try:
            Utils.run(command)
            clumptime = time.time()
            logger.info(" ".join([
                " : ".join([
                    "Input raster well clumped : ",
                    str(clumptime - begin_clump)
                ]), "seconds"
            ]))
        except:
            logger.error(
                "Application 'iota2BandMath' for 64 bits does not exist, please change 64 bits binaries path"
            )
            sys.exit()

        command = '%s/iota2ConcatenateImages %s %s %s %s'%((exe64,
                                                            raster, \
                                                            os.path.join(path, 'clump300.tif'), \
                                                            os.path.join(path, outfilename),
                                                            10))
        try:
            Utils.run(command)
            concattime = time.time()
            logger.info(" ".join([" : ".join(["Regularized and Clumped rasters concatenation : ", \
                                              str(concattime - clumptime)]), "seconds"]))
            shutil.copyfile(os.path.join(path, outfilename),
                            os.path.join(out, outfilename))
            os.remove(os.path.join(path, 'clump.tif'))
            os.remove(os.path.join(path, 'clump300.tif'))
        except:
            logger.error(
                "Application 'iota2ConcatenateImages' for 64 bits does not exist, please change 64 bits binaries path"
            )
            sys.exit()

    command = "gdal_translate -q -b 2 -ot Uint32 %s %s" % (os.path.join(
        path, outfilename), os.path.join(path, "clump32bits.tif"))
    Utils.run(command)
    shutil.copy(os.path.join(path, "clump32bits.tif"), out)
    os.remove(os.path.join(path, "clump32bits.tif"))
    if os.path.exists(os.path.join(path, outfilename)):
        os.remove(os.path.join(path, outfilename))

    clumptime = time.time()
    logger.info(" ".join(
        [" : ".join(["Clump : ", str(clumptime - begin_clump)]), "seconds"]))
예제 #10
0
def manageBlocks(pathCrowns,
                 tilenumber,
                 blocksize,
                 inpath,
                 outpath,
                 ram,
                 logger=logger):

    tabBlocks = []
    tomerge = []
    for paths, dirs, files in os.walk(pathCrowns):
        for crown in files:
            if "crown_" + str(
                    tilenumber) + ".tif" in crown and "aux.xml" not in crown:
                shutil.copy(os.path.join(paths, crown), inpath)
                crownSource = gdal.Open(os.path.join(inpath, crown),
                                        GA_ReadOnly)
                row, col = int(crownSource.RasterYSize), int(
                    crownSource.RasterXSize)
                crownSource = None
                intervalX = np.arange(0, col, blocksize)
                intervalY = np.arange(0, row, blocksize)
                nbcolsblock = len(intervalX)
                nbrowsblock = len(intervalY)

                with open(os.path.join(pathCrowns, "listid_%s" % (tilenumber)),
                          'rb') as f:
                    listid = pickle.load(f)

                nbblock = 0
                for y in intervalY:
                    for x in intervalX:
                        outputTif = os.path.join(
                            inpath,
                            "crown%sblock%s.tif" % (tilenumber, nbblock))
                        xmin, ymin = pixToGeo(os.path.join(inpath, crown), x,
                                              y)
                        xmax, ymax = pixToGeo(os.path.join(inpath, crown),
                                              x + blocksize, y + blocksize)

                        cmd = "gdalwarp -overwrite -multi --config GDAL_CACHEMAX 9000 -wm 9000 -wo NUM_THREADS=ALL_CPUS -te " + str(
                            xmin) + " " + str(ymax) + " " + str(
                                xmax) + " " + str(ymin)
                        cmd = cmd + " -ot UInt32 " + os.path.join(
                            inpath, crown) + " " + outputTif

                        Utils.run(cmd)
                        ds = gdal.Open(outputTif)
                        idx = ds.ReadAsArray()[1]
                        labels = ds.ReadAsArray()[0]
                        masknd = np.isin(idx, listid[0])
                        x = labels * masknd
                        outRasterPath = os.path.join(
                            inpath, "crown%sblock%s_masked.tif" %
                            (tilenumber, nbblock))
                        tomerge.append(outRasterPath)
                        arraytoRaster(x, outRasterPath, ds)
                        os.remove(outputTif)

                        nbblock += 1

                # Mosaic
                out = os.path.join(inpath, "tile_%s.tif" % (tilenumber))
                fu.assembleTile_Merge(tomerge,
                                      int(round(ds.GetGeoTransform()[1], 0)),
                                      out,
                                      ot="Byte")

                shutil.copy(out, outpath)

                # remove tmp files
                os.remove(os.path.join(inpath, crown))
                os.remove(out)
                os.remove(os.path.join(paths, crown))
                os.remove(os.path.join(pathCrowns, "listid_%s" % (tilenumber)))
                for fileblock in tomerge:
                    os.remove(fileblock)

                logger.info('Crown raster of tile %s is now ready' %
                            (tilenumber))