Esempio n. 1
0
def calcImgBasicStats4RefRegion(ref_img,
                                stats_imgs,
                                output_img,
                                gdalformat='KEA'):
    """
A function which calculates the mean and standard deviation through a series of
input images. The region for processing is defined by the reference image and
images padded with no-data where no data is present.

The output image has twice the number of bands as the input image providing
a mean and standard deviation for each input band.

If the input images has 2 bands then the output bands will have the following
order:

1. band 1 mean
2. band 1 std dev
3. band 2 mean
4. band 2 std dev

:param ref_img: reference image which defines the output image
:param stats_imgs: a list of input images over which the stats will be calculated.
:param output_img: the output image path and file name
:param gdalformat: the output image file format. Default KEA.

"""
    import rsgislib.imageutils
    from rios import applier

    rsgis_utils = rsgislib.RSGISPyUtils()
    first = True
    n_bands = 0
    no_data_val = 0
    for img in stats_imgs:
        print(img)
        if first:
            n_bands = rsgis_utils.getImageBandCount(img)
            no_data_val = rsgis_utils.getImageNoDataValue(img)
            first = False
        else:
            if n_bands != rsgis_utils.getImageBandCount(img):
                raise Exception(
                    "The number of bands must be the same in all input images."
                )
            if no_data_val != rsgis_utils.getImageNoDataValue(img):
                raise Exception(
                    "The no data value should be the same in all input images."
                )

    # RIOS internal function to calculate  mean and standard deviation of the input images
    def _calcBasicStats(info, inputs, outputs, otherargs):
        n_imgs = len(inputs.imgs)
        blk_shp = inputs.imgs[0].shape
        if blk_shp[0] != otherargs.n_bands:
            raise Exception(
                "Block shape and the number of input image bands do not align."
            )
        outputs.output_img = numpy.zeros(
            (blk_shp[0] * 2, blk_shp[1], blk_shp[2]), dtype=float)

        band_arr = []
        for band in range(blk_shp[0]):
            band_arr.append(
                numpy.zeros((n_imgs, blk_shp[1], blk_shp[2]), dtype=float))

        img_idx = 0
        for img_blk in inputs.imgs:
            for band in range(blk_shp[0]):
                band_arr[band][img_idx] = img_blk[band]
            img_idx = img_idx + 1

        for band in range(blk_shp[0]):
            band_arr[band][band_arr[band] == otherargs.no_data_val] = numpy.nan

            outputs.output_img[band * 2] = numpy.nanmean(band_arr[band],
                                                         axis=0)
            outputs.output_img[band * 2 + 1] = numpy.nanstd(band_arr[band],
                                                            axis=0)

            outputs.output_img[band * 2][numpy.isnan(
                outputs.output_img[band * 2])] = otherargs.no_data_val
            outputs.output_img[band * 2 + 1][numpy.isnan(
                outputs.output_img[band * 2 + 1])] = 0.0

    try:
        import tqdm
        progress_bar = rsgislib.TQDMProgressBar()
    except:
        from rios import cuiprogress
        progress_bar = cuiprogress.GDALProgressBar()

    infiles = applier.FilenameAssociations()
    infiles.imgs = stats_imgs

    otherargs = applier.OtherInputs()
    otherargs.n_bands = n_bands
    otherargs.no_data_val = no_data_val

    outfiles = applier.FilenameAssociations()
    outfiles.output_img = output_img

    aControls = applier.ApplierControls()
    aControls.referenceImage = ref_img
    aControls.footprint = applier.BOUNDS_FROM_REFERENCE
    aControls.progress = progress_bar
    aControls.drivername = gdalformat
    aControls.omitPyramids = True
    aControls.calcStats = False
    print("Calculating Stats Image.")
    applier.apply(_calcBasicStats,
                  infiles,
                  outfiles,
                  otherargs,
                  controls=aControls)
    print("Completed")

    rsgislib.imageutils.popImageStats(output_img,
                                      usenodataval=True,
                                      nodataval=no_data_val,
                                      calcpyramids=True)
Esempio n. 2
0
def apply_xgboost_binary_classifier(model_file,
                                    imgMask,
                                    imgMaskVal,
                                    imgFileInfo,
                                    outProbImg,
                                    gdalformat,
                                    outClassImg=None,
                                    class_thres=5000,
                                    nthread=1):
    """
This function applies a trained binary (i.e., two classes) xgboost model. The function train_xgboost_binary_classifer
can be used to train such as model. The output image will contain the probability of membership to the class of
interest. You will need to threshold this image to get a final hard classification. Alternative, a hard class output
image and threshold can be applied to this image.

:param model_file: a trained xgboost binary model which can be loaded with lgb.Booster(model_file=model_file).
:param imgMask: is an image file providing a mask to specify where should be classified. Simplest mask is all the
                valid data regions (rsgislib.imageutils.genValidMask)
:param imgMaskVal: the pixel value within the imgMask to limit the region to which the classification is applied.
                   Can be used to create a heirachical classification.
:param imgFileInfo: a list of rsgislib.imageutils.ImageBandInfo objects (also used within
                    rsgislib.imageutils.extractZoneImageBandValues2HDF) to identify which images and bands are to
                    be used for the classification so it adheres to the training data.
:param outProbImg: output image file with the classification probabilities - this image is scaled by
                   multiplying by 10000.
:param gdalformat: is the output image format - all GDAL supported formats are supported.
:param outClassImg: Optional output image which will contain the hard classification, defined with a threshold on the
                    probability image.
:param class_thres: The threshold used to define the hard classification. Default is 5000 (i.e., probability of 0.5).
:param nthread: The number of threads to use for the classifier.

    """
    def _applyXGBClassifier(info, inputs, outputs, otherargs):
        outClassVals = numpy.zeros_like(inputs.imageMask, dtype=numpy.uint16)
        if numpy.any(inputs.imageMask == otherargs.mskVal):
            outClassVals = outClassVals.flatten()
            imgMaskVals = inputs.imageMask.flatten()
            classVars = numpy.zeros(
                (outClassVals.shape[0], otherargs.numClassVars),
                dtype=numpy.float)
            # Array index which can be used to populate the output array following masking etc.
            ID = numpy.arange(imgMaskVals.shape[0])
            classVarsIdx = 0
            for imgFile in otherargs.imgFileInfo:
                imgArr = inputs.__dict__[imgFile.name]
                for band in imgFile.bands:
                    classVars[..., classVarsIdx] = imgArr[(band - 1)].flatten()
                    classVarsIdx = classVarsIdx + 1
            classVars = classVars[imgMaskVals == otherargs.mskVal]
            ID = ID[imgMaskVals == otherargs.mskVal]
            predClass = numpy.around(
                otherargs.classifier.predict(xgb.DMatrix(classVars)) * 10000)
            outClassVals[ID] = predClass
            outClassVals = numpy.expand_dims(outClassVals.reshape(
                (inputs.imageMask.shape[1], inputs.imageMask.shape[2])),
                                             axis=0)
        outputs.outimage = outClassVals

    classifier = xgb.Booster({'nthread': nthread})
    classifier.load_model(model_file)

    infiles = applier.FilenameAssociations()
    infiles.imageMask = imgMask
    numClassVars = 0
    for imgFile in imgFileInfo:
        infiles.__dict__[imgFile.name] = imgFile.fileName
        numClassVars = numClassVars + len(imgFile.bands)

    outfiles = applier.FilenameAssociations()
    outfiles.outimage = outProbImg
    otherargs = applier.OtherInputs()
    otherargs.classifier = classifier
    otherargs.mskVal = imgMaskVal
    otherargs.numClassVars = numClassVars
    otherargs.imgFileInfo = imgFileInfo

    try:
        import tqdm
        progress_bar = rsgislib.TQDMProgressBar()
    except:
        progress_bar = cuiprogress.GDALProgressBar()

    aControls = applier.ApplierControls()
    aControls.progress = progress_bar
    aControls.drivername = gdalformat
    aControls.omitPyramids = True
    aControls.calcStats = False
    print("Applying the Classifier")
    applier.apply(_applyXGBClassifier,
                  infiles,
                  outfiles,
                  otherargs,
                  controls=aControls)
    print("Completed")
    rsgislib.imageutils.popImageStats(outProbImg,
                                      usenodataval=True,
                                      nodataval=0,
                                      calcpyramids=True)

    if outClassImg is not None:
        rsgislib.imagecalc.imageMath(outProbImg, outClassImg,
                                     'b1>{}?1:0'.format(class_thres),
                                     gdalformat, rsgislib.TYPE_8UINT)
        rsgislib.rastergis.populateStats(outClassImg,
                                         addclrtab=True,
                                         calcpyramids=True,
                                         ignorezero=True)
Esempio n. 3
0
def translate(info,
              infile,
              outfile,
              colTypes,
              pulseCols=None,
              expectRange=None,
              scaling=None,
              classificationTranslation=None,
              nullVals=None,
              constCols=None):
    """
    Main function which does the work.

    * Info is a fileinfo object for the input file.
    * infile and outfile are paths to the input and output files respectively.
    * expectRange is a list of tuples with (type, varname, min, max).
    * scaling is a list of tuples with (type, varname, gain, offset).
    * colTypes is a list of name and data type tuples for every column
    * pulseCols is a list of strings defining the pulse columns
    * classificationTranslation is a list of tuples specifying how to translate
        between the codes within the files and the 
        lidarprocessor.CLASSIFICATION_* ones. First element of tuple is file 
        number, second the lidarprocessor code.
    * nullVals is a list of tuples with (type, varname, value)
    * constCols is a list of tupes with (type, varname, dtype, value)
    """
    scalingsDict = translatecommon.overRideDefaultScalings(scaling)

    # set up the variables
    dataFiles = lidarprocessor.DataFiles()
    dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ)

    # convert from strings to numpy dtypes
    numpyColTypes = []
    for name, typeString in colTypes:
        numpydtype = translatecommon.STRING_TO_DTYPE[typeString.upper()]
        numpyColTypes.append((name, numpydtype))

    dataFiles.input1.setLiDARDriverOption('COL_TYPES', numpyColTypes)
    if pulseCols is not None:
        dataFiles.input1.setLiDARDriverOption('PULSE_COLS', pulseCols)

    if classificationTranslation is not None:
        dataFiles.input1.setLiDARDriverOption('CLASSIFICATION_CODES',
                                              classificationTranslation)

    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    controls.setProgress(progress)
    controls.setSpatialProcessing(False)

    otherArgs = lidarprocessor.OtherArgs()
    otherArgs.scaling = scalingsDict
    otherArgs.expectRange = expectRange
    otherArgs.nullVals = nullVals
    otherArgs.constCols = constCols

    dataFiles.output1 = lidarprocessor.LidarFile(outfile,
                                                 lidarprocessor.CREATE)
    dataFiles.output1.setLiDARDriver('SPDV4')
    dataFiles.output1.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING',
                                           False)

    lidarprocessor.doProcessing(transFunc,
                                dataFiles,
                                controls=controls,
                                otherArgs=otherArgs)
Esempio n. 4
0
def rescaleImgPxlVals(inputImg,
                      outputImg,
                      gdalformat,
                      datatype,
                      bandRescale,
                      trim2Limits=True):
    """
Function which rescales an input image base on a list of rescaling parameters.

:param inputImg: the input image
:param outputImg: the output image file name and path (will be same dimensions as the input)
:param gdalformat: the GDAL image file format of the output image file.
:param bandRescale: list of ImageBandRescale objects
:param trim2Limits: whether to trim the output to the output min/max values.

"""
    from rios import applier

    bandRescaleDict = dict()
    for rescaleObj in bandRescale:
        bandRescaleDict[rescaleObj.band - 1] = rescaleObj

    rsgis_utils = rsgislib.RSGISPyUtils()
    numpyDT = rsgis_utils.getNumpyDataType(datatype)

    try:
        import tqdm
        progress_bar = rsgislib.TQDMProgressBar()
    except:
        from rios import cuiprogress
        progress_bar = cuiprogress.GDALProgressBar()

    infiles = applier.FilenameAssociations()
    infiles.image = inputImg
    outfiles = applier.FilenameAssociations()
    outfiles.outimage = outputImg
    otherargs = applier.OtherInputs()
    otherargs.rescaleDict = bandRescaleDict
    otherargs.trim = trim2Limits
    otherargs.numpyDT = numpyDT
    aControls = applier.ApplierControls()
    aControls.progress = progress_bar
    aControls.drivername = gdalformat
    aControls.omitPyramids = True
    aControls.calcStats = False

    def _applyRescale(info, inputs, outputs, otherargs):
        """
        This is an internal rios function 
        """
        outputs.outimage = numpy.zeros_like(inputs.image, dtype=numpyDT)
        for idx in range(inputs.image.shape[0]):
            outputs.outimage[idx] = numpy.where(
                inputs.image[idx] == otherargs.rescaleDict[idx].inNoData,
                otherargs.rescaleDict[idx].outNoData,
                (((inputs.image[idx] - otherargs.rescaleDict[idx].inMin) /
                  (inputs.image[idx] - otherargs.rescaleDict[idx].inMax -
                   inputs.image[idx] - otherargs.rescaleDict[idx].inMin)) *
                 (inputs.image[idx] - otherargs.rescaleDict[idx].outMax -
                  inputs.image[idx] - otherargs.rescaleDict[idx].outMin)) +
                inputs.image[idx] - otherargs.rescaleDict[idx].outMin)
            if otherargs.trim:
                outputs.outimage[idx] = numpy.where(
                    (outputs.outimage[idx] !=
                     otherargs.rescaleDict[idx].outNoData) &
                    (outputs.outimage[idx] <
                     otherargs.rescaleDict[idx].outMin),
                    otherargs.rescaleDict[idx].outMin, outputs.outimage[idx])
                outputs.outimage[idx] = numpy.where(
                    (outputs.outimage[idx] !=
                     otherargs.rescaleDict[idx].outNoData) &
                    (outputs.outimage[idx] >
                     otherargs.rescaleDict[idx].outMax),
                    otherargs.rescaleDict[idx].outMax, outputs.outimage[idx])

    applier.apply(_applyRescale,
                  infiles,
                  outfiles,
                  otherargs,
                  controls=aControls)
Esempio n. 5
0
def apply_keras_pixel_classifier(classTrainInfo,
                                 keras_cls_mdl,
                                 imgMask,
                                 imgMaskVal,
                                 imgFileInfo,
                                 outClassImg,
                                 gdalformat,
                                 pred_batch_size=32,
                                 classClrNames=True):
    """
This function applies a trained single pixel keras model to an image. The function train_keras_pixel_classifer
can be used to train such as model. The output image will contain the hard membership of the predicted class. 

:param classTrainInfo: dict (where the key is the class name) of rsgislib.classification.ClassInfoObj
                       objects which will be used to train the classifier (i.e., train_keras_pixel_classifer()),
                       provide pixel value id and RGB class values.
:param keras_cls_mdl: a trained keras model object, with a input dimensions equivlent to the number of image
                      bands specified in the imgFileInfo input and output layer which provides an output array
                      of the length of the number of classes.
:param imgMask: is an image file providing a mask to specify where should be classified. Simplest mask is all the
                valid data regions (rsgislib.imageutils.genValidMask)
:param imgMaskVal: the pixel value within the imgMask to limit the region to which the classification is applied.
                   Can be used to create a heirachical classification.
:param imgFileInfo: a list of rsgislib.imageutils.ImageBandInfo objects (also used within
                    rsgislib.imageutils.extractZoneImageBandValues2HDF) to identify which images and bands are to
                    be used for the classification so it adheres to the training data.
:param outClassImg: Output image which will contain the hard classification.
:param gdalformat: is the output image format - all GDAL supported formats are supported.
:param pred_batch_size: the batch size used for the classification.
:param classClrNames: default is True and therefore a colour table will the colours specified in ClassInfoObj
                      and a ClassName (from classTrainInfo) column will be added to the output file.

    """
    def _applyKerasPxlClassifier(info, inputs, outputs, otherargs):
        outClassIdVals = numpy.zeros_like(inputs.imageMask, dtype=numpy.uint16)
        if numpy.any(inputs.imageMask == otherargs.mskVal):
            n_pxls = inputs.imageMask.shape[1] * inputs.imageMask.shape[2]
            outClassIdVals = outClassIdVals.flatten()
            imgMaskVals = inputs.imageMask.flatten()
            classVars = numpy.zeros((n_pxls, otherargs.numClassVars),
                                    dtype=numpy.float)
            # Array index which can be used to populate the output array following masking etc.
            ID = numpy.arange(imgMaskVals.shape[0])
            classVarsIdx = 0
            for imgFile in otherargs.imgFileInfo:
                imgArr = inputs.__dict__[imgFile.name]
                for band in imgFile.bands:
                    classVars[..., classVarsIdx] = imgArr[(band - 1)].flatten()
                    classVarsIdx = classVarsIdx + 1
            classVars = classVars[imgMaskVals == otherargs.mskVal]
            ID = ID[imgMaskVals == otherargs.mskVal]
            preds_idxs = numpy.argmax(otherargs.classifier.predict(
                classVars, batch_size=otherargs.pred_batch_size),
                                      axis=1)
            preds_cls_ids = numpy.zeros_like(preds_idxs, dtype=numpy.uint16)
            for cld_id, idx in zip(otherargs.cls_id_lut,
                                   numpy.arange(0, len(otherargs.cls_id_lut))):
                preds_cls_ids[preds_idxs == idx] = cld_id

            outClassIdVals[ID] = preds_cls_ids
            outClassIdVals = numpy.expand_dims(outClassIdVals.reshape(
                (inputs.imageMask.shape[1], inputs.imageMask.shape[2])),
                                               axis=0)
        outputs.outclsimage = outClassIdVals

    infiles = applier.FilenameAssociations()
    infiles.imageMask = imgMask
    numClassVars = 0
    for imgFile in imgFileInfo:
        infiles.__dict__[imgFile.name] = imgFile.fileName
        numClassVars = numClassVars + len(imgFile.bands)

    n_classes = len(classTrainInfo)
    cls_id_lut = numpy.zeros(n_classes)
    for clsname in classTrainInfo:
        if classTrainInfo[clsname].id >= n_classes:
            raise (
                "ClassInfoObj '{}' id ({}) is not consecutive starting from 0."
                .format(clsname, classTrainInfo[clsname].id))
        cls_id_lut[classTrainInfo[clsname].id] = classTrainInfo[clsname].out_id

    outfiles = applier.FilenameAssociations()
    outfiles.outclsimage = outClassImg
    otherargs = applier.OtherInputs()
    otherargs.classifier = keras_cls_mdl
    otherargs.pred_batch_size = pred_batch_size
    otherargs.mskVal = imgMaskVal
    otherargs.numClassVars = numClassVars
    otherargs.imgFileInfo = imgFileInfo
    otherargs.n_classes = n_classes
    otherargs.cls_id_lut = cls_id_lut

    try:
        import tqdm
        progress_bar = rsgislib.TQDMProgressBar()
    except:
        progress_bar = cuiprogress.GDALProgressBar()

    aControls = applier.ApplierControls()
    aControls.progress = progress_bar
    aControls.drivername = gdalformat
    aControls.omitPyramids = True
    aControls.calcStats = False
    print("Applying the Classifier")
    applier.apply(_applyKerasPxlClassifier,
                  infiles,
                  outfiles,
                  otherargs,
                  controls=aControls)
    print("Completed Classification")

    if classClrNames:
        rsgislib.rastergis.populateStats(outClassImg,
                                         addclrtab=True,
                                         calcpyramids=True,
                                         ignorezero=True)
        ratDataset = gdal.Open(outClassImg, gdal.GA_Update)
        red = rat.readColumn(ratDataset, 'Red')
        green = rat.readColumn(ratDataset, 'Green')
        blue = rat.readColumn(ratDataset, 'Blue')
        ClassName = numpy.empty_like(red, dtype=numpy.dtype('a255'))
        ClassName[...] = ""

        for classKey in classTrainInfo:
            print("Apply Colour to class \'" + classKey + "\'")
            red[classTrainInfo[classKey].out_id] = classTrainInfo[classKey].red
            green[classTrainInfo[classKey].
                  out_id] = classTrainInfo[classKey].green
            blue[classTrainInfo[classKey].
                 out_id] = classTrainInfo[classKey].blue
            ClassName[classTrainInfo[classKey].out_id] = classKey

        rat.writeColumn(ratDataset, "Red", red)
        rat.writeColumn(ratDataset, "Green", green)
        rat.writeColumn(ratDataset, "Blue", blue)
        rat.writeColumn(ratDataset, "ClassName", ClassName)
        ratDataset = None
Esempio n. 6
0
def find_class_outliers(pyod_obj,
                        img,
                        img_mask,
                        out_lbls_img,
                        out_scores_img=None,
                        img_mask_val=1,
                        img_bands=None,
                        gdalformat="KEA"):
    """
This function uses the pyod (https://github.com/yzhao062/pyod) library to find outliers within a class.
It is assumed that the input images are from a different date than the mask (classification) and therefore
the outliners will related to class changes.

:param pyod_obj: an instance of a pyod.models (e.g., pyod.models.knn.KNN) pass parameters to the constructor
:param img: input image used for analysis
:param img_mask: input image mask use to define the region of interest.
:param out_lbls_img: output image with pixel over of 1 for within mask but not outlier and 2 for in mask and outlier.
:param out_scores_img: output image (optional, None and won't be provided; Default None) providing the probability of
                       each pixel being an outlier
:param img_mask_val: the pixel value within the mask image for the class of interest. (Default 1)
:param img_bands: the image bands to be used for the analysis. If None then all used (Default: None)
:param gdalformat: file format for the output image(s). Default KEA.

"""
    if not haveRIOS:
        raise Exception(
            "The rios module is required for this function could not be imported\n\t"
            + riosErr)

    rsgis_utils = rsgislib.RSGISPyUtils()

    if img_bands is not None:
        if not ((type(img_bands) is list) or (type(img_bands) is tuple)):
            raise rsgislib.RSGISPyException(
                "If provided then img_bands should be a list (or None)")
    else:
        n_bands = rsgis_utils.getImageBandCount(img)
        img_bands = numpy.arange(1, n_bands + 1)
    num_vars = len(img_bands)
    img_val_no_data = rsgis_utils.getImageNoDataValue(img)

    msk_arr_vals = rsgislib.imageutils.extractImgPxlValsInMsk(
        img, img_bands, img_mask, img_mask_val, img_val_no_data)
    print("There were {} pixels within the mask.".format(
        msk_arr_vals.shape[0]))

    print("Fitting oulier detector")
    pyod_obj.fit(msk_arr_vals)
    print("Fitted oulier detector")

    # RIOS function to apply classifer
    def _applyPyOB(info, inputs, outputs, otherargs):
        # Internal function for rios applier. Used within find_class_outliers.

        out_lbls_vals = numpy.zeros_like(inputs.image_mask, dtype=numpy.uint8)
        if otherargs.out_scores:
            out_scores_vals = numpy.zeros_like(inputs.image_mask,
                                               dtype=numpy.float)
        if numpy.any(inputs.image_mask == otherargs.msk_val):
            out_lbls_vals = out_lbls_vals.flatten()
            img_msk_vals = inputs.image_mask.flatten()
            if otherargs.out_scores:
                out_scores_vals = out_scores_vals.flatten()
            ID = numpy.arange(img_msk_vals.shape[0])

            img_shape = inputs.img.shape
            img_bands = inputs.img.reshape(
                (img_shape[0], (img_shape[1] * img_shape[2])))

            band_lst = []
            for band in otherargs.img_bands:
                if (band > 0) and (band <= img_shape[0]):
                    band_lst.append(img_bands[band - 1])
                else:
                    raise Exception(
                        "Band ({}) specified is not within the image".format(
                            band))
            img_bands_sel = numpy.stack(band_lst, axis=0)
            img_bands_trans = numpy.transpose(img_bands_sel)

            if otherargs.no_data_val is not None:
                ID = ID[(img_bands_trans != otherargs.no_data_val).all(axis=1)]
                img_msk_vals = img_msk_vals[(
                    img_bands_trans != otherargs.no_data_val).all(axis=1)]
                img_bands_trans = img_bands_trans[(
                    img_bands_trans != otherargs.no_data_val).all(axis=1)]

            ID = ID[img_msk_vals == otherargs.msk_val]
            img_bands_trans = img_bands_trans[img_msk_vals ==
                                              otherargs.msk_val]

            if img_bands_trans.shape[0] > 0:
                pred_lbls = otherargs.pyod_obj.predict(img_bands_trans)
                pred_lbls = pred_lbls + 1
                out_lbls_vals[ID] = pred_lbls
            out_lbls_vals = numpy.expand_dims(out_lbls_vals.reshape(
                (inputs.image_mask.shape[1], inputs.image_mask.shape[2])),
                                              axis=0)

            if otherargs.out_scores:
                if img_bands_trans.shape[0] > 0:
                    pred_probs = otherargs.pyod_obj.predict_proba(
                        img_bands_trans, method='unify')
                    out_scores_vals[ID] = pred_probs[:, 1]
                out_scores_vals = numpy.expand_dims(out_scores_vals.reshape(
                    (inputs.image_mask.shape[1], inputs.image_mask.shape[2])),
                                                    axis=0)

        outputs.out_lbls_img = out_lbls_vals
        if otherargs.out_scores:
            outputs.out_scores_img = out_scores_vals

    infiles = applier.FilenameAssociations()
    infiles.image_mask = img_mask
    infiles.img = img

    otherargs = applier.OtherInputs()
    otherargs.pyod_obj = pyod_obj
    otherargs.msk_val = img_mask_val
    otherargs.img_bands = img_bands
    otherargs.out_scores = False
    otherargs.no_data_val = img_val_no_data

    outfiles = applier.FilenameAssociations()
    outfiles.out_lbls_img = out_lbls_img
    if out_scores_img is not None:
        outfiles.out_scores_img = out_scores_img
        otherargs.out_scores = True

    try:
        import tqdm
        progress_bar = rsgislib.TQDMProgressBar()
    except:
        progress_bar = cuiprogress.GDALProgressBar()

    aControls = applier.ApplierControls()
    aControls.progress = progress_bar
    aControls.drivername = gdalformat
    aControls.omitPyramids = True
    aControls.calcStats = False
    print("Applying the Outlier Detector")
    applier.apply(_applyPyOB, infiles, outfiles, otherargs, controls=aControls)
    print("Completed")

    rsgislib.rastergis.populateStats(clumps=out_lbls_img,
                                     addclrtab=True,
                                     calcpyramids=True,
                                     ignorezero=True)
    if out_scores_img is not None:
        rsgislib.imageutils.popImageStats(out_scores_img,
                                          usenodataval=True,
                                          nodataval=0,
                                          calcpyramids=True)
Esempio n. 7
0
def translate(info,
              infile,
              outfile,
              expectRange=None,
              spatial=None,
              extent=None,
              scaling=None,
              epsg=None,
              binSize=None,
              buildPulses=False,
              pulseIndex=None,
              nullVals=None,
              constCols=None,
              useLASScaling=False):
    """
    Main function which does the work.

    * Info is a fileinfo object for the input file.
    * infile and outfile are paths to the input and output files respectively.
    * expectRange is a list of tuples with (type, varname, min, max).
    * spatial is True or False - dictates whether we are processing spatially or not.
        If True then spatial index will be created on the output file on the fly.
    * extent is a tuple of values specifying the extent to work with. 
        xmin ymin xmax ymax
    * scaling is a list of tuples with (type, varname, dtype, gain, offset).
    * if epsg is not None should be a EPSG number to use as the coord system
    * binSize is the used by the LAS spatial index
    * buildPulses dictates whether to attempt to build the pulse structure
    * pulseIndex should be 'FIRST_RETURN' or 'LAST_RETURN' and determines how the
        pulses are indexed.
    * nullVals is a list of tuples with (type, varname, value)
    * constCols is a list of tupes with (type, varname, dtype, value)
    * if useLASScaling is True, then the scaling used in the LAS file
        is used for columns. Overrides anything given in 'scaling'
    
    """
    scalingsDict = translatecommon.overRideDefaultScalings(scaling)

    if epsg is None and (info.wkt is None or len(info.wkt) == 0):
        msg = 'No projection set in las file. Must set EPSG on command line'
        raise generic.LiDARInvalidSetting(msg)

    if spatial and not info.hasSpatialIndex:
        msg = 'Spatial processing requested but file does not have spatial index'
        raise generic.LiDARInvalidSetting(msg)

    if spatial and binSize is None:
        msg = "For spatial processing, the bin size must be set"
        raise generic.LiDARInvalidSetting(msg)

    if extent is not None and not spatial:
        msg = 'Extent can only be set when processing spatially'
        raise generic.LiDARInvalidSetting(msg)

    # set up the variables
    dataFiles = lidarprocessor.DataFiles()

    dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ)
    if pulseIndex == 'FIRST_RETURN':
        dataFiles.input1.setLiDARDriverOption('PULSE_INDEX', las.FIRST_RETURN)
    elif pulseIndex == 'LAST_RETURN':
        dataFiles.input1.setLiDARDriverOption('PULSE_INDEX', las.LAST_RETURN)
    else:
        msg = "Pulse index argument not recognised."
        raise generic.LiDARInvalidSetting(msg)

    dataFiles.input1.setLiDARDriverOption('BUILD_PULSES', buildPulses)

    if spatial:
        dataFiles.input1.setLiDARDriverOption('BIN_SIZE', float(binSize))

    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    controls.setProgress(progress)
    controls.setSpatialProcessing(spatial)

    otherArgs = lidarprocessor.OtherArgs()
    otherArgs.scaling = scalingsDict
    otherArgs.epsg = epsg
    otherArgs.expectRange = expectRange
    otherArgs.lasInfo = info
    otherArgs.nullVals = nullVals
    otherArgs.constCols = constCols
    otherArgs.useLASScaling = useLASScaling

    if extent is not None:
        extent = [float(x) for x in extent]
        pixgrid = pixelgrid.PixelGridDefn(xMin=extent[0],
                                          yMin=extent[1],
                                          xMax=extent[2],
                                          yMax=extent[3],
                                          xRes=binSize,
                                          yRes=binSize)
        controls.setReferencePixgrid(pixgrid)
        controls.setFootprint(lidarprocessor.BOUNDS_FROM_REFERENCE)

    dataFiles.output1 = lidarprocessor.LidarFile(outfile,
                                                 lidarprocessor.CREATE)
    dataFiles.output1.setLiDARDriver('SPDV4')
    dataFiles.output1.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING',
                                           False)

    lidarprocessor.doProcessing(transFunc,
                                dataFiles,
                                controls=controls,
                                otherArgs=otherArgs)
Esempio n. 8
0
def translate(info,
              infile,
              outfile,
              expectRange=None,
              scalings=None,
              internalrotation=False,
              magneticdeclination=0.0,
              externalrotationfn=None,
              nullVals=None,
              constCols=None,
              epsg=None,
              wkt=None):
    """
    Main function which does the work.

    * Info is a fileinfo object for the input file.
    * infile and outfile are paths to the input and output files respectively.
    * expectRange is a list of tuples with (type, varname, min, max).
    * scaling is a list of tuples with (type, varname, gain, offset).
    * if internalrotation is True then the internal rotation will be applied
        to data. Overrides externalrotationfn
    * if externalrotationfn is not None then then the external rotation matrix
        will be read from this file and applied to the data
    * magneticdeclination. If not 0, then this will be applied to the data
    * nullVals is a list of tuples with (type, varname, value)
    * constCols is a list of tupes with (type, varname, dtype, value)
    """
    scalingsDict = translatecommon.overRideDefaultScalings(scalings)

    # set up the variables
    dataFiles = lidarprocessor.DataFiles()

    dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ)

    # first set the rotation matrix if asked for
    if internalrotation and externalrotationfn:
        msg = "Can't use both internal and external rotation"
        raise generic.LiDARInvalidSetting(msg)

    rotationMatrix = None
    if internalrotation:
        if "ROTATION_MATRIX" in info.header:
            dataFiles.input1.setLiDARDriverOption(
                "ROTATION_MATRIX", info.header["ROTATION_MATRIX"])
            rotationMatrix = info.header["ROTATION_MATRIX"]
        else:
            msg = "Internal Rotation requested but no information found in input file"
            raise generic.LiDARInvalidSetting(msg)
    elif externalrotationfn is not None:
        externalrotation = numpy.loadtxt(externalrotationfn,
                                         ndmin=2,
                                         delimiter=" ",
                                         dtype=numpy.float32)
        dataFiles.input1.setLiDARDriverOption("ROTATION_MATRIX",
                                              externalrotation)
        rotationMatrix = externalrotation

    # set the magnetic declination if not 0 (the default)
    if magneticdeclination != 0:
        dataFiles.input1.setLiDARDriverOption("MAGNETIC_DECLINATION",
                                              magneticdeclination)

    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    controls.setProgress(progress)
    controls.setSpatialProcessing(False)

    otherArgs = lidarprocessor.OtherArgs()
    # and the header so we don't collect it again
    otherArgs.rieglInfo = info.header
    # also need the default/overriden scaling
    otherArgs.scaling = scalingsDict
    # Add the rotation matrix to otherArgs
    # for updating the header
    otherArgs.rotationMatrix = rotationMatrix
    # expected range of the data
    otherArgs.expectRange = expectRange
    # null values
    otherArgs.nullVals = nullVals
    # constant columns
    otherArgs.constCols = constCols
    otherArgs.epsg = epsg
    otherArgs.wkt = wkt

    dataFiles.output1 = lidarprocessor.LidarFile(outfile,
                                                 lidarprocessor.CREATE)
    dataFiles.output1.setLiDARDriver('SPDV4')
    dataFiles.output1.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING',
                                           False)

    lidarprocessor.doProcessing(transFunc,
                                dataFiles,
                                controls=controls,
                                otherArgs=otherArgs)
Esempio n. 9
0
def rasterize(infiles,
              outfile,
              attributes,
              function=DEFAULT_FUNCTION,
              atype=DEFAULT_ATTRIBUTE,
              background=0,
              binSize=None,
              extraModule=None,
              quiet=False,
              footprint=None,
              windowSize=None,
              driverName=None,
              driverOptions=None):
    """
    Apply the given function to the list of input files and create
    an output raster file. attributes is a list of attributes to run
    the function on. The function name must contain a module
    name and the specified function must take a masked array, plus
    the 'axis' parameter. atype should be a string containing 
    either POINT|PULSE.
    background is the background raster value to use. binSize is the bin size
    to use which defaults to that of the spatial indices used.
    extraModule should be a string with an extra module to import - use
    this for modules other than numpy that are needed by your function.
    quiet means no progress etc
    footprint specifies the footprint type
    """
    dataFiles = lidarprocessor.DataFiles()
    dataFiles.inList = [
        lidarprocessor.LidarFile(fname, lidarprocessor.READ)
        for fname in infiles
    ]
    dataFiles.imageOut = lidarprocessor.ImageFile(outfile,
                                                  lidarprocessor.CREATE)
    dataFiles.imageOut.setRasterIgnore(background)

    if driverName is not None:
        dataFiles.imageOut.setRasterDriver(driverName)

    if driverOptions is not None:
        dataFiles.imageOut.setRasterDriverOptions(driverOptions)

    # import any other modules required
    globalsDict = globals()
    if extraModule is not None:
        globalsDict[extraModule] = importlib.import_module(extraModule)

    controls = lidarprocessor.Controls()
    controls.setSpatialProcessing(True)
    if not quiet:
        progress = cuiprogress.GDALProgressBar()
        controls.setProgress(progress)

    if binSize is not None:
        controls.setReferenceResolution(binSize)

    if footprint is not None:
        controls.setFootprint(footprint)

    if windowSize is not None:
        controls.setWindowSize(windowSize)

    otherArgs = lidarprocessor.OtherArgs()
    # reference to the function to call
    otherArgs.func = eval(function, globalsDict)
    otherArgs.attributes = attributes
    otherArgs.background = background
    atype = atype.upper()
    if atype == 'POINT':
        otherArgs.atype = POINT
    elif atype == 'PULSE':
        otherArgs.atype = PULSE
    else:
        msg = 'Unsupported type %s' % atype
        raise RasterizationError(msg)

    lidarprocessor.doProcessing(writeImageFunc,
                                dataFiles,
                                controls=controls,
                                otherArgs=otherArgs)
Esempio n. 10
0
def calcSolarAzimuthZenith(inputImg, inImgDateTime, outputImg, gdalformat):
    """
Function which calculate a solar azimuth (band 1) and zenith (band 2) image.

:param inputImg: input image file (can be any image with a spatial header)
:param inImgDateTime: a datatime object for the data/time of the acquasition
:param outputImg: output image file and path
:param gdalformat: output file format (e.g., KEA)

"""
    if not havePysolar:
        raise Exception(
            "The PySolar module required for this function could not be imported."
        )

    if not haveRIOS:
        raise Exception(
            "The RIOS module required for this function could not be imported."
        )

    try:
        import tqdm
        progress_bar = rsgislib.TQDMProgressBar()
    except:
        progress_bar = cuiprogress.GDALProgressBar()

    infiles = applier.FilenameAssociations()
    infiles.image1 = inputImg
    outfiles = applier.FilenameAssociations()
    outfiles.outimage = outputImg
    otherargs = applier.OtherInputs()
    otherargs.dateTime = inImgDateTime
    aControls = applier.ApplierControls()
    aControls.progress = progress_bar
    aControls.drivername = gdalformat

    wgs84latlonProj = osr.SpatialReference()
    wgs84latlonProj.ImportFromEPSG(4326)
    otherargs.wgs84latlonProj = wgs84latlonProj

    def _calcSolarAzimuthZenith(info, inputs, outputs, otherargs):
        """
        Internal functions used within calcSolarAzimuthZenith() - don't call independently.
        
        """
        xBlock, yBlock = info.getBlockCoordArrays()

        inProj = osr.SpatialReference()
        inProj.ImportFromWkt(info.getProjection())

        transform = osr.CoordinateTransformation(inProj,
                                                 otherargs.wgs84latlonProj)
        xBlockF = xBlock.flatten()
        yBlockF = yBlock.flatten()

        pts = numpy.zeros((xBlockF.shape[0], 2), dtype=numpy.float)
        pts[..., 0] = xBlockF
        pts[..., 1] = yBlockF
        outPts = numpy.array(transform.TransformPoints(pts))

        outAz = numpy.zeros_like(xBlockF, dtype=numpy.float)
        outZen = numpy.zeros_like(xBlockF, dtype=numpy.float)

        for i in range(outPts.shape[0]):
            outAz[i] = 90 - Pysolar.solar.GetAltitude(
                outPts[i, 1], outPts[i, 0], otherargs.dateTime)
            outZen[i] = ((-1) * Pysolar.solar.GetAzimuth(
                outPts[i, 1], outPts[i, 0], otherargs.dateTime)) - 180

        outAz = numpy.reshape(outAz, xBlock.shape)
        outZen = numpy.reshape(outZen, xBlock.shape)

        #print("Block End")
        outputs.outimage = numpy.stack((outAz, outZen))

    # Apply the multiply function.
    applier.apply(_calcSolarAzimuthZenith,
                  infiles,
                  outfiles,
                  otherargs,
                  controls=aControls)
Esempio n. 11
0
    def calc_unmixing_rmse_residualerr(self,
                                       input_img,
                                       input_unmix_img,
                                       endmembers_file,
                                       output_img,
                                       gdalformat,
                                       scale_factor=1000):
        """
        """
        try:
            import tqdm
            progress_bar = rsgislib.TQDMProgressBar()
        except:
            progress_bar = cuiprogress.GDALProgressBar()

        endmembers_info = self.read_endmembers_mtxt(endmembers_file)

        infiles = applier.FilenameAssociations()
        infiles.image_orig = input_img
        infiles.image_unmix = input_unmix_img
        outfiles = applier.FilenameAssociations()
        outfiles.outimage = output_img
        otherargs = applier.OtherInputs()
        otherargs.endmembers_q = endmembers_info[0]
        otherargs.endmembers_p = endmembers_info[1]
        otherargs.endmembers_arr = endmembers_info[2]
        otherargs.scale_factor = scale_factor
        aControls = applier.ApplierControls()
        aControls.progress = progress_bar
        aControls.drivername = gdalformat
        aControls.omitPyramids = True
        aControls.calcStats = False

        def _calc_unmix_err_rmse(info, inputs, outputs, otherargs):
            """
            This is an internal rios function
            """
            block_refl_shp = inputs.image_orig.shape
            img_orig_refl_flat = inputs.image_orig.reshape(
                [block_refl_shp[0], (block_refl_shp[1] * block_refl_shp[2])]).T
            block_unmix_shp = inputs.image_unmix.shape
            img_unmix_coef_flat = inputs.image_unmix.reshape([
                block_unmix_shp[0], (block_unmix_shp[1] * block_unmix_shp[2])
            ]).T
            img_flat_shp = img_orig_refl_flat.shape

            img_orig_refl_nodata = numpy.where(img_orig_refl_flat == 0, True,
                                               False)
            img_orig_refl_flat_nodata = numpy.all(img_orig_refl_nodata, axis=1)

            ID = numpy.arange(img_flat_shp[0])
            ID = ID[img_orig_refl_flat_nodata == False]
            img_orig_refl_flat_data = img_orig_refl_flat[
                img_orig_refl_flat_nodata == False, ...]
            img_orig_refl_flat_data_flt = numpy.zeros_like(
                img_orig_refl_flat_data, dtype=numpy.float32)
            img_orig_refl_flat_data_flt[...] = img_orig_refl_flat_data
            img_unmix_coef_flat_data = img_unmix_coef_flat[
                img_orig_refl_flat_nodata == False, ...]
            img_unmix_coef_flat_data_flt = numpy.zeros_like(
                img_unmix_coef_flat_data, dtype=numpy.float32)
            img_unmix_coef_flat_data_flt[
                ...] = img_unmix_coef_flat_data / float(scale_factor)

            img_nodata_flat_shp = img_unmix_coef_flat_data_flt.shape

            pred_refl_img = numpy.zeros_like(img_orig_refl_flat_data,
                                             dtype=float)
            for i in range(img_nodata_flat_shp[0]):
                for q in range(otherargs.endmembers_q):
                    pred_refl_img[i] = pred_refl_img[i] + (
                        img_unmix_coef_flat_data_flt[i, q] *
                        otherargs.endmembers_arr[q])

            # Calc Diff
            diff = img_orig_refl_flat_data_flt - pred_refl_img

            # Calc RMSE
            diff_sq = diff * diff
            mean_sum_diff_sq = numpy.sum(diff_sq,
                                         axis=1) / otherargs.endmembers_p
            rmse_arr = numpy.sqrt(mean_sum_diff_sq)

            # Calc Residual Error
            abs_diff = numpy.absolute(diff)
            residual_arr = numpy.sum(abs_diff, axis=1) / otherargs.endmembers_p

            outarr = numpy.zeros((img_flat_shp[0], 2))
            outarr[ID] = numpy.stack([rmse_arr, residual_arr], axis=-1)
            outarr = outarr.T
            outputs.outimage = outarr.reshape(
                (2, block_refl_shp[1], block_refl_shp[2]))

        applier.apply(_calc_unmix_err_rmse,
                      infiles,
                      outfiles,
                      otherargs,
                      controls=aControls)
Esempio n. 12
0
    def predict_refl_img_from_simple_unmixing(self,
                                              input_unmix_img,
                                              endmembers_file,
                                              output_img,
                                              gdalformat,
                                              scale_factor=1000):
        """
        """
        try:
            import tqdm
            progress_bar = rsgislib.TQDMProgressBar()
        except:
            progress_bar = cuiprogress.GDALProgressBar()

        endmembers_info = self.read_endmembers_mtxt(endmembers_file)

        infiles = applier.FilenameAssociations()
        infiles.image_unmix = input_unmix_img
        outfiles = applier.FilenameAssociations()
        outfiles.outimage = output_img
        otherargs = applier.OtherInputs()
        otherargs.endmembers_q = endmembers_info[0]
        otherargs.endmembers_p = endmembers_info[1]
        otherargs.endmembers_arr = endmembers_info[2]
        otherargs.scale_factor = scale_factor
        aControls = applier.ApplierControls()
        aControls.progress = progress_bar
        aControls.drivername = gdalformat
        aControls.omitPyramids = True
        aControls.calcStats = False

        def _predict_refl_img(info, inputs, outputs, otherargs):
            """
            This is an internal rios function
            """
            block_unmix_shp = inputs.image_unmix.shape
            img_unmix_coef_flat = inputs.image_unmix.reshape([
                block_unmix_shp[0], (block_unmix_shp[1] * block_unmix_shp[2])
            ]).T

            img_unmix_coef_flat_data_flt = numpy.zeros_like(
                img_unmix_coef_flat, dtype=numpy.float32)
            img_unmix_coef_flat_data_flt[
                ...] = img_unmix_coef_flat / float(scale_factor)

            img_nodata_flat_shp = img_unmix_coef_flat_data_flt.shape

            pred_refl_img = numpy.zeros(
                (img_unmix_coef_flat.shape[0], otherargs.endmembers_p),
                dtype=numpy.int16)
            for i in range(img_unmix_coef_flat.shape[0]):
                for q in range(otherargs.endmembers_q):
                    pred_refl_img[i] = pred_refl_img[i] + (
                        img_unmix_coef_flat_data_flt[i, q] *
                        otherargs.endmembers_arr[q])

            pred_refl_img = pred_refl_img.T
            outputs.outimage = pred_refl_img.reshape(
                (otherargs.endmembers_p, block_unmix_shp[1],
                 block_unmix_shp[2]))

        applier.apply(_predict_refl_img,
                      infiles,
                      outfiles,
                      otherargs,
                      controls=aControls)
Esempio n. 13
0
    def perform_simple_unmixing(self,
                                input_image,
                                output_image,
                                gdalformat,
                                endmembers_file,
                                weight=None,
                                scale_factor=1000):
        """
        A function which uses the RIOS to iterate through the input image
        and perform a simple/standard spectral unmixing on the input image
        using the calc_abundance function.

        :param input_image: The file path to a GDAL readable input image.
        :param output_image: The file path to the GDAL writable output image
                             (Note pixel values will be between 0-1000)
        :param gdalformat: The output image format to be used.
        :param endmembers_file: The endmembers for the unmixing in the RSGISLib mtxt format.
        :param weight: Optional (if None ignored) to provide a weight to implement the approach of Scarth et al (2010)
                   adding a weight to the least squares optimisation to get the abundances to sum to 1.
        :param scale_factor: Scale factor for integerising the resulting image. If value is 1 then output image
                             will be a floating point image.

        References:
            Scarth, P., Roder, A., & Schmidt, M. (2010). Tracking grazing pressure and climate
            interaction—The role of Landsat fractional cover in time series analysis. Proceedings of
            the 15th Australasian Remote Sensing and Photogrammetry ConferenceAustralia: Alice Springs.
            http://dx.doi.org/10.6084/m9.figshare.94250.

        """
        try:
            import tqdm
            progress_bar = rsgislib.TQDMProgressBar()
        except:
            progress_bar = cuiprogress.GDALProgressBar()

        if weight is not None:
            endmembers_info = self.read_endmembers_mtxt_weight(
                endmembers_file, weight)
        else:
            endmembers_info = self.read_endmembers_mtxt(endmembers_file)
        print(endmembers_info)

        infiles = applier.FilenameAssociations()
        infiles.image = input_image
        outfiles = applier.FilenameAssociations()
        outfiles.outimage = output_image
        otherargs = applier.OtherInputs()
        otherargs.endmembers_q = endmembers_info[0]
        otherargs.endmembers_p = endmembers_info[1]
        otherargs.endmembers_arr = endmembers_info[2]
        otherargs.weight = weight
        otherargs.scale_factor = scale_factor
        aControls = applier.ApplierControls()
        aControls.progress = progress_bar
        aControls.drivername = gdalformat
        aControls.omitPyramids = True
        aControls.calcStats = False

        def _simple_unmix(info, inputs, outputs, otherargs):
            """
            This is an internal rios function
            """
            block_shp = inputs.image.shape
            img_flat = inputs.image.reshape(
                [block_shp[0], (block_shp[1] * block_shp[2])]).T
            if otherargs.weight is not None:
                img_flat_dtype = img_flat.dtype
                tmp_img_flat = img_flat
                img_flat = numpy.zeros(
                    (img_flat.shape[0], img_flat.shape[1] + 1),
                    dtype=img_flat_dtype)
                img_flat[...] = weight
                img_flat[:, :-1] = tmp_img_flat
            img_flat_shp = img_flat.shape

            img_nodata = numpy.where(img_flat == 0, True, False)
            img_flat_nodata = numpy.all(img_nodata, axis=1)

            ID = numpy.arange(img_flat_shp[0])
            ID = ID[img_flat_nodata == False]
            img_flat_data = img_flat[img_flat_nodata == False, ...]

            abundances_arr = self.calc_abundance(img_flat_data,
                                                 otherargs.endmembers_arr)
            if otherargs.scale_factor > 1:
                outarr = numpy.zeros([img_flat_shp[0], otherargs.endmembers_q],
                                     dtype=numpy.int16)
            else:
                outarr = numpy.zeros([img_flat_shp[0], otherargs.endmembers_q],
                                     dtype=numpy.float32)
            outarr[ID] = (abundances_arr * otherargs.scale_factor)
            outarr = outarr.T
            outputs.outimage = outarr.reshape(
                (otherargs.endmembers_q, block_shp[1], block_shp[2]))

        applier.apply(_simple_unmix,
                      infiles,
                      outfiles,
                      otherargs,
                      controls=aControls)
Esempio n. 14
0
def translate(info,
              infile,
              outfile,
              expectRange=None,
              spatial=False,
              extent=None,
              scaling=None,
              nullVals=None,
              constCols=None):
    """
    Main function which does the work.

    * Info is a fileinfo object for the input file.
    * infile and outfile are paths to the input and output files respectively.
    * expectRange is a list of tuples with (type, varname, min, max).
    * spatial is True or False - dictates whether we are processing spatially or not.
        If True then spatial index will be created on the output file on the fly.
    * extent is a tuple of values specifying the extent to work with. 
        xmin ymin xmax ymax
    * scaling is a list of tuples with (type, varname, gain, offset).
    * nullVals is a list of tuples with (type, varname, value)
    * constCols is a list of tupes with (type, varname, dtype, value)
    """
    scalingsDict = translatecommon.overRideDefaultScalings(scaling)

    # first we need to determine if the file is spatial or not
    if spatial and not info.hasSpatialIndex:
        msg = "Spatial processing requested but file does not have spatial index"
        raise generic.LiDARInvalidSetting(msg)

    if extent is not None and not spatial:
        msg = 'Extent can only be set when processing spatially'
        raise generic.LiDARInvalidSetting(msg)

    dataFiles = lidarprocessor.DataFiles()

    dataFiles.input1 = lidarprocessor.LidarFile(infile, lidarprocessor.READ)
    dataFiles.output1 = lidarprocessor.LidarFile(outfile,
                                                 lidarprocessor.CREATE)
    dataFiles.output1.setLiDARDriver('SPDV4')
    dataFiles.output1.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING',
                                           False)

    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    controls.setProgress(progress)
    controls.setSpatialProcessing(spatial)

    if extent is not None:
        extent = [float(x) for x in extent]
        binSize = info.header['BIN_SIZE']
        pixgrid = pixelgrid.PixelGridDefn(xMin=extent[0],
                                          yMin=extent[1],
                                          xMax=extent[2],
                                          yMax=extent[3],
                                          xRes=binSize,
                                          yRes=binSize)
        controls.setReferencePixgrid(pixgrid)
        controls.setFootprint(lidarprocessor.BOUNDS_FROM_REFERENCE)

    otherArgs = lidarprocessor.OtherArgs()
    otherArgs.scaling = scalingsDict
    otherArgs.expectRange = expectRange
    otherArgs.nullVals = nullVals
    otherArgs.constCols = constCols

    lidarprocessor.doProcessing(transFunc,
                                dataFiles,
                                controls=controls,
                                otherArgs=otherArgs)
Esempio n. 15
0
def splitFileIntoTiles(infiles, binSize=1.0, blockSize=None, 
        tempDir='.', extent=None, indexType=INDEX_CARTESIAN,
        pulseIndexMethod=PULSE_INDEX_FIRST_RETURN, 
        footprint=lidarprocessor.UNION, outputFormat='SPDV4',
        buildPulses=False):
    """
    Takes a filename (or list of filenames) and creates a tempfile for every 
    block (using blockSize).
    If blockSize isn't set then it is picked using BLOCKSIZE_N_BLOCKS.
    binSize is the size of the bins to create the spatial index.
    indexType is one of the INDEX_* constants.
    pulseIndexMethod is one of the PULSE_INDEX_* constants.
    footprint is one of lidarprocessor.UNION or lidarprocessor.INTERSECTION
    and is how to combine extents if there is more than one file.
    outputFormat is either 'SPDV4' or 'LAS'. 'LAS' outputs only supported
    when input is 'LAS'.
    buildPulses relevant for 'LAS' and determines whether to build the 
    pulse structure or not. 

    returns the header of the first input file, the extent used and a list
    of (fname, extent) tuples that contain the information for 
    each tempfile.
    """

    if isinstance(infiles, basestring):
        infiles = [infiles]

    # use the first file for header. Not
    # clear how to combine headers from multiple inputs
    # or if one should.
    # leave setting this in case we grab it when working out the extent.
    firstHeader = None
    
    if extent is None:
        # work out from headers
        pixGrid = None
        for infile in infiles:
            info = generic.getLidarFileInfo(infile)
            header = info.header

            if firstHeader is None:
                firstHeader = header

            try:
                if indexType == INDEX_CARTESIAN:
                    xMax = header['X_MAX']
                    xMin = header['X_MIN']
                    yMax = header['Y_MAX']
                    yMin = header['Y_MIN']
                elif indexType == INDEX_SPHERICAL:
                    xMax = header['AZIMUTH_MAX']
                    xMin = header['AZIMUTH_MIN']
                    yMax = header['ZENITH_MAX']
                    yMin = header['ZENITH_MIN']
                elif indexType == INDEX_SCAN:
                    xMax = header['SCANLINE_IDX_MAX']
                    xMin = header['SCANLINE_IDX_MIN']
                    yMax = header['SCANLINE_MAX']
                    yMin = header['SCANLINE_MIN']
                else:
                    msg = 'unsupported indexing method'
                    raise generic.LiDARSpatialIndexNotAvailable(msg)
            except KeyError:
                msg = 'info for creating bounding box not available'
                raise generic.LiDARFunctionUnsupported(msg)

            newPixGrid = pixelgrid.PixelGridDefn(xMin=xMin, xMax=xMax, 
                            yMin=yMin, yMax=yMax, xRes=binSize, yRes=binSize)
            if pixGrid is None:
                pixGrid = newPixGrid
            elif footprint == lidarprocessor.UNION:
                pixGrid = pixGrid.union(newPixGrid)
            elif footprint == lidarprocessor.INTERSECTION:
                pixGrid = pixGrid.intersection(newPixGrid)
            else:
                msg = 'Unsupported footprint option'
                raise generic.LiDARFunctionUnsupported(msg)

        # TODO: we treat points as being in the block when they are >=
        # the min coords and < the max coords. What happens on the bottom
        # and right margins?? We could possibly miss points that are there.

        # round the coords to the nearest multiple
        xMin = numpy.floor(pixGrid.xMin / binSize) * binSize
        yMin = numpy.floor(pixGrid.yMin / binSize) * binSize
        xMax = numpy.ceil(pixGrid.xMax / binSize) * binSize
        yMax = numpy.ceil(pixGrid.yMax / binSize) * binSize
            
        extent = Extent(xMin, xMax, yMin, yMax, binSize)
        
    else:
        # ensure that our binSize comes from their exent
        binSize = extent.binSize

        # get the first header since we aren't doing the above
        info = generic.getLidarFileInfo(infiles[0])
        firstHeader = info.header
    
    if blockSize is None:
        minAxis = min(extent.xMax - extent.xMin, extent.yMax - extent.yMin)
        blockSize = min(minAxis / BLOCKSIZE_N_BLOCKS, 200.0)
        # make it a multiple of binSize
        blockSize = int(numpy.ceil(blockSize / binSize)) * binSize
    else:
        # ensure that their given block size can be evenly divided by 
        # the binSize
        # the modulo operator doesn't work too well with floats 
        # so we take a different approach
        a = blockSize / binSize
        if a != int(a):
            msg = 'blockSize must be evenly divisible be the binSize'
            raise generic.LiDARInvalidData(msg)
        
    extentList = []
    subExtent = Extent(extent.xMin, extent.xMin + blockSize, 
            extent.yMax - blockSize, extent.yMax, binSize)
    controls = lidarprocessor.Controls()
    controls.setSpatialProcessing(False)

    tmpSuffix = '.' + outputFormat.lower()

    bMoreToDo = True
    while bMoreToDo:
        fd, fname = tempfile.mkstemp(suffix=tmpSuffix, dir=tempDir)
        os.close(fd)
        
        userClass = lidarprocessor.LidarFile(fname, generic.CREATE)
        if outputFormat == 'SPDV4':
            userClass.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING', False)
            driver = spdv4.SPDV4File(fname, generic.CREATE, controls, userClass)
        elif outputFormat == 'LAS':
            driver = las.LasFile(fname, generic.CREATE, controls, userClass)
        else:
            msg = 'Unsupported output format %s' % outputFormat
            raise generic.LiDARFunctionUnsupported(msg)
        data = (copy.copy(subExtent), driver)
        extentList.append(data)

        # move it along
        subExtent.xMin += blockSize
        subExtent.xMax += blockSize

        if subExtent.xMin >= extent.xMax:
            # next line down
            subExtent.xMin = extent.xMin
            subExtent.xMax = extent.xMin + blockSize
            subExtent.yMax -= blockSize
            subExtent.yMin -= blockSize
            
        # done?
        bMoreToDo = subExtent.yMax > extent.yMin

    # ok now set up to read the input files using lidarprocessor
    dataFiles = lidarprocessor.DataFiles()
    dataFiles.inputs = []

    for infile in infiles:
        input = lidarprocessor.LidarFile(infile, lidarprocessor.READ)

        # must be a better way of doing this, but this is what 
        # translate does. We don't know what formats we are getting ahead of time
        info = generic.getLidarFileInfo(infile)
        inFormat = info.getDriverName()
        if inFormat == 'LAS':
            input.setLiDARDriverOption('BUILD_PULSES', buildPulses)

        dataFiles.inputs.append(input)
        
    controls = lidarprocessor.Controls()
    progress = cuiprogress.GDALProgressBar()
    progress.setLabelText('Splitting...')
    controls.setProgress(progress)
    controls.setSpatialProcessing(False)
    controls.setMessageHandler(lidarprocessor.silentMessageFn)
        
    otherArgs = lidarprocessor.OtherArgs()
    otherArgs.outList = extentList
    otherArgs.indexType = indexType
    otherArgs.pulseIndexMethod = pulseIndexMethod
        
    lidarprocessor.doProcessing(classifyFunc, dataFiles, controls=controls, 
                otherArgs=otherArgs)
    
    # close all the output files and save their names to return
    newExtentList = []
    for subExtent, driver in extentList:
        fname = driver.fname
        driver.close()

        data = (fname, subExtent)
        newExtentList.append(data)

    return firstHeader, extent, newExtentList
Esempio n. 16
0
def convert_to_dB(input_img, output_img, gdal_format, out_int_imgs=False):
    """
    Convert power image to decibels (dB) by applying 10 x log10(pwr)

    :param input_img: Input power image
    :param output_img: Output dB image
    :param gdal_format: GDAL image format for output image
    :param out_int_imgs: if False then output image is Float32 if True then Int16 with gain of 100

    """
    try:
        import tqdm
        progress_bar = TQDMProgressBar()
    except:
        progress_bar = cuiprogress.GDALProgressBar()

    def _apply_calc_dB(info, inputs, outputs, otherargs):
        # Internal Function...
        msk_arr = numpy.where(
            ((inputs.image > 0.0) & numpy.isfinite(inputs.image)), 1, 0)
        msk_arr = numpy.amin(msk_arr, axis=0)

        dB_flt_img_arr = numpy.where(msk_arr == 1,
                                     10 * numpy.log10(inputs.image), 999)
        dB_flt_img_arr[numpy.isnan(dB_flt_img_arr)] = 999
        dB_flt_img_arr[numpy.isinf(dB_flt_img_arr)] = 999

        msk_arr = numpy.where((dB_flt_img_arr > -60) & (dB_flt_img_arr < 20),
                              1, 0)
        msk_arr = numpy.amin(msk_arr, axis=0)
        numpy.where(msk_arr == 1, dB_flt_img_arr, 999)

        if otherargs.out_int_imgs:
            out_img_arr = numpy.zeros_like(inputs.image, dtype=numpy.int16)
            out_img_arr[...] = numpy.around((dB_flt_img_arr * 100), 0)
            out_img_arr[dB_flt_img_arr == 999] = 32767
        else:
            out_img_arr = dB_flt_img_arr

        outputs.outimage = out_img_arr

    infiles = applier.FilenameAssociations()
    infiles.image = input_img

    outfiles = applier.FilenameAssociations()
    outfiles.outimage = output_img

    otherargs = applier.OtherInputs()
    otherargs.out_int_imgs = out_int_imgs

    aControls = applier.ApplierControls()
    aControls.progress = progress_bar
    aControls.drivername = gdal_format
    aControls.omitPyramids = True
    aControls.calcStats = False
    if gdal_format == 'GTIFF':
        aControls.creationoptions = GTIFF_CREATION_OPTS

    applier.apply(_apply_calc_dB,
                  infiles,
                  outfiles,
                  otherargs,
                  controls=aControls)
Esempio n. 17
0
def indexAndMerge(extentList, extent, wkt, outfile, header):
    """
    Internal method to merge all the temporary files into the output
    spatially indexing as we go.
    """
    controls = lidarprocessor.Controls()
    controls.setSpatialProcessing(False)

    # open in read mode
    driverExtentList = []
    for fname, subExtent in extentList:
        userClass = lidarprocessor.LidarFile(fname, generic.READ)
        driver = spdv4.SPDV4File(fname, generic.READ, controls, userClass)
        
        data = (subExtent, driver)
        driverExtentList.append(data)


    # create output file    
    userClass = lidarprocessor.LidarFile(outfile, generic.CREATE)
    userClass.setLiDARDriverOption('SCALING_BUT_NO_DATA_WARNING', False)
    controls = lidarprocessor.Controls()
    controls.setSpatialProcessing(True)
    outDriver = spdv4.SPDV4File(outfile, generic.CREATE, controls, userClass)
    pixGrid = pixelgrid.PixelGridDefn(xMin=extent.xMin, xMax=extent.xMax,
                yMin=extent.yMin, yMax=extent.yMax, projection=wkt,
                xRes=extent.binSize, yRes=extent.binSize)
    outDriver.setPixelGrid(pixGrid)
    
    # update header
    nrows,ncols = pixGrid.getDimensions()
    header['NUMBER_BINS_X'] = ncols
    header['NUMBER_BINS_Y'] = nrows

    # clobber these values since we don't want to 
    # start with the number in the original file
    # they will be reset to 0 in the new file
    del header['NUMBER_OF_POINTS']
    del header['NUMBER_OF_PULSES']
    # these too
    del header['GENERATING_SOFTWARE']
    del header['CREATION_DATETIME']
    
    progress = cuiprogress.GDALProgressBar()
    progress.setLabelText('Merging...')
    progress.setTotalSteps(len(extentList))
    progress.setProgress(0)
    nFilesProcessed = 0
    nFilesWritten = 0
    for subExtent, driver in driverExtentList:

        # read in all the data
        # NOTE: can't write data in blocks as the driver needs to be able to 
        # sort all the data in one go.
        bDataWritten = False
        npulses = driver.getTotalNumberPulses()
        if npulses > 0:
            pulseRange = generic.PulseRange(0, npulses)
            driver.setPulseRange(pulseRange)
            pulses = driver.readPulsesForRange()
            points = driver.readPointsByPulse()
            waveformInfo = driver.readWaveformInfo()
            recv = driver.readReceived()
            trans = driver.readTransmitted()

            outDriver.setExtent(subExtent)
            if nFilesWritten == 0:
                copyScaling(driver, outDriver)
                outDriver.setHeader(header)

            # on create, a spatial index is created
            outDriver.writeData(pulses, points, trans, recv, 
                            waveformInfo)
            nFilesWritten += 1

        # close the driver while we are here
        driver.close()
        
        if bDataWritten:
            nFilesWritten += 1
            
        nFilesProcessed += 1
        progress.setProgress(nFilesProcessed)

    outDriver.close()
Esempio n. 18
0
def get_ST_masks(json_fp, bands=None, roi_img=None, gdal_format='KEA', num_processes=1, threshold=3):
    """Main function to run to generate the output masks. Given an input JSON file,
    generates a mask for each date, for each band where 0=Inlier, 1=High outlier,
    -1=Low outlier. Opening/closing of files, generation of blocks and use of
    multiprocessing is all handled by RIOS.

    A minimum of 12 observations is required to create the masks.


    json_fp:       Path to JSON file which provides a dictionary where for each
                   date, an input file name and an output file name are provided.
    gdal_format: Short driver name for GDAL, e.g. KEA, GTiff.
    num_processes: Number of concurrent processes to use.
    bands:         List of GDAL band numbers to use, e.g. [1, 3, 5]. Defaults to all.
    threshold:     Threshold for screening. Defaults to 3, meaning that observations
                   outside 3*RMSE of the fitted model will be counted as outliers.
                   Lower values will result in more outliers being detected.
    """
    ip_paths = []
    op_paths = []
    dates = []

    try:
        # Open and read JSON file containing date:filepath pairs
        with open(json_fp) as json_file:
            image_list = json.load(json_file)

            for date in image_list.items():
                dates.append([datetime.strptime(date[0], '%Y-%m-%d').toordinal()])
                ip_paths.append(date[1]['input'])
                op_paths.append(date[1]['output'])
    except FileNotFoundError:
        print('Could not find the provided JSON file.')
        sys.exit()
    except json.decoder.JSONDecodeError as e:
        print('There is an error in the provided JSON file: {}'.format(e))
        sys.exit()

    # Create object to hold input files
    infiles = applier.FilenameAssociations()
    infiles.images = ip_paths

    # Create object to hold output file
    outfiles = applier.FilenameAssociations()
    outfiles.outimage = op_paths

    # ApplierControls object holds details on how processing should be done
    app = applier.ApplierControls()

    # Set window size to 1 because we are working per-pixel
    app.setWindowXsize(1)
    app.setWindowYsize(1)
    
    # Set progress
    try:
        import tqdm
        progress_bar = rsgislib.TQDMProgressBar()
    except:
        progress_bar = cuiprogress.GDALProgressBar()
    app.progress = progress_bar

    # Set output file type
    app.setOutputDriverName(gdal_format)
    
    if roi_img is not None:
        app.setReferenceImage(roi_img)
        app.setFootprintType(applier.BOUNDS_FROM_REFERENCE)
        app.setResampleMethod('near')
    

    # Use Python's multiprocessing module
    app.setJobManagerType('multiprocessing')
    app.setNumThreads(num_processes)

    # Open first image in list to use as a template
    template_image = fileinfo.ImageInfo(infiles.images[0])

    # Get no data value
    nodata = template_image.nodataval[0]

    if not bands:  # No bands specified - default to all
        num_bands = template_image.rasterCount
        bands = [i for i in range(1, num_bands + 1)]
    else:  # If a list of bands is provided
        # Number of bands determines things like the size of the output array
        num_bands = len(bands)
        # Need to tell the applier to only use the specified bands
        app.selectInputImageLayers(bands)

    full_names = [template_image.layerNameFromNumber(i) for i in bands]
    # Set up output layer name
    app.setLayerNames(full_names)

    # Additional arguments - have to be passed as a single object
    other_args = applier.OtherInputs()
    other_args.dates = dates
    other_args.threshold = threshold
    other_args.nodata = nodata
    other_args.num_bands = num_bands
    template_image = None

    try:
        applier.apply(_gen_band_masks, infiles, outfiles, otherArgs=other_args, controls=app)
    except RuntimeError as e:
        print('There was an error processing the images: {}'.format(e))
        print('Do all images in the JSON file exist?')
Esempio n. 19
0
    def perform_analysis(self, scn_db_obj, sen_obj, plgin_objs):
        logger.info("Processing Scene: {}".format(scn_db_obj.PID))
        if scn_db_obj.Invalid:
            return False, None, False

        rsgis_utils = rsgislib.RSGISPyUtils()
        eodd_utils = EODataDownUtils()

        success = True
        outputs = False
        out_dict = None

        if 'GenChngSummaryFeats' in plgin_objs:
            if plgin_objs['GenChngSummaryFeats'].Completed and plgin_objs[
                    'GenChngSummaryFeats'].Outputs and plgin_objs[
                        'GenChngSummaryFeats'].Success:
                scn_chng_info = plgin_objs['GenChngSummaryFeats'].ExtendedInfo

                scn_unq_name = sen_obj.get_scn_unq_name_record(scn_db_obj)
                out_vec_file = os.path.join(
                    self.params['outvecdir'],
                    "{}_chng_vec.gpkg".format(scn_unq_name))
                if os.path.exists(out_vec_file):
                    delete_vector_file(out_vec_file)

                if sen_obj.get_sensor_name() == 'LandsatGOOG':
                    scn_obs_date = scn_db_obj.Sensing_Time
                elif sen_obj.get_sensor_name() == 'Sentinel2GOOG':
                    scn_obs_date = scn_db_obj.Sensing_Time
                elif sen_obj.get_sensor_name() == 'Sentinel1ASF':
                    scn_obs_date = scn_db_obj.Acquisition_Date
                else:
                    raise Exception("Did not recognise the sensor name...")

                start_date = datetime.datetime(year=2019, month=4, day=30)
                if scn_obs_date > start_date:
                    try:
                        import tqdm
                        progress_bar = rsgislib.TQDMProgressBar()
                    except:
                        from rios import cuiprogress
                        progress_bar = cuiprogress.GDALProgressBar()

                    drv = gdal.GetDriverByName("GPKG")
                    if drv is None:
                        raise Exception("Driver GPKG is not avaiable.")

                    ds = drv.Create(out_vec_file, 0, 0, 0, gdal.GDT_Unknown)
                    if ds is None:
                        raise Exception(
                            "Could not create output file: {}.".format(
                                out_vec_file))

                    out_dict = dict()
                    for tile in scn_chng_info:
                        logger.debug("Processing tile {}...".format(tile))
                        clumps_img = scn_chng_info[tile]

                        in_rats = ratapplier.RatAssociations()
                        out_rats = ratapplier.RatAssociations()
                        in_rats.inrat = ratapplier.RatHandle(clumps_img)

                        lyr = ds.CreateLayer(tile, None, ogr.wkbPoint)
                        if lyr is None:
                            raise Exception(
                                "Could not create output layer: {}.".format(
                                    tile))

                        field_uid_defn = ogr.FieldDefn("uid", ogr.OFTInteger)
                        if lyr.CreateField(field_uid_defn) != 0:
                            raise Exception("Could not create field: 'uid'.")

                        field_prop_chng_defn = ogr.FieldDefn(
                            "prop_chng", ogr.OFTReal)
                        if lyr.CreateField(field_prop_chng_defn) != 0:
                            raise Exception(
                                "Could not create field: 'prop_chng'.")

                        field_score_defn = ogr.FieldDefn(
                            "score", ogr.OFTInteger)
                        if lyr.CreateField(field_score_defn) != 0:
                            raise Exception("Could not create field: 'score'.")

                        # First Observation Date
                        field_firstobsday_defn = ogr.FieldDefn(
                            "firstobsday", ogr.OFTInteger)
                        if lyr.CreateField(field_firstobsday_defn) != 0:
                            raise Exception(
                                "Could not create field: 'firstobsday'.")

                        field_firstobsmonth_defn = ogr.FieldDefn(
                            "firstobsmonth", ogr.OFTInteger)
                        if lyr.CreateField(field_firstobsmonth_defn) != 0:
                            raise Exception(
                                "Could not create field: 'firstobsmonth'.")

                        field_firstobsyear_defn = ogr.FieldDefn(
                            "firstobsyear", ogr.OFTInteger)
                        if lyr.CreateField(field_firstobsyear_defn) != 0:
                            raise Exception(
                                "Could not create field: 'firstobsyear'.")

                        # Last Observation Date
                        field_lastobsday_defn = ogr.FieldDefn(
                            "lastobsday", ogr.OFTInteger)
                        if lyr.CreateField(field_lastobsday_defn) != 0:
                            raise Exception(
                                "Could not create field: 'lastobsday'.")

                        field_lastobsmonth_defn = ogr.FieldDefn(
                            "lastobsmonth", ogr.OFTInteger)
                        if lyr.CreateField(field_lastobsmonth_defn) != 0:
                            raise Exception(
                                "Could not create field: 'lastobsmonth'.")

                        field_lastobsyear_defn = ogr.FieldDefn(
                            "lastobsyear", ogr.OFTInteger)
                        if lyr.CreateField(field_lastobsyear_defn) != 0:
                            raise Exception(
                                "Could not create field: 'lastobsyear'.")

                        # Observation Date Where Score Reached 5
                        field_scr5obsday_defn = ogr.FieldDefn(
                            "scr5obsday", ogr.OFTInteger)
                        if lyr.CreateField(field_scr5obsday_defn) != 0:
                            raise Exception(
                                "Could not create field: 'scr5obsday'.")

                        field_scr5obsmonth_defn = ogr.FieldDefn(
                            "scr5obsmonth", ogr.OFTInteger)
                        if lyr.CreateField(field_scr5obsmonth_defn) != 0:
                            raise Exception(
                                "Could not create field: 'scr5obsmonth'.")

                        field_scr5obsyear_defn = ogr.FieldDefn(
                            "scr5obsyear", ogr.OFTInteger)
                        if lyr.CreateField(field_scr5obsyear_defn) != 0:
                            raise Exception(
                                "Could not create field: 'scr5obsyear'.")

                        lyr_defn = lyr.GetLayerDefn()

                        otherargs = ratapplier.OtherArguments()
                        otherargs.lyr = lyr
                        otherargs.lyr_defn = lyr_defn

                        ratcontrols = ratapplier.RatApplierControls()
                        ratcontrols.setProgress(progress_bar)
                        ratapplier.apply(_ratapplier_check_string_col_valid,
                                         in_rats,
                                         out_rats,
                                         otherargs,
                                         controls=ratcontrols)

                        # Update (create) the JSON LUT file.
                        lut_file_name = "gmw_{}_lut.json".format(tile)
                        lut_file_path = os.path.join(self.params["outlutdir"],
                                                     lut_file_name)
                        eodd_utils.get_file_lock(lut_file_path,
                                                 sleep_period=1,
                                                 wait_iters=120,
                                                 use_except=True)
                        if os.path.exists(lut_file_path):
                            lut_dict = rsgis_utils.readJSON2Dict(lut_file_path)
                        else:
                            lut_dict = dict()

                        obs_date_iso_str = scn_obs_date.isoformat()
                        lut_dict[obs_date_iso_str] = dict()
                        lut_dict[obs_date_iso_str]["file"] = out_vec_file
                        lut_dict[obs_date_iso_str]["layer"] = tile

                        rsgis_utils.writeDict2JSON(lut_dict, lut_file_path)
                        eodd_utils.release_file_lock(lut_file_path)
                        out_dict[tile] = out_vec_file

                    ds = None
                    outputs = True
                    success = True
                else:
                    logger.debug(
                        "Scene is within window used to mask change outside of range."
                    )
            else:
                logger.debug(
                    "No change features available as outputs from previous steps..."
                )
        else:
            logger.debug(
                "GenChngSummaryFeats was not available so previous step had not run..."
            )

        return success, out_dict, outputs
Esempio n. 20
0
def apply_sklearn_classifer(classTrainInfo,
                            skClassifier,
                            imgMask,
                            imgMaskVal,
                            imgFileInfo,
                            outputImg,
                            gdalformat,
                            classClrNames=True):
    """
This function uses a trained classifier and applies it to the provided input image.

:param classTrainInfo: dict (where the key is the class name) of rsgislib.classification.ClassSimpleInfoObj
                       objects which will be used to train the classifier (i.e., train_sklearn_classifier()),
                       provide pixel value id and RGB class values.
:param skClassifier: a trained instance of a scikit-learn classifier
                     (e.g., use train_sklearn_classifier or train_sklearn_classifer_gridsearch)
:param imgMask: is an image file providing a mask to specify where should be classified. Simplest mask is all
                the valid data regions (rsgislib.imageutils.genValidMask)
:param imgMaskVal: the pixel value within the imgMask to limit the region to which the classification is applied.
                   Can be used to create a heirachical classification.
:param imgFileInfo: a list of rsgislib.imageutils.ImageBandInfo objects (also used within
                    rsgislib.imageutils.extractZoneImageBandValues2HDF) to identify which images and bands are to
                    be used for the classification so it adheres to the training data.
:param outputImg: output image file with the classification. Note. by default a colour table and class names column
                  is added to the image. If an error is produced use HFA or KEA formats.
:param gdalformat: is the output image format - all GDAL supported formats are supported.
:param classClrNames: default is True and therefore a colour table will the colours specified in classTrainInfo
                      and a ClassName column (from imgFileInfo) will be added to the output file.

    """
    infiles = applier.FilenameAssociations()
    infiles.imageMask = imgMask
    numClassVars = 0
    for imgFile in imgFileInfo:
        infiles.__dict__[imgFile.name] = imgFile.fileName
        numClassVars = numClassVars + len(imgFile.bands)

    outfiles = applier.FilenameAssociations()
    outfiles.outimage = outputImg
    otherargs = applier.OtherInputs()
    otherargs.classifier = skClassifier
    otherargs.mskVal = imgMaskVal
    otherargs.numClassVars = numClassVars
    otherargs.imgFileInfo = imgFileInfo

    try:
        import tqdm
        progress_bar = rsgislib.TQDMProgressBar()
    except:
        progress_bar = cuiprogress.GDALProgressBar()

    aControls = applier.ApplierControls()
    aControls.progress = progress_bar
    aControls.drivername = gdalformat
    aControls.omitPyramids = True
    aControls.calcStats = False

    # RIOS function to apply classifer
    def _applySKClassifier(info, inputs, outputs, otherargs):
        """
        Internal function for rios applier. Used within applyClassifer.
        """
        outClassVals = numpy.zeros_like(inputs.imageMask, dtype=numpy.uint32)
        if numpy.any(inputs.imageMask == otherargs.mskVal):
            outClassVals = outClassVals.flatten()
            imgMaskVals = inputs.imageMask.flatten()
            classVars = numpy.zeros(
                (outClassVals.shape[0], otherargs.numClassVars),
                dtype=numpy.float)
            # Array index which can be used to populate the output array following masking etc.
            ID = numpy.arange(imgMaskVals.shape[0])
            classVarsIdx = 0
            for imgFile in otherargs.imgFileInfo:
                imgArr = inputs.__dict__[imgFile.name]
                for band in imgFile.bands:
                    classVars[..., classVarsIdx] = imgArr[(band - 1)].flatten()
                    classVarsIdx = classVarsIdx + 1
            classVars = classVars[imgMaskVals == otherargs.mskVal]
            ID = ID[imgMaskVals == otherargs.mskVal]
            predClass = otherargs.classifier.predict(classVars)
            outClassVals[ID] = predClass
            outClassVals = numpy.expand_dims(outClassVals.reshape(
                (inputs.imageMask.shape[1], inputs.imageMask.shape[2])),
                                             axis=0)
        outputs.outimage = outClassVals

    print("Applying the Classifier")
    applier.apply(_applySKClassifier,
                  infiles,
                  outfiles,
                  otherargs,
                  controls=aControls)
    print("Completed")
    rsgislib.rastergis.populateStats(clumps=outputImg,
                                     addclrtab=True,
                                     calcpyramids=True,
                                     ignorezero=True)

    if classClrNames:
        ratDataset = gdal.Open(outputImg, gdal.GA_Update)
        red = rat.readColumn(ratDataset, 'Red')
        green = rat.readColumn(ratDataset, 'Green')
        blue = rat.readColumn(ratDataset, 'Blue')
        ClassName = numpy.empty_like(red, dtype=numpy.dtype('a255'))

        for classKey in classTrainInfo:
            print("Apply Colour to class \'" + classKey + "\'")
            red[classTrainInfo[classKey].id] = classTrainInfo[classKey].red
            green[classTrainInfo[classKey].id] = classTrainInfo[classKey].green
            blue[classTrainInfo[classKey].id] = classTrainInfo[classKey].blue
            ClassName[classTrainInfo[classKey].id] = classKey

        rat.writeColumn(ratDataset, "Red", red)
        rat.writeColumn(ratDataset, "Green", green)
        rat.writeColumn(ratDataset, "Blue", blue)
        rat.writeColumn(ratDataset, "ClassName", ClassName)
        ratDataset = None
Esempio n. 21
0
def doClump(infile, outfile, tempDir):
    """
    Do the clumping
    """
    inputs = applier.FilenameAssociations()
    inputs.infile = infile

    # create temporary file with the clumps done on a per tile basis
    outputs = applier.FilenameAssociations()
    fileh, tmpClump = tempfile.mkstemp('.kea', dir=tempDir)
    os.close(fileh)
    outputs.outfile = tmpClump

    # start at clumpid 1 - will be zeros where no data
    otherinputs = applier.OtherInputs()
    otherinputs.clumpId = 1

    controls = applier.ApplierControls()
    controls.progress = cuiprogress.GDALProgressBar()
    # don't need stats for this since it is just temporary
    controls.calcStats = False

    applier.apply(riosClump, inputs, outputs, otherinputs, controls=controls)

    # run it on the input image again, but also read in the tile clumps
    inputs.tileclump = tmpClump

    # overlap of 1 so we can work out which neighbouring
    # clumps need to be merged
    controls.overlap = 1
    # make thematic
    # but still don't do stats - only when we finally know we succeeded
    controls.thematic = True

    outputs = applier.FilenameAssociations()

    finished = False
    while not finished:

        # it creates the output file as it goes
        # just create temp at this stage until we know it has succeeded
        fileh, tmpMerged = tempfile.mkstemp('.kea', dir=tempDir)
        os.close(fileh)

        outputs.clump = tmpMerged

        otherinputs.nFailedRecodes = 0
        # ok now we have to merge the clumps
        # create a recode table
        recode = numpy.arange(otherinputs.clumpId, dtype=numpy.uint32)
        otherinputs.recode = recode

        # create a boolean array with clumps not to recode
        # obviously if you recode 39 -> 23 you don't want 23 being recoded to
        # something else
        dontrecode = numpy.zeros_like(recode, dtype=numpy.bool)
        otherinputs.dontrecode = dontrecode

        applier.apply(riosMerge,
                      inputs,
                      outputs,
                      otherinputs,
                      controls=controls)

        # clobber the last temp input
        os.remove(inputs.tileclump)

        inputs.tileclump = tmpMerged

        dontrecodesum = dontrecode.sum()
        finished = dontrecodesum == 0
        if not finished:
            print('%d clumps failed to merge. %d recoded' %
                  (otherinputs.nFailedRecodes, dontrecodesum))
            print('having another go')

    # now we save the final output as the output name and calc stats
    cmd = 'gdalcalcstats %s -ignore 0' % tmpMerged
    os.system(cmd)

    # use move rather than rename in case we are on different filesystems
    shutil.move(tmpMerged, outfile)

    # just be careful here since the permissions will be set strangely
    # for outfile since it was created by tempfile. Set to match current umask
    current_umask = os.umask(0)
    os.umask(current_umask)  # can't get without setting!
    # need to convert from umask to mode and remove exe bits
    mode = 0o0777 & ~current_umask
    mode = (((mode ^ stat.S_IXUSR) ^ stat.S_IXGRP) ^ stat.S_IXOTH)

    os.chmod(outfile, mode)

    history.insertMetadataFilename(outfile, [infile], {})