Esempio n. 1
0
    def bitmap_list(
        self, in_file
    ):  # This function opens the dataset and gets a list of bitmap ids
        """
        Creates a list of the bitmap IDs

        Note:
            PCI's developers are going to try to add a 'last segment created' (lasc) parameter for
            this function in the next release, thereby enabling cleaner and more reliable automation.

        Parameters:
            in_file -- Input file

        Return value:
            Bitmap list # when successful, Error statement otherwise.

        Limits and constraints:

        """
        from pci.api import datasource as ds
        from pci.pcimod import pcimod
        from pci.exceptions import PCIException
        ##        from pci.his import *
        from pci.nspio import Report, enableDefaultReport
        from osgeo import gdal, ogr, osr
        ##        from gdalconst import *
        import numpy

        try:
            logging.info('          Executing: EGSUtility.bitmap_list')
            dataset = ds.open_dataset(in_file, ds.eAM_WRITE)
            return dataset.bitmap_ids

        except PCIException, e:
            self.error('bitmap_list(): {:s}'.format(e))
Esempio n. 2
0
def get_channels(img):
    try:
        dataset = handle.open_dataset(img)
        return range(1, dataset.chans_count + 1)
    except:
        print "Unable to open dataset %s" % img
        return range(1, 5)
Esempio n. 3
0
def removePreviousResultChannels(fileInput,defNumChannels):
    try:
        with ds.open_dataset(fileInput, ds.eAM_READ) as pix: 
            # Get channel count
            numberOfChannels = pix.chan_count
            channelOperationType  =  'DEL'
            while numberOfChannels > defNumChannels: #While the number of channels in the image are greater than the default number of channels in Landsat 8
                channelsToDelete = [numberOfChannels]
                pcimod(fileInput,channelOperationType,channelsToDelete) #remove
                numberOfChannels = numberOfChannels - 1
    except Exception as e:
        print e
	print "Previous channels deleted"
Esempio n. 4
0
    def create_chan_list(self, in_file):
        """
        Creates a list of the channel numbers and their associated descriptions

        Note:
            PCI's developers are going to try to add a 'last segment created' (lasc) parameter for
            this function in the next release, thereby enabling cleaner and more reliable automation.

        Parameters:
            in_file -- Input file

        Return value:
            Channel list # when successful, Error statement otherwise.

        Limits and constraints:

        """
        from pci.api import datasource as ds
        from pci.pcimod import pcimod
        from pci.exceptions import PCIException
        ##        from pci.his import *
        from pci.nspio import Report, enableDefaultReport
        from osgeo import gdal, ogr, osr
        ##        from gdalconst import *
        import numpy

        try:
            logging.info('          Executing: EGSUtility.create_chan_list')
            pix = ds.open_dataset(in_file, ds.eAM_WRITE)

            # Read metadata from file
            aux_pix = pix.aux_data

            # Get number of channels
            chans = aux_pix.chan_count
            i = 1
            chan_list = []

            while i <= chans:
                chan_desc = aux_pix.get_chan_description(i)
                list_update = str(i) + "-" + chan_desc
                chan_list.append(list_update)
                i += 1
            return chan_list

        except PCIException, e:
            self.error('create_chan_list(): {:s}'.format(e))
Esempio n. 5
0
def getChannelNumber(channelName):
    global dataset, auxiliaryData
    # Scan for channel name
    for i in range(1, dataset.chan_count + 1):
        if (auxiliaryData.get_chan_description(i).startswith(channelName)):
            return i
    # Scan for empty channel
    for i in range(1, dataset.chan_count + 1):
        if (auxiliaryData.get_chan_description(i) == 'Contents Not Specified'):
            qprintl('Use empty channel #{} for {}'.format(i, channelName))
            setChannelDescription(i, channelName)
            return i
    qprint('Creating a new channel for {}...'.format(channelName))
    pcimod(file=dataset.name, pciop='ADD', pcival=[1, 0, 0, 0])
    dataset = datasource.open_dataset(dataset.name, datasource.eAM_WRITE)
    auxiliaryData = dataset.aux_data
    qprintl('OK')
    return getChannelNumber(channelName)
# how to open the irvine.pix Dataset
import os
import pci
from pci.api import datasource as ds
irvine_pix = os.path.join(
    pci.getHomePath(), 'demo',
    'irvine.pix')  # get the path to irvine.pix in the demo folder
dataset = ds.open_dataset(irvine_pix)  # open the dataset

# The following example shows how get the number of rows, columns and channels of a Dataset.
with ds.open_dataset(irvine_pix) as dataset:
    rows = dataset.width
    cols = dataset.height
    chans = dataset.chan_count
    def threshold_sar(self,
                      in_file,
                      band=4,
                      veg_thr=-3.5,
                      openwater_thres=-12.5):
        """
        Threshold high and low

        Apply low threshold for open water areas for Edge preservation filter results
        Apply high threshold for flooded tree areas for Edge preservation filter results
        The input is a PCIPIX file containing a filtered and scaled imagery.
        Bitmaps are added to PCIPIX representing open water and flood vegeatation areas

        Note:
            PCI's developers are going to try to add a 'last segment created' (lasc) parameter for
            this function in the next release, thereby enabling cleaner and more reliable automation.

        Parameters:
            in_file -- Input file
            band -- Input band
            veg_thr -- Minimum threshold for vegetation flooded area
            openwater_thres -- Maximum threshold for open water flooded areas

        Return value:
            Newly create vegetation and open water bitmap segment # when successful,
            Error statement otherwise.

        Limits and constraints:

        """

        try:
            logging.info('       Executing: VegFloodProcess.threshold_sar')
            # Find min and max of band
            imstat = EGS_utility.EGSUtility().gdal_stat(in_file, band)
            max_pixel = imstat[1]
            min_pixel = imstat[0] + 1

            # Create and name newly created segment, 'thr' does not rename segments
            dataset = ds.open_dataset(in_file, ds.eAM_WRITE)
            veg_seg = dataset.create_bitmap(
            )  #  Get the number of the newly created bitmap segment
            file = in_file
            dbsl = [veg_seg]
            dbsn = "vegflood"
            dbsd = "Veg Flood seg"
            mas(file, dbsl, dbsn, dbsd)
            logging.info(
                '          Successfully completed VegFloodProcess.threshold_sar: mas'
            )

            # Threshold for flood vegetation
            file = in_file
            dbic = [band]
            #dbob    =       []      # create new bitmap
            dbob = [veg_seg]
            tval = [veg_thr, max_pixel]  # threshold range (min,max)
            comp = 'OFF'  # threshold values from 9 to 11
            dbsn = 'vegflood'  # output segment name
            dbsd = 'Veg Flood seg'  # output segment description
            Report.clear()
            thr(file, dbic, dbob, tval, comp, dbsn, dbsd)
            enableDefaultReport('term')
            logging.info(
                '          Successfully completed VegFloodProcess.threshold_sar: thr'
            )

            # Create and name newly created segment, 'thr' does not rename segments
            water_seg = dataset.create_bitmap(
            )  #  Get the number of the newly created bitmap segment
            file = in_file
            dbsl = [water_seg]
            dbsn = "openwate"
            dbsd = "Open Water seg"
            mas(file, dbsl, dbsn, dbsd)
            logging.info(
                '          Successfully completed VegFloodProcess.threshold_sar: mas'
            )

            # Threshold for open water
            file = in_file
            dbic = [band]
            #dbob    =       []      # create new bitmap
            dbob = [water_seg]
            tval = [min_pixel, openwater_thres]  # threshold range (min,max)
            comp = 'OFF'  # threshold values
            dbsn = 'openwate'  # output segment name
            dbsd = 'Open Water seg'  # output segment description
            Report.clear()
            thr(file, dbic, dbob, tval, comp, dbsn, dbsd)
            enableDefaultReport('term')
            logging.info(
                '          Successfully completed VegFloodProcess.threshold_sar: thr'
            )
            return [veg_seg, water_seg]

        except PCIException, e:
            EGS_utility.EGSUtility().error('threshold_sar(): {:s}'.format(e))
    def classification(self, file, alg):
        if (alg == 'kmeans'):
            print("Your selection is K-means Classification")
        else:
            print("Your selection is Iso-Cluster Classification")
        report = os.getcwd() + "\\Data\\Result\\Report.txt"
        if os.path.isfile(report):
            print("Deleting old Report")
            os.remove(report)
        with datasource.open_dataset(file) as dataset:
            cc = dataset.chan_count
            print("Available Channels: ")
            print(cc)

        if (alg == 'kmeans'):
            file = file
            dbic = list(range(1, cc + 1, 1))  # input channels
            dboc = [cc + 1]  # output channel
            print(dboc)
            mask = []  # process entire image
            numclus = [5]  # requested number of clusters
            seedfile = ''  #  automatically generate seeds
            maxiter = [20]  # no more than 20 iterations
            movethrs = [0.01]

            print("The file is: " + root.filename)
            ##        enableDefaultReport(prev_report)
            pcimod(file=file, pciop='add', pcival=[1, 0, 0, 0, 0, 0])
            Report.clear()
            enableDefaultReport(report)
            print("Applying K-Means Classification")
            kcluster = kclus(file, dbic, dboc, mask, numclus, seedfile,
                             maxiter, movethrs)
            print("K-Means Classification is successful")
        else:
            file = file  # input file
            dbic = list(range(1, cc + 1, 1))  # input channels
            dboc = [cc + 1]  # output channel
            print(dboc)
            mask = []  # process entire image
            numclus = [5]  # requested number of clusters
            maxclus = [7]  # at most 20 clusters
            minclus = [5]  # at least 5 clusters
            seedfile = ''  #  automatically generate seeds
            maxiter = [5]  # no more than 20 iterations
            movethrs = [0.01]
            siggen = "NO"  # no signature generation
            samprm = [5]
            stdv = [10.0]
            lump = [1.0]
            maxpair = [5]  # no more than 5 cluster center pairs
            # clumped in one iteration
            backval = []  # no background value
            nsam = []  # default number of samples

            print("The file is: " + root.filename)
            ##        enableDefaultReport(prev_report)
            pcimod(file=file, pciop='add', pcival=[1, 0, 0, 0, 0, 0])
            Report.clear()
            enableDefaultReport(report)
            print("Applying Iso-cluster Classification")
            isoclus( file, dbic, dboc, mask, numclus, maxclus, minclus, seedfile, maxiter,\
                     movethrs, siggen, samprm, stdv, lump, maxpair, backval, nsam )
            print("Iso-Cluster Classification is successful")

        pcimod(file=file, pciop='add', pcival=[1])
        ##        root.status.set("Running fmo")
        print("Applying MOD filter")
        kfmo = fmo(file=file, dbic=[cc + 1], dboc=[cc + 2])
        print("MOD filter is successfully applied")
        pcimod(file=file, pciop='add', pcival=[1])
        ##        root.status.set("Running sieve")
        print("Applying SIEVE filter")
        ksieve = sieve(file=file, dbic=[cc + 1], dboc=[cc + 3], sthresh=[32])
        print("SIEVE filter is successfully applied")
        ##      Split and insert directory name
        split_file = file.split("/")
        split_file.insert(-1, "Result")
        join_result = "\\".join(split_file)
        ##        Split by .
        dot_result = join_result.split(".")
        ##        print(dot_result)
        ##        print("dot result")
        ##        join .shp at last
        shp_result = dot_result[0] + ".shp"

        current_file_path, ext = os.path.splitext(str(file))
        current_file = current_file_path.split("/")[-1]
        ##        print(current_file)
        ##        print("current file")
        ##      Delete previously generated shapefiles
        prev_files = glob.glob(os.getcwd() + "\\Data\\Result\\" + "*")
        ##        print(prev_files)
        ##        print("previous files")
        for prev_file in prev_files:
            prev_file_name, format = os.path.splitext(
                str(os.path.basename(prev_file)))
            ##            print(prev_file_name)
            ##            print(" prev file names")
            if (current_file in prev_file_name):
                print("DELETING: " + str(prev_file))
                os.remove(prev_file)
                print("Successfully DELETED: " + str(prev_file))

##        if(os.path.exists("../Data/Result/"+dot_result[0])):
##            print("inside if ......")
##            os.remove(shp_result)
##            print("shp is deleted...")
        print("Exporting to shapefile")
        ras2poly(fili=file,
                 dbic=[cc + 3],
                 filo=shp_result,
                 smoothv="Yes",
                 ftype="SHP",
                 foptions="")
        print("Shapefile is successfully created")
Esempio n. 9
0
        outputPath + '.shp.pox', outputPath + '.shx'
    ]


##### Program #####

if (len(sys.argv) < 3):
    qprintl(
        '(O_o) Invalid arguments: please provide input file and output file')
    terminate()

inputFile = sys.argv[1]

# Open Dataset
qprint('Opening file...')
dataset = datasource.open_dataset(inputFile, mode=datasource.eAM_WRITE)
auxiliaryData = dataset.aux_data
qprintl('OK \\(^o^)/')

qprintl('Reading channels...')

# Get input channels
if (not inputChannelsProvided()):
    listAllChannels()
inputChannels = getInputChannels()
markInputChannels(inputChannels)
qprintl('')

kClusChannel = getChannelNumber(KCLUS_RESULT)
fmoChannel = getChannelNumber(FMO_RESULT)
sieveChannel = getChannelNumber(SIEVE_RESULT)
Esempio n. 10
0
def icemapr(infile,
            infilec,
            inmask,
            inmasks,
            inpct,
            outfile,
            logfile=None,
            logdetails=2):
    """
    IceMAP-R algorithm for river ice using SAR images with HH polarization

    Parameters:
        INFILE : Name of the file for the ortorectified HH radar image
        INFILEC : Channel number in INFILE for the HH radar image to use
        INMASK : Name of the file for the georeferenced vector of the river polygon
        INMASKS : Segment number in INMASK for the vector to use
        INPCT : Name of the file for the legend or predefined type (freeze, thaw)
        OUTFILE : Name of the output file
        LOGFILE : Name of the log file
        LOGDETAILS : Threshold of log details. From 1 to 5. 1=high detail log, 5=low detail log


    Details:
        IceMAP-R uses PCIDSK format for intermediate results because it is the more convenient format, and often the only
        one that we can use with Geomatica functions. PCIDSK is a data structure for holding digital images and related
        data, such as LUTs, vectors, bitmaps, and other data types.

        Segments are the parts of a PCIDSK database which hold data related to the imagery in the database. A database
        can store up to 1024 segments, provided you have enough disk space. Twelve kinds of information are stored as
        segments. Vectors is one type. To see the segment number of a specific information in a PCIDSK, you need to go
        in the Files tab in Geomatica FOCUS and explore the desired file structure. For the Shapefile format, all the
        data are considered to be in segment number 1.

        Pseudocolor Tables are another type of segment (PCT). Pseudocolor segments hold numerical tables which map
        image DN values to a specific color. Colors are defined by an intensity value (between 0 and 255) for each of
        the red, green, and blue component. For the INPCT parameter, a custom pseudocolor table can be supplied in text
        format. For a template, look in the createPCT function below or in Geomatica Help. That function creates a text
        file for two predefined pseudocolor tables used in IceMAP-R. A PCT segment contains an array of 256 colors and
        assigns color values to 8-bit images. A PCT always contains exactly 256 entries. In the text file, entries can
        be grouped using range.

    Constraints:
        Need to be executed with Python 2.7 in 64 bits and Geomatica Prime 2015 with service pack 1 or numpy 1.8.2+
        To get more information about PCI modules, see python algorithm reference
        http://www.pcigeomatics.com/geomatica-help/index.html?page=concepts%2Falgoreference_c%2Fpace2n100.html
    """

    if setupLogger(logfile, logdetails * 10):
        #Log opening message
        logger.info("- Version python [{0}]".format(sys.version))
        logger.info(
            "- Function call [{0}, {1}, {2}, {3}, {4}, {5}, {6}, {7}]".format(
                infile, infilec, inmask, inmasks, inpct, outfile, logfile,
                logdetails))

    logging.info("Checking validity of inputs parameters")
    # Check input files
    for f, k in zip([infile, inmask], ['Input image file', 'Input mask']):
        if not os.path.exists(f):
            logger.error("{0} doesn't exists! Looked for {1}".format(k, f))
            return False

    if os.path.splitext(inmask)[1] == '.shp':
        if inmasks != 1:
            logger.warning(
                "The segment number for a shapefile is always 1. Input value changed for the process!"
            )
            inmasks = 1

    if inpct.lower() in ['freeze', 'thaw']:
        pct2use = createPCT(inpct)
    elif os.path.splitext(inpct)[1] == '.txt':
        if not os.path.exists(inpct):
            logger.error(
                "PCT file doesn't exists! Looked for {0}".format(inpct))
            return False
        pct2use = inpct
    else:
        logger.error('Invalid legend file format! Need to be a text file.')
        return 1

    # Check output file
    if os.path.exists(outfile):
        #File already exists
        logger.error(
            'Output file already exists! The file must be deleted before running this process.'
        )
        return False

    filename, file_extension = os.path.splitext(outfile)
    try:
        logging.info(
            "Creation of the temporary file to copy the input image and intermediate results"
        )

        # Temporary file with the date and time
        tempDir = tempfile.gettempdir()
        tmpfile = os.path.join(
            tempDir, "icemap_tmpfile_{0}.pix".format(
                datetime.now().strftime("%y%m%d_%H%M%S")))
        logger.info(
            '*Temporary file name for the process : {0}'.format(tmpfile))
        logger.info(
            '*It will be deleted if the icemap is produced succeesfully')

        # Getting information about the HH polarized RS2 image (pixel height and width)
        dataset = ds.open_dataset(infile)
        height = dataset.height
        width = dataset.width

        logging.info("  Creation of the database")

        # Creation of image database file to copy the input image and results
        ifile = tmpfile  # File name
        tex1 = "Temporary file for Icemap results"  # Descriptive text
        tex2 = ''  # Text
        dbsz = [width, height]  # X pixels by Y lines
        pxsz = []  # Meter resolution [x,y]
        dbnc = [6, 0, 0, 0]  # Six 8-bit channels
        dblayout = "BAND"  # Use band interleaving
        cim(ifile, tex1, tex2, dbsz, pxsz, dbnc, dblayout)

        # A georeferencing segment is automatically created as the first segment of the new file.
        segment = 1

        logging.info("  Copy of the projection")

        # Projection copy from the current pix into temporary file
        src_crs = dataset.crs
        src_gc = dataset.geocoding
        writer = ds.BasicWriter(tmpfile)
        writer.crs = src_crs
        writer.geocoding = src_gc

        logging.info("  Adding channels for intermediate results")
        # Add new image channels to existing file
        channels = [0, 0, 0, 6]  # Add 6x 32bit real channels
        pcimod(ifile, "ADD", channels)

        logging.info("  Copy of the original image")
        # Database to database image transfert
        chan_sar = [7]
        fili = infile  # Input file name
        filo = tmpfile  # Output file name
        dbic = [infilec]  # Input raster channel
        dboc = chan_sar  # Output raster channel
        dbiw = []  # Raster input window - Use full image
        dbow = []  # Raster output window - Use full image
        iii(fili, filo, dbic, dboc, dbiw, dbow)

        logging.info(
            "Conversion of river mask in bitmap segment inside temporary file")
        # Conversion of river mask in bitmap segment
        fili = inmask  # Input polygon file
        dbvs = [inmasks]  # Polygon layer
        filo = tmpfile  # Output file name
        dbsd = 'River mask'  # Segment descriptor
        pixres = []  # Pixels resolution equal to output image
        ftype = ''  # Output file extension
        foptions = ''  # Output format type
        poly2bit(fili, dbvs, filo, dbsd, pixres, ftype, foptions)

        segment += 1
        riverMask = segment

        logging.info(
            "Cropping bitmap for the area that is covered by input image")
        # Bitmap crop of the area that is covered in both images
        source = """if (%{0[0]}=0) then
            %%{1}=0;
        endif;""".format(chan_sar, riverMask)
        undefval = []
        model(ifile, source, undefval)

        logging.info("Computing texture analysis")
        # Texture analysis
        chan_tex = [8, 9, 10]
        dbic = chan_sar  # Input raster image channel
        texture = [2, 4, 7]  # Select required texture
        dboc = chan_tex  # Output channels chan_tex[1],chan_tex[2],chan_tex[3]
        flsz = [7, 7]  # Filter Size in pixels
        greylev = [256]  # Number of gray levels
        spatial = [1, 1]  # Spatial relationship
        tex(ifile, dbic, texture, dboc, flsz, greylev, spatial)

        logging.info(
            "Performing Kuan filtering to remove speckle on the original image data"
        )
        # Performs Kuan filtering to remove speckle on image data
        chan_fkuan = [11]
        dbic = chan_sar  # Channel to be filtered
        dboc = chan_fkuan  # Filtered results
        flsz = [7, 7]  # 7x7 filter size
        mask = []  # Filter entire image, area mask
        nlook = [1.0]  # Number of looks
        imagefmt = 'AMP'  # Amplitude image format
        fkuan(ifile, dbic, dboc, flsz, mask, nlook, imagefmt)

        # Modify channel descriptior
        desc = 'FKUAN result'  # New channel description
        dboc = chan_fkuan  # Output channel
        mcd(ifile, desc, dboc)

        logging.info("Performing median filtering")
        # Performs median filtering to further smooth image data, while preserving sharp edges
        chan_fme = [12]
        dbic = chan_fkuan  # input channel
        dboc = chan_fme  # Output channel
        flsz = [3, 3]  # 3x3 filter
        mask = [riverMask]  # Bitmap mask segment
        bgrange = []  # Background values range
        failvalu = []  # Failure value
        bgzero = ''  # Set background to 0 - Default, YES
        fme(ifile, dbic, dboc, flsz, mask, bgrange, failvalu, bgzero)

        # Modify channel descriptior
        desc = 'FME result'  # New channel description
        dboc = chan_fme  # Output channel
        mcd(ifile, desc, dboc)

        logging.info(
            "Performing unsupervised clustering using the Fuzzy K-means method"
        )
        # FUZCLUS  - Performs unsupervised clustering using the Fuzzy K-means method
        chan_fuz1 = [1]
        dbic = [chan_tex[1]]  # Input channel
        dboc = chan_fuz1  # Output channel
        mask = [riverMask]  # Area mask
        numclus = [7]  # Requested number of clusters
        seedfile = ''  # Automatically generate seeds
        maxiter = [20]  # No more than 20 iterations
        movethrs = [0.01]  # Movement threshold
        siggen = ''  # Do not generate signatures
        backval = []  # No background value to be ignored
        nsam = []  # Number of pixel values to sample - Use default 262144
        fuzclus(ifile, dbic, dboc, mask, numclus, seedfile, maxiter, movethrs,
                siggen, backval, nsam)

        logging.info("Clearing regions smaller than 12 pixels")
        # Reads an image channel and merges image value polygons smaller than
        # a user-specified threshold with the largest neighboring polygon
        chan_sieve = [2]
        dbic = chan_fuz1  # Input raster channel
        dboc = chan_sieve  # Output raster channel
        sthresh = [12]  # Polygon size threshold
        keepvalu = [0]  # Value excluded from filtering
        connect = [4]  # Connectedness of lines
        sieve(ifile, dbic, dboc, sthresh, keepvalu, connect)

        logging.info("Extracting class 1 for a new classification")
        # Extracting class 1
        dbic = chan_sieve  # Input raster channel
        dbob = []  # Create new bitmap
        tval = [1, 1]  # Threshold range (min,max)
        comp = 'OFF'  # Complement mode
        dbsn = 'THR_1'  # Output segment name
        dbsd = 'MASK_CLASS_1'  # Output segment description
        thr(ifile, dbic, dbob, tval, comp, dbsn, dbsd)

        segment += 1
        class1Mask = segment

        logging.info("Reclassification of class 1")
        # Reclassification of class 1
        chan_fuz2 = [3]
        dbic = chan_tex  # Input channels
        dboc = chan_fuz2  # Output channel
        mask = [class1Mask]  # Area mask
        numclus = [20]  # Requested number of clusters
        seedfile = ""  # Automatically generate seeds
        maxiter = [20]  # No more than 20 iterations
        movethrs = [0.01]  # Movement threshold
        siggen = ""  # Do not generate signatures
        backval = []  # No background value to be ignored
        nsam = []  # Number of pixel values to sample - Use default 262144
        fuzclus(ifile, dbic, dboc, mask, numclus, seedfile, maxiter, movethrs,
                siggen, backval, nsam)

        logging.info("Extracting class 7 for a new classification")
        # Extracting class 7
        dbic = chan_sieve  # Input raster channel
        dbob = []  # Create new bitmap
        tval = [7, 7]  # Threshold range (min,max)
        comp = 'OFF'  # Complement mode
        dbsn = 'THR_7'  # Output segment name
        dbsd = 'MASK_CLASS_7'  # Output segment description
        thr(ifile, dbic, dbob, tval, comp, dbsn, dbsd)

        segment += 1
        class7Mask = segment

        logging.info("Reclassification of class 7")
        # Reclassification of class 7
        chan_fuzclus3 = [4]
        dbic = chan_fme  # input channel
        dboc = chan_fuzclus3  # output channel
        mask = [class7Mask]  # mask area
        numclus = [8]  # requested number of clusters
        seedfile = ""  # automatically generate seeds
        maxiter = [20]  # no more than 20 iterations
        movethrs = [0.01]  # movement threshold
        siggen = ""  # do not generate signatures
        backval = []  # no background value to be ignored
        nsam = []  # Number of pixel values to sample - Use default 262144
        fuzclus(ifile, dbic, dboc, mask, numclus, seedfile, maxiter, movethrs,
                siggen, backval, nsam)

        logging.info(
            "Final classification - fusion of the three previous classification"
        )
        # Final classification
        chan_mosaic3 = [5]
        source = """if (%{0[0]}>1) and (%{0[0]}<7) then
        %{1[0]} = %{0[0]}+1;
    elseif (%{0[0]} = 1) then
    	if (%{2[0]}<9) and (%{2[0]}>0) then
    		%{1[0]} = 1;
    	elseif (%{2[0]}>=9) then
    		%{1[0]} = 2;
        endif;
    elseif (%{0[0]} = 7) then
    	if (%{3[0]}>=5) then
    		%{1[0]} = 9;
    	elseif (%{3[0]}<5) and (%{3[0]}>0) then
    		%{1[0]} = 8;
        endif;
    endif;""".format(chan_sieve, chan_mosaic3, chan_fuz2, chan_fuzclus3)
        undefval = []  #Value for undefined operations
        model(ifile, source, undefval)
        mcd(tmpfile, 'Model', chan_mosaic3)

        logging.info("Clearing regions smaller than 12 pixels")
        # Reads an image channel and merges image value polygons smaller than
        # a user-specified threshold with the largest neighboring polygon
        chan_sieve2 = [6]
        dbic = chan_mosaic3  # Input raster channel
        dboc = chan_sieve2  # Output raster channel
        sthresh = [12]  # Polygon size threshold
        keepvalu = [0]  # Value excluded from filtering
        connect = [4]  # Connectedness of lines
        sieve(ifile, dbic, dboc, sthresh, keepvalu, connect)

        logging.info("Importation of the pseudocolor table data")
        # reads a pseudocolor table from a textfile and transfers the data into a database file
        ifile = tmpfile  # Output file name
        dbpct = []  # Output PCT segment
        dbsn = 'PCT'  # Output segment name
        dbsd = 'PCT for showing results of icemap classification'  # Output segment description
        pctform = 'ATT'  # PCT text format
        tfile = pct2use  # PCT text file name
        nseg = pctread(ifile, dbpct, dbsn, dbsd, pctform, tfile)

        logging.info("Export classification to TIF file")
        # Export classification to TIF file
        fili = tmpfile  # Input file name
        filo = outfile  # Output file name
        dbiw = []  # Raster input windows
        dbic = chan_sieve2  # Input raster channel to export
        dbib = []  # Input bitmap segment
        dbvs = []  # Input vector segment
        dblut = []  # Input LUT segment
        dbpct = nseg  # Input PCT segment
        ftype = 'TIF'  # Output file type
        foptions = ''  # Output file options
        fexport(fili, filo, dbiw, dbic, dbib, dbvs, dblut, dbpct, ftype,
                foptions)

    except PCIException, e:
        logger.exception(e.message)
        return False
Esempio n. 11
0
import pci
# datasource: Dataset (.pix) File Manager
from pci.api import datasource


def qprint(msg):
    print(msg, end='', flush=True)


def qprintl(msg):
    print(msg, flush=True)


# Open Dataset
qprint('Opening file...')
dataset = datasource.open_dataset('data/golden_horseshoe.pix',
                                  mode=datasource.eAM_WRITE)
# ↑ Open dataset from specified path
# ↓ aux_data is for managing channel description (get/set)
auxiliaryData = dataset.aux_data
qprintl('OK \\(^o^)/')

qprintl('Channels in dataset:')
for i in range(1, dataset.chan_count + 1):
    qprintl('  Channel {}: {}'.format(i,
                                      auxiliaryData.get_chan_description(i)))

# Create channels if not sufficient
qprintl('Is it neccessary to create a new channnel?')
if (dataset.chan_count < 9):
    qprint('  Yes, creating new channel(s)...')
    # pcimod: Channel Manager (Add/Delete)
Esempio n. 12
0
            # Threshold imagery: Low for open water, high for flooded vegetation
            if thres_image:
                if not scale_image:
                    scale_channel = "4"
                logging.info('   Threshold imagery in process...  ')
                if cal_thres:
                    logging.info('   Calculating thresholds from seed files...  ')

                    # Work around for determining new vector segments
                    #   Add bitmap just to determine segment number
                    #   The following 3 vector segments will be consecutive #s after this bitmap
                    #   PCI 2016 support Vector segment read, but not PCI2015
                    #   gdal can not determine the correct vector segment #
                    #   Create and name newly created segment
                    dataset = ds.open_dataset(out_file_ortho, ds.eAM_WRITE)
                    #   Get the number of the newly created bitmap segment
                    bitmap_seg = dataset.create_bitmap()
                    veg_process.import_vector(openwater_seed,out_file_ortho)
                    veg_process.import_vector(nfloodveg_seed,out_file_ortho)
                    veg_process.import_vector(floodveg_seed,out_file_ortho)
                    water_vseg = bitmap_seg + 1
                    veg_non_flood_vseg = bitmap_seg + 2
                    veg_flood_vseg = bitmap_seg + 3

                    water_bseg = veg_process.vegcover2bit(out_file_ortho, out_file_ortho,water_vseg)
                    veg_non_flood_bseg = veg_process.vegcover2bit(out_file_ortho, out_file_ortho,veg_non_flood_vseg)
                    veg_flood_bseg = veg_process.vegcover2bit(out_file_ortho, out_file_ortho,veg_flood_vseg)
                    stat_info_water = EGS_utility.EGSUtility().raster_his(out_file_ortho,int(scale_channel),water_bseg,report_file)
                    stat_info_veg_non_flood = EGS_utility.EGSUtility().raster_his(out_file_ortho,int(scale_channel),veg_non_flood_bseg,report_file)
                    stat_info_veg_flood = EGS_utility.EGSUtility().raster_his(out_file_ortho,int(scale_channel),veg_flood_bseg,report_file)
Esempio n. 13
0
def classification(image):

    # Set timer
    start = time.time()

    # Define output file name
    outputFile = "GH_classPolygons.shp"
    output = outputFolder + "\\" + outputFile

    # get image statistics to extract channel number
    print "Detecting current channels..."
    with ds.open_dataset(image) as dataset:
        chansCount = dataset.chan_count
    print str(chansCount) + " channels detected"
    print ""

    # Whipe previously created channels (only use if you want a fresh clean input file and don't need previously created channels)
    chansDel = range(7, chansCount + 1)
    if chansCount > 6:  # Adjust depending on image bands
        pcimod(file=image, pciop='del', pcival=chansDel)
        print str(len(chansDel)) + " previously created channels deleted"

    # Whipe previously created shape files in output folder
    files = glob.glob(outputFolder + "\\" + '*')
    for f in files:
        os.remove(f)
    print "Previous output files deleted"
    print ""

    # Count input channels and create input channel list
    with ds.open_dataset(image) as dataset:
        inputChansCount = dataset.chan_count
    inputChans = range(1, inputChansCount + 1)
    print inputChans

    # Add 3 channels to the image for storing algorithm output
    print "Adding three 8-bit channels to image..."
    pcimod(file=image, pciop='add', pcival=[3, 0, 0, 0, 0, 0])
    print "Three 8-bit channels added"
    print ""

    # Run k-means cluster algorithm
    classesNumber = [8]  # define number of classes
    iterations = [10]  # define number iterations
    moveThresh = [0.01]  # define move threshhold
    print "Running unsupervized k-means classification..."
    print "Creating " + str(classesNumber) + " classes, applying " + str(
        iterations) + " iterations at a move-threshhold of " + str(moveThresh)
    kclus(file=image,
          dbic=inputChans,
          dboc=[7],
          numclus=classesNumber,
          maxiter=iterations,
          movethrs=moveThresh)
    flag1 = time.time()
    print "Classification complete! Time ellapsed: " + str(flag1 -
                                                           start) + " seconds"
    print ""

    # Run mode filter
    print "Running Mode Filter..."
    fmo(file=image, dbic=[7], dboc=[8], thinline="OFF", flsz=[3, 3])
    flag2 = time.time()
    print "Filtering complete! Time ellapsed: " + str(flag2 -
                                                      start) + " seconds"
    print ""

    # Run sieve
    print "Applying sieve..."
    sieve(file=image, dbic=[8], dboc=[9], sthresh=[16])
    flag3 = time.time()
    print "Sieve complete! Time ellapsed: " + str(flag3 - start) + " seconds"
    print ""

    # Create vector ploygons and export as shape file
    print "Creating polygons..."
    ras2poly(fili=image, dbic=[9], filo=output, ftype="SHP")
    flag4 = time.time()
    print "Polygons created! Time ellapsed: " + str(flag4 - start) + " seconds"
    print ""

    print "Exporting as shape file..."

    end = time.time()
    print "Total time ellapsed: " + str(end - start) + " seconds"
Esempio n. 14
0
def processing(path, image):

    print ""
    print "Classifying " + str(image)
    print ""

    # Set timer
    start = time.time()

    # Define input/output file names
    file_name, ext = os.path.splitext(str(image))
    outputFile = file_name + "_classified.shp"
    output = outputFolder + "\\" + outputFile
    inputfile = path + image

    # Whipe previously created shape files in output folder
    files = glob.glob(outputFolder + "\\" + '*')
    for f in files:
        f_name, ext = os.path.splitext(str(os.path.basename(f)))
        if file_name in f_name:
            os.remove(f)
            print str(f) + " deleted"
    print ""

    # get image statistics to extract channel number
    print "Detecting current channels..."
    with ds.open_dataset(inputfile) as dataset:
        chansCount = dataset.chan_count
    print str(chansCount) + " channels detected"
    print ""

    # Whipe previously created channels (only use if you want a fresh clean input file and don't need previously created channels)
    chansDel = range(10, chansCount + 1)
    if chansCount > 9:  # Adjust depending on image bands
        pcimod(file=inputfile, pciop='del', pcival=chansDel)
        print str(len(chansDel)) + " previously created channels deleted"

    # Redefine chansCount
    with ds.open_dataset(inputfile) as dataset:
        chansCount = dataset.chan_count

    # Count input channels and create input channel list
    with ds.open_dataset(inputfile) as dataset:
        inputChansCount = dataset.chan_count
    inputChans = range(
        1, 8)  # Define input channels (eg: (1,7)=channel 1 to channel 6))
    print "Input channels: " + str(inputChans)

    # k-means cluster algorithm
    def classification(data):
        # Add channel
        pcimod(file=inputfile, pciop='add', pcival=[0, 0, 1, 0, 0, 0])

        # Define input parameters
        classesNumber = [8]  # define number of classes
        iterations = [10]  # define number iterations
        moveThresh = [0.01]  # define move threshhold

        print "Running unsupervized k-means classification..."
        print "Creating " + str(classesNumber) + " classes, applying " + str(
            iterations) + " iterations at a move-threshhold of " + str(
                moveThresh)

        # Run algorithm and create classification report
        try:
            Report.clear()
            enableDefaultReport(report)
            kclus(file=inputfile,
                  dbic=inputChans,
                  dboc=[chansCount + 1],
                  numclus=classesNumber,
                  maxiter=iterations,
                  movethrs=moveThresh)
        finally:
            enableDefaultReport('term')  # this will close the report file
        flag1 = time.time()
        print "Classification complete! Time elapsed: " + str(
            flag1 - start) + " seconds"
        print ""

    # Mode filter
    def FMO(data):
        # Add channel
        pcimod(file=inputfile, pciop='add', pcival=[0, 0, 1, 0, 0, 0])
        print "Running Mode Filter..."
        filter = [5, 5]
        # Run algorithm
        fmo(file=inputfile,
            dbic=[chansCount + 1],
            dboc=[chansCount + 2],
            thinline="OFF",
            flsz=filter)
        flag2 = time.time()
        print "Filtering complete! Time elapsed: " + str(flag2 -
                                                         start) + " seconds"
        print ""

    # Sieve
    def SIEVE(data):
        # Add channel
        pcimod(file=inputfile, pciop='add', pcival=[0, 0, 1, 0, 0, 0])
        print "Applying sieve..."
        # Run algorithm
        sieve(file=inputfile,
              dbic=[chansCount + 2],
              dboc=[chansCount + 3],
              sthresh=[32])
        flag3 = time.time()
        print "Sieve complete! Time elapsed: " + str(flag3 -
                                                     start) + " seconds"
        print ""

    # Create vector ploygons and export as shape file
    def RAS2POLY(data):
        print "Creating polygons..."
        ras2poly(fili=inputfile,
                 dbic=[chansCount + 3],
                 filo=output,
                 ftype="SHP")
        flag4 = time.time()
        print "Polygons created! Time elapsed: " + str(flag4 -
                                                       start) + " seconds"
        print ""

        print "Exporting as shape file..."

    classification(image)
    FMO(image)
    SIEVE(image)
    RAS2POLY(image)

    end = time.time()
    print "Processing time elapsed for " + image + ": " + str(
        end - start) + " seconds"
    print ""