def process(self, tup):

        localFileName = tup.values[0]
        hdfsFileName = tup.values[1]
        imageData = {}
        imageData["metadata"] = None

        storm.log("start processing %s %s" % (localFileName, hdfsFileName))

        for key, sorter, interpretedValue in binaryhadoop.readFromHDFSiter(hdfsFileName):
            if key == "metadata":
                imageData["metadata"] = interpretedValue
                bands = {}
                storm.log("    read metadata")
            elif key == "mask":
                mask = utilities.rollMask(interpretedValue > 0)
                numPixels = numpy.nonzero(mask)[0].size 
                storm.log("    read mask")
            else:
                bands[key] = interpretedValue[mask]
                storm.log("    read band %s" % key)

        if imageData["metadata"] is not None:
            wavelengths = imageData["metadata"]["bandWavelength"]
            multipliers = imageData["metadata"]["bandMultiplier"]

            storm.log("making imageArray 1")
            imageList = utilities.preprocessImage(bands, multipliers, wavelengths, imageData)
        
            #find the covariance of the image bands
            storm.log("making covariance")
            imageCov = self.makeCovariance(imageList, numPixels)  

            #find the principal components (eigenvectors/values)
            storm.log("making principle components 1")
            imgV, imgP = numpy.linalg.eig(imageCov)
            #
            storm.log("making principle components 2")
            indexList = numpy.argsort(-imgV)
            imgV = imgV[indexList]
            imgP = imgP[:,indexList]

            storm.log("making variance percentage")
            xVarianceComponents = 5
            variancePercentage = [x/numpy.sum(imgV) for x in imgV][:xVarianceComponents]

            storm.log("making rogue bands")
            rogueBands = self.checkpca(imgP.T,xVarianceComponents)

            storm.log("making gray bands")
            bandGray = numpy.zeros(len(imageList[0]))
            for band in imageList:
                bandGray += (numpy.array(band))**2
            bandGray = numpy.sort(bandGray) 

            bandPercent = 1
            #The 99th percentile is removed to avoid skewing the mean
            bandGray = bandGray[bandGray < numpy.percentile(bandGray,100-bandPercent)] 
 
            #Histogram is created 
            storm.log("making gray band histogram")
            [hist,bin_edges] = numpy.histogram(bandGray,bins=100)

            #Locate the peaks on the histogram
            storm.log("making peaks and valleys")
            peaks, valleys = self.findpeaks(hist,3,(bin_edges[:-1] + bin_edges[1:])/2) 

            #Find mean and standard deviation of all pixels
            bandMean = numpy.mean(bandGray)
            bandSigma = numpy.std(bandGray)
        
            storm.log("making JSON output")
            imageData["numPixels"] = int(numPixels)

            imageData["grayBandMean"] = float(bandMean)
            imageData["grayBandSigma"] = float(bandSigma)

            #Report percentage of total pixels which lie beyond one standard deviation from mean
            imageData["grayBandPlusOneSigma"] = float(numpy.sum(bandGray > (bandMean+bandSigma))/numpy.float(numPixels))
            imageData["grayBandMinusOneSigma"] = float(numpy.sum(bandGray < (bandMean-bandSigma))/numpy.float(numPixels))

            imageData["grayBandHistPeaks"] = [[float(x), int(y)] for x, y in peaks]
            #imageData["grayBandHistValleys"] = [[float(x), int(y)] for x, y in valleys]

            #PCA analysis and sum of first 5 principal components
            imageData["grayBandExplainedVariance"] = [float(x) for x in variancePercentage]
 
            #Report bands that have high leave-one-out loading variance
            imageData["grayBandRogueBands"] = [str(x) for x in rogueBands] 

            #Report histogram
            imageData["grayBandHistogram"] = [[float(x) for x in bin_edges], [int(x) for x in hist]]

            #emit the final statistics
            storm.log("emiting Storm tuple")
            storm.emit([localFileName, hdfsFileName, json.dumps(imageData)], stream="summaryStatistics")

            storm.log("done with %s %s" % (localFileName, hdfsFileName))
Exemple #2
0
    if not os.path.exists("/var/www/reports/{}-{}/gmm-knn/image_reports".format(year, month)):
        os.mkdir("/var/www/reports/{}-{}/gmm-knn".format(year, month))
        os.mkdir("/var/www/reports/{}-{}/gmm-knn/image_reports".format(year, month))

    # makingReport = not os.path.exists("/var/www/reports/{}-{}/gmm-knn/image_reports/{}.html".format(year, month, shortName))
    # makingImage = not os.path.exists("/var/www/reports/{}-{}/gmm-knn/image_reports/{}.svg".format(year, month, shortName))

    makingReport = True
    makingImage = True

    print "    " + ("" if makingReport else "NOT ") + "making report and " + ("" if makingImage else "NOT ") + "making image"; sys.stdout.flush()

    if makingImage:
        sawkeys = set()
        for key, sorter, value in binaryhadoop.readFromHDFSiter(imageName):
            sawkeys.add(key)
            if key == "metadata":
                metadata = value
            elif key == "mask":
                mask = value
                numBands = len(metadata["bandNames"])
                rgbDouble = numpy.empty([3] + list(mask.shape), dtype=numpy.double)
            else:
                if key == "B05":
                    index = 0
                elif key == "B04":
                    index = 1
                elif key == "B03":
                    index = 2
                else: