def runLushWithPipes(scriptName, *arguments, **keywordArguments):
    # Initialize
    scriptPath = os.path.join(DIRECTORY, store.replaceFileExtension(scriptName, 'lsh'))
    terms = ['lush', scriptPath, DIRECTORY] + [str(x) for x in arguments]
    standardError = True
    errorCount = 0
    errorLimit = 5
    standardInput = keywordArguments.get('standardInput')
    # While there is standardError,
    while standardError:
        # Try again
        if standardInput: 
            process = subprocess.Popen(terms, stdin=PIPE, stdout=PIPE, stderr=PIPE)
            standardOutput, standardError = process.communicate(standardInput)
        else:
            process = subprocess.Popen(terms, stdout=PIPE, stderr=PIPE)
            standardOutput, standardError = process.communicate()
        # If we have an error,
        if standardError:
            # Count
            errorCount += 1
            print 'Failed (%d/%d): %s' % (errorCount, errorLimit, store.extractFileBaseName(scriptPath))
            if errorCount >= errorLimit: raise ClassifierError(standardError)
    # Return
    return standardOutput
 def __init__(self, datasetPath):
     # Fix extension
     datasetPath = store.replaceFileExtension(datasetPath, 'db')
     # Check whether the dataset exists
     flag_exists = True if os.path.exists(datasetPath) else False
     # Connect
     self.connection = sqlite3.connect(datasetPath)
     self.connection.text_factory = str
     self.cursor = self.connection.cursor()
     # If the dataset doesn't exist, create it
     if not flag_exists: 
         self.cursor.execute('CREATE TABLE regions (multispectralLeft INTEGER, multispectralTop INTEGER, multispectralRight INTEGER, multispectralBottom INTEGER)')
         self.connection.commit()
     # Remember
     self.datasetPath = datasetPath
 def __init__(self, datasetPath):
     # Fix extension
     datasetPath = store.replaceFileExtension(datasetPath, 'db')
     # Check whether the dataset exists
     flag_exists = True if os.path.exists(datasetPath) else False
     # Connect
     self.connection = sqlite3.connect(datasetPath)
     self.connection.text_factory = str
     self.cursor = self.connection.cursor()
     # If the dataset doesn't exist, create it
     if not flag_exists: 
         self.cursor.execute('CREATE TABLE samples (hasRoof INTEGER, xGeo REAL, yGeo REAL, multispectralData BLOB, panchromaticData BLOB)')
         self.connection.commit()
     # Remember
     self.datasetPath = datasetPath
def verifyConnectionTable(connectionTablePath):
    # Get extension
    extension = os.path.splitext(connectionTablePath)[1]
    # If it is a CSV,
    if extension == '.csv':
        # Read connections from CSV
        connections = []
        for rowIndex, row in enumerate(csv.reader(open(connectionTablePath))):
            for columnIndex, column in enumerate(row):
                if column.strip():
                    connections.append((rowIndex, columnIndex))
        # Save matrix for Lush
        connectionTablePath = store.replaceFileExtension(connectionTablePath, 'map')
        open(connectionTablePath, 'wt').write(makeLushMatrix(connections))
    # Return
    return connectionTablePath
Esempio n. 5
0
 def __init__(self, datasetPath):
     # Fix extension
     datasetPath = store.replaceFileExtension(datasetPath, 'db')
     # Check whether the dataset exists
     flag_exists = True if os.path.exists(datasetPath) else False
     # Connect
     self.connection = sqlite3.connect(datasetPath)
     self.connection.text_factory = str
     self.cursor = self.connection.cursor()
     # If the dataset doesn't exist, create it
     if not flag_exists:
         self.cursor.execute(
             'CREATE TABLE samples (hasRoof INTEGER, xGeo REAL, yGeo REAL, multispectralData BLOB, panchromaticData BLOB)'
         )
         self.connection.commit()
     # Remember
     self.datasetPath = datasetPath
def saveShapefile(targetPath, regionFrames, multispectralImage, withPixelToGeoConversion=True):
    # Get spatial reference
    spatialReference = multispectralImage.getSpatialReference()
    # Define
    def convertToWktGeometry(frame):
        # Unpack
        left, top, right, bottom = frame
        polygon = (left, top), (right, top), (right, bottom), (left, bottom)
        # Convert
        if withPixelToGeoConversion:
            polygon = multispectralImage.convertPixelLocationsToGeoLocations(polygon)
        # Return
        return 'POLYGON ((%s))' % ', '.join('%s %s' % x for x in polygon)
    # Set wktGeometries
    wktGeometries = map(convertToWktGeometry, regionFrames)
    # Save
    geometry_store.save(store.replaceFileExtension(targetPath, 'shp'), spatialReference, wktGeometries)
def plotIterationHistory(targetPath, iterationHistoryVsTestError):
    # Sort by iterationIndex
    iterationHistoryVsIndex = [(iterationIndex, testError, trainingError) for testError, trainingError, iterationIndex, classifierPath in iterationHistoryVsTestError]
    iterationHistoryVsIndex.sort()
    # Assemble
    testErrors = [x[1] for x in iterationHistoryVsIndex]
    trainingErrors = [x[2] for x in iterationHistoryVsIndex]
    iterationIndices = [x[0] for x in iterationHistoryVsIndex]
    # Plot
    pylab.figure()
    pylab.hold(True)
    pylab.plot(iterationIndices, testErrors, 'r')
    pylab.plot(iterationIndices, trainingErrors, 'b')
    pylab.legend(['Validation Test Error', 'Validation Training Error'])
    pylab.title('Iteration History')
    pylab.xlabel('Iteration Index')
    pylab.ylabel('Percent Error')
    pylab.savefig(store.replaceFileExtension(targetPath, 'png'))
def plot(targetImagePath, points, probabilities):
    # Load
    normalizedProbabilities = normalize(probabilities)
    probabilityByXY = dict(itertools.izip(points, normalizedProbabilities))
    # Initialize color
    maximumColor = numpy.array([255, 255, 255])
    # Initialize image
    xs = list(set([x[0] for x in points])); xs.sort()
    ys = list(set([x[1] for x in points])); xs.sort()
    imageWidth = len(xs)
    imageHeight = len(ys)
    image = Image.new('RGB', (imageWidth, imageHeight))
    pixels = image.load()
    # Fill image
    for xIndex in xrange(len(xs)):
        for yIndex in xrange(len(ys)):
            xy = xs[xIndex], ys[yIndex]
            colorFraction = probabilityByXY[xy] if xy in probabilityByXY else 0
            pixels[xIndex, yIndex] = tuple(map(int, maximumColor * colorFraction))
    # Save matrix as an image
    targetImagePath = store.replaceFileExtension(targetImagePath, 'png')
    image.save(targetImagePath)
    return targetImagePath
def runLushProcess(scriptName, *arguments):
    scriptPath = os.path.join(DIRECTORY, store.replaceFileExtension(scriptName, 'lsh'))
    terms = ['lush', scriptPath, DIRECTORY] + [str(x) for x in arguments]
    return subprocess.Popen(terms, stdin=PIPE, stdout=PIPE)
def runLush(scriptName, *arguments):
    scriptPath = os.path.join(DIRECTORY, store.replaceFileExtension(scriptName, 'lsh'))
    terms = ['lush', scriptPath, DIRECTORY] + [str(x) for x in arguments]
    returnCode = subprocess.call(terms)
    if returnCode != 1: 
        raise ClassifierError(scriptPath)
def load(datasetPath):
    datasetPath = store.replaceFileExtension(datasetPath, 'db')
    if not os.path.exists(datasetPath): raise IOError('Dataset does not exist: ' + datasetPath)
    return Store(datasetPath)
def create(datasetPath):
    datasetPath = store.replaceFileExtension(datasetPath, 'db')
    if os.path.exists(datasetPath): os.remove(datasetPath)
    return Store(datasetPath)
Esempio n. 13
0
def load(datasetPath):
    datasetPath = store.replaceFileExtension(datasetPath, 'db')
    if not os.path.exists(datasetPath):
        raise IOError('Dataset does not exist: ' + datasetPath)
    return Store(datasetPath)
Esempio n. 14
0
def create(datasetPath):
    datasetPath = store.replaceFileExtension(datasetPath, 'db')
    if os.path.exists(datasetPath): os.remove(datasetPath)
    return Store(datasetPath)