예제 #1
0
def _add_optional_output_param(param, args, user, result_hooks):
    if not param.isExternalType() or not is_on_girder(param) \
       or param.identifier() not in args or \
       (param.identifier() + FOLDER_SUFFIX) not in args:
        return []
    value = args[param.identifier()]
    folder = args[param.identifier() + FOLDER_SUFFIX]

    container_args = []
    if param.longflag:
        container_args.append(param.longflag)
    elif param.flag:
        container_args.append(param.flag)
    else:
        return []

    folderModel = ModelImporter.model('folder')
    instance = folderModel.load(folder, level=AccessType.WRITE, user=user)
    if not instance:
        raise RestException('Invalid Folder id (%s).' % (str(folder)))

    # Output Binding !!
    path = VolumePath(value)
    container_args.append(path)
    result_hooks.append(GirderUploadVolumePathToFolder(path, folder))

    return container_args
예제 #2
0
def prepare_task(params, user, token, index_params, opt_params, has_simple_return_file, reference):
    ca = []
    result_hooks = []
    primary_input_name = None

    # Get primary name and reference
    for param in index_params:
        if param.channel != 'output':
            arg, name = _add_indexed_input_param(param, params, user, token)
            if (name and not primary_input_name and
                    SLICER_TYPE_TO_GIRDER_MODEL_MAP[param.typ] != 'folder'):
                primary_input_name = name
                reference['userId'] = str(user['_id'])
                value = _parseParamValue(param, params[param.identifier()], user, token)
                itemId = value['_id']
                if SLICER_TYPE_TO_GIRDER_MODEL_MAP[param.typ] == 'file':
                    reference['fileId'] = str(value['_id'])
                    itemId = value['itemId']
                reference['itemId'] = str(itemId)

    # optional params
    for param in opt_params:
        if param.channel == 'output':
            ca.extend(_add_optional_output_param(param, params, user, result_hooks, reference))
        else:
            ca.extend(_add_optional_input_param(param, params, user, token))

    if has_simple_return_file:
        param_id = return_parameter_file_name + FOLDER_SUFFIX
        param_name_id = return_parameter_file_name
        if param_id in params and param_name_id in params:
            value = params[return_parameter_file_name]
            folder = params[return_parameter_file_name + FOLDER_SUFFIX]

            folderModel = ModelImporter.model('folder')
            instance = folderModel.load(folder, level=AccessType.WRITE, user=user)
            if not instance:
                raise RestException('Invalid Folder id (%s).' % (str(folder)))

            ca.append('--returnparameterfile')

            # Output Binding
            path = VolumePath(value)
            ca.append(path)
            ref = reference.copy()
            ref['identifier'] = 'returnparameterfile'
            result_hooks.append(GirderUploadVolumePathToFolder(
                path, folder, upload_kwargs={'reference': json.dumps(ref)}))

    # indexed params
    for param in index_params:
        if param.channel == 'output':
            ca.append(_add_indexed_output_param(param, params, user, result_hooks, reference))
        else:
            arg, name = _add_indexed_input_param(param, params, user, token)
            ca.append(arg)
            if name and not primary_input_name:
                primary_input_name = name

    return ca, result_hooks, primary_input_name
예제 #3
0
    def run_albany(self, params):
        """Run albany on a folder that is on girder.

        Will store the output in the specified output folder.
        """
        inputFolderId = params.get('inputFolderId')
        outputFolderId = params.get('outputFolderId')
        filename = 'input.yaml'
        folder_name = 'workingDir'
        volume = GirderFolderIdToVolume(inputFolderId,
                                        volume=TemporaryVolume.default,
                                        folder_name=folder_name)
        outputDir = inputFolderId + '/' + folder_name + '/output.exo'
        volumepath = VolumePath(outputDir, volume=TemporaryVolume.default)
        result = docker_run.delay(ALBANY_IMAGE,
                                  pull_image=False,
                                  container_args=[filename],
                                  entrypoint='/usr/local/albany/bin/AlbanyT',
                                  remove_container=True,
                                  working_dir=volume,
                                  girder_result_hooks=[
                                      GirderUploadVolumePathToFolder(
                                          volumepath, outputFolderId)
                                  ])

        # Set the multiscale meta data and return the job
        jobId = result.job['_id']
        return utils.setMultiscaleMetaData(jobId, inputFolderId,
                                           outputFolderId)
예제 #4
0
    def run_dream3d(self, params):
        """Run Dream3D on a folder that is on girder.

        Will store the output in the specified output folder.
        """
        inputFolderId = params.get('inputFolderId')
        outputFolderId = params.get('outputFolderId')
        folder_name = 'workingDir'
        volume = GirderFolderIdToVolume(inputFolderId,
                                        volume=TemporaryVolume.default,
                                        folder_name=folder_name)
        outputDir = inputFolderId + '/' + folder_name + '/output'
        volumepath = VolumePath(outputDir, volume=TemporaryVolume.default)
        result = docker_run.delay(
            DREAM3D_IMAGE,
            pull_image=False,
            container_args=[
                '-c', 'bash /root/runPipelineRunner $(ls *.json | head -1)'
            ],
            remove_container=True,
            working_dir=volume,
            entrypoint='bash',
            girder_result_hooks=[
                GirderUploadVolumePathToFolder(volumepath, outputFolderId)
            ])

        # Set the multiscale meta data and return the job
        jobId = result.job['_id']
        return utils.setMultiscaleMetaData(jobId, inputFolderId,
                                           outputFolderId)
예제 #5
0
    def runInpainting(self, image, mask, folder):
        basename = os.path.splitext(image['name'])[0]
        outPath = VolumePath(basename + '_result.jpg')
        artifactPath = VolumePath('job_artifacts')
        job = docker_run.delay(
            'zachmullen/inpainting:latest',
            container_args=[
                GirderFileIdToVolume(image['_id']),
                GirderFileIdToVolume(mask['_id']), outPath, '--artifacts-dir',
                artifactPath, '--progress-pipe',
                ProgressPipe()
            ],
            girder_job_title='Inpainting: %s' % image['name'],
            girder_result_hooks=[
                GirderUploadVolumePathToFolder(outPath,
                                               folder['_id'],
                                               upload_kwargs={
                                                   'reference':
                                                   json.dumps({
                                                       'inpaintedImage':
                                                       True,
                                                       'folderId':
                                                       str(folder['_id']),
                                                   })
                                               }),
                # GirderUploadVolumePathJobArtifact(artifactPath)
            ]).job

        folder['inpaintingJobId'] = job['_id']
        Folder().save(folder)

        job['inpaintingImageId'] = image['_id']
        job['inpaintingMaskId'] = mask['_id']
        job['inpaintingFolderId'] = folder['_id']
        return Job().save(job)
예제 #6
0
def _runUtm(folder, paramsFile, outputFolder):
    outpath = VolumePath('__results__')
    return docker_run.delay('samuelgerber/utm', container_args=[
        GirderFolderIdToVolume(folder['_id']),
        GirderFileIdToVolume(paramsFile['_id']),
        '--workingfolder', outpath
    ], girder_job_title='UTM: ' + folder['name'], girder_result_hooks=[
        GirderUploadVolumePathToFolder(outpath, outputFolder['_id'])
    ]).job
예제 #7
0
def _add_indexed_output_param(param, args, user, result_hooks):
    value = args[param.identifier()]
    folder = args[param.identifier() + FOLDER_SUFFIX]

    folderModel = ModelImporter.model('folder')
    instance = folderModel.load(folder, level=AccessType.WRITE, user=user)
    if not instance:
        raise RestException('Invalid Folder id (%s).' % (str(folder)))

    # Output Binding !!
    path = VolumePath(value)
    result_hooks.append(GirderUploadVolumePathToFolder(path, folder))
    return path
예제 #8
0
def prepare_task(params, user, token, index_params, opt_params,
                 has_simple_return_file):
    ca = []
    result_hooks = []
    primary_input_name = None

    # optional params
    for param in opt_params:
        if param.channel == 'output':
            ca.extend(
                _add_optional_output_param(param, params, user, result_hooks))
        else:
            ca.extend(_add_optional_input_param(param, params, user, token))

    if has_simple_return_file:
        param_id = return_parameter_file_name + FOLDER_SUFFIX
        param_name_id = return_parameter_file_name
        if param_id in params and param_name_id in params:
            value = params[return_parameter_file_name]
            folder = params[return_parameter_file_name + FOLDER_SUFFIX]

            folderModel = ModelImporter.model('folder')
            instance = folderModel.load(folder,
                                        level=AccessType.WRITE,
                                        user=user)
            if not instance:
                raise RestException('Invalid Folder id (%s).' % (str(folder)))

            ca.append('--returnparameterfile')

            # Output Binding !!
            path = VolumePath(value)
            ca.append(path)
            result_hooks.append(GirderUploadVolumePathToFolder(path, folder))

    # indexed params
    for param in index_params:
        if param.channel == 'output':
            ca.append(
                _add_indexed_output_param(param, params, user, result_hooks))
        else:
            arg, name = _add_indexed_input_param(param, params, user, token)
            ca.append(arg)
            if name and not primary_input_name:
                primary_input_name = name

    return ca, result_hooks, primary_input_name
예제 #9
0
    def createOrthorectifyTask(imageFile, rpcFile):
        # Set output file name based on input file name
        orthoName = os.path.splitext(imageFile['name'])[0] + '_ortho.tif'
        outputVolumePath = VolumePath(orthoName)

        # Docker container arguments
        containerArgs = [
            'danesfield/tools/orthorectify.py',
            # Source image
            GirderFileIdToVolume(imageFile['_id'], gc=gc),
            # DSM
            GirderFileIdToVolume(dsmFile['_id'], gc=gc),
            # Destination image
            outputVolumePath,
            '--dtm',
            GirderFileIdToVolume(dtmFile['_id'], gc=gc),
            '--raytheon-rpc',
            GirderFileIdToVolume(rpcFile['_id'], gc=gc),
        ]
        if occlusionThreshold is not None:
            containerArgs.extend(
                ['--occlusion-thresh',
                 str(occlusionThreshold)])
        if denoiseRadius is not None:
            containerArgs.extend(['--denoise-radius', str(denoiseRadius)])

        # Result hooks
        # - Upload output files to output folder
        # - Provide upload metadata
        upload_kwargs = createUploadMetadata(jobId, stepName)
        resultHooks = [
            GirderUploadVolumePathToFolder(outputVolumePath,
                                           outputFolder['_id'],
                                           upload_kwargs=upload_kwargs,
                                           gc=gc)
        ]

        return docker_run.s(
            **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                       containerArgs=containerArgs,
                                       jobTitle=('[%s] Orthorectify: %s' %
                                                 (initWorkingSetName,
                                                  imageFile['name'])),
                                       jobType=stepName,
                                       user=requestInfo.user,
                                       resultHooks=resultHooks))
예제 #10
0
    def createConvertMsiToRgbTask(prefix, imageFile):
        # Set output file name based on prefix
        outputName = prefix + '_rgb_byte_image.tif'
        outputVolumePath = VolumePath(outputName)

        # Docker container arguments
        containerArgs = [
            'danesfield/tools/msi_to_rgb.py',
            # Pansharpened MSI image
            GirderFileIdToVolume(imageFile['_id'], gc=gc),
            # Output image
            outputVolumePath
        ]
        # Enable byte option by default
        if byte or byte is None:
            containerArgs.append('--byte')
        if alpha:
            containerArgs.append('--alpha')
        if rangePercentile is not None:
            containerArgs.extend(['--range-percentile', str(rangePercentile)])
        # TODO: Handle --big option (i.e. BIGTIFF)

        # Result hooks
        # - Upload output files to output folder
        # - Provide upload metadata
        upload_kwargs = createUploadMetadata(jobId, stepName)
        resultHooks = [
            GirderUploadVolumePathToFolder(
                outputVolumePath,
                outputFolder['_id'],
                upload_kwargs=upload_kwargs,
                gc=gc)
        ]

        return docker_run.s(
            **createDockerRunArguments(
                image=DockerImage.DANESFIELD,
                containerArgs=containerArgs,
                jobTitle=('[%s] Convert MSI to RGB: %s' %
                          (initWorkingSetName, prefix)),
                jobType=stepName,
                user=requestInfo.user,
                resultHooks=resultHooks
            )
        )
예제 #11
0
    def createCropAndPansharpenTask(prefix,
                                    msiImageFile,
                                    panImageFile,
                                    msiRpcFile=None,
                                    panRpcFile=None):
        # Set output directory
        outputVolumePath = VolumePath('__output__')

        containerArgs = [
            'danesfield/tools/crop_and_pansharpen.py',
            GirderFileIdToVolume(dsmFile['_id'], gc=gc), outputVolumePath,
            '--pan',
            GirderFileIdToVolume(panImageFile['_id'], gc=gc)
        ]
        if panRpcFile is not None:
            containerArgs.append(GirderFileIdToVolume(panRpcFile['_id'],
                                                      gc=gc))

        containerArgs.extend(
            ['--msi',
             GirderFileIdToVolume(msiImageFile['_id'], gc=gc)])
        if msiRpcFile is not None:
            containerArgs.append(GirderFileIdToVolume(msiRpcFile['_id'],
                                                      gc=gc))

        # Result hooks
        # - Upload output files to output folder
        # - Provide upload metadata
        upload_kwargs = createUploadMetadata(jobId, stepName)
        resultHooks = [
            GirderUploadVolumePathToFolder(outputVolumePath,
                                           outputFolder['_id'],
                                           upload_kwargs=upload_kwargs,
                                           gc=gc)
        ]

        return docker_run.s(**createDockerRunArguments(
            image=DockerImage.DANESFIELD,
            containerArgs=containerArgs,
            jobTitle='[%s] Crop and pansharpen: %s' %
            (initWorkingSetName, prefix),
            jobType=stepName,
            user=requestInfo.user,
            resultHooks=resultHooks))
예제 #12
0
    def createPansharpenTask(prefix, panImageFile, msiImageFile):
        # Set output file name based on prefix
        outputName = prefix + '_ortho_pansharpened.tif'
        outputVolumePath = VolumePath(outputName)

        # Docker container arguments
        containerArgs = [
            'gdal_pansharpen.py',
            # PAN image
            GirderFileIdToVolume(panImageFile['_id'], gc=gc),
            # MSI image
            GirderFileIdToVolume(msiImageFile['_id'], gc=gc),
            # Output image
            outputVolumePath
        ]

        # Result hooks
        # - Upload output files to output folder
        # - Provide upload metadata
        upload_kwargs = createUploadMetadata(jobId, stepName)
        resultHooks = [
            GirderUploadVolumePathToFolder(
                outputVolumePath,
                outputFolder['_id'],
                upload_kwargs=upload_kwargs,
                gc=gc)
        ]

        return docker_run.s(
            **createDockerRunArguments(
                image=DockerImage.DANESFIELD,
                containerArgs=containerArgs,
                jobTitle='[%s] Pansharpen: %s' % (initWorkingSetName, prefix),
                jobType=stepName,
                user=requestInfo.user,
                resultHooks=resultHooks
            )
        )
예제 #13
0
    def run_smtk_mesh_placement(self, params):
        """Run an smtk mesh placement on a folder that is on girder.

        Will store the output in the specified output folder.
        """
        inputFolderId = params.get('inputFolderId')
        outputFolderId = params.get('outputFolderId')
        folder_name = 'workingDir'
        volume = GirderFolderIdToVolume(inputFolderId,
                                        volume=TemporaryVolume.default,
                                        folder_name=folder_name)
        outputDir = inputFolderId + '/' + folder_name + '/output/'
        volumepath = VolumePath(outputDir, volume=TemporaryVolume.default)
        result = docker_run.delay(
            SMTK_IMAGE,
            pull_image=False,
            container_args=[
                '-c',
                ('. ~/setupEnvironment; '
                 'python /usr/local/afrl-automation/runner.py input.json; '
                 'mkdir output; '
                 'mv input.yaml output/; '
                 'mv elastic.yaml output/;'
                 'mv *BC.exo output/')
            ],
            entrypoint='bash',
            remove_container=True,
            working_dir=volume,
            girder_result_hooks=[
                GirderUploadVolumePathToFolder(volumepath, outputFolderId)
            ])

        # Set the multiscale meta data and return the job
        jobId = result.job['_id']
        return utils.setMultiscaleMetaData(jobId, inputFolderId,
                                           outputFolderId)
예제 #14
0
def runMetrics(initWorkingSetName, stepName, requestInfo, jobId, outputFolder,
               referenceFolder, referencePrefix, dtmFile, dsmFile, clsFile,
               mtlFile):
    """
    Run a Girder Worker job to compute metrics on output files.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param referenceFolder: Reference directory.
    :type referenceFolder: dict
    :param referencePrefix: Reference file prefix.
    :type referencePrefix: str
    :param dtmFile: DTM file document.
    :type dtmFile: dict
    :param dsmFile: DSM file document.
    :type dsmFile: dict
    :param clsFile: CLS file document.
    :type clsFile: dict
    :param mtlFile: MTL file document.
    :type mtlFile: dict
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    if referencePrefix == "STANDARD":
        # We know that there's no reference data with this selection
        containerArgs = ['echo', 'No ground truth selected for scoring']

        asyncResult = docker_run.delay(
            **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                       containerArgs=containerArgs,
                                       jobTitle='[%s] Run metrics' %
                                       initWorkingSetName,
                                       jobType=stepName,
                                       user=requestInfo.user))
    else:
        # Otherwise we assume the reference data exists, and try to
        # run the metrics
        outputVolumePath = VolumePath('__output__')

        # Docker container arguments
        containerArgs = [
            'danesfield/tools/run_metrics.py', '--output-dir',
            outputVolumePath, '--ref-dir',
            GirderFolderIdToVolume(referenceFolder['_id'], gc=gc),
            '--ref-prefix', referencePrefix, '--dsm',
            GirderFileIdToVolume(dsmFile['_id'], gc=gc), '--cls',
            GirderFileIdToVolume(clsFile['_id'], gc=gc), '--mtl',
            GirderFileIdToVolume(mtlFile['_id'], gc=gc), '--dtm',
            GirderFileIdToVolume(dtmFile['_id'], gc=gc)
        ]

        # Result hooks
        # - Upload output files to output folder
        # - Provide upload metadata
        upload_kwargs = createUploadMetadata(jobId, stepName)
        resultHooks = [
            GirderUploadVolumePathToFolder(outputVolumePath,
                                           outputFolder['_id'],
                                           upload_kwargs=upload_kwargs,
                                           gc=gc)
        ]

        asyncResult = docker_run.delay(
            **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                       containerArgs=containerArgs,
                                       jobTitle='[%s] Run metrics' %
                                       initWorkingSetName,
                                       jobType=stepName,
                                       user=requestInfo.user,
                                       resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
예제 #15
0
def fitDtm(initWorkingSetName,
           stepName,
           requestInfo,
           jobId,
           outputFolder,
           dsmFile,
           outputPrefix,
           iterations=None,
           tension=None):
    """
    Run a Girder Worker job to fit a Digital Terrain Model (DTM) to a
    Digital Surface Model (DSM).

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param dsmFile: DSM image file document.
    :type dsmFile: dict
    :param outputPrefix: The prefix of the output file name.
    :type outputPrefix: str
    :param iterations: The base number of iterations at the coarsest scale.
    :type iterations: int
    :param tension: Number of inner smoothing iterations.
    :type tension: int
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output file name based on input file name
    dtmName = outputPrefix + '_DTM.tif'
    outputVolumePath = VolumePath(dtmName)

    # Docker container arguments
    containerArgs = [
        'danesfield/tools/fit_dtm.py',
        GirderFileIdToVolume(dsmFile['_id'], gc=gc), outputVolumePath
    ]
    if iterations is not None:
        containerArgs.extend(['--num-iterations', str(iterations)])
    if tension is not None:
        containerArgs.extend(['--tension', str(tension)])

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(outputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle=(
                                       '[%s] Fit DTM: %s' %
                                       (initWorkingSetName, dsmFile['name'])),
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
예제 #16
0
def classifyMaterials(initWorkingSetName,
                      stepName,
                      requestInfo,
                      jobId,
                      outputFolder,
                      imageFiles,
                      metadataFiles,
                      modelFile,
                      outfilePrefix,
                      cuda=None,
                      batchSize=None,
                      model=None):
    """
    Run a Girder Worker job to classify materials in an orthorectified image.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param imageFiles: List of orthorectified MSI image files.
    :type imageFiles: list[dict]
    :param metadataFiles: List of MSI-source NITF metadata files.
    :type metadataFiles: list[dict]
    :param modelFile: Model file document.
    :type modelFile: dict
    :param outfilePrefix: Prefix for output filename
    :type outfilePrefix: str
    :param cuda: Enable/disable CUDA; enabled by default.
    :type cuda: bool
    :param batchSize: Number of pixels classified at a time.
    :type batchSize: int
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    outputVolumePath = VolumePath('.')

    # Docker container arguments
    containerArgs = list(
        itertools.chain([
            'danesfield/tools/material_classifier.py', '--model_path',
            GirderFileIdToVolume(modelFile['_id'],
                                 gc=gc), '--output_dir', outputVolumePath,
            '--outfile_prefix', outfilePrefix, '--image_paths'
        ], [
            GirderFileIdToVolume(imageFile['_id'], gc=gc)
            for imageFile in imageFiles
        ], ['--info_paths'], [
            GirderFileIdToVolume(metadataFile['_id'], gc=gc)
            for metadataFile in metadataFiles
        ]))
    if cuda is None or cuda:
        containerArgs.append('--cuda')
    if batchSize is not None:
        containerArgs.extend(['--batch_size', str(batchSize)])

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(outputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle='[%s] Classify materials' %
                                   initWorkingSetName,
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
예제 #17
0
def computeNdvi(initWorkingSetName, stepName, requestInfo, jobId, outputFolder,
                imageFiles, outputNdviFilename):
    """
    Run a Girder Worker job to compute the NDVI from a set of images

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param imageFiles: List of pansharpened image files.
    :type imageFiles: list[dict]
    :param outputNdviFilename: Filename for output NDVI
    :type outputNdviFilename: str
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output file names
    ndviOutputVolumePath = VolumePath(outputNdviFilename)

    # Docker container arguments
    containerArgs = list(
        itertools.chain([
            'danesfield/tools/compute_ndvi.py',
        ], [
            GirderFileIdToVolume(imageFile['_id'], gc=gc)
            for imageFile in imageFiles
        ], [ndviOutputVolumePath]))

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(ndviOutputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle='[%s] Compute NDVI' %
                                   initWorkingSetName,
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
예제 #18
0
def getRoadVector(initWorkingSetName,
                  stepName,
                  requestInfo,
                  jobId,
                  outputFolder,
                  left,
                  bottom,
                  right,
                  top):
    """
    Run a Girder Worker job to segment buildings by comparing a DSM to a DTM.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param left: Longitude of left / westernmost side of bounding box
    :type left: float
    :param bottom: Latitude of bottom / southernmost side of bounding box
    :type bottom: float
    :param right: Longitude of right / easternmost side of bounding box
    :type right: float
    :param top: Latitude of top / northernmost side of bounding box
    :type top: float
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output file names
    outputVolumePath = VolumePath('__output__')

    # Docker container arguments
    containerArgs = [
        'danesfield/tools/get_road_vector.py',
        '--left', str(left),
        '--bottom', str(bottom),
        '--right', str(right),
        '--top', str(top),
        '--output-dir', outputVolumePath,
    ]

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(
            outputVolumePath,
            outputFolder['_id'],
            upload_kwargs=upload_kwargs,
            gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(
            image=DockerImage.DANESFIELD,
            containerArgs=containerArgs,
            jobTitle='[%s] Get OSM road vector data' % initWorkingSetName,
            jobType=stepName,
            user=requestInfo.user,
            resultHooks=resultHooks
        )
    )

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
예제 #19
0
def generateDsm(initWorkingSetName, stepName, requestInfo, jobId, outputFolder,
                pointCloudFile, outputPrefix):
    """
    Run a Girder Worker job to generate a Digital Surface Model (DSM)
    from a point cloud.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param pointCloudFile: Point cloud file document.
    :type pointCloudFile: dict
    :param outputPrefix: The prefix of the output file name.
    :type outputPrefix: str
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output file name based on point cloud file
    dsmName = outputPrefix + '_P3D_DSM.tif'
    outputVolumePath = VolumePath(dsmName)

    # Docker container arguments
    containerArgs = [
        'danesfield/tools/generate_dsm.py', outputVolumePath,
        '--source_points',
        GirderFileIdToVolume(pointCloudFile['_id'], gc=gc)
    ]

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(outputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle=('[%s] Generate DSM: %s' %
                                             (initWorkingSetName,
                                              pointCloudFile['name'])),
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
예제 #20
0
def generatePointCloud(initWorkingSetName,
                       stepName,
                       requestInfo,
                       jobId,
                       outputFolder,
                       imageFiles,
                       longitude,
                       latitude,
                       longitudeWidth,
                       latitudeWidth):
    """
    Run a Girder Worker job to generate a 3D point cloud from 2D images.

    Requirements:
    - P3D Girder Worker Docker image is available on host
    - Host folder /mnt/GTOPO30 contains GTOPO 30 data

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param imageFiles: List of input image files.
    :type imageFiles: list[dict]
    :param longitude:
    :type longitude:
    :param latitude:
    :type latitude:
    :param longitudeWidth:
    :type longitudeWidth:
    :param latitudeWidth:
    :type latitudeWidth:
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Docker volumes
    volumes = [
        BindMountVolume(host_path='/mnt/GTOPO30',
                        container_path='/P3D/GTOPO30',
                        mode='ro')
    ]

    outputVolumePath = VolumePath('__output__')

    # Docker container arguments
    # TODO: Consider a solution where args are written to a file, in
    # case of very long command lines
    containerArgs = list(itertools.chain(
        [
            'python', '/P3D/RTN_distro/scripts/generate_point_cloud.pyc',
            '--out', outputVolumePath,
            '--longitude', str(longitude),
            '--latitude', str(latitude),
            '--longitudeWidth', str(longitudeWidth),
            '--latitudeWidth', str(latitudeWidth),
            '--firstProc', '0',
            '--threads', '8',
            '--images'
        ],
        [GirderFileIdToVolume(imageFile['_id'], gc=gc)
         for imageFile in imageFiles],
    ))

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(
            outputVolumePath,
            outputFolder['_id'],
            upload_kwargs=upload_kwargs,
            gc=gc)
    ]

    asyncResult = docker_run.delay(
        volumes=volumes,
        **createDockerRunArguments(
            image=DockerImage.P3D,
            containerArgs=containerArgs,
            jobTitle='[%s] Generate point cloud' % initWorkingSetName,
            jobType=stepName,
            user=requestInfo.user,
            resultHooks=resultHooks
        )
    )

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
예제 #21
0
def textureMapping(initWorkingSetName, stepName, requestInfo, jobId,
                   outputFolder, objFiles, imageFiles, dsmFile, dtmFile):
    """
    Run a Girder Worker job to run texture mapping.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param objFiles: List of OBJ files.
    :type objFiles: list[dict]
    :param imageFiles: List of cropped and pansharpened image files.
    :type imageFiles: list[dict]
    :param dsmFile: DSM file document.
    :type dsmFile: dict
    :param dtmFile: DTM file document.
    :type dtmFile: dict
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output directory
    outputVolumePath = VolumePath('__output__')

    # Set output path for occlusion mesh
    occlusionMeshName = 'xxxx.obj'
    occlusionMeshVolumePath = VolumePath(occlusionMeshName)

    containerArgs = [
        'danesfield/tools/texture_mapping.py',
        GirderFileIdToVolume(dsmFile['_id'], gc=gc),
        GirderFileIdToVolume(dtmFile['_id'], gc=gc), outputVolumePath,
        occlusionMeshVolumePath, '--crops'
    ]
    containerArgs.extend(
        [GirderFileIdToVolume(f['_id'], gc=gc) for f in imageFiles])

    containerArgs.append('--buildings')
    containerArgs.extend(
        [GirderFileIdToVolume(f['_id'], gc=gc) for f in objFiles])

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(outputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc),
        GirderUploadVolumePathToFolder(occlusionMeshVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle='[%s] Texture mapping' %
                                   initWorkingSetName,
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
def roofGeonExtraction(initWorkingSetName, stepName, requestInfo, jobId,
                       outputFolder, pointCloudFile, dtmFile, buildingMaskFile,
                       modelFolder, modelFilePrefix):
    """
    Run a Girder Worker job to run Purdue and Columbia's roof geon
    extraction pipeline.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param pointCloudFile: Point cloud file document.
    :type pointCloudFile: dict
    :param dtmFile: DTM file document.
    :type dtmFile: dict
    :param buildingMaskFile: Building mask file document.
    :type buildingMaskFile: dict
    :param modelFolder: Model directory.
    :type modelFolder: dict
    :param modelFilePrefix: Model name prefix.
    :type modelFilePrefix: str
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output directory
    outputVolumePath = VolumePath('__output__')

    # Docker container arguments
    containerArgs = [
        'danesfield/tools/roof_geon_extraction.py', '--las',
        GirderFileIdToVolume(pointCloudFile['_id'], gc=gc), '--cls',
        GirderFileIdToVolume(buildingMaskFile['_id'], gc=gc), '--dtm',
        GirderFileIdToVolume(dtmFile['_id'], gc=gc), '--model_dir',
        GirderFolderIdToVolume(modelFolder['_id'], gc=gc), '--model_prefix',
        modelFilePrefix, '--output_dir', outputVolumePath
    ]

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(outputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle=('[%s] Roof geon extraction: %s' %
                                             (initWorkingSetName,
                                              buildingMaskFile['name'])),
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
예제 #23
0
def segmentByHeight(initWorkingSetName, stepName, requestInfo, jobId,
                    outputFolder, dsmFile, dtmFile, ndviFile, roadVectorFile):
    """
    Run a Girder Worker job to segment buildings by comparing a DSM to a DTM.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param dsmFile: DSM file document.
    :type dsmFile: dict
    :param dtmFile: DTM file document.
    :type dtmFile: dict
    :param ndviFile: NDVI file document.
    :type ndviFile: dict
    :param roadVectorFile: Road vector file.
    :type roadVectorFile: dict
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output file names
    # TODO: Danesfield master script hardcodes these without any
    # prefix; do the same here
    thresholdOutputVolumePath = VolumePath('threshold_CLS.tif')
    roadRasterOutputVolumePath = VolumePath('road_rasterized.tif')
    roadBridgeRasterOutputVolumePath = VolumePath('road_rasterized_bridge.tif')

    # Docker container arguments
    containerArgs = [
        'danesfield/tools/segment_by_height.py',
        # DSM
        GirderFileIdToVolume(dsmFile['_id'], gc=gc),
        # DTM
        GirderFileIdToVolume(dtmFile['_id'], gc=gc),
        # Threshold output image
        thresholdOutputVolumePath,
        # Normalized Difference Vegetation Index image
        '--input-ndvi',
        GirderFileIdToVolume(ndviFile['_id'], gc=gc),
        '--road-vector',
        GirderFileIdToVolume(roadVectorFile['_id'], gc=gc),
        '--road-rasterized',
        roadRasterOutputVolumePath,
        '--road-rasterized-bridge',
        roadBridgeRasterOutputVolumePath
    ]

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(thresholdOutputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle='[%s] Segment by height: %s' %
                                   (initWorkingSetName, dsmFile['name']),
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
예제 #24
0
def unetSemanticSegmentation(initWorkingSetName, stepName, requestInfo, jobId,
                             outputFolder, dsmFile, dtmFile, msiImageFile,
                             rgbImageFile, configFile, modelFile):
    """
    Run a Girder Worker job to segment buildings using UNet semantic
    segmentation.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: strps
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param dsmFile: DSM file document.
    :type dsmFile: dict
    :param dtmFile: DTM file document.
    :type dtmFile: dict
    :param msiImageFile: Pansharpened MSI image file document.
    :type msiImageFile: dict
    :param rgbImageFile: RGB image file document.
    :type rgbImageFile: dict
    :param configFile: Configuration file document.
    :type configFile: dict
    :param modelFile: Model file document.
    :type modelFile: dict
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output directory
    outputVolumePath = VolumePath('.')

    # Docker container arguments
    containerArgs = [
        'danesfield/tools/kwsemantic_segment.py',
        # Configuration file
        GirderFileIdToVolume(configFile['_id'], gc=gc),
        # Model file
        GirderFileIdToVolume(modelFile['_id'], gc=gc),
        # RGB image
        GirderFileIdToVolume(rgbImageFile['_id'], gc=gc),
        # DSM
        GirderFileIdToVolume(dsmFile['_id'], gc=gc),
        # DTM
        GirderFileIdToVolume(dtmFile['_id'], gc=gc),
        # MSI image
        GirderFileIdToVolume(msiImageFile['_id'], gc=gc),
        # Output directory
        outputVolumePath,
        # Output file prefix
        'semantic'
    ]

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(outputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle=(
                                       '[%s] UNet semantic segmentation: %s' %
                                       (initWorkingSetName, dsmFile['name'])),
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
예제 #25
0
    def runPhotomorph(self, folder, maskRect):
        user = self.getCurrentUser()
        mp4Out = VolumePath('__output_mp4s__/')
        gifOut = VolumePath('__output_gifs__/')

        parent = Folder().load(folder['parentId'],
                               level=AccessType.WRITE,
                               exc=True,
                               user=user)
        outputFolder = Folder().createFolder(parent,
                                             '_output',
                                             public=False,
                                             creator=user,
                                             reuseExisting=True)
        outputMp4 = Folder().createFolder(outputFolder,
                                          'mp4s',
                                          public=False,
                                          creator=user,
                                          reuseExisting=True)
        outputGif = Folder().createFolder(outputFolder,
                                          'gifs',
                                          public=False,
                                          creator=user,
                                          reuseExisting=True)

        parent['photomorphOutputFolderId'] = outputFolder['_id']
        parent['photomorphOutputItems'] = {}
        parent['photomorphMaskRect'] = maskRect
        parent['photomorphJobStatus'] = JobStatus.QUEUED
        parent['photomorphOutputItems'] = {'gif': [], 'mp4': []}

        job = docker_run.delay(
            'zachmullen/photomorph:latest',
            container_args=[
                '--mp4-out', mp4Out, '--gif-out', gifOut, '--mask-rect',
                ','.join(str(i) for i in itertools.chain(*maskRect)),
                GirderFolderIdToVolume(folder['_id'], folder_name='_input')
            ],
            girder_job_title='Timelapse creation: %s' % parent['name'],
            girder_result_hooks=[
                GirderUploadVolumePathToFolder(mp4Out,
                                               outputMp4['_id'],
                                               upload_kwargs={
                                                   'reference':
                                                   json.dumps({
                                                       'photomorph':
                                                       True,
                                                       'folderId':
                                                       str(parent['_id']),
                                                       'resultType':
                                                       'mp4'
                                                   })
                                               }),
                GirderUploadVolumePathToFolder(gifOut,
                                               outputGif['_id'],
                                               upload_kwargs={
                                                   'reference':
                                                   json.dumps({
                                                       'photomorph':
                                                       True,
                                                       'folderId':
                                                       str(parent['_id']),
                                                       'resultType':
                                                       'gif'
                                                   })
                                               })
            ]).job

        parent['photomorphJobId'] = job['_id']
        Folder().save(parent)

        job['photomorphId'] = parent['_id']
        return Job().save(job)