Example #1
0
    def runInpainting(self, image, mask, folder):
        basename = os.path.splitext(image['name'])[0]
        outPath = VolumePath(basename + '_result.jpg')
        artifactPath = VolumePath('job_artifacts')
        job = docker_run.delay(
            'zachmullen/inpainting:latest',
            container_args=[
                GirderFileIdToVolume(image['_id']),
                GirderFileIdToVolume(mask['_id']), outPath, '--artifacts-dir',
                artifactPath, '--progress-pipe',
                ProgressPipe()
            ],
            girder_job_title='Inpainting: %s' % image['name'],
            girder_result_hooks=[
                GirderUploadVolumePathToFolder(outPath,
                                               folder['_id'],
                                               upload_kwargs={
                                                   'reference':
                                                   json.dumps({
                                                       'inpaintedImage':
                                                       True,
                                                       'folderId':
                                                       str(folder['_id']),
                                                   })
                                               }),
                # GirderUploadVolumePathJobArtifact(artifactPath)
            ]).job

        folder['inpaintingJobId'] = job['_id']
        Folder().save(folder)

        job['inpaintingImageId'] = image['_id']
        job['inpaintingMaskId'] = mask['_id']
        job['inpaintingFolderId'] = folder['_id']
        return Job().save(job)
Example #2
0
def prepare_task(params, user, token, index_params, opt_params, has_simple_return_file, reference):
    ca = []
    result_hooks = []
    primary_input_name = None

    # Get primary name and reference
    for param in index_params:
        if param.channel != 'output':
            arg, name = _add_indexed_input_param(param, params, user, token)
            if (name and not primary_input_name and
                    SLICER_TYPE_TO_GIRDER_MODEL_MAP[param.typ] != 'folder'):
                primary_input_name = name
                reference['userId'] = str(user['_id'])
                value = _parseParamValue(param, params[param.identifier()], user, token)
                itemId = value['_id']
                if SLICER_TYPE_TO_GIRDER_MODEL_MAP[param.typ] == 'file':
                    reference['fileId'] = str(value['_id'])
                    itemId = value['itemId']
                reference['itemId'] = str(itemId)

    # optional params
    for param in opt_params:
        if param.channel == 'output':
            ca.extend(_add_optional_output_param(param, params, user, result_hooks, reference))
        else:
            ca.extend(_add_optional_input_param(param, params, user, token))

    if has_simple_return_file:
        param_id = return_parameter_file_name + FOLDER_SUFFIX
        param_name_id = return_parameter_file_name
        if param_id in params and param_name_id in params:
            value = params[return_parameter_file_name]
            folder = params[return_parameter_file_name + FOLDER_SUFFIX]

            folderModel = ModelImporter.model('folder')
            instance = folderModel.load(folder, level=AccessType.WRITE, user=user)
            if not instance:
                raise RestException('Invalid Folder id (%s).' % (str(folder)))

            ca.append('--returnparameterfile')

            # Output Binding
            path = VolumePath(value)
            ca.append(path)
            ref = reference.copy()
            ref['identifier'] = 'returnparameterfile'
            result_hooks.append(GirderUploadVolumePathToFolder(
                path, folder, upload_kwargs={'reference': json.dumps(ref)}))

    # indexed params
    for param in index_params:
        if param.channel == 'output':
            ca.append(_add_indexed_output_param(param, params, user, result_hooks, reference))
        else:
            arg, name = _add_indexed_input_param(param, params, user, token)
            ca.append(arg)
            if name and not primary_input_name:
                primary_input_name = name

    return ca, result_hooks, primary_input_name
def _add_optional_output_param(param, args, user, result_hooks):
    if not param.isExternalType() or not is_on_girder(param) \
       or param.identifier() not in args or \
       (param.identifier() + FOLDER_SUFFIX) not in args:
        return []
    value = args[param.identifier()]
    folder = args[param.identifier() + FOLDER_SUFFIX]

    container_args = []
    if param.longflag:
        container_args.append(param.longflag)
    elif param.flag:
        container_args.append(param.flag)
    else:
        return []

    folderModel = ModelImporter.model('folder')
    instance = folderModel.load(folder, level=AccessType.WRITE, user=user)
    if not instance:
        raise RestException('Invalid Folder id (%s).' % (str(folder)))

    # Output Binding !!
    path = VolumePath(value)
    container_args.append(path)
    result_hooks.append(GirderUploadVolumePathToFolder(path, folder))

    return container_args
def actual_file():
    vp = os.path.dirname(__file__)
    path = os.path.join(vp, 'foo.txt')
    open(path, 'a').close()
    volume = BindMountVolume(vp, vp)
    yield path, VolumePath('foo.txt', volume=volume)
    os.unlink(path)
Example #5
0
    def run_dream3d(self, params):
        """Run Dream3D on a folder that is on girder.

        Will store the output in the specified output folder.
        """
        inputFolderId = params.get('inputFolderId')
        outputFolderId = params.get('outputFolderId')
        folder_name = 'workingDir'
        volume = GirderFolderIdToVolume(inputFolderId,
                                        volume=TemporaryVolume.default,
                                        folder_name=folder_name)
        outputDir = inputFolderId + '/' + folder_name + '/output'
        volumepath = VolumePath(outputDir, volume=TemporaryVolume.default)
        result = docker_run.delay(
            DREAM3D_IMAGE,
            pull_image=False,
            container_args=[
                '-c', 'bash /root/runPipelineRunner $(ls *.json | head -1)'
            ],
            remove_container=True,
            working_dir=volume,
            entrypoint='bash',
            girder_result_hooks=[
                GirderUploadVolumePathToFolder(volumepath, outputFolderId)
            ])

        # Set the multiscale meta data and return the job
        jobId = result.job['_id']
        return utils.setMultiscaleMetaData(jobId, inputFolderId,
                                           outputFolderId)
Example #6
0
    def run_albany(self, params):
        """Run albany on a folder that is on girder.

        Will store the output in the specified output folder.
        """
        inputFolderId = params.get('inputFolderId')
        outputFolderId = params.get('outputFolderId')
        filename = 'input.yaml'
        folder_name = 'workingDir'
        volume = GirderFolderIdToVolume(inputFolderId,
                                        volume=TemporaryVolume.default,
                                        folder_name=folder_name)
        outputDir = inputFolderId + '/' + folder_name + '/output.exo'
        volumepath = VolumePath(outputDir, volume=TemporaryVolume.default)
        result = docker_run.delay(ALBANY_IMAGE,
                                  pull_image=False,
                                  container_args=[filename],
                                  entrypoint='/usr/local/albany/bin/AlbanyT',
                                  remove_container=True,
                                  working_dir=volume,
                                  girder_result_hooks=[
                                      GirderUploadVolumePathToFolder(
                                          volumepath, outputFolderId)
                                  ])

        # Set the multiscale meta data and return the job
        jobId = result.job['_id']
        return utils.setMultiscaleMetaData(jobId, inputFolderId,
                                           outputFolderId)
def test_GirderUploadVolumePathToItem_transform_accepts_ObjectId(
        mock_gc, bogus_volume):
    vp = VolumePath('test', bogus_volume)
    hash = '5a5fc09ec2231b9487ce42db'
    GirderUploadVolumePathToItem(vp, ObjectId(hash), gc=mock_gc).transform()
    mock_gc.uploadFileToItem.assert_called_once_with(
        hash, os.path.join(BOGUS_CONTAINER_PATH, 'test'))
Example #8
0
def _runUtm(folder, paramsFile, outputFolder):
    outpath = VolumePath('__results__')
    return docker_run.delay('samuelgerber/utm', container_args=[
        GirderFolderIdToVolume(folder['_id']),
        GirderFileIdToVolume(paramsFile['_id']),
        '--workingfolder', outpath
    ], girder_job_title='UTM: ' + folder['name'], girder_result_hooks=[
        GirderUploadVolumePathToFolder(outpath, outputFolder['_id'])
    ]).job
Example #9
0
    def test_docker_run_mount_idiomatic_volume(self, params):
        fixture_dir = params.get('fixtureDir')
        filename = 'read.txt'
        mount_dir = '/mnt/test'
        mount_path = os.path.join(mount_dir, filename)
        volume = BindMountVolume(fixture_dir, mount_path, 'ro')
        volumepath = VolumePath(filename, volume)

        result = docker_run.delay(
            TEST_IMAGE, pull_image=True, container_args=['read', '-p', volumepath],
            remove_container=True, volumes=[volume])

        return result.job
Example #10
0
    def test_docker_run_file_upload_to_item(self, params):
        item_id = params.get('itemId')
        contents = params.get('contents')

        volumepath = VolumePath('test_file')

        result = docker_run.delay(
            TEST_IMAGE, pull_image=True,
            container_args=['write', '-p', volumepath, '-m', contents],
            remove_container=True,
            girder_result_hooks=[GirderUploadVolumePathToItem(volumepath, item_id)])

        return result.job
def _add_indexed_output_param(param, args, user, result_hooks):
    value = args[param.identifier()]
    folder = args[param.identifier() + FOLDER_SUFFIX]

    folderModel = ModelImporter.model('folder')
    instance = folderModel.load(folder, level=AccessType.WRITE, user=user)
    if not instance:
        raise RestException('Invalid Folder id (%s).' % (str(folder)))

    # Output Binding !!
    path = VolumePath(value)
    result_hooks.append(GirderUploadVolumePathToFolder(path, folder))
    return path
def prepare_task(params, user, token, index_params, opt_params,
                 has_simple_return_file):
    ca = []
    result_hooks = []
    primary_input_name = None

    # optional params
    for param in opt_params:
        if param.channel == 'output':
            ca.extend(
                _add_optional_output_param(param, params, user, result_hooks))
        else:
            ca.extend(_add_optional_input_param(param, params, user, token))

    if has_simple_return_file:
        param_id = return_parameter_file_name + FOLDER_SUFFIX
        param_name_id = return_parameter_file_name
        if param_id in params and param_name_id in params:
            value = params[return_parameter_file_name]
            folder = params[return_parameter_file_name + FOLDER_SUFFIX]

            folderModel = ModelImporter.model('folder')
            instance = folderModel.load(folder,
                                        level=AccessType.WRITE,
                                        user=user)
            if not instance:
                raise RestException('Invalid Folder id (%s).' % (str(folder)))

            ca.append('--returnparameterfile')

            # Output Binding !!
            path = VolumePath(value)
            ca.append(path)
            result_hooks.append(GirderUploadVolumePathToFolder(path, folder))

    # indexed params
    for param in index_params:
        if param.channel == 'output':
            ca.append(
                _add_indexed_output_param(param, params, user, result_hooks))
        else:
            arg, name = _add_indexed_input_param(param, params, user, token)
            ca.append(arg)
            if name and not primary_input_name:
                primary_input_name = name

    return ca, result_hooks, primary_input_name
Example #13
0
    def createOrthorectifyTask(imageFile, rpcFile):
        # Set output file name based on input file name
        orthoName = os.path.splitext(imageFile['name'])[0] + '_ortho.tif'
        outputVolumePath = VolumePath(orthoName)

        # Docker container arguments
        containerArgs = [
            'danesfield/tools/orthorectify.py',
            # Source image
            GirderFileIdToVolume(imageFile['_id'], gc=gc),
            # DSM
            GirderFileIdToVolume(dsmFile['_id'], gc=gc),
            # Destination image
            outputVolumePath,
            '--dtm',
            GirderFileIdToVolume(dtmFile['_id'], gc=gc),
            '--raytheon-rpc',
            GirderFileIdToVolume(rpcFile['_id'], gc=gc),
        ]
        if occlusionThreshold is not None:
            containerArgs.extend(
                ['--occlusion-thresh',
                 str(occlusionThreshold)])
        if denoiseRadius is not None:
            containerArgs.extend(['--denoise-radius', str(denoiseRadius)])

        # Result hooks
        # - Upload output files to output folder
        # - Provide upload metadata
        upload_kwargs = createUploadMetadata(jobId, stepName)
        resultHooks = [
            GirderUploadVolumePathToFolder(outputVolumePath,
                                           outputFolder['_id'],
                                           upload_kwargs=upload_kwargs,
                                           gc=gc)
        ]

        return docker_run.s(
            **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                       containerArgs=containerArgs,
                                       jobTitle=('[%s] Orthorectify: %s' %
                                                 (initWorkingSetName,
                                                  imageFile['name'])),
                                       jobType=stepName,
                                       user=requestInfo.user,
                                       resultHooks=resultHooks))
Example #14
0
    def createConvertMsiToRgbTask(prefix, imageFile):
        # Set output file name based on prefix
        outputName = prefix + '_rgb_byte_image.tif'
        outputVolumePath = VolumePath(outputName)

        # Docker container arguments
        containerArgs = [
            'danesfield/tools/msi_to_rgb.py',
            # Pansharpened MSI image
            GirderFileIdToVolume(imageFile['_id'], gc=gc),
            # Output image
            outputVolumePath
        ]
        # Enable byte option by default
        if byte or byte is None:
            containerArgs.append('--byte')
        if alpha:
            containerArgs.append('--alpha')
        if rangePercentile is not None:
            containerArgs.extend(['--range-percentile', str(rangePercentile)])
        # TODO: Handle --big option (i.e. BIGTIFF)

        # Result hooks
        # - Upload output files to output folder
        # - Provide upload metadata
        upload_kwargs = createUploadMetadata(jobId, stepName)
        resultHooks = [
            GirderUploadVolumePathToFolder(
                outputVolumePath,
                outputFolder['_id'],
                upload_kwargs=upload_kwargs,
                gc=gc)
        ]

        return docker_run.s(
            **createDockerRunArguments(
                image=DockerImage.DANESFIELD,
                containerArgs=containerArgs,
                jobTitle=('[%s] Convert MSI to RGB: %s' %
                          (initWorkingSetName, prefix)),
                jobType=stepName,
                user=requestInfo.user,
                resultHooks=resultHooks
            )
        )
Example #15
0
    def createCropAndPansharpenTask(prefix,
                                    msiImageFile,
                                    panImageFile,
                                    msiRpcFile=None,
                                    panRpcFile=None):
        # Set output directory
        outputVolumePath = VolumePath('__output__')

        containerArgs = [
            'danesfield/tools/crop_and_pansharpen.py',
            GirderFileIdToVolume(dsmFile['_id'], gc=gc), outputVolumePath,
            '--pan',
            GirderFileIdToVolume(panImageFile['_id'], gc=gc)
        ]
        if panRpcFile is not None:
            containerArgs.append(GirderFileIdToVolume(panRpcFile['_id'],
                                                      gc=gc))

        containerArgs.extend(
            ['--msi',
             GirderFileIdToVolume(msiImageFile['_id'], gc=gc)])
        if msiRpcFile is not None:
            containerArgs.append(GirderFileIdToVolume(msiRpcFile['_id'],
                                                      gc=gc))

        # Result hooks
        # - Upload output files to output folder
        # - Provide upload metadata
        upload_kwargs = createUploadMetadata(jobId, stepName)
        resultHooks = [
            GirderUploadVolumePathToFolder(outputVolumePath,
                                           outputFolder['_id'],
                                           upload_kwargs=upload_kwargs,
                                           gc=gc)
        ]

        return docker_run.s(**createDockerRunArguments(
            image=DockerImage.DANESFIELD,
            containerArgs=containerArgs,
            jobTitle='[%s] Crop and pansharpen: %s' %
            (initWorkingSetName, prefix),
            jobType=stepName,
            user=requestInfo.user,
            resultHooks=resultHooks))
Example #16
0
    def createPansharpenTask(prefix, panImageFile, msiImageFile):
        # Set output file name based on prefix
        outputName = prefix + '_ortho_pansharpened.tif'
        outputVolumePath = VolumePath(outputName)

        # Docker container arguments
        containerArgs = [
            'gdal_pansharpen.py',
            # PAN image
            GirderFileIdToVolume(panImageFile['_id'], gc=gc),
            # MSI image
            GirderFileIdToVolume(msiImageFile['_id'], gc=gc),
            # Output image
            outputVolumePath
        ]

        # Result hooks
        # - Upload output files to output folder
        # - Provide upload metadata
        upload_kwargs = createUploadMetadata(jobId, stepName)
        resultHooks = [
            GirderUploadVolumePathToFolder(
                outputVolumePath,
                outputFolder['_id'],
                upload_kwargs=upload_kwargs,
                gc=gc)
        ]

        return docker_run.s(
            **createDockerRunArguments(
                image=DockerImage.DANESFIELD,
                containerArgs=containerArgs,
                jobTitle='[%s] Pansharpen: %s' % (initWorkingSetName, prefix),
                jobType=stepName,
                user=requestInfo.user,
                resultHooks=resultHooks
            )
        )
Example #17
0
def _createThumbnail(item, preset):
    # Remove previously attached thumbnails
    _removeThumbnails(item, saveItem=True)

    outdir = VolumePath('__thumbnails_output__')
    return docker_run.delay(
        'zachmullen/3d_thumbnails:latest',
        container_args=[
            '--angle-step',
            str(_ANGLE_STEP), '--width',
            str(_SIZE), '--height',
            str(_SIZE), '--preset', preset,
            GirderItemIdToVolume(item['_id'], item_name=item['name']), outdir
        ],
        girder_job_title='Interactive thumbnail generation: %s' % item['name'],
        girder_result_hooks=[
            GirderUploadVolumePathToItem(
                outdir,
                item['_id'],
                upload_kwargs={
                    'reference': json.dumps({'interactive_thumbnail': True})
                })
        ]).job
Example #18
0
def _createThumbnail(item):
    # Remove previously attached thumbnails
    _removeThumbnails(item, saveItem=True)

    outdir = VolumePath('__thumbnails_output__')
    return docker_run.delay(
        'girder/dicom_thumbnailer:latest',
        container_args=[
            '--slices',
            str(THUMB_SLICES), '--width',
            str(THUMB_WIDTH), '--height',
            str(THUMB_HEIGHT),
            GirderItemIdToVolume(item['_id'], item_name=item['name']), outdir
        ],
        girder_job_title='DICOM thumbnail generation: %s' % item['name'],
        girder_result_hooks=[
            GirderUploadVolumePathToItem(
                outdir,
                item['_id'],
                upload_kwargs={
                    'reference': json.dumps({'interactive_thumbnail': True})
                })
        ]).job
Example #19
0
    def run_smtk_mesh_placement(self, params):
        """Run an smtk mesh placement on a folder that is on girder.

        Will store the output in the specified output folder.
        """
        inputFolderId = params.get('inputFolderId')
        outputFolderId = params.get('outputFolderId')
        folder_name = 'workingDir'
        volume = GirderFolderIdToVolume(inputFolderId,
                                        volume=TemporaryVolume.default,
                                        folder_name=folder_name)
        outputDir = inputFolderId + '/' + folder_name + '/output/'
        volumepath = VolumePath(outputDir, volume=TemporaryVolume.default)
        result = docker_run.delay(
            SMTK_IMAGE,
            pull_image=False,
            container_args=[
                '-c',
                ('. ~/setupEnvironment; '
                 'python /usr/local/afrl-automation/runner.py input.json; '
                 'mkdir output; '
                 'mv input.yaml output/; '
                 'mv elastic.yaml output/;'
                 'mv *BC.exo output/')
            ],
            entrypoint='bash',
            remove_container=True,
            working_dir=volume,
            girder_result_hooks=[
                GirderUploadVolumePathToFolder(volumepath, outputFolderId)
            ])

        # Set the multiscale meta data and return the job
        jobId = result.job['_id']
        return utils.setMultiscaleMetaData(jobId, inputFolderId,
                                           outputFolderId)
def test_GirderUploadJobArtifact_file_not_found(mock_gc, bogus_volume):
    vp = VolumePath('test', bogus_volume)
    GirderUploadVolumePathJobArtifact(vp, job_id='123', gc=mock_gc).transform()
    mock_gc.post.assert_not_called()
Example #21
0
def runMetrics(initWorkingSetName, stepName, requestInfo, jobId, outputFolder,
               referenceFolder, referencePrefix, dtmFile, dsmFile, clsFile,
               mtlFile):
    """
    Run a Girder Worker job to compute metrics on output files.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param referenceFolder: Reference directory.
    :type referenceFolder: dict
    :param referencePrefix: Reference file prefix.
    :type referencePrefix: str
    :param dtmFile: DTM file document.
    :type dtmFile: dict
    :param dsmFile: DSM file document.
    :type dsmFile: dict
    :param clsFile: CLS file document.
    :type clsFile: dict
    :param mtlFile: MTL file document.
    :type mtlFile: dict
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    if referencePrefix == "STANDARD":
        # We know that there's no reference data with this selection
        containerArgs = ['echo', 'No ground truth selected for scoring']

        asyncResult = docker_run.delay(
            **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                       containerArgs=containerArgs,
                                       jobTitle='[%s] Run metrics' %
                                       initWorkingSetName,
                                       jobType=stepName,
                                       user=requestInfo.user))
    else:
        # Otherwise we assume the reference data exists, and try to
        # run the metrics
        outputVolumePath = VolumePath('__output__')

        # Docker container arguments
        containerArgs = [
            'danesfield/tools/run_metrics.py', '--output-dir',
            outputVolumePath, '--ref-dir',
            GirderFolderIdToVolume(referenceFolder['_id'], gc=gc),
            '--ref-prefix', referencePrefix, '--dsm',
            GirderFileIdToVolume(dsmFile['_id'], gc=gc), '--cls',
            GirderFileIdToVolume(clsFile['_id'], gc=gc), '--mtl',
            GirderFileIdToVolume(mtlFile['_id'], gc=gc), '--dtm',
            GirderFileIdToVolume(dtmFile['_id'], gc=gc)
        ]

        # Result hooks
        # - Upload output files to output folder
        # - Provide upload metadata
        upload_kwargs = createUploadMetadata(jobId, stepName)
        resultHooks = [
            GirderUploadVolumePathToFolder(outputVolumePath,
                                           outputFolder['_id'],
                                           upload_kwargs=upload_kwargs,
                                           gc=gc)
        ]

        asyncResult = docker_run.delay(
            **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                       containerArgs=containerArgs,
                                       jobTitle='[%s] Run metrics' %
                                       initWorkingSetName,
                                       jobType=stepName,
                                       user=requestInfo.user,
                                       resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
Example #22
0
def generatePointCloud(initWorkingSetName,
                       stepName,
                       requestInfo,
                       jobId,
                       outputFolder,
                       imageFiles,
                       longitude,
                       latitude,
                       longitudeWidth,
                       latitudeWidth):
    """
    Run a Girder Worker job to generate a 3D point cloud from 2D images.

    Requirements:
    - P3D Girder Worker Docker image is available on host
    - Host folder /mnt/GTOPO30 contains GTOPO 30 data

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param imageFiles: List of input image files.
    :type imageFiles: list[dict]
    :param longitude:
    :type longitude:
    :param latitude:
    :type latitude:
    :param longitudeWidth:
    :type longitudeWidth:
    :param latitudeWidth:
    :type latitudeWidth:
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Docker volumes
    volumes = [
        BindMountVolume(host_path='/mnt/GTOPO30',
                        container_path='/P3D/GTOPO30',
                        mode='ro')
    ]

    outputVolumePath = VolumePath('__output__')

    # Docker container arguments
    # TODO: Consider a solution where args are written to a file, in
    # case of very long command lines
    containerArgs = list(itertools.chain(
        [
            'python', '/P3D/RTN_distro/scripts/generate_point_cloud.pyc',
            '--out', outputVolumePath,
            '--longitude', str(longitude),
            '--latitude', str(latitude),
            '--longitudeWidth', str(longitudeWidth),
            '--latitudeWidth', str(latitudeWidth),
            '--firstProc', '0',
            '--threads', '8',
            '--images'
        ],
        [GirderFileIdToVolume(imageFile['_id'], gc=gc)
         for imageFile in imageFiles],
    ))

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(
            outputVolumePath,
            outputFolder['_id'],
            upload_kwargs=upload_kwargs,
            gc=gc)
    ]

    asyncResult = docker_run.delay(
        volumes=volumes,
        **createDockerRunArguments(
            image=DockerImage.P3D,
            containerArgs=containerArgs,
            jobTitle='[%s] Generate point cloud' % initWorkingSetName,
            jobType=stepName,
            user=requestInfo.user,
            resultHooks=resultHooks
        )
    )

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
Example #23
0
def generateDsm(initWorkingSetName, stepName, requestInfo, jobId, outputFolder,
                pointCloudFile, outputPrefix):
    """
    Run a Girder Worker job to generate a Digital Surface Model (DSM)
    from a point cloud.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param pointCloudFile: Point cloud file document.
    :type pointCloudFile: dict
    :param outputPrefix: The prefix of the output file name.
    :type outputPrefix: str
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output file name based on point cloud file
    dsmName = outputPrefix + '_P3D_DSM.tif'
    outputVolumePath = VolumePath(dsmName)

    # Docker container arguments
    containerArgs = [
        'danesfield/tools/generate_dsm.py', outputVolumePath,
        '--source_points',
        GirderFileIdToVolume(pointCloudFile['_id'], gc=gc)
    ]

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(outputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle=('[%s] Generate DSM: %s' %
                                             (initWorkingSetName,
                                              pointCloudFile['name'])),
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
Example #24
0
def getRoadVector(initWorkingSetName,
                  stepName,
                  requestInfo,
                  jobId,
                  outputFolder,
                  left,
                  bottom,
                  right,
                  top):
    """
    Run a Girder Worker job to segment buildings by comparing a DSM to a DTM.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param left: Longitude of left / westernmost side of bounding box
    :type left: float
    :param bottom: Latitude of bottom / southernmost side of bounding box
    :type bottom: float
    :param right: Longitude of right / easternmost side of bounding box
    :type right: float
    :param top: Latitude of top / northernmost side of bounding box
    :type top: float
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output file names
    outputVolumePath = VolumePath('__output__')

    # Docker container arguments
    containerArgs = [
        'danesfield/tools/get_road_vector.py',
        '--left', str(left),
        '--bottom', str(bottom),
        '--right', str(right),
        '--top', str(top),
        '--output-dir', outputVolumePath,
    ]

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(
            outputVolumePath,
            outputFolder['_id'],
            upload_kwargs=upload_kwargs,
            gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(
            image=DockerImage.DANESFIELD,
            containerArgs=containerArgs,
            jobTitle='[%s] Get OSM road vector data' % initWorkingSetName,
            jobType=stepName,
            user=requestInfo.user,
            resultHooks=resultHooks
        )
    )

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
Example #25
0
def computeNdvi(initWorkingSetName, stepName, requestInfo, jobId, outputFolder,
                imageFiles, outputNdviFilename):
    """
    Run a Girder Worker job to compute the NDVI from a set of images

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param imageFiles: List of pansharpened image files.
    :type imageFiles: list[dict]
    :param outputNdviFilename: Filename for output NDVI
    :type outputNdviFilename: str
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output file names
    ndviOutputVolumePath = VolumePath(outputNdviFilename)

    # Docker container arguments
    containerArgs = list(
        itertools.chain([
            'danesfield/tools/compute_ndvi.py',
        ], [
            GirderFileIdToVolume(imageFile['_id'], gc=gc)
            for imageFile in imageFiles
        ], [ndviOutputVolumePath]))

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(ndviOutputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle='[%s] Compute NDVI' %
                                   initWorkingSetName,
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
def test_GirderUploadVolumePathToItem_transform_calls_gc_uploadFile(
        mock_gc, bogus_volume):
    vp = VolumePath('test', bogus_volume)
    GirderUploadVolumePathToItem(vp, 'BOGUS_ITEM_ID', gc=mock_gc).transform()
    mock_gc.uploadFileToItem.assert_called_once_with(
        'BOGUS_ITEM_ID', os.path.join(BOGUS_CONTAINER_PATH, 'test'))
def test_girderUploadVolumePathToItem_transform_returns_item_id(
        mock_gc, bogus_volume):
    vp = VolumePath('test', bogus_volume)

    guvpti = GirderUploadVolumePathToItem(vp, 'BOGUS_ITEM_ID', gc=mock_gc)
    assert guvpti.transform() == 'BOGUS_ITEM_ID'
def test_VolumePath_transform_returns_host_path_with_args(bogus_volume):
    vp = VolumePath('test', bogus_volume)
    assert vp.transform('TASK_DATA') == os.path.join(BOGUS_HOST_PATH, 'test')
def test_VolumePath_transform_returns_container_path_with_no_args(
        bogus_volume):
    vp = VolumePath('test', bogus_volume)
    assert vp.transform() == os.path.join(BOGUS_CONTAINER_PATH, 'test')

def test_GirderUploadVolumePathToItem_transform_calls_gc_uploadFile(
        mock_gc, bogus_volume):
    vp = VolumePath('test', bogus_volume)
    GirderUploadVolumePathToItem(vp, 'BOGUS_ITEM_ID', gc=mock_gc).transform()
    mock_gc.uploadFileToItem.assert_called_once_with(
        'BOGUS_ITEM_ID', os.path.join(BOGUS_CONTAINER_PATH, 'test'))


def test_GirderUploadVolumePathToItem_transform_accepts_ObjectId(
        mock_gc, bogus_volume):
    vp = VolumePath('test', bogus_volume)
    hash = '5a5fc09ec2231b9487ce42db'
    GirderUploadVolumePathToItem(vp, ObjectId(hash), gc=mock_gc).transform()
    mock_gc.uploadFileToItem.assert_called_once_with(
        hash, os.path.join(BOGUS_CONTAINER_PATH, 'test'))


@pytest.mark.parametrize('obj,expected_repr', ((VolumePath(
    'test', bogus_volume
), '<girder_worker.docker.transforms.VolumePath: "test">'), (
    GirderFileIdToVolume('123', gc=mock_gc()),
    '<girder_worker.docker.transforms.girder.GirderFileIdToVolume: File ID=123>'
), (GirderFileIdToVolume(
    '123', filename='foo.txt', gc=mock_gc()
), '<girder_worker.docker.transforms.girder.GirderFileIdToVolume: File ID=123 -> "foo.txt">'
    )))
def test_docker_repr_models(obj, expected_repr):
    assert obj._repr_model_() == expected_repr