예제 #1
0
def test_GirderFolderIdToVolume(mock_gc, patch_makedirs, bogus_volume):
    gfitv = GirderFolderIdToVolume('BOGUS_ID', volume=bogus_volume, folder_name='f', gc=mock_gc)
    assert gfitv.transform() == os.path.join(BOGUS_CONTAINER_PATH, 'BOGUS_ID', 'f')
    patch_makedirs.assert_called_once_with(os.path.join(BOGUS_HOST_PATH, 'BOGUS_ID', 'f'))
    with mock.patch('girder_worker.docker.transforms.girder.shutil.rmtree') as mock_rmtree:
        gfitv.cleanup()
        mock_rmtree.assert_called_once_with(
            os.path.join(BOGUS_HOST_PATH, 'BOGUS_ID', 'f'), ignore_errors=True)
예제 #2
0
    def run_dream3d(self, params):
        """Run Dream3D on a folder that is on girder.

        Will store the output in the specified output folder.
        """
        inputFolderId = params.get('inputFolderId')
        outputFolderId = params.get('outputFolderId')
        folder_name = 'workingDir'
        volume = GirderFolderIdToVolume(inputFolderId,
                                        volume=TemporaryVolume.default,
                                        folder_name=folder_name)
        outputDir = inputFolderId + '/' + folder_name + '/output'
        volumepath = VolumePath(outputDir, volume=TemporaryVolume.default)
        result = docker_run.delay(
            DREAM3D_IMAGE,
            pull_image=False,
            container_args=[
                '-c', 'bash /root/runPipelineRunner $(ls *.json | head -1)'
            ],
            remove_container=True,
            working_dir=volume,
            entrypoint='bash',
            girder_result_hooks=[
                GirderUploadVolumePathToFolder(volumepath, outputFolderId)
            ])

        # Set the multiscale meta data and return the job
        jobId = result.job['_id']
        return utils.setMultiscaleMetaData(jobId, inputFolderId,
                                           outputFolderId)
예제 #3
0
    def run_albany(self, params):
        """Run albany on a folder that is on girder.

        Will store the output in the specified output folder.
        """
        inputFolderId = params.get('inputFolderId')
        outputFolderId = params.get('outputFolderId')
        filename = 'input.yaml'
        folder_name = 'workingDir'
        volume = GirderFolderIdToVolume(inputFolderId,
                                        volume=TemporaryVolume.default,
                                        folder_name=folder_name)
        outputDir = inputFolderId + '/' + folder_name + '/output.exo'
        volumepath = VolumePath(outputDir, volume=TemporaryVolume.default)
        result = docker_run.delay(ALBANY_IMAGE,
                                  pull_image=False,
                                  container_args=[filename],
                                  entrypoint='/usr/local/albany/bin/AlbanyT',
                                  remove_container=True,
                                  working_dir=volume,
                                  girder_result_hooks=[
                                      GirderUploadVolumePathToFolder(
                                          volumepath, outputFolderId)
                                  ])

        # Set the multiscale meta data and return the job
        jobId = result.job['_id']
        return utils.setMultiscaleMetaData(jobId, inputFolderId,
                                           outputFolderId)
예제 #4
0
def _runUtm(folder, paramsFile, outputFolder):
    outpath = VolumePath('__results__')
    return docker_run.delay('samuelgerber/utm', container_args=[
        GirderFolderIdToVolume(folder['_id']),
        GirderFileIdToVolume(paramsFile['_id']),
        '--workingfolder', outpath
    ], girder_job_title='UTM: ' + folder['name'], girder_result_hooks=[
        GirderUploadVolumePathToFolder(outpath, outputFolder['_id'])
    ]).job
예제 #5
0
def _to_file_volume(param, model):
    girder_type = SLICER_TYPE_TO_GIRDER_MODEL_MAP[param.typ]

    if girder_type == 'folder':
        return GirderFolderIdToVolume(model['_id'], folder_name=model['name'])
    elif girder_type == 'item':
        return GirderItemIdToVolume(model['_id'])

    if not Setting().get(PluginSettings.DIRECT_PATH):
        return GirderFileIdToVolume(model['_id'], filename=model['name'])

    try:
        path = File().getLocalFilePath(model)
        return DirectGirderFileIdToVolume(model['_id'],
                                          direct_file_path=path,
                                          filename=model['name'])
    except FilePathException:
        return GirderFileIdToVolume(model['_id'], filename=model['name'])
예제 #6
0
    def run_smtk_mesh_placement(self, params):
        """Run an smtk mesh placement on a folder that is on girder.

        Will store the output in the specified output folder.
        """
        inputFolderId = params.get('inputFolderId')
        outputFolderId = params.get('outputFolderId')
        folder_name = 'workingDir'
        volume = GirderFolderIdToVolume(inputFolderId,
                                        volume=TemporaryVolume.default,
                                        folder_name=folder_name)
        outputDir = inputFolderId + '/' + folder_name + '/output/'
        volumepath = VolumePath(outputDir, volume=TemporaryVolume.default)
        result = docker_run.delay(
            SMTK_IMAGE,
            pull_image=False,
            container_args=[
                '-c',
                ('. ~/setupEnvironment; '
                 'python /usr/local/afrl-automation/runner.py input.json; '
                 'mkdir output; '
                 'mv input.yaml output/; '
                 'mv elastic.yaml output/;'
                 'mv *BC.exo output/')
            ],
            entrypoint='bash',
            remove_container=True,
            working_dir=volume,
            girder_result_hooks=[
                GirderUploadVolumePathToFolder(volumepath, outputFolderId)
            ])

        # Set the multiscale meta data and return the job
        jobId = result.job['_id']
        return utils.setMultiscaleMetaData(jobId, inputFolderId,
                                           outputFolderId)
예제 #7
0
def runMetrics(initWorkingSetName, stepName, requestInfo, jobId, outputFolder,
               referenceFolder, referencePrefix, dtmFile, dsmFile, clsFile,
               mtlFile):
    """
    Run a Girder Worker job to compute metrics on output files.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param referenceFolder: Reference directory.
    :type referenceFolder: dict
    :param referencePrefix: Reference file prefix.
    :type referencePrefix: str
    :param dtmFile: DTM file document.
    :type dtmFile: dict
    :param dsmFile: DSM file document.
    :type dsmFile: dict
    :param clsFile: CLS file document.
    :type clsFile: dict
    :param mtlFile: MTL file document.
    :type mtlFile: dict
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    if referencePrefix == "STANDARD":
        # We know that there's no reference data with this selection
        containerArgs = ['echo', 'No ground truth selected for scoring']

        asyncResult = docker_run.delay(
            **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                       containerArgs=containerArgs,
                                       jobTitle='[%s] Run metrics' %
                                       initWorkingSetName,
                                       jobType=stepName,
                                       user=requestInfo.user))
    else:
        # Otherwise we assume the reference data exists, and try to
        # run the metrics
        outputVolumePath = VolumePath('__output__')

        # Docker container arguments
        containerArgs = [
            'danesfield/tools/run_metrics.py', '--output-dir',
            outputVolumePath, '--ref-dir',
            GirderFolderIdToVolume(referenceFolder['_id'], gc=gc),
            '--ref-prefix', referencePrefix, '--dsm',
            GirderFileIdToVolume(dsmFile['_id'], gc=gc), '--cls',
            GirderFileIdToVolume(clsFile['_id'], gc=gc), '--mtl',
            GirderFileIdToVolume(mtlFile['_id'], gc=gc), '--dtm',
            GirderFileIdToVolume(dtmFile['_id'], gc=gc)
        ]

        # Result hooks
        # - Upload output files to output folder
        # - Provide upload metadata
        upload_kwargs = createUploadMetadata(jobId, stepName)
        resultHooks = [
            GirderUploadVolumePathToFolder(outputVolumePath,
                                           outputFolder['_id'],
                                           upload_kwargs=upload_kwargs,
                                           gc=gc)
        ]

        asyncResult = docker_run.delay(
            **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                       containerArgs=containerArgs,
                                       jobTitle='[%s] Run metrics' %
                                       initWorkingSetName,
                                       jobType=stepName,
                                       user=requestInfo.user,
                                       resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
예제 #8
0
    def runPhotomorph(self, folder, maskRect):
        user = self.getCurrentUser()
        mp4Out = VolumePath('__output_mp4s__/')
        gifOut = VolumePath('__output_gifs__/')

        parent = Folder().load(folder['parentId'],
                               level=AccessType.WRITE,
                               exc=True,
                               user=user)
        outputFolder = Folder().createFolder(parent,
                                             '_output',
                                             public=False,
                                             creator=user,
                                             reuseExisting=True)
        outputMp4 = Folder().createFolder(outputFolder,
                                          'mp4s',
                                          public=False,
                                          creator=user,
                                          reuseExisting=True)
        outputGif = Folder().createFolder(outputFolder,
                                          'gifs',
                                          public=False,
                                          creator=user,
                                          reuseExisting=True)

        parent['photomorphOutputFolderId'] = outputFolder['_id']
        parent['photomorphOutputItems'] = {}
        parent['photomorphMaskRect'] = maskRect
        parent['photomorphJobStatus'] = JobStatus.QUEUED
        parent['photomorphOutputItems'] = {'gif': [], 'mp4': []}

        job = docker_run.delay(
            'zachmullen/photomorph:latest',
            container_args=[
                '--mp4-out', mp4Out, '--gif-out', gifOut, '--mask-rect',
                ','.join(str(i) for i in itertools.chain(*maskRect)),
                GirderFolderIdToVolume(folder['_id'], folder_name='_input')
            ],
            girder_job_title='Timelapse creation: %s' % parent['name'],
            girder_result_hooks=[
                GirderUploadVolumePathToFolder(mp4Out,
                                               outputMp4['_id'],
                                               upload_kwargs={
                                                   'reference':
                                                   json.dumps({
                                                       'photomorph':
                                                       True,
                                                       'folderId':
                                                       str(parent['_id']),
                                                       'resultType':
                                                       'mp4'
                                                   })
                                               }),
                GirderUploadVolumePathToFolder(gifOut,
                                               outputGif['_id'],
                                               upload_kwargs={
                                                   'reference':
                                                   json.dumps({
                                                       'photomorph':
                                                       True,
                                                       'folderId':
                                                       str(parent['_id']),
                                                       'resultType':
                                                       'gif'
                                                   })
                                               })
            ]).job

        parent['photomorphJobId'] = job['_id']
        Folder().save(parent)

        job['photomorphId'] = parent['_id']
        return Job().save(job)
def roofGeonExtraction(initWorkingSetName, stepName, requestInfo, jobId,
                       outputFolder, pointCloudFile, dtmFile, buildingMaskFile,
                       modelFolder, modelFilePrefix):
    """
    Run a Girder Worker job to run Purdue and Columbia's roof geon
    extraction pipeline.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param pointCloudFile: Point cloud file document.
    :type pointCloudFile: dict
    :param dtmFile: DTM file document.
    :type dtmFile: dict
    :param buildingMaskFile: Building mask file document.
    :type buildingMaskFile: dict
    :param modelFolder: Model directory.
    :type modelFolder: dict
    :param modelFilePrefix: Model name prefix.
    :type modelFilePrefix: str
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output directory
    outputVolumePath = VolumePath('__output__')

    # Docker container arguments
    containerArgs = [
        'danesfield/tools/roof_geon_extraction.py', '--las',
        GirderFileIdToVolume(pointCloudFile['_id'], gc=gc), '--cls',
        GirderFileIdToVolume(buildingMaskFile['_id'], gc=gc), '--dtm',
        GirderFileIdToVolume(dtmFile['_id'], gc=gc), '--model_dir',
        GirderFolderIdToVolume(modelFolder['_id'], gc=gc), '--model_prefix',
        modelFilePrefix, '--output_dir', outputVolumePath
    ]

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(outputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle=('[%s] Roof geon extraction: %s' %
                                             (initWorkingSetName,
                                              buildingMaskFile['name'])),
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
def buildingSegmentation(initWorkingSetName,
                         stepName,
                         requestInfo,
                         jobId,
                         outputFolder,
                         dsmFile,
                         dtmFile,
                         msiImageFile,
                         rgbImageFile,
                         modelFolder,
                         modelFilePrefix):
    """
    Run a Girder Worker job to segment buildings using Columbia
    building segmentation.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param dsmFile: DSM file document.
    :type dsmFile: dict
    :param dtmFile: DTM file document.
    :type dtmFile: dict
    :param msiImageFile: Pansharpened MSI image file document.
    :type msiImageFile: dict
    :param rgbImageFile: RGB image file document.
    :type rgbImageFile: dict
    :param modelFolder: Model folder document.
    :type modelFolder: dict
    :param modelFilePrefix: Model file prefix.
    :type modelFilePrefix: str
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output directory
    outputVolumePath = VolumePath('.')

    # Docker container arguments
    containerArgs = [
        'danesfield/tools/building_segmentation.py',
        '--rgb_image', GirderFileIdToVolume(rgbImageFile['_id'], gc=gc),
        '--msi_image', GirderFileIdToVolume(msiImageFile['_id'], gc=gc),
        '--dsm', GirderFileIdToVolume(dsmFile['_id'], gc=gc),
        '--dtm', GirderFileIdToVolume(dtmFile['_id'], gc=gc),
        '--model_dir', GirderFolderIdToVolume(modelFolder['_id'], gc=gc),
        '--model_prefix', modelFilePrefix,
        '--save_dir', outputVolumePath,
        '--output_tif'
    ]

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(
            outputVolumePath,
            outputFolder['_id'],
            upload_kwargs=upload_kwargs,
            gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(
            image=DockerImage.DANESFIELD,
            containerArgs=containerArgs,
            jobTitle=('[%s] Building segmentation: %s' %
                      (initWorkingSetName, dsmFile['name'])),
            jobType=stepName,
            user=requestInfo.user,
            resultHooks=resultHooks
        )
    )

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job