コード例 #1
0
def test_GirderFileIdToVolume_transform_returns_volume_container_path_plus_id_plus_name(
        mock_gc, patch_mkdir, bogus_volume):
    gfitv = GirderFileIdToVolume('BOGUS_ID', volume=bogus_volume, gc=mock_gc)
    assert gfitv.transform() == os.path.join(BOGUS_CONTAINER_PATH, 'BOGUS_ID',
                                             'bogus.txt')
    patch_mkdir.assert_called_once_with(
        os.path.join(BOGUS_HOST_PATH, 'BOGUS_ID'))
コード例 #2
0
ファイル: __init__.py プロジェクト: girder/demo_site
    def runInpainting(self, image, mask, folder):
        basename = os.path.splitext(image['name'])[0]
        outPath = VolumePath(basename + '_result.jpg')
        artifactPath = VolumePath('job_artifacts')
        job = docker_run.delay(
            'zachmullen/inpainting:latest',
            container_args=[
                GirderFileIdToVolume(image['_id']),
                GirderFileIdToVolume(mask['_id']), outPath, '--artifacts-dir',
                artifactPath, '--progress-pipe',
                ProgressPipe()
            ],
            girder_job_title='Inpainting: %s' % image['name'],
            girder_result_hooks=[
                GirderUploadVolumePathToFolder(outPath,
                                               folder['_id'],
                                               upload_kwargs={
                                                   'reference':
                                                   json.dumps({
                                                       'inpaintedImage':
                                                       True,
                                                       'folderId':
                                                       str(folder['_id']),
                                                   })
                                               }),
                # GirderUploadVolumePathJobArtifact(artifactPath)
            ]).job

        folder['inpaintingJobId'] = job['_id']
        Folder().save(folder)

        job['inpaintingImageId'] = image['_id']
        job['inpaintingMaskId'] = mask['_id']
        job['inpaintingFolderId'] = folder['_id']
        return Job().save(job)
コード例 #3
0
def selectBest(initWorkingSetName,
               stepName,
               requestInfo,
               jobId,
               outputFolder,
               imageFiles,
               dsmFile):
    """
    Run a Girder Worker job to select the best image pair.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param imageFiles: List of image files.
    :type imageFiles: list[dict]
    :param file: DSM image file document.
    :type file: dict
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Docker container arguments
    containerArgs = list(itertools.chain(
        [
            'danesfield/tools/select_best.py',
            '--dsm', GirderFileIdToVolume(dsmFile['_id'], gc=gc)
        ],
        [
            GirderFileIdToVolume(imageFile['_id'], gc=gc)
            for imageFile in imageFiles
        ]
    ))

    asyncResult = docker_run.delay(
        **createDockerRunArguments(
            image=DockerImage.DANESFIELD,
            containerArgs=containerArgs,
            jobTitle='[%s] Select best' % initWorkingSetName,
            jobType=stepName,
            user=requestInfo.user
        )
    )

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
コード例 #4
0
def test_GirderFileIdToVolume_transform_respects_filename_if_passed(
        mock_gc, bogus_volume):
    gfitv = GirderFileIdToVolume('BOGUS_ID',
                                 volume=bogus_volume,
                                 filename='foo.txt',
                                 gc=mock_gc)
    assert gfitv.transform() == os.path.join(BOGUS_CONTAINER_PATH, 'foo.txt')
    mock_gc.downloadFile.assert_called_once_with(
        'BOGUS_ID', os.path.join(BOGUS_HOST_PATH, 'foo.txt'))
コード例 #5
0
def test_GirderFileIdToVolume_accepts_ObjectId(mock_gc, patch_mkdir,
                                               bogus_volume):
    hash = '5a5fc09ec2231b9487ce42db'
    GirderFileIdToVolume(ObjectId(hash), volume=bogus_volume,
                         gc=mock_gc).transform()
    mock_gc.downloadFile.assert_called_once_with(
        hash, os.path.join(BOGUS_HOST_PATH, hash, 'bogus.txt'))
コード例 #6
0
    def createOrthorectifyTask(imageFile, rpcFile):
        # Set output file name based on input file name
        orthoName = os.path.splitext(imageFile['name'])[0] + '_ortho.tif'
        outputVolumePath = VolumePath(orthoName)

        # Docker container arguments
        containerArgs = [
            'danesfield/tools/orthorectify.py',
            # Source image
            GirderFileIdToVolume(imageFile['_id'], gc=gc),
            # DSM
            GirderFileIdToVolume(dsmFile['_id'], gc=gc),
            # Destination image
            outputVolumePath,
            '--dtm',
            GirderFileIdToVolume(dtmFile['_id'], gc=gc),
            '--raytheon-rpc',
            GirderFileIdToVolume(rpcFile['_id'], gc=gc),
        ]
        if occlusionThreshold is not None:
            containerArgs.extend(
                ['--occlusion-thresh',
                 str(occlusionThreshold)])
        if denoiseRadius is not None:
            containerArgs.extend(['--denoise-radius', str(denoiseRadius)])

        # Result hooks
        # - Upload output files to output folder
        # - Provide upload metadata
        upload_kwargs = createUploadMetadata(jobId, stepName)
        resultHooks = [
            GirderUploadVolumePathToFolder(outputVolumePath,
                                           outputFolder['_id'],
                                           upload_kwargs=upload_kwargs,
                                           gc=gc)
        ]

        return docker_run.s(
            **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                       containerArgs=containerArgs,
                                       jobTitle=('[%s] Orthorectify: %s' %
                                                 (initWorkingSetName,
                                                  imageFile['name'])),
                                       jobType=stepName,
                                       user=requestInfo.user,
                                       resultHooks=resultHooks))
コード例 #7
0
def _to_file_volume(param, model):
    girder_type = SLICER_TYPE_TO_GIRDER_MODEL_MAP[param.typ]

    if girder_type == 'folder':
        return GirderFolderIdToVolume(model['_id'], folder_name=model['name'])
    elif girder_type == 'item':
        return GirderItemIdToVolume(model['_id'])

    if not Setting().get(PluginSettings.DIRECT_PATH):
        return GirderFileIdToVolume(model['_id'], filename=model['name'])

    try:
        path = File().getLocalFilePath(model)
        return DirectGirderFileIdToVolume(model['_id'],
                                          direct_file_path=path,
                                          filename=model['name'])
    except FilePathException:
        return GirderFileIdToVolume(model['_id'], filename=model['name'])
コード例 #8
0
def _runUtm(folder, paramsFile, outputFolder):
    outpath = VolumePath('__results__')
    return docker_run.delay('samuelgerber/utm', container_args=[
        GirderFolderIdToVolume(folder['_id']),
        GirderFileIdToVolume(paramsFile['_id']),
        '--workingfolder', outpath
    ], girder_job_title='UTM: ' + folder['name'], girder_result_hooks=[
        GirderUploadVolumePathToFolder(outpath, outputFolder['_id'])
    ]).job
コード例 #9
0
    def createCropAndPansharpenTask(prefix,
                                    msiImageFile,
                                    panImageFile,
                                    msiRpcFile=None,
                                    panRpcFile=None):
        # Set output directory
        outputVolumePath = VolumePath('__output__')

        containerArgs = [
            'danesfield/tools/crop_and_pansharpen.py',
            GirderFileIdToVolume(dsmFile['_id'], gc=gc), outputVolumePath,
            '--pan',
            GirderFileIdToVolume(panImageFile['_id'], gc=gc)
        ]
        if panRpcFile is not None:
            containerArgs.append(GirderFileIdToVolume(panRpcFile['_id'],
                                                      gc=gc))

        containerArgs.extend(
            ['--msi',
             GirderFileIdToVolume(msiImageFile['_id'], gc=gc)])
        if msiRpcFile is not None:
            containerArgs.append(GirderFileIdToVolume(msiRpcFile['_id'],
                                                      gc=gc))

        # Result hooks
        # - Upload output files to output folder
        # - Provide upload metadata
        upload_kwargs = createUploadMetadata(jobId, stepName)
        resultHooks = [
            GirderUploadVolumePathToFolder(outputVolumePath,
                                           outputFolder['_id'],
                                           upload_kwargs=upload_kwargs,
                                           gc=gc)
        ]

        return docker_run.s(**createDockerRunArguments(
            image=DockerImage.DANESFIELD,
            containerArgs=containerArgs,
            jobTitle='[%s] Crop and pansharpen: %s' %
            (initWorkingSetName, prefix),
            jobType=stepName,
            user=requestInfo.user,
            resultHooks=resultHooks))
コード例 #10
0
ファイル: docker.py プロジェクト: wphicks/girder_worker
    def test_docker_run_girder_file_to_volume(self, params):
        file_id = params.get('fileId')

        result = docker_run.delay(
            TEST_IMAGE, pull_image=True,
            container_args=['read_write', '-i', GirderFileIdToVolume(file_id),
                            '-o', Connect(NamedOutputPipe('out'), HostStdOut())],
            remove_container=True)

        return result.job
コード例 #11
0
def test_GirderFileIdToVolume_cleanup_removes_filepath(mock_gc, patch_mkdir, bogus_volume):
    gfitv = GirderFileIdToVolume('BOGUS_ID', volume=bogus_volume, gc=mock_gc)
    gfitv.transform()
    with mock.patch('girder_worker.docker.transforms.girder.shutil.rmtree') as mock_rmtree:
        gfitv.cleanup()
        mock_rmtree.assert_called_once_with(
            os.path.join(BOGUS_HOST_PATH, 'BOGUS_ID', 'bogus.txt'), ignore_errors=True)
コード例 #12
0
    def createPansharpenTask(prefix, panImageFile, msiImageFile):
        # Set output file name based on prefix
        outputName = prefix + '_ortho_pansharpened.tif'
        outputVolumePath = VolumePath(outputName)

        # Docker container arguments
        containerArgs = [
            'gdal_pansharpen.py',
            # PAN image
            GirderFileIdToVolume(panImageFile['_id'], gc=gc),
            # MSI image
            GirderFileIdToVolume(msiImageFile['_id'], gc=gc),
            # Output image
            outputVolumePath
        ]

        # Result hooks
        # - Upload output files to output folder
        # - Provide upload metadata
        upload_kwargs = createUploadMetadata(jobId, stepName)
        resultHooks = [
            GirderUploadVolumePathToFolder(
                outputVolumePath,
                outputFolder['_id'],
                upload_kwargs=upload_kwargs,
                gc=gc)
        ]

        return docker_run.s(
            **createDockerRunArguments(
                image=DockerImage.DANESFIELD,
                containerArgs=containerArgs,
                jobTitle='[%s] Pansharpen: %s' % (initWorkingSetName, prefix),
                jobType=stepName,
                user=requestInfo.user,
                resultHooks=resultHooks
            )
        )
コード例 #13
0
    def createConvertMsiToRgbTask(prefix, imageFile):
        # Set output file name based on prefix
        outputName = prefix + '_rgb_byte_image.tif'
        outputVolumePath = VolumePath(outputName)

        # Docker container arguments
        containerArgs = [
            'danesfield/tools/msi_to_rgb.py',
            # Pansharpened MSI image
            GirderFileIdToVolume(imageFile['_id'], gc=gc),
            # Output image
            outputVolumePath
        ]
        # Enable byte option by default
        if byte or byte is None:
            containerArgs.append('--byte')
        if alpha:
            containerArgs.append('--alpha')
        if rangePercentile is not None:
            containerArgs.extend(['--range-percentile', str(rangePercentile)])
        # TODO: Handle --big option (i.e. BIGTIFF)

        # Result hooks
        # - Upload output files to output folder
        # - Provide upload metadata
        upload_kwargs = createUploadMetadata(jobId, stepName)
        resultHooks = [
            GirderUploadVolumePathToFolder(
                outputVolumePath,
                outputFolder['_id'],
                upload_kwargs=upload_kwargs,
                gc=gc)
        ]

        return docker_run.s(
            **createDockerRunArguments(
                image=DockerImage.DANESFIELD,
                containerArgs=containerArgs,
                jobTitle=('[%s] Convert MSI to RGB: %s' %
                          (initWorkingSetName, prefix)),
                jobType=stepName,
                user=requestInfo.user,
                resultHooks=resultHooks
            )
        )
コード例 #14
0
ファイル: docker.py プロジェクト: wphicks/girder_worker
    def test_docker_run_transfer_encoding_stream(self, params):
        item_id = params.get('itemId')
        file_id = params.get('fileId')
        delimiter = params.get('delimiter')

        headers = {
            'Girder-Token': str(Token().createToken(getCurrentUser())['_id'])
        }
        url = '%s/%s?itemId=%s&delimiter=%s' % (
            getApiUrl(), 'integration_tests/docker/input_stream', item_id, delimiter)

        container_args = [
            'read_write',
            '-i', GirderFileIdToVolume(file_id),
            '-o', Connect(NamedOutputPipe('out'),
                          ChunkedTransferEncodingStream(url, headers))
        ]
        result = docker_run.delay(
            TEST_IMAGE, pull_image=True, container_args=container_args,
            remove_container=True)

        return result.job
コード例 #15
0
def test_GirderFileIdToVolume_transform_calls_gc_downloadFile(
        mock_gc, patch_mkdir, bogus_volume):
    gfitv = GirderFileIdToVolume('BOGUS_ID', volume=bogus_volume, gc=mock_gc)
    gfitv.transform()
    mock_gc.downloadFile.assert_called_once_with(
        'BOGUS_ID', os.path.join(BOGUS_HOST_PATH, 'BOGUS_ID', 'bogus.txt'))
コード例 #16
0
def fitDtm(initWorkingSetName,
           stepName,
           requestInfo,
           jobId,
           outputFolder,
           dsmFile,
           outputPrefix,
           iterations=None,
           tension=None):
    """
    Run a Girder Worker job to fit a Digital Terrain Model (DTM) to a
    Digital Surface Model (DSM).

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param dsmFile: DSM image file document.
    :type dsmFile: dict
    :param outputPrefix: The prefix of the output file name.
    :type outputPrefix: str
    :param iterations: The base number of iterations at the coarsest scale.
    :type iterations: int
    :param tension: Number of inner smoothing iterations.
    :type tension: int
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output file name based on input file name
    dtmName = outputPrefix + '_DTM.tif'
    outputVolumePath = VolumePath(dtmName)

    # Docker container arguments
    containerArgs = [
        'danesfield/tools/fit_dtm.py',
        GirderFileIdToVolume(dsmFile['_id'], gc=gc), outputVolumePath
    ]
    if iterations is not None:
        containerArgs.extend(['--num-iterations', str(iterations)])
    if tension is not None:
        containerArgs.extend(['--tension', str(tension)])

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(outputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle=(
                                       '[%s] Fit DTM: %s' %
                                       (initWorkingSetName, dsmFile['name'])),
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
コード例 #17
0
def classifyMaterials(initWorkingSetName,
                      stepName,
                      requestInfo,
                      jobId,
                      outputFolder,
                      imageFiles,
                      metadataFiles,
                      modelFile,
                      outfilePrefix,
                      cuda=None,
                      batchSize=None,
                      model=None):
    """
    Run a Girder Worker job to classify materials in an orthorectified image.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param imageFiles: List of orthorectified MSI image files.
    :type imageFiles: list[dict]
    :param metadataFiles: List of MSI-source NITF metadata files.
    :type metadataFiles: list[dict]
    :param modelFile: Model file document.
    :type modelFile: dict
    :param outfilePrefix: Prefix for output filename
    :type outfilePrefix: str
    :param cuda: Enable/disable CUDA; enabled by default.
    :type cuda: bool
    :param batchSize: Number of pixels classified at a time.
    :type batchSize: int
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    outputVolumePath = VolumePath('.')

    # Docker container arguments
    containerArgs = list(
        itertools.chain([
            'danesfield/tools/material_classifier.py', '--model_path',
            GirderFileIdToVolume(modelFile['_id'],
                                 gc=gc), '--output_dir', outputVolumePath,
            '--outfile_prefix', outfilePrefix, '--image_paths'
        ], [
            GirderFileIdToVolume(imageFile['_id'], gc=gc)
            for imageFile in imageFiles
        ], ['--info_paths'], [
            GirderFileIdToVolume(metadataFile['_id'], gc=gc)
            for metadataFile in metadataFiles
        ]))
    if cuda is None or cuda:
        containerArgs.append('--cuda')
    if batchSize is not None:
        containerArgs.extend(['--batch_size', str(batchSize)])

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(outputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle='[%s] Classify materials' %
                                   initWorkingSetName,
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
コード例 #18
0

def test_GirderUploadVolumePathToItem_transform_calls_gc_uploadFile(
        mock_gc, bogus_volume):
    vp = VolumePath('test', bogus_volume)
    GirderUploadVolumePathToItem(vp, 'BOGUS_ITEM_ID', gc=mock_gc).transform()
    mock_gc.uploadFileToItem.assert_called_once_with(
        'BOGUS_ITEM_ID', os.path.join(BOGUS_CONTAINER_PATH, 'test'))


def test_GirderUploadVolumePathToItem_transform_accepts_ObjectId(
        mock_gc, bogus_volume):
    vp = VolumePath('test', bogus_volume)
    hash = '5a5fc09ec2231b9487ce42db'
    GirderUploadVolumePathToItem(vp, ObjectId(hash), gc=mock_gc).transform()
    mock_gc.uploadFileToItem.assert_called_once_with(
        hash, os.path.join(BOGUS_CONTAINER_PATH, 'test'))


@pytest.mark.parametrize('obj,expected_repr', ((VolumePath(
    'test', bogus_volume
), '<girder_worker.docker.transforms.VolumePath: "test">'), (
    GirderFileIdToVolume('123', gc=mock_gc()),
    '<girder_worker.docker.transforms.girder.GirderFileIdToVolume: File ID=123>'
), (GirderFileIdToVolume(
    '123', filename='foo.txt', gc=mock_gc()
), '<girder_worker.docker.transforms.girder.GirderFileIdToVolume: File ID=123 -> "foo.txt">'
    )))
def test_docker_repr_models(obj, expected_repr):
    assert obj._repr_model_() == expected_repr
コード例 #19
0
def roofGeonExtraction(initWorkingSetName, stepName, requestInfo, jobId,
                       outputFolder, pointCloudFile, dtmFile, buildingMaskFile,
                       modelFolder, modelFilePrefix):
    """
    Run a Girder Worker job to run Purdue and Columbia's roof geon
    extraction pipeline.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param pointCloudFile: Point cloud file document.
    :type pointCloudFile: dict
    :param dtmFile: DTM file document.
    :type dtmFile: dict
    :param buildingMaskFile: Building mask file document.
    :type buildingMaskFile: dict
    :param modelFolder: Model directory.
    :type modelFolder: dict
    :param modelFilePrefix: Model name prefix.
    :type modelFilePrefix: str
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output directory
    outputVolumePath = VolumePath('__output__')

    # Docker container arguments
    containerArgs = [
        'danesfield/tools/roof_geon_extraction.py', '--las',
        GirderFileIdToVolume(pointCloudFile['_id'], gc=gc), '--cls',
        GirderFileIdToVolume(buildingMaskFile['_id'], gc=gc), '--dtm',
        GirderFileIdToVolume(dtmFile['_id'], gc=gc), '--model_dir',
        GirderFolderIdToVolume(modelFolder['_id'], gc=gc), '--model_prefix',
        modelFilePrefix, '--output_dir', outputVolumePath
    ]

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(outputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle=('[%s] Roof geon extraction: %s' %
                                             (initWorkingSetName,
                                              buildingMaskFile['name'])),
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
コード例 #20
0
def generateDsm(initWorkingSetName, stepName, requestInfo, jobId, outputFolder,
                pointCloudFile, outputPrefix):
    """
    Run a Girder Worker job to generate a Digital Surface Model (DSM)
    from a point cloud.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param pointCloudFile: Point cloud file document.
    :type pointCloudFile: dict
    :param outputPrefix: The prefix of the output file name.
    :type outputPrefix: str
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output file name based on point cloud file
    dsmName = outputPrefix + '_P3D_DSM.tif'
    outputVolumePath = VolumePath(dsmName)

    # Docker container arguments
    containerArgs = [
        'danesfield/tools/generate_dsm.py', outputVolumePath,
        '--source_points',
        GirderFileIdToVolume(pointCloudFile['_id'], gc=gc)
    ]

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(outputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle=('[%s] Generate DSM: %s' %
                                             (initWorkingSetName,
                                              pointCloudFile['name'])),
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
コード例 #21
0
def generatePointCloud(initWorkingSetName,
                       stepName,
                       requestInfo,
                       jobId,
                       outputFolder,
                       imageFiles,
                       longitude,
                       latitude,
                       longitudeWidth,
                       latitudeWidth):
    """
    Run a Girder Worker job to generate a 3D point cloud from 2D images.

    Requirements:
    - P3D Girder Worker Docker image is available on host
    - Host folder /mnt/GTOPO30 contains GTOPO 30 data

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param imageFiles: List of input image files.
    :type imageFiles: list[dict]
    :param longitude:
    :type longitude:
    :param latitude:
    :type latitude:
    :param longitudeWidth:
    :type longitudeWidth:
    :param latitudeWidth:
    :type latitudeWidth:
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Docker volumes
    volumes = [
        BindMountVolume(host_path='/mnt/GTOPO30',
                        container_path='/P3D/GTOPO30',
                        mode='ro')
    ]

    outputVolumePath = VolumePath('__output__')

    # Docker container arguments
    # TODO: Consider a solution where args are written to a file, in
    # case of very long command lines
    containerArgs = list(itertools.chain(
        [
            'python', '/P3D/RTN_distro/scripts/generate_point_cloud.pyc',
            '--out', outputVolumePath,
            '--longitude', str(longitude),
            '--latitude', str(latitude),
            '--longitudeWidth', str(longitudeWidth),
            '--latitudeWidth', str(latitudeWidth),
            '--firstProc', '0',
            '--threads', '8',
            '--images'
        ],
        [GirderFileIdToVolume(imageFile['_id'], gc=gc)
         for imageFile in imageFiles],
    ))

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(
            outputVolumePath,
            outputFolder['_id'],
            upload_kwargs=upload_kwargs,
            gc=gc)
    ]

    asyncResult = docker_run.delay(
        volumes=volumes,
        **createDockerRunArguments(
            image=DockerImage.P3D,
            containerArgs=containerArgs,
            jobTitle='[%s] Generate point cloud' % initWorkingSetName,
            jobType=stepName,
            user=requestInfo.user,
            resultHooks=resultHooks
        )
    )

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
コード例 #22
0
def segmentByHeight(initWorkingSetName, stepName, requestInfo, jobId,
                    outputFolder, dsmFile, dtmFile, ndviFile, roadVectorFile):
    """
    Run a Girder Worker job to segment buildings by comparing a DSM to a DTM.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param dsmFile: DSM file document.
    :type dsmFile: dict
    :param dtmFile: DTM file document.
    :type dtmFile: dict
    :param ndviFile: NDVI file document.
    :type ndviFile: dict
    :param roadVectorFile: Road vector file.
    :type roadVectorFile: dict
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output file names
    # TODO: Danesfield master script hardcodes these without any
    # prefix; do the same here
    thresholdOutputVolumePath = VolumePath('threshold_CLS.tif')
    roadRasterOutputVolumePath = VolumePath('road_rasterized.tif')
    roadBridgeRasterOutputVolumePath = VolumePath('road_rasterized_bridge.tif')

    # Docker container arguments
    containerArgs = [
        'danesfield/tools/segment_by_height.py',
        # DSM
        GirderFileIdToVolume(dsmFile['_id'], gc=gc),
        # DTM
        GirderFileIdToVolume(dtmFile['_id'], gc=gc),
        # Threshold output image
        thresholdOutputVolumePath,
        # Normalized Difference Vegetation Index image
        '--input-ndvi',
        GirderFileIdToVolume(ndviFile['_id'], gc=gc),
        '--road-vector',
        GirderFileIdToVolume(roadVectorFile['_id'], gc=gc),
        '--road-rasterized',
        roadRasterOutputVolumePath,
        '--road-rasterized-bridge',
        roadBridgeRasterOutputVolumePath
    ]

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(thresholdOutputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle='[%s] Segment by height: %s' %
                                   (initWorkingSetName, dsmFile['name']),
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
コード例 #23
0
def computeNdvi(initWorkingSetName, stepName, requestInfo, jobId, outputFolder,
                imageFiles, outputNdviFilename):
    """
    Run a Girder Worker job to compute the NDVI from a set of images

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param imageFiles: List of pansharpened image files.
    :type imageFiles: list[dict]
    :param outputNdviFilename: Filename for output NDVI
    :type outputNdviFilename: str
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output file names
    ndviOutputVolumePath = VolumePath(outputNdviFilename)

    # Docker container arguments
    containerArgs = list(
        itertools.chain([
            'danesfield/tools/compute_ndvi.py',
        ], [
            GirderFileIdToVolume(imageFile['_id'], gc=gc)
            for imageFile in imageFiles
        ], [ndviOutputVolumePath]))

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(ndviOutputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle='[%s] Compute NDVI' %
                                   initWorkingSetName,
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
コード例 #24
0
def textureMapping(initWorkingSetName, stepName, requestInfo, jobId,
                   outputFolder, objFiles, imageFiles, dsmFile, dtmFile):
    """
    Run a Girder Worker job to run texture mapping.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param objFiles: List of OBJ files.
    :type objFiles: list[dict]
    :param imageFiles: List of cropped and pansharpened image files.
    :type imageFiles: list[dict]
    :param dsmFile: DSM file document.
    :type dsmFile: dict
    :param dtmFile: DTM file document.
    :type dtmFile: dict
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output directory
    outputVolumePath = VolumePath('__output__')

    # Set output path for occlusion mesh
    occlusionMeshName = 'xxxx.obj'
    occlusionMeshVolumePath = VolumePath(occlusionMeshName)

    containerArgs = [
        'danesfield/tools/texture_mapping.py',
        GirderFileIdToVolume(dsmFile['_id'], gc=gc),
        GirderFileIdToVolume(dtmFile['_id'], gc=gc), outputVolumePath,
        occlusionMeshVolumePath, '--crops'
    ]
    containerArgs.extend(
        [GirderFileIdToVolume(f['_id'], gc=gc) for f in imageFiles])

    containerArgs.append('--buildings')
    containerArgs.extend(
        [GirderFileIdToVolume(f['_id'], gc=gc) for f in objFiles])

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(outputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc),
        GirderUploadVolumePathToFolder(occlusionMeshVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle='[%s] Texture mapping' %
                                   initWorkingSetName,
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
コード例 #25
0
def unetSemanticSegmentation(initWorkingSetName, stepName, requestInfo, jobId,
                             outputFolder, dsmFile, dtmFile, msiImageFile,
                             rgbImageFile, configFile, modelFile):
    """
    Run a Girder Worker job to segment buildings using UNet semantic
    segmentation.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: strps
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param dsmFile: DSM file document.
    :type dsmFile: dict
    :param dtmFile: DTM file document.
    :type dtmFile: dict
    :param msiImageFile: Pansharpened MSI image file document.
    :type msiImageFile: dict
    :param rgbImageFile: RGB image file document.
    :type rgbImageFile: dict
    :param configFile: Configuration file document.
    :type configFile: dict
    :param modelFile: Model file document.
    :type modelFile: dict
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    # Set output directory
    outputVolumePath = VolumePath('.')

    # Docker container arguments
    containerArgs = [
        'danesfield/tools/kwsemantic_segment.py',
        # Configuration file
        GirderFileIdToVolume(configFile['_id'], gc=gc),
        # Model file
        GirderFileIdToVolume(modelFile['_id'], gc=gc),
        # RGB image
        GirderFileIdToVolume(rgbImageFile['_id'], gc=gc),
        # DSM
        GirderFileIdToVolume(dsmFile['_id'], gc=gc),
        # DTM
        GirderFileIdToVolume(dtmFile['_id'], gc=gc),
        # MSI image
        GirderFileIdToVolume(msiImageFile['_id'], gc=gc),
        # Output directory
        outputVolumePath,
        # Output file prefix
        'semantic'
    ]

    # Result hooks
    # - Upload output files to output folder
    # - Provide upload metadata
    upload_kwargs = createUploadMetadata(jobId, stepName)
    resultHooks = [
        GirderUploadVolumePathToFolder(outputVolumePath,
                                       outputFolder['_id'],
                                       upload_kwargs=upload_kwargs,
                                       gc=gc)
    ]

    asyncResult = docker_run.delay(
        **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                   containerArgs=containerArgs,
                                   jobTitle=(
                                       '[%s] UNet semantic segmentation: %s' %
                                       (initWorkingSetName, dsmFile['name'])),
                                   jobType=stepName,
                                   user=requestInfo.user,
                                   resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
コード例 #26
0
def runMetrics(initWorkingSetName, stepName, requestInfo, jobId, outputFolder,
               referenceFolder, referencePrefix, dtmFile, dsmFile, clsFile,
               mtlFile):
    """
    Run a Girder Worker job to compute metrics on output files.

    Requirements:
    - Danesfield Docker image is available on host

    :param initWorkingSetName: The name of the top-level working set.
    :type initWorkingSetName: str
    :param stepName: The name of the step.
    :type stepName: str (DanesfieldStep)
    :param requestInfo: HTTP request and authorization info.
    :type requestInfo: RequestInfo
    :param jobId: Job ID.
    :type jobId: str
    :param outputFolder: Output folder document.
    :type outputFolder: dict
    :param referenceFolder: Reference directory.
    :type referenceFolder: dict
    :param referencePrefix: Reference file prefix.
    :type referencePrefix: str
    :param dtmFile: DTM file document.
    :type dtmFile: dict
    :param dsmFile: DSM file document.
    :type dsmFile: dict
    :param clsFile: CLS file document.
    :type clsFile: dict
    :param mtlFile: MTL file document.
    :type mtlFile: dict
    :returns: Job document.
    """
    gc = createGirderClient(requestInfo)

    if referencePrefix == "STANDARD":
        # We know that there's no reference data with this selection
        containerArgs = ['echo', 'No ground truth selected for scoring']

        asyncResult = docker_run.delay(
            **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                       containerArgs=containerArgs,
                                       jobTitle='[%s] Run metrics' %
                                       initWorkingSetName,
                                       jobType=stepName,
                                       user=requestInfo.user))
    else:
        # Otherwise we assume the reference data exists, and try to
        # run the metrics
        outputVolumePath = VolumePath('__output__')

        # Docker container arguments
        containerArgs = [
            'danesfield/tools/run_metrics.py', '--output-dir',
            outputVolumePath, '--ref-dir',
            GirderFolderIdToVolume(referenceFolder['_id'], gc=gc),
            '--ref-prefix', referencePrefix, '--dsm',
            GirderFileIdToVolume(dsmFile['_id'], gc=gc), '--cls',
            GirderFileIdToVolume(clsFile['_id'], gc=gc), '--mtl',
            GirderFileIdToVolume(mtlFile['_id'], gc=gc), '--dtm',
            GirderFileIdToVolume(dtmFile['_id'], gc=gc)
        ]

        # Result hooks
        # - Upload output files to output folder
        # - Provide upload metadata
        upload_kwargs = createUploadMetadata(jobId, stepName)
        resultHooks = [
            GirderUploadVolumePathToFolder(outputVolumePath,
                                           outputFolder['_id'],
                                           upload_kwargs=upload_kwargs,
                                           gc=gc)
        ]

        asyncResult = docker_run.delay(
            **createDockerRunArguments(image=DockerImage.DANESFIELD,
                                       containerArgs=containerArgs,
                                       jobTitle='[%s] Run metrics' %
                                       initWorkingSetName,
                                       jobType=stepName,
                                       user=requestInfo.user,
                                       resultHooks=resultHooks))

    # Add info for job event listeners
    job = asyncResult.job
    job = addJobInfo(job, jobId=jobId, stepName=stepName)

    return job
コード例 #27
0
    GirderUploadVolumePathToItem(vp, 'BOGUS_ITEM_ID', gc=mock_gc).transform()
    mock_gc.uploadFileToItem.assert_called_once_with(
        'BOGUS_ITEM_ID', os.path.join(BOGUS_CONTAINER_PATH, 'test'))


def test_GirderUploadVolumePathToItem_transform_accepts_ObjectId(mock_gc, bogus_volume):
    vp = VolumePath('test', bogus_volume)
    hash = '5a5fc09ec2231b9487ce42db'
    GirderUploadVolumePathToItem(vp, ObjectId(hash), gc=mock_gc).transform()
    mock_gc.uploadFileToItem.assert_called_once_with(
        hash, os.path.join(BOGUS_CONTAINER_PATH, 'test'))


@pytest.mark.parametrize('obj,expected_repr', (
    (VolumePath('test', bogus_volume), '<girder_worker.docker.transforms.VolumePath: "test">'),
    (GirderFileIdToVolume('123', gc=_mock_gc()),
     '<girder_worker.docker.transforms.girder.GirderFileIdToVolume: File ID=123>'),
    (GirderFileIdToVolume('123', filename='foo.txt', gc=_mock_gc()),
     '<girder_worker.docker.transforms.girder.GirderFileIdToVolume: File ID=123 -> "foo.txt">')
))
def test_docker_repr_models(obj, expected_repr):
    assert obj._repr_model_() == expected_repr


def test_GirderUploadJobArtifact_file_not_found(mock_gc, bogus_volume):
    vp = VolumePath('test', bogus_volume)
    GirderUploadVolumePathJobArtifact(vp, job_id='123', gc=mock_gc).transform()
    mock_gc.post.assert_not_called()


def test_GirderUploadJobArtifact(mock_gc, actual_file):