示例#1
0
 def test_get_clip_region(self):
     """Test getting a clip region from WKT"""
     wkt = 'MULTIPOLYGON (((-75.759298375698563 41.391337611891402, -75.759298375698563 49.022078452247342, -92.303148066299968 49.022078452247342, -92.303148066299968 41.391337611891402, -75.759298375698563 41.391337611891402)))'
     clip_region = task_utils.get_clip_region(wkt, 3857)
     extent_min = ('{0:.1f}'.format(clip_region.XMax),
                   '{0:.1f}'.format(clip_region.YMax))
     self.assertEqual(extent_min, ('-8433486.5', '6278608.5'))
示例#2
0
 def test_get_clip_region(self):
     """Test getting a clip region from WKT"""
     wkt = 'MULTIPOLYGON (((-75.759298375698563 41.391337611891402, -75.759298375698563 49.022078452247342, -92.303148066299968 49.022078452247342, -92.303148066299968 41.391337611891402, -75.759298375698563 41.391337611891402)))'
     clip_region = task_utils.get_clip_region(wkt, 3857)
     extent_min = ('{0:.1f}'.format(clip_region.XMax), '{0:.1f}'.format(clip_region.YMax))
     self.assertEqual(extent_min, ('-8433486.5', '6278608.5'))
示例#3
0
def execute(request):
    """Package inputs to an Esri map or layer package.
    :param request: json as a dict.
    """
    errors = 0
    skipped = 0
    layers = []
    files = []

    app_folder = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
    parameters = request['params']
    out_format = task_utils.get_parameter_value(parameters, 'output_format', 'value')
    summary = task_utils.get_parameter_value(parameters, 'summary')
    tags = task_utils.get_parameter_value(parameters, 'tags')
    output_file_name = task_utils.get_parameter_value(parameters, 'output_file_name')
    if not output_file_name:
        output_file_name = 'package_results'

    # Get the clip region as an extent object.
    clip_area = None
    try:
        clip_area_wkt = task_utils.get_parameter_value(parameters, 'processing_extent', 'wkt')
        clip_area = task_utils.get_clip_region(clip_area_wkt)
    except (KeyError, ValueError):
        pass

    out_workspace = os.path.join(request['folder'], 'temp')
    if not os.path.exists(out_workspace):
        os.makedirs(out_workspace)

    num_results, response_index = task_utils.get_result_count(parameters)
    # if num_results > task_utils.CHUNK_SIZE:
    # Query the index for results in groups of 25.
    query_index = task_utils.QueryIndex(parameters[response_index])
    fl = query_index.fl
    query = '{0}{1}{2}'.format(sys.argv[2].split('=')[1], '/select?&wt=json', fl)
    fq = query_index.get_fq()
    if fq:
        groups = task_utils.grouper(range(0, num_results), task_utils.CHUNK_SIZE, '')
        query += fq
    elif 'ids' in parameters[response_index]:
        groups = task_utils.grouper(list(parameters[response_index]['ids']), task_utils.CHUNK_SIZE, '')
    else:
        groups = task_utils.grouper(range(0, num_results), task_utils.CHUNK_SIZE, '')

    headers = {'x-access-token': task_utils.get_security_token(request['owner'])}
    status_writer.send_status(_('Starting to process...'))
    for group in groups:
        if fq:
            results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers)
        elif 'ids' in parameters[response_index]:
            results = requests.get(query + '{0}&ids={1}'.format(fl, ','.join(group)), headers=headers)
        else:
            results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers)

        input_items = task_utils.get_input_items(results.json()['response']['docs'])
        if not input_items:
            input_items = task_utils.get_input_items(parameters[response_index]['response']['docs'])
        layers, files, errors, skipped = get_items(input_items, out_workspace)
    # else:
    #     input_items = task_utils.get_input_items(parameters[response_index]['response']['docs'])
    #     layers, files, errors, skipped = get_items(input_items, out_workspace)

    if errors == num_results:
        status_writer.send_state(status.STAT_FAILED, _('No results to package'))
        return

    try:
        if out_format == 'MPK':
            shutil.copyfile(os.path.join(app_folder, 'supportfiles', 'MapTemplate.mxd'),
                            os.path.join(out_workspace, 'output.mxd'))
            mxd = arcpy.mapping.MapDocument(os.path.join(out_workspace, 'output.mxd'))
            if mxd.description == '':
                mxd.description = os.path.basename(mxd.filePath)
            df = arcpy.mapping.ListDataFrames(mxd)[0]
            for layer in layers:
                arcpy.mapping.AddLayer(df, layer)
            mxd.save()
            status_writer.send_status(_('Generating {0}. Large input {1} will take longer to process.'.format('MPK', 'results')))
            if arcpy.GetInstallInfo()['Version'] == '10.0':
                arcpy.PackageMap_management(mxd.filePath,
                                            os.path.join(os.path.dirname(out_workspace), '{0}.mpk'.format(output_file_name)),
                                            'PRESERVE',
                                            extent=clip_area)
            elif arcpy.GetInstallInfo()['Version'] == '10.1':
                arcpy.PackageMap_management(mxd.filePath,
                                            os.path.join(os.path.dirname(out_workspace), '{0}.mpk'.format(output_file_name)),
                                            'PRESERVE',
                                            extent=clip_area,
                                            ArcGISRuntime='RUNTIME',
                                            version='10',
                                            additional_files=files,
                                            summary=summary,
                                            tags=tags)
            else:
                arcpy.PackageMap_management(mxd.filePath,
                                            os.path.join(os.path.dirname(out_workspace), '{0}.mpk'.format(output_file_name)),
                                            'PRESERVE',
                                            extent=clip_area,
                                            arcgisruntime='RUNTIME',
                                            version='10',
                                            additional_files=files,
                                            summary=summary,
                                            tags=tags)
            #  Create a thumbnail size PNG of the mxd.
            task_utils.make_thumbnail(mxd, os.path.join(request['folder'], '_thumb.png'))
        else:
            status_writer.send_status(_('Generating {0}. Large input {1} will take longer to process.'.format('LPK', 'results')))
            for layer in layers:
                if layer.description == '':
                    layer.description = layer.name
            if arcpy.GetInstallInfo()['Version'] == '10.0':
                arcpy.PackageLayer_management(layers,
                                              os.path.join(os.path.dirname(out_workspace), '{0}.lpk'.format(output_file_name)),
                                              'PRESERVE',
                                              extent=clip_area,
                                              version='10')
            else:
                arcpy.PackageLayer_management(layers,
                                              os.path.join(os.path.dirname(out_workspace), '{0}.lpk'.format(output_file_name)),
                                              'PRESERVE',
                                              extent=clip_area,
                                              version='10',
                                              additional_files=files,
                                              summary=summary,
                                              tags=tags)
            #  Create a thumbnail size PNG of the mxd.
            task_utils.make_thumbnail(layers[0], os.path.join(request['folder'], '_thumb.png'))
    except (RuntimeError, ValueError, arcpy.ExecuteError) as ex:
        status_writer.send_state(status.STAT_FAILED, repr(ex))
        return

    # Update state if necessary.
    if errors > 0 or skipped:
        status_writer.send_state(status.STAT_WARNING, _('{0} results could not be processed').format(errors + skipped))
    task_utils.report(os.path.join(request['folder'], '__report.json'), num_results - (skipped + errors), skipped, errors, errors_reasons, skipped_reasons)