def execute(request):
    """Creates a GeoPDF.
    :param request: json as a dict.
    """
    added_to_map = 0
    errors = 0
    skipped = 0

    parameters = request['params']
    num_results, response_index = task_utils.get_result_count(parameters)
    docs = parameters[response_index]['response']['docs']
    input_items = task_utils.get_input_items(docs)
    input_rows = collections.defaultdict(list)
    for doc in docs:
        if 'path' not in doc:
            input_rows[doc['name']].append(doc)
    if num_results > task_utils.CHUNK_SIZE:
        status_writer.send_state(status.STAT_FAILED,
                                 'Reduce results to 25 or less.')
        return

    map_template = task_utils.get_parameter_value(parameters, 'map_template',
                                                  'value')
    base_map = task_utils.get_parameter_value(parameters, 'base_map', 'value')
    map_title = task_utils.get_parameter_value(parameters, 'map_title',
                                               'value')
    attribute_setting = task_utils.get_parameter_value(parameters,
                                                       'attribute_settings',
                                                       'value')
    author = task_utils.get_parameter_value(parameters, 'map_author', 'value')
    output_file_name = task_utils.get_parameter_value(parameters,
                                                      'output_file_name',
                                                      'value')
    if not output_file_name:
        output_file_name = 'output_pdf'
    try:
        map_view = task_utils.get_parameter_value(parameters, 'map_view',
                                                  'extent')
    except KeyError:
        map_view = None
        pass

    temp_folder = os.path.join(request['folder'], 'temp')
    if not os.path.exists(temp_folder):
        os.makedirs(temp_folder)

    if base_map == 'NONE':
        base_layer = None
    else:
        base_layer = arcpy.mapping.Layer(
            os.path.join(os.path.dirname(os.path.dirname(__file__)),
                         'supportfiles', 'basemaps',
                         '{0}.lyr'.format(base_map)))
    mxd_path = os.path.join(os.path.dirname(os.path.dirname(__file__)),
                            'supportfiles', 'frame', map_template)
    mxd = arcpy.mapping.MapDocument(mxd_path)
    data_frame = arcpy.mapping.ListDataFrames(mxd)[0]

    layers = []
    all_layers = []

    if input_rows:
        for name, rows in input_rows.iteritems():
            for row in rows:
                try:
                    name = arcpy.CreateUniqueName(name, 'in_memory')
                    # Create the geometry.
                    geo_json = row['[geo]']
                    geom = arcpy.AsShape(geo_json)
                    arcpy.CopyFeatures_management(geom, name)
                    feature_layer = arcpy.MakeFeatureLayer_management(
                        name, os.path.basename(name))
                    layer_file = arcpy.SaveToLayerFile_management(
                        feature_layer,
                        os.path.join(temp_folder,
                                     '{0}.lyr'.format(os.path.basename(name))))
                    layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))
                    all_layers.append(
                        arcpy.mapping.Layer(layer_file.getOutput(0)))
                    added_to_map += 1
                except KeyError:
                    skipped += 1
                    skipped_reasons[name] = 'No geographic information'
                    continue

    for i, item in enumerate(input_items, 1):
        try:
            # Is the item a mxd data frame.
            map_frame_name = task_utils.get_data_frame_name(item)
            if map_frame_name:
                item = item.split('|')[0].strip()

            dsc = arcpy.Describe(item)
            if dsc.dataType == 'Layer':
                layers.append(arcpy.mapping.Layer(dsc.catalogPath))

            elif dsc.dataType == 'FeatureClass' or dsc.dataType == 'ShapeFile':
                if os.path.basename(item) in [l.name for l in all_layers]:
                    layer_name = '{0}_{1}'.format(os.path.basename(item), i)
                else:
                    layer_name = os.path.basename(item)
                feature_layer = arcpy.MakeFeatureLayer_management(
                    item, layer_name)
                layer_file = arcpy.SaveToLayerFile_management(
                    feature_layer,
                    os.path.join(temp_folder, '{0}.lyr'.format(layer_name)))
                layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))
                all_layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))

            elif dsc.dataType == 'FeatureDataset':
                arcpy.env.workspace = item
                for fc in arcpy.ListFeatureClasses():
                    layer_file = arcpy.SaveToLayerFile_management(
                        arcpy.MakeFeatureLayer_management(
                            fc, '{0}_{1}'.format(fc, i)),
                        os.path.join(temp_folder, '{0}_{1}.lyr'.format(fc, i)))
                    layer = arcpy.mapping.Layer(layer_file.getOutput(0))
                    layer.name = fc
                    layers.append(layer)
                    all_layers.append(layer)

            elif dsc.dataType == 'RasterDataset':
                if os.path.basename(item) in [l.name for l in all_layers]:
                    layer_name = '{0}_{1}'.format(os.path.basename(item), i)
                else:
                    layer_name = os.path.basename(item)
                raster_layer = arcpy.MakeRasterLayer_management(
                    item, layer_name)
                layer_file = arcpy.SaveToLayerFile_management(
                    raster_layer,
                    os.path.join(temp_folder, '{0}.lyr'.format(layer_name)))
                layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))
                all_layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))

            elif dsc.catalogPath.endswith('.kml') or dsc.catalogPath.endswith(
                    '.kmz'):
                if not os.path.splitext(dsc.name)[0] in layers:
                    name = os.path.splitext(dsc.name)[0]
                else:
                    name = '{0}_{1}'.format(os.path.splitext(dsc.name)[0], i)
                arcpy.KMLToLayer_conversion(dsc.catalogPath, temp_folder, name)
                layers.append(
                    arcpy.mapping.Layer(
                        os.path.join(temp_folder, '{0}.lyr'.format(name))))
                all_layers.append(
                    arcpy.mapping.Layer(
                        os.path.join(temp_folder, '{0}.lyr'.format(name))))

            elif dsc.dataType == 'MapDocument':
                input_mxd = arcpy.mapping.MapDocument(item)
                if map_frame_name:
                    df = arcpy.mapping.ListDataFrames(input_mxd,
                                                      map_frame_name)[0]
                    layers = arcpy.mapping.ListLayers(input_mxd, data_frame=df)
                else:
                    layers = arcpy.mapping.ListLayers(input_mxd)

            if layers:
                for layer in layers:
                    status_writer.send_status(
                        _('Adding layer {0}...').format(layer.name))
                    arcpy.mapping.AddLayer(data_frame, layer)
                    added_to_map += 1
                    layers = []
            else:
                status_writer.send_status(
                    _('Invalid input type: {0}').format(item))
                skipped_reasons[item] = 'Invalid input type'
                skipped += 1
        except Exception as ex:
            status_writer.send_status(_('FAIL: {0}').format(repr(ex)))
            errors += 1
            errors_reasons[item] = repr(ex)
            pass

    if map_view:
        extent = map_view.split(' ')
        new_extent = data_frame.extent
        new_extent.XMin, new_extent.YMin = float(extent[0]), float(extent[1])
        new_extent.XMax, new_extent.YMax = float(extent[2]), float(extent[3])
        data_frame.extent = new_extent
    else:
        data_frame.zoomToSelectedFeatures()

    # Update text elements in map template.
    date_element = arcpy.mapping.ListLayoutElements(mxd, 'TEXT_ELEMENT',
                                                    'date')
    if date_element:
        date_element[0].text = 'Date: {0}'.format(task_utils.get_local_date())

    title_element = arcpy.mapping.ListLayoutElements(mxd, 'TEXT_ELEMENT',
                                                     'title')
    if title_element:
        title_element[0].text = map_title

    author_element = arcpy.mapping.ListLayoutElements(mxd, 'TEXT_ELEMENT',
                                                      'author')
    if author_element:
        author_element[0].text = '{0} {1}'.format(author_element[0].text,
                                                  author)

    if map_template in ('ANSI_D_LND.mxd', 'ANSI_E_LND.mxd'):
        coord_elements = arcpy.mapping.ListLayoutElements(
            mxd, 'TEXT_ELEMENT', 'x*')
        coord_elements += arcpy.mapping.ListLayoutElements(
            mxd, 'TEXT_ELEMENT', 'y*')
        if coord_elements:
            for e in coord_elements:
                new_text = e.text
                if e.name == 'xmin':
                    dms = task_utils.dd_to_dms(data_frame.extent.XMin)
                    if data_frame.extent.XMin > 0:
                        new_text = new_text.replace('W', 'E')
                elif e.name == 'xmax':
                    dms = task_utils.dd_to_dms(data_frame.extent.XMax)
                    if data_frame.extent.XMax > 0:
                        new_text = new_text.replace('W', 'E')
                elif e.name == 'ymin':
                    dms = task_utils.dd_to_dms(data_frame.extent.YMin)
                    if data_frame.extent.YMin < 0:
                        new_text = new_text.replace('N', 'S')
                elif e.name == 'ymax':
                    if data_frame.extent.YMax < 0:
                        new_text = new_text.replace('N', 'S')
                    dms = task_utils.dd_to_dms(data_frame.extent.YMax)

                new_text = new_text.replace('d', str(dms[0]))
                new_text = new_text.replace('m', str(dms[1]))
                new_text = new_text.replace('s', str(dms[2]))
                e.text = new_text

    # Do this now so it does not affect zoom level or extent.
    if base_layer:
        status_writer.send_status(_('Adding basemap {0}...').format(base_map))
        arcpy.mapping.AddLayer(data_frame, base_layer, 'BOTTOM')

    if added_to_map > 0:
        status_writer.send_status(_('Exporting to PDF...'))
        arcpy.mapping.ExportToPDF(mxd,
                                  os.path.join(
                                      request['folder'],
                                      '{0}.pdf'.format(output_file_name)),
                                  layers_attributes=attribute_setting)
        # Create a thumbnail size PNG of the mxd.
        task_utils.make_thumbnail(
            mxd, os.path.join(request['folder'], '_thumb.png'), False)
    else:
        status_writer.send_state(status.STAT_FAILED,
                                 _('No results can be exported to PDF'))
        task_utils.report(os.path.join(request['folder'], '__report.json'),
                          added_to_map,
                          skipped,
                          skipped_details=skipped_reasons)
        return

    # Update state if necessary.
    if skipped > 0 or errors > 0:
        status_writer.send_state(
            status.STAT_WARNING,
            _('{0} results could not be processed').format(skipped + errors))
    task_utils.report(os.path.join(request['folder'],
                                   '__report.json'), added_to_map, skipped,
                      errors, errors_reasons, skipped_reasons)
Beispiel #2
0
def execute(request):
    """Mosaics input raster datasets into a new raster dataset.
    :param request: json as a dict.
    """
    parameters = request['params']
    out_coordinate_system = task_utils.get_parameter_value(parameters, 'output_projection', 'code')
    # Advanced options
    output_raster_format = task_utils.get_parameter_value(parameters, 'raster_format', 'value')
    compression_method = task_utils.get_parameter_value(parameters, 'compression_method', 'value')
    compression_quality = task_utils.get_parameter_value(parameters, 'compression_quality', 'value')
    output_file_name = task_utils.get_parameter_value(parameters, 'output_file_name', 'value')
    if not output_file_name:
        output_file_name = 'output'
    arcpy.env.compression = '{0} {1}'.format(compression_method, compression_quality)

    clip_area = None
    if not output_raster_format == 'MosaicDataset':
        # Get the clip region as an extent object.
        try:
            clip_area_wkt = task_utils.get_parameter_value(parameters, 'processing_extent', 'wkt')
            if not clip_area_wkt:
                clip_area_wkt = 'POLYGON ((-180 -90, -180 90, 180 90, 180 -90, -180 -90))'
            if not out_coordinate_system == '0':
                clip_area = task_utils.get_clip_region(clip_area_wkt, out_coordinate_system)
            else:
                clip_area = task_utils.get_clip_region(clip_area_wkt)
        except KeyError:
            pass

    status_writer.send_status(_('Setting the output workspace...'))
    out_workspace = os.path.join(request['folder'], 'temp')
    if not os.path.exists(out_workspace):
        os.makedirs(out_workspace)
    if output_raster_format == 'FileGDB' or output_raster_format == 'MosaicDataset':
        out_workspace = arcpy.CreateFileGDB_management(out_workspace, 'output.gdb').getOutput(0)
    arcpy.env.workspace = out_workspace

    status_writer.send_status(_('Starting to process...'))
    num_results, response_index = task_utils.get_result_count(parameters)
    raster_items = None
    if num_results > task_utils.CHUNK_SIZE:
        # Query the index for results in groups of 25.
        query_index = task_utils.QueryIndex(parameters[response_index])
        fl = query_index.fl
        query = '{0}{1}{2}'.format(sys.argv[2].split('=')[1], '/select?&wt=json', fl)
        fq = query_index.get_fq()
        if fq:
            groups = task_utils.grouper(range(0, num_results), task_utils.CHUNK_SIZE, '')
            query += fq
        elif 'ids' in parameters[response_index]:
            groups = task_utils.grouper(list(parameters[response_index]['ids']), task_utils.CHUNK_SIZE, '')
        else:
            groups = task_utils.grouper(range(0, num_results), task_utils.CHUNK_SIZE, '')

        headers = {'x-access-token': task_utils.get_security_token(request['owner'])}
        for group in groups:
            if fq:
                results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers)
            elif 'ids' in parameters[response_index]:
                results = requests.get(query + '{0}&ids={1}'.format(fl, ','.join(group)), headers=headers)
            else:
                results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers)

            input_items = task_utils.get_input_items(results.json()['response']['docs'])
            if not input_items:
                input_items = task_utils.get_input_items(parameters[response_index]['response']['docs'])
            raster_items, pixels, bands, skipped = get_items(input_items)
    else:
        input_items = task_utils.get_input_items(parameters[response_index]['response']['docs'])
        raster_items, pixels, bands, skipped = get_items(input_items)

    if not raster_items:
        if skipped == 0:
            status_writer.send_state(status.STAT_FAILED, _('Invalid input types'))
            skipped_reasons['All Items'] = _('Invalid input types')
            task_utils.report(os.path.join(request['folder'], '__report.json'), len(raster_items), num_results, skipped_details=skipped_reasons)
            return
        else:
            status_writer.send_state(status.STAT_WARNING, _('{0} results could not be processed').format(skipped))
            task_utils.report(os.path.join(request['folder'], '__report.json'), len(raster_items), skipped, skipped_details=skipped_reasons)
            return

    # Get most common pixel type.
    pixel_type = pixel_types[max(set(pixels), key=pixels.count)]
    if output_raster_format in ('FileGDB', 'GRID', 'MosaicDataset'):
        output_name = arcpy.ValidateTableName('mosaic', out_workspace)
    else:
        output_name = '{0}.{1}'.format(arcpy.ValidateTableName('mosaic', out_workspace)[:9], output_raster_format.lower())
        status_writer.send_status(output_name)

    if output_raster_format == 'MosaicDataset':
        try:
            status_writer.send_status(_('Generating {0}. Large input {1} will take longer to process.'.format('Mosaic', 'rasters')))
            if out_coordinate_system == '0':
                out_coordinate_system = raster_items[0]
            else:
                out_coordinate_system = None
            mosaic_ds = arcpy.CreateMosaicDataset_management(out_workspace,
                                                             output_name,
                                                             out_coordinate_system,
                                                             max(bands),
                                                             pixel_type)
            arcpy.AddRastersToMosaicDataset_management(mosaic_ds, 'Raster Dataset', raster_items)
            arcpy.MakeMosaicLayer_management(mosaic_ds, 'mosaic_layer')
            layer_object = arcpy.mapping.Layer('mosaic_layer')
            task_utils.make_thumbnail(layer_object, os.path.join(request['folder'], '_thumb.png'))
        except arcpy.ExecuteError:
            status_writer.send_state(status.STAT_FAILED, arcpy.GetMessages(2))
            return
    else:
        try:
            if len(bands) > 1:
                status_writer.send_state(status.STAT_FAILED, _('Input rasters must have the same number of bands'))
                return
            status_writer.send_status(_('Generating {0}. Large input {1} will take longer to process.'.format('Mosaic', 'rasters')))
            if out_coordinate_system == '0':
                out_coordinate_system = None
            if clip_area:
                ext = '{0} {1} {2} {3}'.format(clip_area.XMin, clip_area.YMin, clip_area.XMax, clip_area.YMax)
                tmp_mosaic = arcpy.MosaicToNewRaster_management(
                    raster_items,
                    out_workspace,
                    'tm',
                    out_coordinate_system,
                    pixel_type,
                    number_of_bands=bands.keys()[0]
                )
                status_writer.send_status(_('Clipping...'))
                out_mosaic = arcpy.Clip_management(tmp_mosaic, ext, output_name)
                arcpy.Delete_management(tmp_mosaic)
            else:
                out_mosaic = arcpy.MosaicToNewRaster_management(raster_items,
                                                                out_workspace,
                                                                output_name,
                                                                out_coordinate_system,
                                                                pixel_type, number_of_bands=bands.keys()[0])
            arcpy.MakeRasterLayer_management(out_mosaic, 'mosaic_layer')
            layer_object = arcpy.mapping.Layer('mosaic_layer')
            task_utils.make_thumbnail(layer_object, os.path.join(request['folder'], '_thumb.png'))
        except arcpy.ExecuteError:
            status_writer.send_state(status.STAT_FAILED, arcpy.GetMessages(2))
            return

    if arcpy.env.workspace.endswith('.gdb'):
        out_workspace = os.path.dirname(arcpy.env.workspace)
    zip_file = task_utils.zip_data(out_workspace, '{0}.zip'.format(output_file_name))
    shutil.move(zip_file, os.path.join(os.path.dirname(out_workspace), os.path.basename(zip_file)))

    # Update state if necessary.
    if skipped > 0:
        status_writer.send_state(status.STAT_WARNING, _('{0} results could not be processed').format(skipped))
    task_utils.report(os.path.join(request['folder'], '__report.json'), len(raster_items), skipped, skipped_details=skipped_reasons)
Beispiel #3
0
def execute(request):
    """Converts each input dataset to kml (.kmz).
    :param request: json as a dict.
    """
    converted = 0
    skipped = 0
    errors = 0
    global result_count
    parameters = request['params']

    out_workspace = os.path.join(request['folder'], 'temp')
    if not os.path.exists(out_workspace):
        os.makedirs(out_workspace)

    # Get the boundary box extent for input to KML tools.
    extent = ''
    try:
        try:
            ext = task_utils.get_parameter_value(parameters,
                                                 'processing_extent', 'wkt')
            if ext:
                sr = task_utils.get_spatial_reference("4326")
                extent = task_utils.from_wkt(ext, sr)
        except KeyError:
            ext = task_utils.get_parameter_value(parameters,
                                                 'processing_extent',
                                                 'feature')
            if ext:
                extent = arcpy.Describe(ext).extent
    except KeyError:
        pass

    # Get the output file name.
    output_file_name = task_utils.get_parameter_value(parameters,
                                                      'output_file_name',
                                                      'value')
    if not output_file_name:
        output_file_name = 'kml_results'

    result_count, response_index = task_utils.get_result_count(parameters)
    # Query the index for results in groups of 25.
    query_index = task_utils.QueryIndex(parameters[response_index])
    fl = query_index.fl
    query = '{0}{1}{2}'.format(sys.argv[2].split('=')[1], '/select?&wt=json',
                               fl)
    fq = query_index.get_fq()
    if fq:
        groups = task_utils.grouper(range(0, result_count),
                                    task_utils.CHUNK_SIZE, '')
        query += fq
    elif 'ids' in parameters[response_index]:
        groups = task_utils.grouper(list(parameters[response_index]['ids']),
                                    task_utils.CHUNK_SIZE, '')
    else:
        groups = task_utils.grouper(range(0, result_count),
                                    task_utils.CHUNK_SIZE, '')

    # Begin processing
    status_writer.send_percent(0.0, _('Starting to process...'),
                               'convert_to_kml')
    headers = {
        'x-access-token': task_utils.get_security_token(request['owner'])
    }
    for group in groups:
        if fq:
            results = requests.get(
                query +
                "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]),
                headers=headers)
        elif 'ids' in parameters[response_index]:
            results = requests.get(query +
                                   '{0}&ids={1}'.format(fl, ','.join(group)),
                                   headers=headers)
        else:
            results = requests.get(
                query +
                "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]),
                headers=headers)

        docs = results.json()['response']['docs']
        input_items = task_utils.get_input_items(docs)
        if not input_items:
            input_items = task_utils.get_input_items(
                parameters[response_index]['response']['docs'])

        input_rows = collections.defaultdict(list)
        for doc in docs:
            if 'path' not in doc:
                input_rows[doc['name']].append(doc)
        if input_rows:
            result = convert_to_kml(input_rows, out_workspace, extent)
            converted += result[0]
            errors += result[1]
            skipped += result[2]

        if input_items:
            result = convert_to_kml(input_items, out_workspace, extent)
            converted += result[0]
            errors += result[1]
            skipped += result[2]

        if not input_items and not input_rows:
            status_writer.send_state(
                status.STAT_FAILED,
                _('No items to process. Check if items exist.'))
            return

    # Zip up kmz files if more than one.
    if converted > 1:
        status_writer.send_status("Converted: {}".format(converted))
        zip_file = task_utils.zip_data(out_workspace,
                                       '{0}.zip'.format(output_file_name))
        shutil.move(
            zip_file,
            os.path.join(os.path.dirname(out_workspace),
                         os.path.basename(zip_file)))
        shutil.copy2(
            os.path.join(os.path.dirname(os.path.dirname(__file__)),
                         'supportfiles', '_thumb.png'), request['folder'])
    elif converted == 1:
        try:
            kml_file = glob.glob(os.path.join(out_workspace, '*.kmz'))[0]
            tmp_lyr = arcpy.KMLToLayer_conversion(kml_file, out_workspace,
                                                  'kml_layer')
            task_utils.make_thumbnail(
                tmp_lyr.getOutput(0),
                os.path.join(request['folder'], '_thumb.png'))
        except arcpy.ExecuteError:
            pass
        shutil.move(
            kml_file,
            os.path.join(request['folder'], os.path.basename(kml_file)))

    # Update state if necessary.
    if skipped > 0 or errors > 0:
        status_writer.send_state(
            status.STAT_WARNING,
            _('{0} results could not be processed').format(errors + skipped))
    task_utils.report(os.path.join(request['folder'],
                                   '__report.json'), converted, skipped,
                      errors, errors_reasons, skipped_reasons)
def execute(request):
    """Package inputs to an Esri map or layer package.
    :param request: json as a dict.
    """
    errors = 0
    skipped = 0
    layers = []
    files = []

    app_folder = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
    parameters = request['params']
    out_format = task_utils.get_parameter_value(parameters, 'output_format',
                                                'value')
    summary = task_utils.get_parameter_value(parameters, 'summary')
    tags = task_utils.get_parameter_value(parameters, 'tags')
    output_file_name = task_utils.get_parameter_value(parameters,
                                                      'output_file_name')
    if not output_file_name:
        output_file_name = 'package_results'

    # Get the clip region as an extent object.
    clip_area = None
    try:
        clip_area_wkt = task_utils.get_parameter_value(parameters,
                                                       'processing_extent',
                                                       'wkt')
        clip_area = task_utils.get_clip_region(clip_area_wkt)
    except (KeyError, ValueError):
        pass

    out_workspace = os.path.join(request['folder'], 'temp')
    if not os.path.exists(out_workspace):
        os.makedirs(out_workspace)

    num_results, response_index = task_utils.get_result_count(parameters)
    # if num_results > task_utils.CHUNK_SIZE:
    # Query the index for results in groups of 25.
    query_index = task_utils.QueryIndex(parameters[response_index])
    fl = query_index.fl
    query = '{0}{1}{2}'.format(sys.argv[2].split('=')[1], '/select?&wt=json',
                               fl)
    fq = query_index.get_fq()
    if fq:
        groups = task_utils.grouper(range(0, num_results),
                                    task_utils.CHUNK_SIZE, '')
        query += fq
    elif 'ids' in parameters[response_index]:
        groups = task_utils.grouper(list(parameters[response_index]['ids']),
                                    task_utils.CHUNK_SIZE, '')
    else:
        groups = task_utils.grouper(range(0, num_results),
                                    task_utils.CHUNK_SIZE, '')

    headers = {
        'x-access-token': task_utils.get_security_token(request['owner'])
    }
    status_writer.send_status(_('Starting to process...'))
    for group in groups:
        if fq:
            results = requests.get(
                query +
                "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]),
                verify=verify_ssl,
                headers=headers)
        elif 'ids' in parameters[response_index]:
            results = requests.get(query +
                                   '{0}&ids={1}'.format(fl, ','.join(group)),
                                   verify=verify_ssl,
                                   headers=headers)
        else:
            results = requests.get(
                query +
                "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]),
                verify=verify_ssl,
                headers=headers)

        input_items = task_utils.get_input_items(
            results.json()['response']['docs'])
        if not input_items:
            input_items = task_utils.get_input_items(
                parameters[response_index]['response']['docs'])
        layers, files, errors, skipped = get_items(input_items, out_workspace)
    # else:
    #     input_items = task_utils.get_input_items(parameters[response_index]['response']['docs'])
    #     layers, files, errors, skipped = get_items(input_items, out_workspace)

    if errors == num_results:
        status_writer.send_state(status.STAT_FAILED,
                                 _('No results to package'))
        return

    try:
        if out_format == 'MPK':
            shutil.copyfile(
                os.path.join(app_folder, 'supportfiles', 'MapTemplate.mxd'),
                os.path.join(out_workspace, 'output.mxd'))
            mxd = arcpy.mapping.MapDocument(
                os.path.join(out_workspace, 'output.mxd'))
            if mxd.description == '':
                mxd.description = os.path.basename(mxd.filePath)
            df = arcpy.mapping.ListDataFrames(mxd)[0]
            for layer in layers:
                arcpy.mapping.AddLayer(df, layer)
            mxd.save()
            status_writer.send_status(
                _('Generating {0}. Large input {1} will take longer to process.'
                  .format('MPK', 'results')))
            if arcpy.GetInstallInfo()['Version'] == '10.0':
                arcpy.PackageMap_management(
                    mxd.filePath,
                    os.path.join(os.path.dirname(out_workspace),
                                 '{0}.mpk'.format(output_file_name)),
                    'PRESERVE',
                    extent=clip_area)
            elif arcpy.GetInstallInfo()['Version'] == '10.1':
                arcpy.PackageMap_management(
                    mxd.filePath,
                    os.path.join(os.path.dirname(out_workspace),
                                 '{0}.mpk'.format(output_file_name)),
                    'PRESERVE',
                    extent=clip_area,
                    ArcGISRuntime='RUNTIME',
                    version='10',
                    additional_files=files,
                    summary=summary,
                    tags=tags)
            else:
                arcpy.PackageMap_management(
                    mxd.filePath,
                    os.path.join(os.path.dirname(out_workspace),
                                 '{0}.mpk'.format(output_file_name)),
                    'PRESERVE',
                    extent=clip_area,
                    arcgisruntime='RUNTIME',
                    version='10',
                    additional_files=files,
                    summary=summary,
                    tags=tags)
            #  Create a thumbnail size PNG of the mxd.
            task_utils.make_thumbnail(
                mxd, os.path.join(request['folder'], '_thumb.png'))
        else:
            status_writer.send_status(
                _('Generating {0}. Large input {1} will take longer to process.'
                  .format('LPK', 'results')))
            for layer in layers:
                if layer.description == '':
                    layer.description = layer.name
            if arcpy.GetInstallInfo()['Version'] == '10.0':
                arcpy.PackageLayer_management(
                    layers,
                    os.path.join(os.path.dirname(out_workspace),
                                 '{0}.lpk'.format(output_file_name)),
                    'PRESERVE',
                    extent=clip_area,
                    version='10')
            else:
                arcpy.PackageLayer_management(
                    layers,
                    os.path.join(os.path.dirname(out_workspace),
                                 '{0}.lpk'.format(output_file_name)),
                    'PRESERVE',
                    extent=clip_area,
                    version='10',
                    additional_files=files,
                    summary=summary,
                    tags=tags)
            #  Create a thumbnail size PNG of the mxd.
            task_utils.make_thumbnail(
                layers[0], os.path.join(request['folder'], '_thumb.png'))
    except (RuntimeError, ValueError, arcpy.ExecuteError) as ex:
        status_writer.send_state(status.STAT_FAILED, repr(ex))
        return

    # Update state if necessary.
    if errors > 0 or skipped:
        status_writer.send_state(
            status.STAT_WARNING,
            _('{0} results could not be processed').format(errors + skipped))
    task_utils.report(os.path.join(request['folder'], '__report.json'),
                      num_results - (skipped + errors), skipped, errors,
                      errors_reasons, skipped_reasons)
def execute(request):
    """Mosaics input raster datasets into a new raster dataset or mosaic dataset.
    :param request: json as a dict.
    """
    status_writer = status.Writer()
    parameters = request['params']
    target_workspace = task_utils.get_parameter_value(parameters, 'target_workspace', 'value')
    output_name = task_utils.get_parameter_value(parameters, 'output_dataset_name', 'value')
    out_coordinate_system = task_utils.get_parameter_value(parameters, 'output_projection', 'code')
    # Advanced options
    output_raster_format = task_utils.get_parameter_value(parameters, 'raster_format', 'value')
    compression_method = task_utils.get_parameter_value(parameters, 'compression_method', 'value')
    compression_quality = task_utils.get_parameter_value(parameters, 'compression_quality', 'value')
    arcpy.env.compression = '{0} {1}'.format(compression_method, compression_quality)

    if output_raster_format in ('FileGDB', 'MosaicDataset'):
        if not os.path.splitext(target_workspace)[1] in ('.gdb', '.mdb', '.sde'):
            status_writer.send_state(status.STAT_FAILED, _('Target workspace must be a geodatabase'))
            return

    task_folder = request['folder']
    if not os.path.exists(task_folder):
        os.makedirs(task_folder)

    clip_area = None
    if not output_raster_format == 'MosaicDataset':
        # Get the clip region as an extent object.
        try:
            clip_area_wkt = task_utils.get_parameter_value(parameters, 'processing_extent', 'wkt')
            if not clip_area_wkt:
                clip_area_wkt = 'POLYGON ((-180 -90, -180 90, 180 90, 180 -90, -180 -90))'
            if not out_coordinate_system == '0':
                clip_area = task_utils.get_clip_region(clip_area_wkt, out_coordinate_system)
            else:
                clip_area = task_utils.get_clip_region(clip_area_wkt)
        except KeyError:
            pass

    status_writer.send_status(_('Setting the output workspace...'))
    if not os.path.exists(target_workspace):
        status_writer.send_state(status.STAT_FAILED, _('Target workspace does not exist'))
        return
    arcpy.env.workspace = target_workspace

    status_writer.send_status(_('Starting to process...'))
    num_results, response_index = task_utils.get_result_count(parameters)
    raster_items = None
    if num_results > task_utils.CHUNK_SIZE:
        # Query the index for results in groups of 25.
        query_index = task_utils.QueryIndex(parameters[response_index])
        fl = query_index.fl
        query = '{0}{1}{2}'.format(sys.argv[2].split('=')[1], '/select?&wt=json', fl)
        fq = query_index.get_fq()
        if fq:
            groups = task_utils.grouper(range(0, num_results), task_utils.CHUNK_SIZE, '')
            query += fq
        elif 'ids' in parameters[response_index]:
            groups = task_utils.grouper(list(parameters[response_index]['ids']), task_utils.CHUNK_SIZE, '')
        else:
            groups = task_utils.grouper(range(0, num_results), task_utils.CHUNK_SIZE, '')

        headers = {'x-access-token': task_utils.get_security_token(request['owner'])}
        for group in groups:
            if fq:
                results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers)
            elif 'ids' in parameters[response_index]:
                results = requests.get(query + '{0}&ids={1}'.format(fl, ','.join(group)), headers=headers)
            else:
                results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers)

            input_items = task_utils.get_input_items(results.json()['response']['docs'])
            if not input_items:
                input_items = task_utils.get_input_items(parameters[response_index]['response']['docs'])
            raster_items, pixels, bands, skipped = get_items(input_items)
    else:
        input_items = task_utils.get_input_items(parameters[response_index]['response']['docs'])
        raster_items, pixels, bands, skipped = get_items(input_items)

    if not raster_items:
        if skipped == 0:
            status_writer.send_state(status.STAT_FAILED, _('Invalid input types'))
            skipped_reasons['All Items'] = _('Invalid input types')
            task_utils.report(os.path.join(request['folder'], '__report.json'), len(raster_items), num_results, skipped_details=skipped_reasons)
            return
        else:
            status_writer.send_state(status.STAT_WARNING, _('{0} results could not be processed').format(skipped))
            task_utils.report(os.path.join(request['folder'], '__report.json'), len(raster_items), skipped, skipped_details=skipped_reasons)
            return

    # Get most common pixel type.
    pixel_type = pixel_types[max(set(pixels), key=pixels.count)]
    if output_raster_format in ('FileGDB', 'GRID', 'MosaicDataset'):
        output_name = arcpy.ValidateTableName(output_name, target_workspace)
    else:
        output_name = '{0}.{1}'.format(arcpy.ValidateTableName(output_name, target_workspace), output_raster_format.lower())

    if arcpy.Exists(os.path.join(target_workspace, output_name)):
        status_writer.send_state(status.STAT_FAILED, _('Output dataset already exists.'))
        return

    if output_raster_format == 'MosaicDataset':
        try:
            status_writer.send_status(_('Generating {0}. Large input {1} will take longer to process.'.format('Mosaic', 'rasters')))
            if out_coordinate_system == '0':
                out_coordinate_system = raster_items[0]
            else:
                out_coordinate_system = None
            mosaic_ds = arcpy.CreateMosaicDataset_management(target_workspace,
                                                             output_name,
                                                             out_coordinate_system,
                                                             max(bands),
                                                             pixel_type)
            arcpy.AddRastersToMosaicDataset_management(mosaic_ds, 'Raster Dataset', raster_items)
            arcpy.MakeMosaicLayer_management(mosaic_ds, 'mosaic_layer')
            layer_object = arcpy.mapping.Layer('mosaic_layer')
            task_utils.make_thumbnail(layer_object, os.path.join(request['folder'], '_thumb.png'))
        except arcpy.ExecuteError:
            skipped += 1
            skipped_reasons['All Items'] = arcpy.GetMessages(2)
    else:
        try:
            if len(bands) > 1:
                status_writer.send_state(status.STAT_FAILED, _('Input rasters must have the same number of bands'))
                return
            if out_coordinate_system == '0':
                out_coordinate_system = None
            status_writer.send_status(_('Generating {0}. Large input {1} will take longer to process.'.format('Mosaic', 'rasters')))
            if clip_area:
                ext = '{0} {1} {2} {3}'.format(clip_area.XMin, clip_area.YMin, clip_area.XMax, clip_area.YMax)
                tmp_mosaic = arcpy.MosaicToNewRaster_management(
                    raster_items,
                    target_workspace,
                    'tmpMosaic',
                    out_coordinate_system,
                    pixel_type,
                    number_of_bands=bands.keys()[0]
                )
                status_writer.send_status(_('Clipping...'))
                out_mosaic = arcpy.Clip_management(tmp_mosaic, ext, output_name)
                arcpy.Delete_management(tmp_mosaic)
            else:
                out_mosaic = arcpy.MosaicToNewRaster_management(raster_items,
                                                                target_workspace,
                                                                output_name,
                                                                out_coordinate_system,
                                                                pixel_type,
                                                                number_of_bands=bands.keys()[0],
                                                                mosaic_method='BLEND')
            arcpy.MakeRasterLayer_management(out_mosaic, 'mosaic_layer')
            layer_object = arcpy.mapping.Layer('mosaic_layer')
            task_utils.make_thumbnail(layer_object, os.path.join(request['folder'], '_thumb.png'))
        except arcpy.ExecuteError:
            skipped += 1
            skipped_reasons['All Items'] = arcpy.GetMessages(2)

    # Update state if necessary.
    if skipped > 0:
        status_writer.send_state(status.STAT_WARNING, _('{0} results could not be processed').format(skipped))
    task_utils.report(os.path.join(request['folder'], '__report.json'), len(raster_items), skipped, skipped_details=skipped_reasons)
def execute(request):
    """Converts each input dataset to kml (.kmz).
    :param request: json as a dict.
    """
    converted = 0
    skipped = 0
    errors = 0
    global result_count
    parameters = request["params"]

    out_workspace = os.path.join(request["folder"], "temp")
    if not os.path.exists(out_workspace):
        os.makedirs(out_workspace)

    # Get the boundary box extent for input to KML tools.
    extent = ""
    try:
        try:
            ext = task_utils.get_parameter_value(parameters, "processing_extent", "wkt")
            if ext:
                sr = task_utils.get_spatial_reference("4326")
                extent = task_utils.from_wkt(ext, sr)
        except KeyError:
            ext = task_utils.get_parameter_value(parameters, "processing_extent", "feature")
            if ext:
                extent = arcpy.Describe(ext).extent
    except KeyError:
        pass

    # Get the output file name.
    output_file_name = task_utils.get_parameter_value(parameters, "output_file_name", "value")
    if not output_file_name:
        output_file_name = "kml_results"

    result_count, response_index = task_utils.get_result_count(parameters)
    # Query the index for results in groups of 25.
    query_index = task_utils.QueryIndex(parameters[response_index])
    fl = query_index.fl
    query = "{0}{1}{2}".format(sys.argv[2].split("=")[1], "/select?&wt=json", fl)
    fq = query_index.get_fq()
    if fq:
        groups = task_utils.grouper(range(0, result_count), task_utils.CHUNK_SIZE, "")
        query += fq
    elif "ids" in parameters[response_index]:
        groups = task_utils.grouper(list(parameters[response_index]["ids"]), task_utils.CHUNK_SIZE, "")
    else:
        groups = task_utils.grouper(range(0, result_count), task_utils.CHUNK_SIZE, "")

    # Begin processing
    status_writer.send_percent(0.0, _("Starting to process..."), "convert_to_kml")
    headers = {"x-access-token": task_utils.get_security_token(request["owner"])}
    for group in groups:
        if fq:
            results = requests.get(
                query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers
            )
        elif "ids" in parameters[response_index]:
            results = requests.get(query + "{0}&ids={1}".format(fl, ",".join(group)), headers=headers)
        else:
            results = requests.get(
                query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers
            )

        docs = results.json()["response"]["docs"]
        input_items = task_utils.get_input_items(docs)
        if not input_items:
            input_items = task_utils.get_input_items(parameters[response_index]["response"]["docs"])

        input_rows = collections.defaultdict(list)
        for doc in docs:
            if "path" not in doc:
                input_rows[doc["name"]].append(doc)
        if input_rows:
            result = convert_to_kml(input_rows, out_workspace, extent)
            converted += result[0]
            errors += result[1]
            skipped += result[2]

        if input_items:
            result = convert_to_kml(input_items, out_workspace, extent)
            converted += result[0]
            errors += result[1]
            skipped += result[2]

        if not input_items and not input_rows:
            status_writer.send_state(status.STAT_FAILED, _("No items to process. Check if items exist."))
            return

    # Zip up kmz files if more than one.
    if converted > 1:
        status_writer.send_status("Converted: {}".format(converted))
        zip_file = task_utils.zip_data(out_workspace, "{0}.zip".format(output_file_name))
        shutil.move(zip_file, os.path.join(os.path.dirname(out_workspace), os.path.basename(zip_file)))
        shutil.copy2(
            os.path.join(os.path.dirname(os.path.dirname(__file__)), "supportfiles", "_thumb.png"), request["folder"]
        )
    elif converted == 1:
        try:
            kml_file = glob.glob(os.path.join(out_workspace, "*.kmz"))[0]
            tmp_lyr = arcpy.KMLToLayer_conversion(kml_file, out_workspace, "kml_layer")
            task_utils.make_thumbnail(tmp_lyr.getOutput(0), os.path.join(request["folder"], "_thumb.png"))
        except arcpy.ExecuteError:
            pass
        shutil.move(kml_file, os.path.join(request["folder"], os.path.basename(kml_file)))

    # Update state if necessary.
    if skipped > 0 or errors > 0:
        status_writer.send_state(status.STAT_WARNING, _("{0} results could not be processed").format(errors + skipped))
    task_utils.report(
        os.path.join(request["folder"], "__report.json"), converted, skipped, errors, errors_reasons, skipped_reasons
    )
def execute(request):
    """Creates a GeoPDF.
    :param request: json as a dict.
    """
    added_to_map = 0
    errors = 0
    skipped = 0

    parameters = request["params"]
    num_results, response_index = task_utils.get_result_count(parameters)
    docs = parameters[response_index]["response"]["docs"]
    input_items = task_utils.get_input_items(docs)
    input_rows = collections.defaultdict(list)
    for doc in docs:
        if "path" not in doc:
            input_rows[doc["name"]].append(doc)
    if num_results > task_utils.CHUNK_SIZE:
        status_writer.send_state(status.STAT_FAILED, "Reduce results to 25 or less.")
        return

    map_template = task_utils.get_parameter_value(parameters, "map_template", "value")
    base_map = task_utils.get_parameter_value(parameters, "base_map", "value")
    map_title = task_utils.get_parameter_value(parameters, "map_title", "value")
    attribute_setting = task_utils.get_parameter_value(parameters, "attribute_settings", "value")
    author = task_utils.get_parameter_value(parameters, "map_author", "value")
    output_file_name = task_utils.get_parameter_value(parameters, "output_file_name", "value")
    if not output_file_name:
        output_file_name = "output_pdf"
    try:
        map_view = task_utils.get_parameter_value(parameters, "map_view", "extent")
    except KeyError:
        map_view = None
        pass

    temp_folder = os.path.join(request["folder"], "temp")
    if not os.path.exists(temp_folder):
        os.makedirs(temp_folder)

    if base_map == "NONE":
        base_layer = None
    else:
        base_layer = arcpy.mapping.Layer(
            os.path.join(
                os.path.dirname(os.path.dirname(__file__)), "supportfiles", "basemaps", "{0}.lyr".format(base_map)
            )
        )
    mxd_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), "supportfiles", "frame", map_template)
    mxd = arcpy.mapping.MapDocument(mxd_path)
    data_frame = arcpy.mapping.ListDataFrames(mxd)[0]

    layers = []
    all_layers = []

    if input_rows:
        for name, rows in input_rows.iteritems():
            for row in rows:
                try:
                    name = arcpy.CreateUniqueName(name, "in_memory")
                    # Create the geometry.
                    geo_json = row["[geo]"]
                    geom = arcpy.AsShape(geo_json)
                    arcpy.CopyFeatures_management(geom, name)
                    feature_layer = arcpy.MakeFeatureLayer_management(name, os.path.basename(name))
                    layer_file = arcpy.SaveToLayerFile_management(
                        feature_layer, os.path.join(temp_folder, "{0}.lyr".format(os.path.basename(name)))
                    )
                    layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))
                    all_layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))
                    added_to_map += 1
                except KeyError:
                    skipped += 1
                    skipped_reasons[name] = "No geographic information"
                    continue

    for i, item in enumerate(input_items, 1):
        try:
            # Is the item a mxd data frame.
            map_frame_name = task_utils.get_data_frame_name(item)
            if map_frame_name:
                item = item.split("|")[0].strip()

            dsc = arcpy.Describe(item)
            if dsc.dataType == "Layer":
                layers.append(arcpy.mapping.Layer(dsc.catalogPath))

            elif dsc.dataType == "FeatureClass" or dsc.dataType == "ShapeFile":
                if os.path.basename(item) in [l.name for l in all_layers]:
                    layer_name = "{0}_{1}".format(os.path.basename(item), i)
                else:
                    layer_name = os.path.basename(item)
                feature_layer = arcpy.MakeFeatureLayer_management(item, layer_name)
                layer_file = arcpy.SaveToLayerFile_management(
                    feature_layer, os.path.join(temp_folder, "{0}.lyr".format(layer_name))
                )
                layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))
                all_layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))

            elif dsc.dataType == "FeatureDataset":
                arcpy.env.workspace = item
                for fc in arcpy.ListFeatureClasses():
                    layer_file = arcpy.SaveToLayerFile_management(
                        arcpy.MakeFeatureLayer_management(fc, "{0}_{1}".format(fc, i)),
                        os.path.join(temp_folder, "{0}_{1}.lyr".format(fc, i)),
                    )
                    layer = arcpy.mapping.Layer(layer_file.getOutput(0))
                    layer.name = fc
                    layers.append(layer)
                    all_layers.append(layer)

            elif dsc.dataType == "RasterDataset":
                if os.path.basename(item) in [l.name for l in all_layers]:
                    layer_name = "{0}_{1}".format(os.path.basename(item), i)
                else:
                    layer_name = os.path.basename(item)
                raster_layer = arcpy.MakeRasterLayer_management(item, layer_name)
                layer_file = arcpy.SaveToLayerFile_management(
                    raster_layer, os.path.join(temp_folder, "{0}.lyr".format(layer_name))
                )
                layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))
                all_layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))

            elif dsc.catalogPath.endswith(".kml") or dsc.catalogPath.endswith(".kmz"):
                if not os.path.splitext(dsc.name)[0] in layers:
                    name = os.path.splitext(dsc.name)[0]
                else:
                    name = "{0}_{1}".format(os.path.splitext(dsc.name)[0], i)
                arcpy.KMLToLayer_conversion(dsc.catalogPath, temp_folder, name)
                layers.append(arcpy.mapping.Layer(os.path.join(temp_folder, "{0}.lyr".format(name))))
                all_layers.append(arcpy.mapping.Layer(os.path.join(temp_folder, "{0}.lyr".format(name))))

            elif dsc.dataType == "MapDocument":
                input_mxd = arcpy.mapping.MapDocument(item)
                if map_frame_name:
                    df = arcpy.mapping.ListDataFrames(input_mxd, map_frame_name)[0]
                    layers = arcpy.mapping.ListLayers(input_mxd, data_frame=df)
                else:
                    layers = arcpy.mapping.ListLayers(input_mxd)

            if layers:
                for layer in layers:
                    status_writer.send_status(_("Adding layer {0}...").format(layer.name))
                    arcpy.mapping.AddLayer(data_frame, layer)
                    added_to_map += 1
                    layers = []
            else:
                status_writer.send_status(_("Invalid input type: {0}").format(item))
                skipped_reasons[item] = "Invalid input type"
                skipped += 1
        except Exception as ex:
            status_writer.send_status(_("FAIL: {0}").format(repr(ex)))
            errors += 1
            errors_reasons[item] = repr(ex)
            pass

    if map_view:
        extent = map_view.split(" ")
        new_extent = data_frame.extent
        new_extent.XMin, new_extent.YMin = float(extent[0]), float(extent[1])
        new_extent.XMax, new_extent.YMax = float(extent[2]), float(extent[3])
        data_frame.extent = new_extent
    else:
        data_frame.zoomToSelectedFeatures()

    # Update text elements in map template.
    date_element = arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT", "date")
    if date_element:
        date_element[0].text = "Date: {0}".format(task_utils.get_local_date())

    title_element = arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT", "title")
    if title_element:
        title_element[0].text = map_title

    author_element = arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT", "author")
    if author_element:
        author_element[0].text = "{0} {1}".format(author_element[0].text, author)

    if map_template in ("ANSI_D_LND.mxd", "ANSI_E_LND.mxd"):
        coord_elements = arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT", "x*")
        coord_elements += arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT", "y*")
        if coord_elements:
            for e in coord_elements:
                new_text = e.text
                if e.name == "xmin":
                    dms = task_utils.dd_to_dms(data_frame.extent.XMin)
                    if data_frame.extent.XMin > 0:
                        new_text = new_text.replace("W", "E")
                elif e.name == "xmax":
                    dms = task_utils.dd_to_dms(data_frame.extent.XMax)
                    if data_frame.extent.XMax > 0:
                        new_text = new_text.replace("W", "E")
                elif e.name == "ymin":
                    dms = task_utils.dd_to_dms(data_frame.extent.YMin)
                    if data_frame.extent.YMin < 0:
                        new_text = new_text.replace("N", "S")
                elif e.name == "ymax":
                    if data_frame.extent.YMax < 0:
                        new_text = new_text.replace("N", "S")
                    dms = task_utils.dd_to_dms(data_frame.extent.YMax)

                new_text = new_text.replace("d", str(dms[0]))
                new_text = new_text.replace("m", str(dms[1]))
                new_text = new_text.replace("s", str(dms[2]))
                e.text = new_text

    # Do this now so it does not affect zoom level or extent.
    if base_layer:
        status_writer.send_status(_("Adding basemap {0}...").format(base_map))
        arcpy.mapping.AddLayer(data_frame, base_layer, "BOTTOM")

    if added_to_map > 0:
        status_writer.send_status(_("Exporting to PDF..."))
        arcpy.mapping.ExportToPDF(
            mxd,
            os.path.join(request["folder"], "{0}.pdf".format(output_file_name)),
            layers_attributes=attribute_setting,
        )
        # Create a thumbnail size PNG of the mxd.
        task_utils.make_thumbnail(mxd, os.path.join(request["folder"], "_thumb.png"), False)
    else:
        status_writer.send_state(status.STAT_FAILED, _("No results can be exported to PDF"))
        task_utils.report(
            os.path.join(request["folder"], "__report.json"), added_to_map, skipped, skipped_details=skipped_reasons
        )
        return

    # Update state if necessary.
    if skipped > 0 or errors > 0:
        status_writer.send_state(status.STAT_WARNING, _("{0} results could not be processed").format(skipped + errors))
    task_utils.report(
        os.path.join(request["folder"], "__report.json"), added_to_map, skipped, errors, errors_reasons, skipped_reasons
    )
def execute(request):
    """Converts each input dataset to kml (.kmz).
    :param request: json as a dict.
    """
    converted = 0
    skipped = 0
    errors = 0
    global result_count
    parameters = request['params']

    out_workspace = os.path.join(request['folder'], 'temp')
    if not os.path.exists(out_workspace):
        os.makedirs(out_workspace)

    # Get the boundary box extent for input to KML tools.
    extent = ''
    try:
        try:
            ext = task_utils.get_parameter_value(parameters, 'processing_extent', 'wkt')
            if ext:
                sr = task_utils.get_spatial_reference("4326")
                extent = task_utils.from_wkt(ext, sr)
        except KeyError:
            ext = task_utils.get_parameter_value(parameters, 'processing_extent', 'feature')
            if ext:
                extent = arcpy.Describe(ext).extent
    except KeyError:
        pass

    # Get the output file name.
    output_file_name = task_utils.get_parameter_value(parameters, 'output_file_name', 'value')
    if not output_file_name:
        output_file_name = 'kml_results'

    result_count, response_index = task_utils.get_result_count(parameters)
    # Query the index for results in groups of 25.
    query_index = task_utils.QueryIndex(parameters[response_index])
    fl = query_index.fl
    query = '{0}{1}{2}'.format(sys.argv[2].split('=')[1], '/select?&wt=json', fl)
    fq = query_index.get_fq()
    if fq:
        groups = task_utils.grouper(range(0, result_count), task_utils.CHUNK_SIZE, '')
        query += fq
    elif 'ids' in parameters[response_index]:
        groups = task_utils.grouper(list(parameters[response_index]['ids']), task_utils.CHUNK_SIZE, '')
    else:
        groups = task_utils.grouper(range(0, result_count), task_utils.CHUNK_SIZE, '')

    # Begin processing
    status_writer.send_percent(0.0, _('Starting to process...'), 'convert_to_kml')
    headers = {'x-access-token': task_utils.get_security_token(request['owner'])}
    for group in groups:
        if fq:
            results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers)
        elif 'ids' in parameters[response_index]:
            results = requests.get(query + '{0}&ids={1}'.format(fl, ','.join(group)), headers=headers)
        else:
            results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers)

        docs = results.json()['response']['docs']
        input_items = task_utils.get_input_items(docs)
        if not input_items:
            input_items = task_utils.get_input_items(parameters[response_index]['response']['docs'])

        input_rows = collections.defaultdict(list)
        for doc in docs:
            if 'path' not in doc:
               input_rows[doc['name']].append(doc)
        if input_rows:
            result = convert_to_kml(input_rows, out_workspace, extent)
            converted += result[0]
            errors += result[1]
            skipped += result[2]

        if input_items:
            result = convert_to_kml(input_items, out_workspace, extent)
            converted += result[0]
            errors += result[1]
            skipped += result[2]

        if not input_items and not input_rows:
            status_writer.send_state(status.STAT_FAILED, _('No items to process. Check if items exist.'))
            return

    # Zip up kmz files if more than one.
    if converted > 1:
        status_writer.send_status("Converted: {}".format(converted))
        zip_file = task_utils.zip_data(out_workspace, '{0}.zip'.format(output_file_name))
        shutil.move(zip_file, os.path.join(os.path.dirname(out_workspace), os.path.basename(zip_file)))
        shutil.copy2(os.path.join(os.path.dirname(os.path.dirname(__file__)), 'supportfiles', '_thumb.png'), request['folder'])
    elif converted == 1:
        try:
            kml_file = glob.glob(os.path.join(out_workspace, '*.kmz'))[0]
            tmp_lyr = arcpy.KMLToLayer_conversion(kml_file, out_workspace, 'kml_layer')
            task_utils.make_thumbnail(tmp_lyr.getOutput(0), os.path.join(request['folder'], '_thumb.png'))
        except arcpy.ExecuteError:
            pass
        shutil.move(kml_file, os.path.join(request['folder'], os.path.basename(kml_file)))

    # Update state if necessary.
    if skipped > 0 or errors > 0:
        status_writer.send_state(status.STAT_WARNING, _('{0} results could not be processed').format(errors + skipped))
    task_utils.report(os.path.join(request['folder'], '__report.json'), converted, skipped, errors, errors_reasons, skipped_reasons)