Esempio n. 1
0
def execute(request):
    """Clips selected search results using the clip geometry.
    :param request: json as a dict.
    """
    clipped = 0
    errors = 0
    skipped = 0
    global result_count
    parameters = request['params']

    # Retrieve clip geometry.
    try:
        clip_area = task_utils.get_parameter_value(parameters, 'clip_geometry', 'wkt')
        if not clip_area:
            clip_area = 'POLYGON ((-180 -90, -180 90, 180 90, 180 -90, -180 -90))'
    except KeyError:
        clip_area = 'POLYGON ((-180 -90, -180 90, 180 90, 180 -90, -180 -90))'

    # Retrieve the coordinate system code.
    out_coordinate_system = int(task_utils.get_parameter_value(parameters, 'output_projection', 'code'))

    # Retrieve the output format, create mxd parameter and output file name values.
    out_format = task_utils.get_parameter_value(parameters, 'output_format', 'value')
    create_mxd = task_utils.get_parameter_value(parameters, 'create_mxd', 'value')
    output_file_name = task_utils.get_parameter_value(parameters, 'output_file_name', 'value')
    if not output_file_name:
        output_file_name = 'clip_results'

    # Create the temporary workspace if clip_feature_class:
    out_workspace = os.path.join(request['folder'], 'temp')
    if not os.path.exists(out_workspace):
        os.makedirs(out_workspace)

    # Set the output coordinate system.
    if not out_coordinate_system == 0:  # Same as Input
        out_sr = task_utils.get_spatial_reference(out_coordinate_system)
        arcpy.env.outputCoordinateSystem = out_sr

    # Create the clip polygon geometry object in WGS84 projection.
    gcs_sr = task_utils.get_spatial_reference(4326)
    gcs_clip_poly = task_utils.from_wkt(clip_area, gcs_sr)
    if not gcs_clip_poly.area > 0:
        gcs_clip_poly = task_utils.from_wkt('POLYGON ((-180 -90, -180 90, 180 90, 180 -90, -180 -90))', gcs_sr)

    # Set the output workspace.
    status_writer.send_status(_('Setting the output workspace...'))
    if not out_format == 'SHP':
        out_workspace = arcpy.CreateFileGDB_management(out_workspace, 'output.gdb').getOutput(0)
    arcpy.env.workspace = out_workspace

    # Query the index for results in groups of 25.
    headers = {'x-access-token': task_utils.get_security_token(request['owner'])}
    result_count, response_index = task_utils.get_result_count(parameters)
    query_index = task_utils.QueryIndex(parameters[response_index])
    fl = query_index.fl
    query = '{0}{1}{2}'.format(sys.argv[2].split('=')[1], '/select?&wt=json', fl)
    fq = query_index.get_fq()
    if fq:
        groups = task_utils.grouper(range(0, result_count), task_utils.CHUNK_SIZE, '')
        query += fq
    elif 'ids' in parameters[response_index]:
        groups = task_utils.grouper(list(parameters[response_index]['ids']), task_utils.CHUNK_SIZE, '')
    else:
        groups = task_utils.grouper(range(0, result_count), task_utils.CHUNK_SIZE, '')

    # Begin processing
    status_writer.send_percent(0.0, _('Starting to process...'), 'clip_data')
    for group in groups:
        if fq:
            results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers)
        elif 'ids' in parameters[response_index]:
            results = requests.get(query + '{0}&ids={1}'.format(fl, ','.join(group)), headers=headers)
        else:
            results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers)

        # docs = eval(results.read().replace('false', 'False').replace('true', 'True').replace('null', 'None'))['response']['docs']
        docs = results.json()['response']['docs']
        input_items = task_utils.get_input_items(docs)
        if not input_items:
            input_items = task_utils.get_input_items(parameters[response_index]['response']['docs'])

        input_rows = collections.defaultdict(list)
        for doc in docs:
            if 'path' not in doc:
               input_rows[doc['name']].append(doc)
        if input_rows:
            result = clip_data(input_rows, out_workspace, out_coordinate_system, gcs_sr, gcs_clip_poly, out_format)
            clipped += result[0]
            errors += result[1]
            skipped += result[2]

        if input_items:
            result = clip_data(input_items, out_workspace, out_coordinate_system, gcs_sr, gcs_clip_poly, out_format)
            clipped += result[0]
            errors += result[1]
            skipped += result[2]

        if not input_items and not input_rows:
            status_writer.send_state(status.STAT_FAILED, _('No items to process. Check if items exist.'))
            return

    if arcpy.env.workspace.endswith('.gdb'):
        out_workspace = os.path.dirname(arcpy.env.workspace)
    if clipped > 0:
        try:
            if out_format == 'MPK':
                mxd_template = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'supportfiles', 'MapTemplate.mxd')
                mxd = task_utils.create_mxd(out_workspace, mxd_template, 'output')
                status_writer.send_status(_("Packaging results..."))
                task_utils.create_mpk(out_workspace, mxd, files_to_package)
                shutil.move(os.path.join(out_workspace, 'output.mpk'),
                            os.path.join(os.path.dirname(out_workspace), '{0}.mpk'.format(output_file_name)))
            elif out_format == 'LPK':
                status_writer.send_status(_("Packaging results..."))
                task_utils.create_lpk(out_workspace,output_file_name, files_to_package)
            elif out_format == 'KML':
                task_utils.convert_to_kml(os.path.join(out_workspace, "output.gdb"))
                arcpy.env.workspace = ''
                try:
                    arcpy.Delete_management(os.path.join(out_workspace, "output.gdb"))
                except arcpy.ExecuteError:
                    pass
                zip_file = task_utils.zip_data(out_workspace, '{0}.zip'.format(output_file_name))
                shutil.move(zip_file, os.path.join(os.path.dirname(out_workspace), os.path.basename(zip_file)))
            else:
                if create_mxd:
                    mxd_template = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'supportfiles', 'MapTemplate.mxd')
                    task_utils.create_mxd(out_workspace, mxd_template, 'output')
                zip_file = task_utils.zip_data(out_workspace, '{0}.zip'.format(output_file_name))
                shutil.move(zip_file, os.path.join(os.path.dirname(out_workspace), os.path.basename(zip_file)))
        except arcpy.ExecuteError as ee:
            status_writer.send_state(status.STAT_FAILED, _(ee))
            sys.exit(1)
    else:
        status_writer.send_state(status.STAT_FAILED, _('No output created. Zero inputs were clipped.'))

    # Update state if necessary.
    if errors > 0 or skipped > 0:
        status_writer.send_state(status.STAT_WARNING, _('{0} results could not be processed').format(errors + skipped))
    task_utils.report(os.path.join(request['folder'], '__report.json'), clipped, skipped, errors, errors_reasons, skipped_reasons)
Esempio n. 2
0
def execute(request):
    """Converts each input dataset to kml (.kmz).
    :param request: json as a dict.
    """
    converted = 0
    skipped = 0
    errors = 0
    global result_count
    parameters = request['params']

    out_workspace = os.path.join(request['folder'], 'temp')
    if not os.path.exists(out_workspace):
        os.makedirs(out_workspace)

    # Get the boundary box extent for input to KML tools.
    extent = ''
    try:
        try:
            ext = task_utils.get_parameter_value(parameters,
                                                 'processing_extent', 'wkt')
            if ext:
                sr = task_utils.get_spatial_reference("4326")
                extent = task_utils.from_wkt(ext, sr)
        except KeyError:
            ext = task_utils.get_parameter_value(parameters,
                                                 'processing_extent',
                                                 'feature')
            if ext:
                extent = arcpy.Describe(ext).extent
    except KeyError:
        pass

    # Get the output file name.
    output_file_name = task_utils.get_parameter_value(parameters,
                                                      'output_file_name',
                                                      'value')
    if not output_file_name:
        output_file_name = 'kml_results'

    result_count, response_index = task_utils.get_result_count(parameters)
    # Query the index for results in groups of 25.
    query_index = task_utils.QueryIndex(parameters[response_index])
    fl = query_index.fl
    query = '{0}{1}{2}'.format(sys.argv[2].split('=')[1], '/select?&wt=json',
                               fl)
    fq = query_index.get_fq()
    if fq:
        groups = task_utils.grouper(range(0, result_count),
                                    task_utils.CHUNK_SIZE, '')
        query += fq
    elif 'ids' in parameters[response_index]:
        groups = task_utils.grouper(list(parameters[response_index]['ids']),
                                    task_utils.CHUNK_SIZE, '')
    else:
        groups = task_utils.grouper(range(0, result_count),
                                    task_utils.CHUNK_SIZE, '')

    # Begin processing
    status_writer.send_percent(0.0, _('Starting to process...'),
                               'convert_to_kml')
    headers = {
        'x-access-token': task_utils.get_security_token(request['owner'])
    }
    for group in groups:
        if fq:
            results = requests.get(
                query +
                "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]),
                headers=headers)
        elif 'ids' in parameters[response_index]:
            results = requests.get(query +
                                   '{0}&ids={1}'.format(fl, ','.join(group)),
                                   headers=headers)
        else:
            results = requests.get(
                query +
                "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]),
                headers=headers)

        docs = results.json()['response']['docs']
        input_items = task_utils.get_input_items(docs)
        if not input_items:
            input_items = task_utils.get_input_items(
                parameters[response_index]['response']['docs'])

        input_rows = collections.defaultdict(list)
        for doc in docs:
            if 'path' not in doc:
                input_rows[doc['name']].append(doc)
        if input_rows:
            result = convert_to_kml(input_rows, out_workspace, extent)
            converted += result[0]
            errors += result[1]
            skipped += result[2]

        if input_items:
            result = convert_to_kml(input_items, out_workspace, extent)
            converted += result[0]
            errors += result[1]
            skipped += result[2]

        if not input_items and not input_rows:
            status_writer.send_state(
                status.STAT_FAILED,
                _('No items to process. Check if items exist.'))
            return

    # Zip up kmz files if more than one.
    if converted > 1:
        status_writer.send_status("Converted: {}".format(converted))
        zip_file = task_utils.zip_data(out_workspace,
                                       '{0}.zip'.format(output_file_name))
        shutil.move(
            zip_file,
            os.path.join(os.path.dirname(out_workspace),
                         os.path.basename(zip_file)))
        shutil.copy2(
            os.path.join(os.path.dirname(os.path.dirname(__file__)),
                         'supportfiles', '_thumb.png'), request['folder'])
    elif converted == 1:
        try:
            kml_file = glob.glob(os.path.join(out_workspace, '*.kmz'))[0]
            tmp_lyr = arcpy.KMLToLayer_conversion(kml_file, out_workspace,
                                                  'kml_layer')
            task_utils.make_thumbnail(
                tmp_lyr.getOutput(0),
                os.path.join(request['folder'], '_thumb.png'))
        except arcpy.ExecuteError:
            pass
        shutil.move(
            kml_file,
            os.path.join(request['folder'], os.path.basename(kml_file)))

    # Update state if necessary.
    if skipped > 0 or errors > 0:
        status_writer.send_state(
            status.STAT_WARNING,
            _('{0} results could not be processed').format(errors + skipped))
    task_utils.report(os.path.join(request['folder'],
                                   '__report.json'), converted, skipped,
                      errors, errors_reasons, skipped_reasons)
Esempio n. 3
0
def execute(request):
    """Builds raster pyramids for input raster datasets.
    :param request: json as a dict.
    """
    processed = 0
    skipped = 0
    parameters = request['params']

    # Get the extent for for which to use to calculate statistics.
    extent = ''
    try:
        try:
            ext = task_utils.get_parameter_value(parameters,
                                                 'processing_extent', 'wkt')
            if ext:
                sr = task_utils.get_spatial_reference("4326")
                extent = task_utils.from_wkt(ext, sr)
        except KeyError:
            ext = task_utils.get_parameter_value(parameters,
                                                 'processing_extent',
                                                 'feature')
            if ext:
                extent = arcpy.Describe(ext).extent
    except KeyError:
        pass

    horizontal_skip_factor = task_utils.get_parameter_value(
        parameters, 'horizontal_skip_factor', 'value')
    vertical_skip_factor = task_utils.get_parameter_value(
        parameters, 'vertical_skip_factor', 'value')
    ignore_pixel_values = task_utils.get_parameter_value(
        parameters, 'ignore_pixel_values', 'value')

    # Create the task folder to hold report files.
    task_folder = request['folder']
    if not os.path.exists(task_folder):
        os.makedirs(task_folder)

    headers = {
        'x-access-token': task_utils.get_security_token(request['owner'])
    }
    num_results, response_index = task_utils.get_result_count(parameters)
    if num_results > task_utils.CHUNK_SIZE:
        # Query the index for results in groups of 25.
        query_index = task_utils.QueryIndex(parameters[response_index])
        fl = query_index.fl
        query = '{0}{1}{2}'.format(sys.argv[2].split('=')[1],
                                   '/select?&wt=json', fl)
        fq = query_index.get_fq()
        if fq:
            groups = task_utils.grouper(range(0, num_results),
                                        task_utils.CHUNK_SIZE, '')
            query += fq
        elif 'ids' in parameters[response_index]:
            groups = task_utils.grouper(
                list(parameters[response_index]['ids']), task_utils.CHUNK_SIZE,
                '')
        else:
            groups = task_utils.grouper(range(0, num_results),
                                        task_utils.CHUNK_SIZE, '')

        # Begin processing
        status_writer.send_percent(0.0, _('Starting to process...'),
                                   'calculate_raster_statistics')
        i = 0.
        for group in groups:
            i += len(group) - group.count('')
            if fq:
                results = requests.get(query + "&rows={0}&start={1}".format(
                    task_utils.CHUNK_SIZE, group[0]),
                                       verify=verify_ssl,
                                       headers=headers)
            elif 'ids' in parameters[response_index]:
                results = requests.get(
                    query + '{0}&ids={1}'.format(fl, ','.join(group)),
                    verify=verify_ssl,
                    headers=headers)
            else:
                results = requests.get(query + "&rows={0}&start={1}".format(
                    task_utils.CHUNK_SIZE, group[0]),
                                       verify=verify_ssl,
                                       headers=headers)

            input_items = task_utils.get_input_items(
                results.json()['response']['docs'])
            if not input_items:
                input_items = task_utils.get_input_items(
                    parameters[response_index]['response']['docs'])

            result = calculate_raster_statistics(input_items, extent,
                                                 horizontal_skip_factor,
                                                 vertical_skip_factor,
                                                 ignore_pixel_values)
            processed += result[0]
            skipped += result[1]
            status_writer.send_percent(
                i / num_results,
                '{0}: {1:.0f}%'.format("Processed", i / num_results * 100),
                'calculate_raster_statistics')
    else:
        input_items = task_utils.get_input_items(
            parameters[response_index]['response']['docs'])
        processed, skipped = calculate_raster_statistics(
            input_items, extent, horizontal_skip_factor, vertical_skip_factor,
            ignore_pixel_values, True)

    # Update state if necessary.
    if skipped > 0:
        status_writer.send_state(
            status.STAT_WARNING,
            _('{0} results could not be processed').format(skipped))
    task_utils.report(os.path.join(request['folder'], '__report.json'),
                      processed,
                      skipped,
                      skipped_details=skipped_reasons)
Esempio n. 4
0
def execute(request):
    """Converts each input dataset to kml (.kmz).
    :param request: json as a dict.
    """
    converted = 0
    skipped = 0
    errors = 0
    global result_count
    parameters = request["params"]

    out_workspace = os.path.join(request["folder"], "temp")
    if not os.path.exists(out_workspace):
        os.makedirs(out_workspace)

    # Get the boundary box extent for input to KML tools.
    extent = ""
    try:
        try:
            ext = task_utils.get_parameter_value(parameters, "processing_extent", "wkt")
            if ext:
                sr = task_utils.get_spatial_reference("4326")
                extent = task_utils.from_wkt(ext, sr)
        except KeyError:
            ext = task_utils.get_parameter_value(parameters, "processing_extent", "feature")
            if ext:
                extent = arcpy.Describe(ext).extent
    except KeyError:
        pass

    # Get the output file name.
    output_file_name = task_utils.get_parameter_value(parameters, "output_file_name", "value")
    if not output_file_name:
        output_file_name = "kml_results"

    result_count, response_index = task_utils.get_result_count(parameters)
    # Query the index for results in groups of 25.
    query_index = task_utils.QueryIndex(parameters[response_index])
    fl = query_index.fl
    query = "{0}{1}{2}".format(sys.argv[2].split("=")[1], "/select?&wt=json", fl)
    fq = query_index.get_fq()
    if fq:
        groups = task_utils.grouper(range(0, result_count), task_utils.CHUNK_SIZE, "")
        query += fq
    elif "ids" in parameters[response_index]:
        groups = task_utils.grouper(list(parameters[response_index]["ids"]), task_utils.CHUNK_SIZE, "")
    else:
        groups = task_utils.grouper(range(0, result_count), task_utils.CHUNK_SIZE, "")

    # Begin processing
    status_writer.send_percent(0.0, _("Starting to process..."), "convert_to_kml")
    headers = {"x-access-token": task_utils.get_security_token(request["owner"])}
    for group in groups:
        if fq:
            results = requests.get(
                query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers
            )
        elif "ids" in parameters[response_index]:
            results = requests.get(query + "{0}&ids={1}".format(fl, ",".join(group)), headers=headers)
        else:
            results = requests.get(
                query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers
            )

        docs = results.json()["response"]["docs"]
        input_items = task_utils.get_input_items(docs)
        if not input_items:
            input_items = task_utils.get_input_items(parameters[response_index]["response"]["docs"])

        input_rows = collections.defaultdict(list)
        for doc in docs:
            if "path" not in doc:
                input_rows[doc["name"]].append(doc)
        if input_rows:
            result = convert_to_kml(input_rows, out_workspace, extent)
            converted += result[0]
            errors += result[1]
            skipped += result[2]

        if input_items:
            result = convert_to_kml(input_items, out_workspace, extent)
            converted += result[0]
            errors += result[1]
            skipped += result[2]

        if not input_items and not input_rows:
            status_writer.send_state(status.STAT_FAILED, _("No items to process. Check if items exist."))
            return

    # Zip up kmz files if more than one.
    if converted > 1:
        status_writer.send_status("Converted: {}".format(converted))
        zip_file = task_utils.zip_data(out_workspace, "{0}.zip".format(output_file_name))
        shutil.move(zip_file, os.path.join(os.path.dirname(out_workspace), os.path.basename(zip_file)))
        shutil.copy2(
            os.path.join(os.path.dirname(os.path.dirname(__file__)), "supportfiles", "_thumb.png"), request["folder"]
        )
    elif converted == 1:
        try:
            kml_file = glob.glob(os.path.join(out_workspace, "*.kmz"))[0]
            tmp_lyr = arcpy.KMLToLayer_conversion(kml_file, out_workspace, "kml_layer")
            task_utils.make_thumbnail(tmp_lyr.getOutput(0), os.path.join(request["folder"], "_thumb.png"))
        except arcpy.ExecuteError:
            pass
        shutil.move(kml_file, os.path.join(request["folder"], os.path.basename(kml_file)))

    # Update state if necessary.
    if skipped > 0 or errors > 0:
        status_writer.send_state(status.STAT_WARNING, _("{0} results could not be processed").format(errors + skipped))
    task_utils.report(
        os.path.join(request["folder"], "__report.json"), converted, skipped, errors, errors_reasons, skipped_reasons
    )
Esempio n. 5
0
def execute(request):
    """Converts each input dataset to kml (.kmz).
    :param request: json as a dict.
    """
    converted = 0
    skipped = 0
    errors = 0
    global result_count
    parameters = request['params']

    out_workspace = os.path.join(request['folder'], 'temp')
    if not os.path.exists(out_workspace):
        os.makedirs(out_workspace)

    # Get the boundary box extent for input to KML tools.
    extent = ''
    try:
        try:
            ext = task_utils.get_parameter_value(parameters, 'processing_extent', 'wkt')
            if ext:
                sr = task_utils.get_spatial_reference("4326")
                extent = task_utils.from_wkt(ext, sr)
        except KeyError:
            ext = task_utils.get_parameter_value(parameters, 'processing_extent', 'feature')
            if ext:
                extent = arcpy.Describe(ext).extent
    except KeyError:
        pass

    # Get the output file name.
    output_file_name = task_utils.get_parameter_value(parameters, 'output_file_name', 'value')
    if not output_file_name:
        output_file_name = 'kml_results'

    result_count, response_index = task_utils.get_result_count(parameters)
    # Query the index for results in groups of 25.
    query_index = task_utils.QueryIndex(parameters[response_index])
    fl = query_index.fl
    query = '{0}{1}{2}'.format(sys.argv[2].split('=')[1], '/select?&wt=json', fl)
    fq = query_index.get_fq()
    if fq:
        groups = task_utils.grouper(range(0, result_count), task_utils.CHUNK_SIZE, '')
        query += fq
    elif 'ids' in parameters[response_index]:
        groups = task_utils.grouper(list(parameters[response_index]['ids']), task_utils.CHUNK_SIZE, '')
    else:
        groups = task_utils.grouper(range(0, result_count), task_utils.CHUNK_SIZE, '')

    # Begin processing
    status_writer.send_percent(0.0, _('Starting to process...'), 'convert_to_kml')
    headers = {'x-access-token': task_utils.get_security_token(request['owner'])}
    for group in groups:
        if fq:
            results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers)
        elif 'ids' in parameters[response_index]:
            results = requests.get(query + '{0}&ids={1}'.format(fl, ','.join(group)), headers=headers)
        else:
            results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers)

        docs = results.json()['response']['docs']
        input_items = task_utils.get_input_items(docs)
        if not input_items:
            input_items = task_utils.get_input_items(parameters[response_index]['response']['docs'])

        input_rows = collections.defaultdict(list)
        for doc in docs:
            if 'path' not in doc:
               input_rows[doc['name']].append(doc)
        if input_rows:
            result = convert_to_kml(input_rows, out_workspace, extent)
            converted += result[0]
            errors += result[1]
            skipped += result[2]

        if input_items:
            result = convert_to_kml(input_items, out_workspace, extent)
            converted += result[0]
            errors += result[1]
            skipped += result[2]

        if not input_items and not input_rows:
            status_writer.send_state(status.STAT_FAILED, _('No items to process. Check if items exist.'))
            return

    # Zip up kmz files if more than one.
    if converted > 1:
        status_writer.send_status("Converted: {}".format(converted))
        zip_file = task_utils.zip_data(out_workspace, '{0}.zip'.format(output_file_name))
        shutil.move(zip_file, os.path.join(os.path.dirname(out_workspace), os.path.basename(zip_file)))
        shutil.copy2(os.path.join(os.path.dirname(os.path.dirname(__file__)), 'supportfiles', '_thumb.png'), request['folder'])
    elif converted == 1:
        try:
            kml_file = glob.glob(os.path.join(out_workspace, '*.kmz'))[0]
            tmp_lyr = arcpy.KMLToLayer_conversion(kml_file, out_workspace, 'kml_layer')
            task_utils.make_thumbnail(tmp_lyr.getOutput(0), os.path.join(request['folder'], '_thumb.png'))
        except arcpy.ExecuteError:
            pass
        shutil.move(kml_file, os.path.join(request['folder'], os.path.basename(kml_file)))

    # Update state if necessary.
    if skipped > 0 or errors > 0:
        status_writer.send_state(status.STAT_WARNING, _('{0} results could not be processed').format(errors + skipped))
    task_utils.report(os.path.join(request['folder'], '__report.json'), converted, skipped, errors, errors_reasons, skipped_reasons)
def execute(request):
    """Builds raster pyramids for input raster datasets.
    :param request: json as a dict.
    """
    processed = 0
    skipped = 0
    parameters = request['params']

    # Get the extent for for which to use to calculate statistics.
    extent = ''
    try:
        try:
            ext = task_utils.get_parameter_value(parameters, 'processing_extent', 'wkt')
            if ext:
                sr = task_utils.get_spatial_reference("4326")
                extent = task_utils.from_wkt(ext, sr)
        except KeyError:
            ext = task_utils.get_parameter_value(parameters, 'processing_extent', 'feature')
            if ext:
                extent = arcpy.Describe(ext).extent
    except KeyError:
        pass

    horizontal_skip_factor = task_utils.get_parameter_value(parameters, 'horizontal_skip_factor', 'value')
    vertical_skip_factor = task_utils.get_parameter_value(parameters, 'vertical_skip_factor', 'value')
    ignore_pixel_values = task_utils.get_parameter_value(parameters, 'ignore_pixel_values', 'value')

    # Create the task folder to hold report files.
    task_folder = request['folder']
    if not os.path.exists(task_folder):
        os.makedirs(task_folder)

    headers = {'x-access-token': task_utils.get_security_token(request['owner'])}
    num_results, response_index = task_utils.get_result_count(parameters)
    if num_results > task_utils.CHUNK_SIZE:
        # Query the index for results in groups of 25.
        query_index = task_utils.QueryIndex(parameters[response_index])
        fl = query_index.fl
        query = '{0}{1}{2}'.format(sys.argv[2].split('=')[1], '/select?&wt=json', fl)
        fq = query_index.get_fq()
        if fq:
            groups = task_utils.grouper(range(0, num_results), task_utils.CHUNK_SIZE, '')
            query += fq
        elif 'ids' in parameters[response_index]:
            groups = task_utils.grouper(list(parameters[response_index]['ids']), task_utils.CHUNK_SIZE, '')
        else:
            groups = task_utils.grouper(range(0, num_results), task_utils.CHUNK_SIZE, '')

        # Begin processing
        status_writer.send_percent(0.0, _('Starting to process...'), 'calculate_raster_statistics')
        i = 0.
        for group in groups:
            i += len(group) - group.count('')
            if fq:
                results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers)
            elif 'ids' in parameters[response_index]:
                results = requests.get(query + '{0}&ids={1}'.format(fl, ','.join(group)), headers=headers)
            else:
                results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers)

            # input_items = task_utils.get_input_items(eval(results.read().replace('false', 'False').replace('true', 'True'))['response']['docs'])
            input_items = task_utils.get_input_items(results.json()['response']['docs'])
            if not input_items:
                input_items = task_utils.get_input_items(parameters[response_index]['response']['docs'])

            result = calculate_raster_statistics(input_items, extent, horizontal_skip_factor, vertical_skip_factor, ignore_pixel_values)
            processed += result[0]
            skipped += result[1]
            status_writer.send_percent(i / num_results, '{0}: {1:.0f}%'.format("Processed", i / num_results * 100), 'calculate_raster_statistics')
    else:
        input_items = task_utils.get_input_items(parameters[response_index]['response']['docs'])
        processed, skipped = calculate_raster_statistics(input_items, extent, horizontal_skip_factor,
                                                         vertical_skip_factor, ignore_pixel_values, True)

    # Update state if necessary.
    if skipped > 0:
        status_writer.send_state(status.STAT_WARNING, _('{0} results could not be processed').format(skipped))
    task_utils.report(os.path.join(request['folder'], '__report.json'), processed, skipped, skipped_details=skipped_reasons)