Exemplo n.º 1
0
def clip_service_layers(layer, clipping_polygon, output_name):
    """Clips map and feature service layers using the clippnig polygon."""
    service_layer = task_utils.ServiceLayer(layer, clipping_polygon.extent.JSON, 'esriGeometryEnvelope')
    arcpy.env.overwriteOutput = True
    oid_groups = service_layer.object_ids
    out_features = None
    g = 0.
    group_cnt = service_layer.object_ids_cnt
    for group in oid_groups:
        g += 1
        group = [oid for oid in group if oid]
        where = '{0} IN {1}'.format(service_layer.oid_field_name, tuple(group))
        url = layer + "/query?where={}&outFields={}&returnGeometry=true&f=json".format(where, '*')
        feature_set = arcpy.FeatureSet()
        try:
            feature_set.load(url)
        except Exception:
            continue
        if not out_features:
            out_features = arcpy.Clip_analysis(feature_set, clipping_polygon, output_name)
        else:
            clip_features = arcpy.Clip_analysis(feature_set, clipping_polygon, 'in_memory/features')
            arcpy.Append_management(clip_features, out_features, 'NO_TEST')
            try:
                arcpy.Delete_management(clip_features)
            except arcpy.ExecuteError:
                pass
        status_writer.send_percent(float(g) / group_cnt * 100, '', 'clip_data_by_features')
Exemplo n.º 2
0
def clip_data(input_items, out_workspace, out_coordinate_system, gcs_sr, gcs_clip_poly, out_format):
    """Clips input results."""
    clipped = 0
    errors = 0
    skipped = 0
    fds = None
    global processed_count
    global layer_name
    global existing_fields
    global new_fields
    global field_values

    for ds, out_name in input_items.items():
        try:
            if not isinstance(out_name, list):
                out_name = ''
            # -----------------------------------------------
            # If the item is a service layer, process and continue.
            # -----------------------------------------------
            if ds.startswith('http'):
                try:
                    if out_coordinate_system == 0:
                        service_layer = task_utils.ServiceLayer(ds)
                        wkid = service_layer.wkid
                        out_sr = arcpy.SpatialReference(wkid)
                        arcpy.env.outputCoordinateSystem = out_sr
                    else:
                        out_sr = task_utils.get_spatial_reference(out_coordinate_system)
                        arcpy.env.outputCoordinateSystem = out_sr

                    if not out_sr.name == gcs_sr.name:
                        try:
                            geo_transformation = arcpy.ListTransformations(gcs_sr, out_sr)[0]
                            clip_poly = gcs_clip_poly.projectAs(out_sr, geo_transformation)
                        except (AttributeError, IndexError):
                            try:
                                clip_poly = gcs_clip_poly.projectAs(out_sr)
                            except AttributeError:
                                clip_poly = gcs_clip_poly
                        except ValueError:
                            clip_poly = gcs_clip_poly
                    else:
                        clip_poly = gcs_clip_poly

                    arcpy.env.overwriteOutput = True
                    service_layer = task_utils.ServiceLayer(ds, clip_poly.extent.JSON, 'esriGeometryEnvelope')
                    oid_groups = service_layer.object_ids
                    out_features = None
                    g = 0.
                    group_cnt = service_layer.object_ids_cnt
                    for group in oid_groups:
                        g += 1
                        group = [oid for oid in group if oid]
                        where = '{0} IN {1}'.format(service_layer.oid_field_name, tuple(group))
                        url = ds + "/query?where={}&outFields={}&returnGeometry=true&f=json".format(where, '*', eval(clip_poly.JSON))
                        feature_set = arcpy.FeatureSet()
                        if not out_name:
                            out_name = service_layer.service_layer_name
                        try:
                            feature_set.load(url)
                        except Exception:
                            continue
                        if not out_features:
                            out_features = arcpy.Clip_analysis(feature_set, clip_poly, out_name)
                        else:
                            clip_features = arcpy.Clip_analysis(feature_set, clip_poly, 'in_memory/features')
                            arcpy.Append_management(clip_features, out_features, 'NO_TEST')
                            try:
                                arcpy.Delete_management(clip_features)
                            except arcpy.ExecuteError:
                                pass
                        status_writer.send_percent(float(g) / group_cnt, '', 'clip_data')
                    processed_count += 1.
                    clipped += 1
                    status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(ds), 'clip_data')
                    continue
                except Exception as ex:
                    status_writer.send_state(status.STAT_WARNING, str(ex))
                    errors_reasons[ds] = ex.message
                    errors += 1
                    continue

            # -----------------------------------------------
            # Check if the path is a MXD data frame type.
            # ------------------------------------------------
            map_frame_name = task_utils.get_data_frame_name(ds)
            if map_frame_name:
                ds = ds.split('|')[0].strip()

            # -------------------------------
            # Is the input a geometry feature
            # -------------------------------
            if isinstance(out_name, list):
                for row in out_name:
                    try:
                        arcpy.env.overwriteOutput = True
                        name = os.path.join(out_workspace, arcpy.ValidateTableName(ds, out_workspace))
                        if out_format == 'SHP':
                            name += '.shp'
                        # Clip the geometry.
                        geo_json = row['[geo]']
                        geom = arcpy.AsShape(geo_json)
                        row.pop('[geo]')
                        if not arcpy.Exists(name):
                            if arcpy.env.outputCoordinateSystem:
                                arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())
                            else:
                                arcpy.env.outputCoordinateSystem = 4326
                                arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())

                            layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer')
                            existing_fields = [f.name for f in arcpy.ListFields(layer_name)]
                            new_fields = []
                            field_values = []
                            for field, value in row.iteritems():
                                valid_field = arcpy.ValidateFieldName(field, out_workspace)
                                new_fields.append(valid_field)
                                field_values.append(value)
                                try:
                                    arcpy.AddField_management(layer_name, valid_field, 'TEXT')
                                except arcpy.ExecuteError:
                                    arcpy.DeleteField_management(layer_name, valid_field)
                                    arcpy.AddField_management(layer_name, valid_field, 'TEXT')
                        else:
                            if not geom.type.upper() == arcpy.Describe(name).shapeType.upper():
                                name = arcpy.CreateUniqueName(os.path.basename(name), out_workspace)
                                if arcpy.env.outputCoordinateSystem:
                                    arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())
                                else:
                                    arcpy.env.outputCoordinateSystem = 4326
                                    arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())

                                layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer')
                                existing_fields = [f.name for f in arcpy.ListFields(layer_name)]
                                new_fields = []
                                field_values = []
                                for field, value in row.items():
                                    valid_field = arcpy.ValidateFieldName(field, out_workspace)
                                    new_fields.append(valid_field)
                                    field_values.append(value)
                                    if not valid_field in existing_fields:
                                        try:
                                            arcpy.AddField_management(layer_name, valid_field, 'TEXT')
                                        except arcpy.ExecuteError:
                                            arcpy.DeleteField_management(layer_name, valid_field)
                                            arcpy.AddField_management(layer_name, valid_field, 'TEXT')

                        clipped_geometry = arcpy.Clip_analysis(geom, gcs_clip_poly, arcpy.Geometry())
                        if clipped_geometry:
                            with arcpy.da.InsertCursor(layer_name, ["SHAPE@"] + new_fields) as icur:
                                icur.insertRow([clipped_geometry[0]] + field_values)
                        status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(row['name']), 'clip_data')
                        processed_count += 1
                        clipped += 1
                    except KeyError:
                        processed_count += 1
                        skipped += 1
                        status_writer.send_state(_(status.STAT_WARNING, 'Invalid input type: {0}').format(ds))
                        skipped_reasons[ds] = 'Invalid input type'
                    except Exception as ex:
                        processed_count += 1
                        errors += 1
                        errors_reasons[ds] = ex.message
                        continue
                continue

            dsc = arcpy.Describe(ds)
            try:
                if dsc.spatialReference.name == 'Unknown':
                    status_writer.send_state(status.STAT_WARNING, _('{0} has an Unknown projection. Output may be invalid or empty.').format(dsc.name))
            except AttributeError:
                pass

            # --------------------------------------------------------------------
            # If no output coord. system, get output spatial reference from input.
            # --------------------------------------------------------------------
            if out_coordinate_system == 0:
                try:
                    out_sr = dsc.spatialReference
                    arcpy.env.outputCoordinateSystem = out_sr
                except AttributeError:
                    out_sr = task_utils.get_spatial_reference(4326)
                    arcpy.env.outputCoordinateSystem = out_sr
            else:
                out_sr = task_utils.get_spatial_reference(out_coordinate_system)
                arcpy.env.outputCoordinateSystem = out_sr

            # -------------------------------------------------
            # If the item is not a file, project the clip area.
            # -------------------------------------------------
            if dsc.dataType not in ('File', 'TextFile'):
                if not out_sr.name == gcs_sr.name:
                    try:
                        geo_transformation = arcpy.ListTransformations(gcs_sr, out_sr)[0]
                        clip_poly = gcs_clip_poly.projectAs(out_sr, geo_transformation)
                    except (AttributeError, IndexError):
                        try:
                            clip_poly = gcs_clip_poly.projectAs(out_sr)
                        except AttributeError:
                            clip_poly = gcs_clip_poly
                    except ValueError:
                        clip_poly = gcs_clip_poly
                else:
                    clip_poly = gcs_clip_poly
                extent = clip_poly.extent


            # -----------------------------
            # Check the data type and clip.
            # -----------------------------

            # Feature Class or ShapeFile
            if dsc.dataType in ('FeatureClass', 'ShapeFile', 'Shapefile'):
                if out_name == '':
                    name = arcpy.ValidateTableName(dsc.name, out_workspace)
                    name = task_utils.create_unique_name(name, out_workspace)
                else:
                    name = arcpy.ValidateTableName(out_name, out_workspace)
                    name = task_utils.create_unique_name(name, out_workspace)
                # Does the input exist in a feature dataset? If so, create the feature dataset if it doesn't exist.
                ws = os.path.dirname(ds)
                if [any(ext) for ext in ('.gdb', '.mdb', '.sde') if ext in ws]:
                    if os.path.splitext(ws)[1] in ('.gdb', '.mdb', '.sde'):
                        arcpy.Clip_analysis(ds, clip_poly, name)
                    else:
                        fds_name = os.path.basename(ws)
                        if not arcpy.Exists(os.path.join(out_workspace, fds_name)):
                            arcpy.CreateFeatureDataset_management(out_workspace, fds_name, dsc.spatialReference)
                        arcpy.Clip_analysis(ds, clip_poly, os.path.join(out_workspace, fds_name, os.path.basename(ds)))
                else:
                    arcpy.Clip_analysis(ds, clip_poly, name)

            # Feature dataset
            elif dsc.dataType == 'FeatureDataset':
                if not out_format == 'SHP':
                    fds_name = os.path.basename(task_utils.create_unique_name(dsc.name, out_workspace))
                    fds = arcpy.CreateFeatureDataset_management(out_workspace, fds_name)
                arcpy.env.workspace = ds
                for fc in arcpy.ListFeatureClasses():
                    try:
                        if not out_format == 'SHP':
                            arcpy.Clip_analysis(fc, clip_poly, task_utils.create_unique_name(fc, fds.getOutput(0)))
                        else:
                            arcpy.Clip_analysis(fc, clip_poly, task_utils.create_unique_name(fc, out_workspace))
                    except arcpy.ExecuteError:
                        pass
                arcpy.env.workspace = out_workspace

            # Raster dataset
            elif dsc.dataType == 'RasterDataset':
                if out_name == '':
                    name = task_utils.create_unique_name(dsc.name, out_workspace)
                else:
                    name = task_utils.create_unique_name(out_name, out_workspace)
                ext = '{0} {1} {2} {3}'.format(extent.XMin, extent.YMin, extent.XMax, extent.YMax)
                arcpy.Clip_management(ds, ext, name, in_template_dataset=clip_poly, clipping_geometry="ClippingGeometry")

            # Layer file
            elif dsc.dataType == 'Layer':
                task_utils.clip_layer_file(dsc.catalogPath, clip_poly, arcpy.env.workspace)

            # Cad drawing dataset
            elif dsc.dataType == 'CadDrawingDataset':
                arcpy.env.workspace = dsc.catalogPath
                cad_wks_name = os.path.splitext(dsc.name)[0]
                for cad_fc in arcpy.ListFeatureClasses():
                    name = task_utils.create_unique_name('{0}_{1}'.format(cad_wks_name, cad_fc), out_workspace)
                    arcpy.Clip_analysis(cad_fc, clip_poly, name)
                arcpy.env.workspace = out_workspace

            # File
            elif dsc.dataType in ('File', 'TextFile'):
                if dsc.catalogPath.endswith('.kml') or dsc.catalogPath.endswith('.kmz'):
                    name = os.path.splitext(dsc.name)[0]
                    kml_layer = arcpy.KMLToLayer_conversion(dsc.catalogPath, arcpy.env.scratchFolder, name)
                    group_layer = arcpy.mapping.Layer(os.path.join(arcpy.env.scratchFolder, '{0}.lyr'.format(name)))
                    for layer in arcpy.mapping.ListLayers(group_layer):
                        if layer.isFeatureLayer:
                            arcpy.Clip_analysis(layer,
                                                gcs_clip_poly,
                                                task_utils.create_unique_name(layer, out_workspace))
                    # Clean up temp KML results.
                    arcpy.Delete_management(os.path.join(arcpy.env.scratchFolder, '{0}.lyr'.format(name)))
                    arcpy.Delete_management(kml_layer[1])
                    del group_layer
                else:
                    if out_name == '':
                        out_name = dsc.name
                    if out_workspace.endswith('.gdb'):
                        f = arcpy.Copy_management(ds, os.path.join(os.path.dirname(out_workspace), out_name))
                    else:
                        f = arcpy.Copy_management(ds, os.path.join(out_workspace, out_name))
                    processed_count += 1.
                    status_writer.send_percent(processed_count / result_count, _('Copied file: {0}').format(dsc.name), 'clip_data')
                    status_writer.send_state(_('Copied file: {0}').format(dsc.name))
                    clipped += 1
                    if out_format in ('LPK', 'MPK'):
                        files_to_package.append(f.getOutput(0))
                    continue

            # Map document
            elif dsc.dataType == 'MapDocument':
                task_utils.clip_mxd_layers(dsc.catalogPath, clip_poly, arcpy.env.workspace, map_frame_name)
            else:
                processed_count += 1.
                status_writer.send_percent(processed_count / result_count, _('Invalid input type: {0}').format(ds), 'clip_data')
                status_writer.send_state(status.STAT_WARNING, _('Invalid input type: {0}').format(ds))
                skipped += 1
                skipped_reasons[ds] = _('Invalid input type: {0}').format(dsc.dataType)
                continue

            processed_count += 1.
            status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(dsc.name), 'clip_data')
            status_writer.send_status(_('Clipped: {0}').format(dsc.name))
            clipped += 1
        # Continue. Process as many as possible.
        except Exception as ex:
            processed_count += 1.
            status_writer.send_percent(processed_count / result_count, _('Skipped: {0}').format(os.path.basename(ds)), 'clip_data')
            status_writer.send_status(_('FAIL: {0}').format(repr(ex)))
            errors_reasons[ds] = ex.message
            errors += 1
            pass
    return clipped, errors, skipped
Exemplo n.º 3
0
def convert_to_kml(input_items, out_workspace, extent, show_progress=False):
    converted = 0
    errors = 0
    skipped = 0
    global processed_count
    global layer_name
    global existing_fields
    global new_fields
    global field_values

    arcpy.env.overwriteOutput = True
    for ds, out_name in input_items.iteritems():
        try:
            # -----------------------------------------------
            # If the item is a service layer, process and continue.
            # -----------------------------------------------
            if ds.startswith('http'):
                try:
                    service_layer = task_utils.ServiceLayer(
                        ds, extent.JSON, 'esriGeometryPolygon')
                    arcpy.env.overwriteOutput = True
                    oid_groups = service_layer.object_ids
                    out_features = None
                    g = 0.
                    group_cnt = service_layer.object_ids_cnt
                    if not arcpy.Exists(os.path.join(out_workspace,
                                                     'temp.gdb')):
                        temp_gdb = arcpy.CreateFileGDB_management(
                            out_workspace, 'temp.gdb')
                        temp_gdb = temp_gdb[0]
                    else:
                        temp_gdb = os.path.join(out_workspace, 'temp.gdb')
                    for group in oid_groups:
                        g += 1
                        group = [oid for oid in group if oid]
                        where = '{0} IN {1}'.format(
                            service_layer.oid_field_name, tuple(group))
                        url = ds + "/query?where={}&outFields={}&returnGeometry=true&f=json&".format(
                            where, '*')
                        feature_set = arcpy.FeatureSet()
                        try:
                            feature_set.load(url)
                        except Exception:
                            continue
                        if not out_features:
                            out_features = arcpy.CopyFeatures_management(
                                feature_set,
                                task_utils.create_unique_name(
                                    out_name, temp_gdb))
                        else:
                            features = arcpy.CopyFeatures_management(
                                feature_set,
                                task_utils.create_unique_name(
                                    out_name, temp_gdb))
                            arcpy.Append_management(features, out_features,
                                                    'NO_TEST')
                            try:
                                arcpy.Delete_management(features)
                            except arcpy.ExecuteError:
                                pass
                        status_writer.send_percent(
                            float(g) / group_cnt * 100, '', 'convert_to_kml')
                    arcpy.MakeFeatureLayer_management(out_features, out_name)
                    arcpy.LayerToKML_conversion(
                        out_name,
                        '{0}.kmz'.format(os.path.join(out_workspace,
                                                      out_name)),
                        1,
                        boundary_box_extent=extent)
                    processed_count += 1.
                    converted += 1
                    status_writer.send_percent(processed_count / result_count,
                                               _('Converted: {0}').format(ds),
                                               'convert_to_kml')
                    continue
                except Exception as ex:
                    status_writer.send_state(status.STAT_WARNING, str(ex))
                    errors += 1
                    errors_reasons[ds] = ex.message
                    continue

            # Is the input a mxd data frame.
            map_frame_name = task_utils.get_data_frame_name(ds)
            if map_frame_name:
                ds = ds.split('|')[0].strip()

            # -------------------------------
            # Is the input a geometry feature
            # -------------------------------
            if isinstance(out_name, list):
                increment = task_utils.get_increment(result_count)
                for row in out_name:
                    try:
                        name = arcpy.ValidateTableName(ds, 'in_memory')
                        name = os.path.join('in_memory', name)
                        # Clip the geometry.
                        geo_json = row['[geo]']
                        geom = arcpy.AsShape(geo_json)
                        row.pop('[geo]')
                        if not arcpy.Exists(name):
                            if arcpy.env.outputCoordinateSystem:
                                layer_name = arcpy.CreateFeatureclass_management(
                                    'in_memory', os.path.basename(name),
                                    geom.type.upper())
                            else:
                                arcpy.env.outputCoordinateSystem = 4326
                                layer_name = arcpy.CreateFeatureclass_management(
                                    'in_memory', os.path.basename(name),
                                    geom.type.upper())
                            # layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer')
                            existing_fields = [
                                f.name for f in arcpy.ListFields(layer_name)
                            ]
                            new_fields = []
                            field_values = []
                            for field, value in row.iteritems():
                                valid_field = arcpy.ValidateFieldName(
                                    field, 'in_memory')
                                new_fields.append(valid_field)
                                field_values.append(value)
                                arcpy.AddField_management(
                                    layer_name, valid_field, 'TEXT')
                        else:
                            if not geom.type.upper() == arcpy.Describe(
                                    name).shapeType.upper():
                                name = arcpy.CreateUniqueName(
                                    os.path.basename(name), 'in_memory')
                                if arcpy.env.outputCoordinateSystem:
                                    layer_name = arcpy.CreateFeatureclass_management(
                                        'in_memory', os.path.basename(name),
                                        geom.type.upper())
                                else:
                                    arcpy.env.outputCoordinateSystem = 4326
                                    layer_name = arcpy.CreateFeatureclass_management(
                                        'in_memory', os.path.basename(name),
                                        geom.type.upper())

                                existing_fields = [
                                    f.name
                                    for f in arcpy.ListFields(layer_name)
                                ]
                                new_fields = []
                                field_values = []
                                for field, value in row.iteritems():
                                    valid_field = arcpy.ValidateFieldName(
                                        field, 'in_memory')
                                    new_fields.append(valid_field)
                                    field_values.append(value)
                                    if not valid_field in existing_fields:
                                        arcpy.AddField_management(
                                            layer_name, valid_field, 'TEXT')

                        with arcpy.da.InsertCursor(layer_name, ["SHAPE@"] +
                                                   new_fields) as icur:
                            icur.insertRow([geom] + field_values)

                        arcpy.MakeFeatureLayer_management(
                            layer_name, os.path.basename(name))
                        arcpy.LayerToKML_conversion(
                            os.path.basename(name),
                            '{0}.kmz'.format(
                                os.path.join(out_workspace,
                                             os.path.basename(name))),
                            1,
                            boundary_box_extent=extent)
                        if (processed_count % increment) == 0:
                            status_writer.send_percent(
                                float(processed_count) / result_count,
                                _('Converted: {0}').format(row['name']),
                                'convert_to_kml')
                        processed_count += 1
                        converted += 1
                    except KeyError:
                        processed_count += 1
                        skipped += 1
                        skipped_reasons[ds] = 'Invalid input type'
                        status_writer.send_state(
                            _(status.STAT_WARNING,
                              'Invalid input type: {0}').format(ds))
                    except Exception as ex:
                        processed_count += 1
                        errors += 1
                        errors_reasons[ds] = ex.message
                        continue
                del icur
                continue

            dsc = arcpy.Describe(ds)

            if os.path.exists(
                    os.path.join('{0}.kmz'.format(
                        os.path.join(out_workspace, out_name)))):
                out_name = os.path.basename(
                    arcpy.CreateUniqueName(out_name + '.kmz',
                                           out_workspace))[:-4]

            if dsc.dataType == 'FeatureClass':
                arcpy.MakeFeatureLayer_management(ds, dsc.name)
                if out_name == '':
                    out_name = dsc.name
                arcpy.LayerToKML_conversion(
                    dsc.name,
                    '{0}.kmz'.format(os.path.join(out_workspace, out_name)),
                    1,
                    boundary_box_extent=extent)
                converted += 1

            elif dsc.dataType == 'ShapeFile':
                arcpy.MakeFeatureLayer_management(ds, dsc.name[:-4])
                if out_name == '':
                    out_name = dsc.name[:-4]
                arcpy.LayerToKML_conversion(
                    dsc.name[:-4],
                    '{0}.kmz'.format(os.path.join(out_workspace, out_name)),
                    1,
                    boundary_box_extent=extent)
                converted += 1

            elif dsc.dataType == 'RasterDataset':
                arcpy.MakeRasterLayer_management(ds, dsc.name)
                if out_name == '':
                    out_name = dsc.name
                arcpy.LayerToKML_conversion(
                    dsc.name,
                    '{0}.kmz'.format(os.path.join(out_workspace, out_name)),
                    1,
                    boundary_box_extent=extent)
                converted += 1

            elif dsc.dataType == 'Layer':
                if out_name == '':
                    if dsc.name.endswith('.lyr'):
                        out_name = dsc.name[:-4]
                    else:
                        out_name = dsc.name
                arcpy.LayerToKML_conversion(
                    ds,
                    '{0}.kmz'.format(os.path.join(out_workspace, out_name)),
                    1,
                    boundary_box_extent=extent)
                converted += 1

            elif dsc.dataType == 'FeatureDataset':
                arcpy.env.workspace = ds
                for fc in arcpy.ListFeatureClasses():
                    arcpy.MakeFeatureLayer_management(fc, 'tmp_lyr')
                    arcpy.LayerToKML_conversion(
                        'tmp_lyr',
                        '{0}.kmz'.format(os.path.join(out_workspace, fc)),
                        1,
                        boundary_box_extent=extent)
                    converted += 1

            elif dsc.dataType == 'CadDrawingDataset':
                arcpy.env.workspace = dsc.catalogPath
                for cad_fc in arcpy.ListFeatureClasses():
                    if cad_fc.lower() == 'annotation':
                        try:
                            cad_anno = arcpy.ImportCADAnnotation_conversion(
                                cad_fc,
                                arcpy.CreateUniqueName('cadanno',
                                                       arcpy.env.scratchGDB))
                        except arcpy.ExecuteError:
                            cad_anno = arcpy.ImportCADAnnotation_conversion(
                                cad_fc,
                                arcpy.CreateUniqueName('cadanno',
                                                       arcpy.env.scratchGDB),
                                1)
                        arcpy.MakeFeatureLayer_management(cad_anno, 'cad_lyr')
                        name = '{0}_{1}'.format(dsc.name[:-4], cad_fc)
                        arcpy.LayerToKML_conversion(
                            'cad_lyr',
                            '{0}.kmz'.format(os.path.join(out_workspace,
                                                          name)),
                            1,
                            boundary_box_extent=extent)
                        converted += 1
                    else:
                        arcpy.MakeFeatureLayer_management(cad_fc, 'cad_lyr')
                        name = '{0}_{1}'.format(dsc.name[:-4], cad_fc)
                        arcpy.LayerToKML_conversion(
                            'cad_lyr',
                            '{0}.kmz'.format(os.path.join(out_workspace,
                                                          name)),
                            1,
                            boundary_box_extent=extent)
                        converted += 1

            # Map document to KML.
            elif dsc.dataType == 'MapDocument':
                mxd = arcpy.mapping.MapDocument(ds)
                if map_frame_name:
                    data_frames = arcpy.mapping.ListDataFrames(
                        mxd, map_frame_name)
                else:
                    data_frames = arcpy.mapping.ListDataFrames(mxd)
                for df in data_frames:
                    name = '{0}_{1}'.format(dsc.name[:-4], df.name)
                    arcpy.MapToKML_conversion(
                        ds,
                        df.name,
                        '{0}.kmz'.format(os.path.join(out_workspace, name)),
                        extent_to_export=extent)
                converted += 1

            else:
                processed_count += 1
                status_writer.send_percent(
                    processed_count / result_count,
                    _('Invalid input type: {0}').format(dsc.name),
                    'convert_to_kml')
                skipped += 1
                skipped_reasons[ds] = _('Invalid input type: {0}').format(
                    dsc.dataType)
                continue
            processed_count += 1
            status_writer.send_percent(processed_count / result_count,
                                       _('Converted: {0}').format(ds),
                                       'convert_to_kml')
            status_writer.send_status(_('Converted: {0}').format(ds))
        except Exception as ex:
            processed_count += 1
            status_writer.send_percent(processed_count / result_count,
                                       _('Skipped: {0}').format(ds),
                                       'convert_to_kml')
            status_writer.send_status(_('WARNING: {0}').format(repr(ex)))
            errors_reasons[ds] = repr(ex)
            errors += 1
            pass

    return converted, errors, skipped
def add_to_geodatabase(input_items, out_gdb, is_fds):
    """Adds items to a geodatabase."""
    added = 0
    skipped = 0
    errors = 0
    global processed_count
    global layer_name
    global existing_fields
    global new_fields
    global field_values

    for ds, out_name in input_items.iteritems():
        try:
            # -----------------------------------------------
            # If the item is a service layer, process and continue.
            # -----------------------------------------------
            if ds.startswith('http'):
                try:
                    service_layer = task_utils.ServiceLayer(ds)
                    arcpy.env.overwriteOutput = True
                    oid_groups = service_layer.object_ids
                    out_features = None
                    g = 0.
                    group_cnt = service_layer.object_ids_cnt
                    for group in oid_groups:
                        g += 1
                        group = [oid for oid in group if oid]
                        where = '{0} IN {1}'.format(
                            service_layer.oid_field_name, tuple(group))
                        url = ds + "/query?where={}&outFields={}&returnGeometry=true&geometryType=esriGeometryPolygon&f=json".format(
                            where, '*')
                        feature_set = arcpy.FeatureSet()
                        feature_set.load(url)
                        if not out_features:
                            out_features = arcpy.CopyFeatures_management(
                                feature_set,
                                task_utils.create_unique_name(
                                    out_name, out_gdb))
                        else:
                            features = arcpy.CopyFeatures_management(
                                feature_set,
                                task_utils.create_unique_name(
                                    out_name, out_gdb))
                            arcpy.Append_management(features, out_features,
                                                    'NO_TEST')
                            try:
                                arcpy.Delete_management(features)
                            except arcpy.ExecuteError:
                                pass
                        status_writer.send_percent(
                            float(g) / group_cnt * 100, '',
                            'add_to_geodatabase')
                    processed_count += 1.
                    added += 1
                    status_writer.send_percent(processed_count / result_count,
                                               _('Added: {0}').format(ds),
                                               'add_to_geodatabase')
                    continue
                except Exception as ex:
                    status_writer.send_state(status.STAT_WARNING, str(ex))
                    errors_reasons[ds] = ex.message
                    errors += 1
                    continue

            # ------------------------------
            # Is the input a mxd data frame.
            # ------------------------------
            map_frame_name = task_utils.get_data_frame_name(ds)
            if map_frame_name:
                ds = ds.split('|')[0].strip()

            # -------------------------------
            # Is the input a geometry feature
            # -------------------------------
            if isinstance(out_name, list):
                increment = task_utils.get_increment(result_count)
                for row in out_name:
                    try:
                        name = os.path.join(
                            out_gdb, arcpy.ValidateTableName(ds, out_gdb))
                        # Create the geometry if it exists.
                        geom = None
                        try:
                            geo_json = row['[geo]']
                            geom = arcpy.AsShape(geo_json)
                            row.pop('[geo]')
                        except KeyError:
                            pass

                        if geom:
                            if not arcpy.Exists(name):
                                if arcpy.env.outputCoordinateSystem:
                                    arcpy.CreateFeatureclass_management(
                                        out_gdb, os.path.basename(name),
                                        geom.type.upper())
                                else:
                                    arcpy.env.outputCoordinateSystem = 4326
                                    arcpy.CreateFeatureclass_management(
                                        out_gdb, os.path.basename(name),
                                        geom.type.upper())
                                layer_name = arcpy.MakeFeatureLayer_management(
                                    name, 'flayer_{0}'.format(
                                        os.path.basename(name)))
                                existing_fields = [
                                    f.name
                                    for f in arcpy.ListFields(layer_name)
                                ]
                                new_fields = []
                                field_values = []
                                for field, value in row.iteritems():
                                    valid_field = arcpy.ValidateFieldName(
                                        field, out_gdb)
                                    new_fields.append(valid_field)
                                    field_values.append(value)
                                    arcpy.AddField_management(
                                        layer_name, valid_field, 'TEXT')
                            else:
                                if not geom.type.upper() == arcpy.Describe(
                                        name).shapeType.upper():
                                    name = arcpy.CreateUniqueName(
                                        os.path.basename(name), out_gdb)
                                    if arcpy.env.outputCoordinateSystem:
                                        arcpy.CreateFeatureclass_management(
                                            out_gdb, os.path.basename(name),
                                            geom.type.upper())
                                    else:
                                        arcpy.env.outputCoordinateSystem = 4326
                                        arcpy.CreateFeatureclass_management(
                                            out_gdb, os.path.basename(name),
                                            geom.type.upper())
                                    layer_name = arcpy.MakeFeatureLayer_management(
                                        name, 'flayer_{0}'.format(
                                            os.path.basename(name)))
                                    existing_fields = [
                                        f.name
                                        for f in arcpy.ListFields(layer_name)
                                    ]
                                    new_fields = []
                                    field_values = []
                                    for field, value in row.iteritems():
                                        valid_field = arcpy.ValidateFieldName(
                                            field, out_gdb)
                                        new_fields.append(valid_field)
                                        field_values.append(value)
                                        if valid_field not in existing_fields:
                                            arcpy.AddField_management(
                                                layer_name, valid_field,
                                                'TEXT')
                        else:
                            if not arcpy.Exists(name):
                                arcpy.CreateTable_management(
                                    out_gdb, os.path.basename(name))
                                view_name = arcpy.MakeTableView_management(
                                    name, 'tableview')
                                existing_fields = [
                                    f.name for f in arcpy.ListFields(view_name)
                                ]
                                new_fields = []
                                field_values = []
                                for field, value in row.iteritems():
                                    valid_field = arcpy.ValidateFieldName(
                                        field, out_gdb)
                                    new_fields.append(valid_field)
                                    field_values.append(value)
                                    if valid_field not in existing_fields:
                                        arcpy.AddField_management(
                                            view_name, valid_field, 'TEXT')

                        if geom:
                            with arcpy.da.InsertCursor(layer_name, ["SHAPE@"] +
                                                       new_fields) as icur:
                                icur.insertRow([geom] + field_values)
                        else:
                            with arcpy.da.InsertCursor(view_name,
                                                       new_fields) as icur:
                                icur.insertRow(field_values)

                        processed_count += 1
                        if (processed_count % increment) == 0:
                            status_writer.send_percent(
                                float(processed_count) / result_count,
                                _('Added: {0}').format(row['name']),
                                'add_to_geodatabase')
                        added += 1
                        continue
                    except Exception as ex:
                        processed_count += 1
                        errors += 1
                        errors_reasons[name] = ex.message
                        continue
                continue
            # -----------------------------
            # Check the data type and clip.
            # -----------------------------
            dsc = arcpy.Describe(ds)
            if dsc.dataType == 'FeatureClass':
                if out_name == '':
                    arcpy.CopyFeatures_management(
                        ds, task_utils.create_unique_name(dsc.name, out_gdb))
                else:
                    arcpy.CopyFeatures_management(
                        ds, task_utils.create_unique_name(out_name, out_gdb))

            elif dsc.dataType == 'ShapeFile':
                if out_name == '':
                    arcpy.CopyFeatures_management(
                        ds,
                        task_utils.create_unique_name(dsc.name[:-4], out_gdb))
                else:
                    arcpy.CopyFeatures_management(
                        ds, task_utils.create_unique_name(out_name, out_gdb))

            elif dsc.dataType == 'FeatureDataset':
                if not is_fds:
                    fds_name = os.path.basename(
                        task_utils.create_unique_name(dsc.name, out_gdb))
                    fds = arcpy.CreateFeatureDataset_management(
                        out_gdb, fds_name).getOutput(0)
                else:
                    fds = out_gdb
                arcpy.env.workspace = dsc.catalogPath
                for fc in arcpy.ListFeatureClasses():
                    name = os.path.basename(
                        task_utils.create_unique_name(fc, out_gdb))
                    arcpy.CopyFeatures_management(fc, os.path.join(fds, name))
                arcpy.env.workspace = out_gdb

            elif dsc.dataType == 'RasterDataset':
                if is_fds:
                    out_gdb = os.path.dirname(out_gdb)
                if out_name == '':
                    arcpy.CopyRaster_management(
                        ds, task_utils.create_unique_name(dsc.name, out_gdb))
                else:
                    arcpy.CopyRaster_management(
                        ds, task_utils.create_unique_name(out_name, out_gdb))

            elif dsc.dataType == 'RasterCatalog':
                if is_fds:
                    out_gdb = os.path.dirname(out_gdb)
                if out_name == '':
                    arcpy.CopyRasterCatalogItems_management(
                        ds, task_utils.create_unique_name(dsc.name, out_gdb))
                else:
                    arcpy.CopyRasterCatalogItems_management(
                        ds, task_utils.create_unique_name(out_name, out_gdb))

            elif dsc.dataType == 'Layer':
                layer_from_file = arcpy.mapping.Layer(dsc.catalogPath)
                layers = arcpy.mapping.ListLayers(layer_from_file)
                for layer in layers:
                    if out_name == '':
                        name = task_utils.create_unique_name(
                            layer.name, out_gdb)
                    else:
                        name = task_utils.create_unique_name(out_name, out_gdb)
                    if layer.isFeatureLayer:
                        arcpy.CopyFeatures_management(layer.dataSource, name)
                    elif layer.isRasterLayer:
                        if is_fds:
                            name = os.path.dirname(name)
                        arcpy.CopyRaster_management(layer.dataSource, name)

            elif dsc.dataType == 'CadDrawingDataset':
                arcpy.env.workspace = dsc.catalogPath
                cad_wks_name = os.path.splitext(dsc.name)[0]
                for cad_fc in arcpy.ListFeatureClasses():
                    arcpy.CopyFeatures_management(
                        cad_fc,
                        task_utils.create_unique_name(
                            '{0}_{1}'.format(cad_wks_name, cad_fc), out_gdb))
                arcpy.env.workspace = out_gdb

            elif dsc.dataType == 'File':
                if dsc.catalogPath.endswith(
                        '.kml') or dsc.catalogPath.endswith('.kmz'):
                    name = os.path.splitext(dsc.name)[0]
                    temp_dir = tempfile.mkdtemp()
                    kml_layer = arcpy.KMLToLayer_conversion(
                        dsc.catalogPath, temp_dir, name)
                    group_layer = arcpy.mapping.Layer(
                        os.path.join(temp_dir, '{}.lyr'.format(name)))
                    for layer in arcpy.mapping.ListLayers(group_layer):
                        if layer.isFeatureLayer:
                            arcpy.CopyFeatures_management(
                                layer,
                                task_utils.create_unique_name(layer, out_gdb))
                        elif layer.isRasterLayer:
                            if is_fds:
                                out_gdb = os.path.dirname(out_gdb)
                            arcpy.CopyRaster_management(
                                layer,
                                task_utils.create_unique_name(layer, out_gdb))
                    # Clean up temp KML results.
                    arcpy.Delete_management(
                        os.path.join(temp_dir, '{}.lyr'.format(name)))
                    arcpy.Delete_management(kml_layer)
                else:
                    processed_count += 1
                    status_writer.send_percent(
                        processed_count / result_count,
                        _('Invalid input type: {0}').format(dsc.name),
                        'add_to_geodatabase')
                    skipped += 1
                    skipped_reasons[ds] = _('Invalid input type: {0}').format(
                        dsc.dataType)
                    continue

            elif dsc.dataType == 'MapDocument':
                mxd = arcpy.mapping.MapDocument(dsc.catalogPath)
                if map_frame_name:
                    df = arcpy.mapping.ListDataFrames(mxd, map_frame_name)[0]
                    layers = arcpy.mapping.ListLayers(mxd, data_frame=df)
                else:
                    layers = arcpy.mapping.ListLayers(mxd)
                for layer in layers:
                    if layer.isFeatureLayer:
                        arcpy.CopyFeatures_management(
                            layer.dataSource,
                            task_utils.create_unique_name(layer.name, out_gdb))
                    elif layer.isRasterLayer:
                        if is_fds:
                            out_gdb = os.path.dirname(out_gdb)
                        arcpy.CopyRaster_management(
                            layer.dataSource,
                            task_utils.create_unique_name(layer.name, out_gdb))
                table_views = arcpy.mapping.ListTableViews(mxd)
                if is_fds:
                    out_gdb = os.path.dirname(out_gdb)
                for table_view in table_views:
                    arcpy.CopyRows_management(
                        table_view.dataSource,
                        task_utils.create_unique_name(table_view.name,
                                                      out_gdb))
                out_gdb = arcpy.env.workspace

            elif dsc.dataType.find('Table') > 0:
                if is_fds:
                    out_gdb = os.path.dirname(out_gdb)
                if out_name == '':
                    arcpy.CopyRows_management(
                        ds, task_utils.create_unique_name(dsc.name, out_gdb))
                else:
                    arcpy.CopyRows_management(
                        ds, task_utils.create_unique_name(out_name, out_gdb))

            else:
                # Try to copy any other types such as topologies, network datasets, etc.
                if is_fds:
                    out_gdb = os.path.dirname(out_gdb)
                arcpy.Copy_management(
                    ds, task_utils.create_unique_name(dsc.name, out_gdb))

            out_gdb = arcpy.env.workspace
            processed_count += 1.
            status_writer.send_percent(processed_count / result_count,
                                       _('Added: {0}').format(ds),
                                       'add_to_geodatabase')
            status_writer.send_status(_('Added: {0}').format(ds))
            added += 1
        # Continue if an error. Process as many as possible.
        except Exception as ex:
            processed_count += 1
            status_writer.send_percent(processed_count / result_count,
                                       _('Skipped: {0}').format(ds),
                                       'add_to_geodatabase')
            status_writer.send_status(_('FAIL: {0}').format(repr(ex)))
            errors_reasons[ds] = repr(ex)
            errors += 1
            continue

    return added, errors, skipped