예제 #1
0
def GetKmlToFc(kml):
    FileGeoDB = working_directory + "\\" + kml[:-4] + ".gdb"
    if arcpy.Exists(FileGeoDB):
        try:
            arcpy.Delete_management(FileGeoDB)
            arcpy.AddMessage('%s deleted!' % FileGeoDB)
            arcpy.KMLToLayer_conversion(kml, working_directory, kml[:-4],
                                        "NO_GROUNDOVERLAY")
        except:
            arcpy.AddMessage("error deleting FDs")
            outfile = open(logfile, 'a')
            outfile.write("Process: Failed for: " + kml + " " +
                          str(timeYearMonDay) + " " + str(timeHour) + ":" +
                          str(timeMin) + '\n')
            outfile.write(arcpy.GetMessages(2))

    else:
        try:
            arcpy.AddMessage(
                '%s does not exist therefore it was not deleted!' % FileGeoDB)
            arcpy.KMLToLayer_conversion(kml, working_directory, kml[:-4],
                                        "NO_GROUNDOVERLAY")
        except:
            arcpy.AddMessage("Error with kml to layer")
            outfile = open(logfile, 'a')
            outfile.write("Process: Failed for: " + kml + " " +
                          str(timeYearMonDay) + " " + str(timeHour) + ":" +
                          str(timeMin) + '\n')
            outfile.write(arcpy.GetMessages(2))
예제 #2
0
def load_main_data():
    # Load the CII score raster
    arcpy.MakeRasterLayer_management(cii_overall_score_ras, "cii_overall_score_ras1")
    symbolize_rasters(["cii_overall_score_ras1"], recalc_stats = "no")
    turn_off_layers(["cii_overall_score_ras1"])

    # Either we generate all the layers from scratch:
    if COMPUTE_FROM_SCRATCH_OPTION == "yes":
        # Convert the non-circuit trails from kml
        arcpy.KMLToLayer_conversion (trails_orig, trails_converted_path, "trails_converted")
        # Reproject the converted non-circuit trails feature class
        target_spatial_reference = arcpy.SpatialReference('NAD 1983 UTM Zone 18N')
        arcpy.Project_management("trails_converted\\Polylines", "trails_proj", target_spatial_reference, "WGS_1984_(ITRF00)_To_NAD_1983")

        # Load the LTS1-2 Islands feature class, and reproject it
        arcpy.MakeFeatureLayer_management(islands_orig, "islands_orig")
        arcpy.Project_management("islands_orig", "islands_proj", target_spatial_reference, "WGS_1984_(ITRF00)_To_NAD_1983")

        # Clean up
        remove_intermediary_layers(["trails_converted", "islands_orig"])

    # Or we can load the layers already preprocessed:
    else:
        arcpy.MakeFeatureLayer_management("islands_with_score", "islands_with_score")
        arcpy.MakeFeatureLayer_management("trails", "trails")
        arcpy.MakeFeatureLayer_management("trails_intersecting", "trails_intersecting")
        arcpy.MakeFeatureLayer_management("trails_intersecting_gte_2", "trails_intersecting_gte_2")
        # Clean up
        turn_off_layers(["islands_with_score", "trails", "trails_intersecting", "trails_intersecting_gte_2"])
예제 #3
0
def process():
    arcpy.CreateFolder_management(out_folder_path="C:/", out_name="EDIFICIOS")
    arcpy.CreateFileGDB_management(out_folder_path="C:/EDIFICIOS",
                                   out_name="GDB",
                                   out_version="CURRENT")
    arcpy.CreateFolder_management(out_folder_path="C:/EDIFICIOS",
                                  out_name="CARPETA")
    arcpy.CreateFileGDB_management(out_folder_path="C:/EDIFICIOS/CARPETA",
                                   out_name="GDB_E",
                                   out_version="CURRENT")

    #COPIAR PLANTILLA#
    arcpy.Copy_management(ly_taps_,
                          out_data="C:/EDIFICIOS/CARPETA/GDB_E.gdb/ly_taps",
                          data_type="FeatureClass")
    arcpy.Copy_management(ly_areain_,
                          out_data="C:/EDIFICIOS/CARPETA/GDB_E.gdb/ly_areain",
                          data_type="FeatureClass")
    arcpy.Copy_management(ly_troba_,
                          out_data="C:/EDIFICIOS/CARPETA/GDB_E.gdb/ly_troba",
                          data_type="FeatureClass")
    arcpy.Copy_management(ly_nodo_,
                          out_data="C:/EDIFICIOS/CARPETA/GDB_E.gdb/ly_nodo",
                          data_type="FeatureClass")

    #KMZ A FEATURE#
    EDIFICIO_KMZ = arcpy.KMLToLayer_conversion(
        EDI_KMZ,
        output_folder="C:/EDIFICIOS",
        output_data="EDIFICIO_KMZ",
        include_groundoverlay="NO_GROUNDOVERLAY")
    arcpy.AddField_management(in_table="C:/EDIFICIOS/EDIFICIO_KMZ.gdb/Points",
                              field_name="COD_TAP",
                              field_type="TEXT")
    arcpy.CalculateField_management(
        in_table="C:/EDIFICIOS/EDIFICIO_KMZ.gdb/Points",
        field="COD_TAP",
        expression="[Name]",
        expression_type="VB")

    #RELLENAR PLANTILLAS#
    arcpy.Append_management(inputs="C:/EDIFICIOS/EDIFICIO_KMZ.gdb/Points",
                            target="C:/EDIFICIOS/CARPETA/GDB_E.gdb/ly_taps",
                            schema_type="NO_TEST")
    arcpy.Append_management(inputs="C:/EDIFICIOS/EDIFICIO_KMZ.gdb/Polygons",
                            target="C:/EDIFICIOS/CARPETA/GDB_E.gdb/ly_troba",
                            schema_type="NO_TEST")
    arcpy.Append_management(inputs="C:/EDIFICIOS/EDIFICIO_KMZ.gdb/Polygons",
                            target="C:/EDIFICIOS/CARPETA/GDB_E.gdb/ly_areain",
                            schema_type="NO_TEST")
    arcpy.Append_management(inputs="C:/EDIFICIOS/EDIFICIO_KMZ.gdb/Polygons",
                            target="C:/EDIFICIOS/CARPETA/GDB_E.gdb/ly_nodo",
                            schema_type="NO_TEST")

    #RELLENAR CAMPOS CODIGO DE TROBA#
    arcpy.CalculateField_management(ly_taps, "MTCODNOD", CodExpression,
                                    "PYTHON_9.3")
def treatKML(file, crs):
    if os.path.isdir(os.path.join(arcpy.env.scratchFolder, 'kml.gdb')):
        shutil.rmtree(os.path.join(arcpy.env.scratchFolder, 'kml.gdb'))
    arcpy.KMLToLayer_conversion(file, arcpy.env.scratchFolder, 'kml')
    # TODO : if folder in kml => dataset in the gdb => it doesn't work.
    return [
        os.path.join(arcpy.env.scratchFolder, 'kml.gdb', 'point'),
        os.path.join(arcpy.env.scratchFolder, 'kml.gdb', 'line'),
        os.path.join(arcpy.env.scratchFolder, 'kml.gdb', 'polygon')
    ]
예제 #5
0
def import_kmz(input_feature, temp_location, output_folder, output_name,
               input_type):
    delete_kmz_imports(temp_location, output_name)
    delete_shape(output_folder, output_name)

    print("Importing kmz: " + input_feature + ". Type: " + input_type)

    arcpy.KMLToLayer_conversion(input_feature, temp_location, output_name)

    print("Moving features: " + temp_location + " : " + output_name)
    f_in = temp_location + "\\" + output_name + ".gdb\\Placemarks\\" + input_type
    arcpy.FeatureClassToFeatureClass_conversion(f_in, output_folder,
                                                output_name)
예제 #6
0
def kmzToFeatures(inKMZ):
    arcpy.AddMessage("kmzToFeatures")
    arcpy.AddMessage("parsed Zip")
    zf = zipfile.ZipFile(inKMZ)
    zf.namelist()
    for name in zf.namelist():
        if name.endswith(".kml"):
            kmlExtract = zf.extract(name)
            arcpy.AddMessage(kmlExtract)
            arcpy.KMLToLayer_conversion(kmlExtract, arcpy.env.scratchFolder,
                                        "outKML")
            return arcpy.env.scratchFolder + "/outKML.gdb/Placemarks/Polygons"
    arcpy.AddError("No KML was found in KMZ file")
def ImportKMZ(in_kmz, output_fc, fc_type):
    # Define Path Variables
    scratch_directory       =   arcpy.Describe(in_kmz).path
    scratch_gdb_name        =   arcpy.Describe(in_kmz).basename
    # Create GDB From KMZ
    arcpy.KMLToLayer_conversion(in_kmz, scratch_directory, scratch_gdb_name, 'NO_GROUNDOVERLAY')
    # Define Target FC
    raw_fc                  =   os.path.join(scratch_directory,scratch_gdb_name+".gdb", "Placemarks", fc_type)
    # Copy to Final FC and Remove Z Flag
    arcpy.env.outputZFlag = "Disabled"
    arcpy.CopyFeatures_management(raw_fc, output_fc)
    # Delete Intermediate
    arcpy.Delete_management(os.path.join(scratch_directory,scratch_gdb_name+".lyr"))
    arcpy.Delete_management(os.path.join(scratch_directory,scratch_gdb_name+".gdb"))
예제 #8
0
def main():
    # Gather inputs
    inputFolder = r"E:\Dropbox (RES)\@RES GIS\projects\CA\Klamath\DataReceived\AECOM\EagleData_20191113\All Eagle Data - SEND ME\Spatial Data Collected in Field\AECOM Eagle Data"
    spatialReference = r"C:\Users\jtouzel\AppData\Roaming\Esri\Desktop10.6\ArcMap\Coordinate Systems\NAD 1983 UTM Zone 10N.prj"

    # Write to Log
    arcpy.AddMessage('')
    arcpy.AddMessage(
        "===================================================================")
    sVersionInfo = 'Batch_KMZ_Converter.py, v20171129'
    arcpy.AddMessage('Batch KMZ Converter, {}'.format(sVersionInfo))
    arcpy.AddMessage("")
    arcpy.AddMessage("Support: [email protected], [email protected]")
    arcpy.AddMessage("")
    arcpy.AddMessage("Input Folder: {}".format(inputFolder))
    arcpy.AddMessage("Projection: {}".format(spatialReference.GCS.name))
    arcpy.AddMessage(
        "===================================================================")

    kmzs = []

    # Loop through root directory to extract .KMZ and .KML files.  Add them to kmz list
    for root, dirs, files in os.walk(inputFolder):

        for item in files:
            if item.endswith('.kmz') or item.endswith('.kml'):
                kmzs.append(os.path.join(root, item))

    for kmz in kmzs:

        fileName = formatFileName(kmz)

        newFolder = makeNewFolder(root, fileName)

        # Convert KML/KMZ to GDB
        arcpy.KMLToLayer_conversion(kmz, newFolder, fileName)
        gdb = os.path.join(newFolder, '{}.gdb'.format(fileName))
        arcpy.env.workspace = gdb

        # Loop through the feature datasets and feature classes
        for ds in arcpy.ListDatasets('', ''):
            for i in arcpy.ListFeatureClasses('', '', ds):
                newFile = os.path.join(newFolder, fileName + '_' + i + '.shp')
                arcpy.Project_management(os.path.join(gdb, ds, i), newFile,
                                         spatialReference)
예제 #9
0
inKML = arcpy.GetParameterAsText(0)
outLocation = arcpy.GetParameterAsText(1)

arcpy.env.overwriteOutput = True

#arcpy.ExportCAD_conversion(os.path.join(outLocation, os.path.basename(inKML)[:-4] + ".lyr"), 'DXF_R2013',os.path.join(outLocation,os.path.basename(inKML)[:-4] + ".dxf"))
if outLocation == r"~\Desktop":
    outLocation = os.path.expanduser("~\Desktop")
    arcpy.AddMessage("Output Location: " + str(outLocation))
    arcpy.env.workspace = os.path.expanduser("~\Desktop")
else:
    arcpy.env.workspace = outLocation
    arcpy.AddMessage("Output Location: " + str(outLocation))

newLayer = arcpy.KMLToLayer_conversion(inKML, outLocation)

fgdbs = arcpy.ListWorkspaces(os.path.basename(inKML)[:-4] + '*', 'FileGDB')
arcpy.AddMessage("Workspaces: " + str(fgdbs))

if os.path.exists(
        os.path.join(outLocation,
                     os.path.basename(inKML)[:-4] + ".dxf")):
    os.remove(os.path.join(outLocation, os.path.basename(inKML)[:-4] + ".dxf"))
    try:
        os.remove(
            os.path.join(outLocation,
                         os.path.basename(inKML)[:-4] + ".dxf.xml"))
    except:
        pass
예제 #10
0
def execute(request):
    """Converts each input dataset to kml (.kmz).
    :param request: json as a dict.
    """
    converted = 0
    skipped = 0
    errors = 0
    global result_count
    parameters = request['params']

    out_workspace = os.path.join(request['folder'], 'temp')
    if not os.path.exists(out_workspace):
        os.makedirs(out_workspace)

    # Get the boundary box extent for input to KML tools.
    extent = ''
    try:
        try:
            ext = task_utils.get_parameter_value(parameters,
                                                 'processing_extent', 'wkt')
            if ext:
                sr = task_utils.get_spatial_reference("4326")
                extent = task_utils.from_wkt(ext, sr)
        except KeyError:
            ext = task_utils.get_parameter_value(parameters,
                                                 'processing_extent',
                                                 'feature')
            if ext:
                extent = arcpy.Describe(ext).extent
    except KeyError:
        pass

    # Get the output file name.
    output_file_name = task_utils.get_parameter_value(parameters,
                                                      'output_file_name',
                                                      'value')
    if not output_file_name:
        output_file_name = 'kml_results'

    result_count, response_index = task_utils.get_result_count(parameters)
    # Query the index for results in groups of 25.
    query_index = task_utils.QueryIndex(parameters[response_index])
    fl = query_index.fl
    query = '{0}{1}{2}'.format(sys.argv[2].split('=')[1], '/select?&wt=json',
                               fl)
    fq = query_index.get_fq()
    if fq:
        groups = task_utils.grouper(range(0, result_count),
                                    task_utils.CHUNK_SIZE, '')
        query += fq
    elif 'ids' in parameters[response_index]:
        groups = task_utils.grouper(list(parameters[response_index]['ids']),
                                    task_utils.CHUNK_SIZE, '')
    else:
        groups = task_utils.grouper(range(0, result_count),
                                    task_utils.CHUNK_SIZE, '')

    # Begin processing
    status_writer.send_percent(0.0, _('Starting to process...'),
                               'convert_to_kml')
    headers = {
        'x-access-token': task_utils.get_security_token(request['owner'])
    }
    for group in groups:
        if fq:
            results = requests.get(
                query +
                "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]),
                headers=headers)
        elif 'ids' in parameters[response_index]:
            results = requests.get(query +
                                   '{0}&ids={1}'.format(fl, ','.join(group)),
                                   headers=headers)
        else:
            results = requests.get(
                query +
                "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]),
                headers=headers)

        docs = results.json()['response']['docs']
        input_items = task_utils.get_input_items(docs)
        if not input_items:
            input_items = task_utils.get_input_items(
                parameters[response_index]['response']['docs'])

        input_rows = collections.defaultdict(list)
        for doc in docs:
            if 'path' not in doc:
                input_rows[doc['name']].append(doc)
        if input_rows:
            result = convert_to_kml(input_rows, out_workspace, extent)
            converted += result[0]
            errors += result[1]
            skipped += result[2]

        if input_items:
            result = convert_to_kml(input_items, out_workspace, extent)
            converted += result[0]
            errors += result[1]
            skipped += result[2]

        if not input_items and not input_rows:
            status_writer.send_state(
                status.STAT_FAILED,
                _('No items to process. Check if items exist.'))
            return

    # Zip up kmz files if more than one.
    if converted > 1:
        status_writer.send_status("Converted: {}".format(converted))
        zip_file = task_utils.zip_data(out_workspace,
                                       '{0}.zip'.format(output_file_name))
        shutil.move(
            zip_file,
            os.path.join(os.path.dirname(out_workspace),
                         os.path.basename(zip_file)))
        shutil.copy2(
            os.path.join(os.path.dirname(os.path.dirname(__file__)),
                         'supportfiles', '_thumb.png'), request['folder'])
    elif converted == 1:
        try:
            kml_file = glob.glob(os.path.join(out_workspace, '*.kmz'))[0]
            tmp_lyr = arcpy.KMLToLayer_conversion(kml_file, out_workspace,
                                                  'kml_layer')
            task_utils.make_thumbnail(
                tmp_lyr.getOutput(0),
                os.path.join(request['folder'], '_thumb.png'))
        except arcpy.ExecuteError:
            pass
        shutil.move(
            kml_file,
            os.path.join(request['folder'], os.path.basename(kml_file)))

    # Update state if necessary.
    if skipped > 0 or errors > 0:
        status_writer.send_state(
            status.STAT_WARNING,
            _('{0} results could not be processed').format(errors + skipped))
    task_utils.report(os.path.join(request['folder'],
                                   '__report.json'), converted, skipped,
                      errors, errors_reasons, skipped_reasons)
예제 #11
0
def add_layers_to_group(data_sources, group_layer, mxd, verify=False, version=CURRENT_VERSION):
    data_frame = mxd.activeDataFrame

    for vector_layer_name, layer_info in data_sources.items():
        for file_info in layer_info["files"]:
            # As of arcgis 10.5.1 shapefiles can't be imported as zips.
            if file_info["file_ext"] in UNSUPPORTED_FILES:
                logger.warning(
                    "This script can't automatically add {} files.  "
                    "You can try to use a template in the folder or manually importing "
                    "the files.".format(file_info["file_ext"])
                )
                continue
            file_path = os.path.abspath(os.path.join(BASE_DIR, file_info["file_path"]))
            # If possible calculate the statistics now so that they are correct when opening arcmap.
            try:
                logger.warning(("Calculating statistics for the file {0}...".format(file_path)))
                arcpy.CalculateStatistics_management(file_path)
            except Exception as e:
                logger.warning(e)
            layer_file = get_layer_file(layer_info["type"], version)
            if not (layer_file or layer_info["type"].lower() == "vector"):
                logger.warning(
                    (
                        "Skipping layer {0} because the file type is not supported for ArcMap {1}".format(
                            vector_layer_name, version
                        )
                    )
                )
                if version == "10.5":
                    logger.warning(
                        "However with your version of ArcMap you can still drag and drop this layer onto the Map."
                    )
                continue
            vector_layer_name = "{}_{}{}".format(
                layer_info["name"], file_info["projection"], file_info["file_ext"].replace(".", "_")
            )
            if file_info["file_ext"] in [".kml", ".kmz"]:
                # Since this will generate data by converting the KML files, we should store it with the original data.
                output_folder = os.path.join(os.path.dirname(file_path), "arcgis")
                kml_layer = os.path.join(output_folder, "{}.lyr".format(vector_layer_name))
                logger.error("KML LAYER: {}".format(kml_layer))
                try:
                    logger.error("Converting {} to ArcGIS Layer".format(file_path))
                    arcpy.KMLToLayer_conversion(
                        in_kml_file=file_path,
                        output_folder=os.path.join(os.path.dirname(file_path), "arcgis"),
                        output_data=vector_layer_name,
                    )
                    logger.error("Successfully converted: " + file_path)
                except Exception as e:
                    # This could fail for various reasons including that the file already exists.
                    # If KMLs are very important to your workflow please contact us and we can make this more robust.
                    logger.warning("Could not create a new KML layer file and gdb, it may already exist.")
                    logger.info(e)
                    # We couldn't create the file, try to grab the layer if it exists.
                try:
                    arc_layer = arcpy.mapping.Layer(kml_layer)
                    if arc_layer:
                        arc_layer.name = vector_layer_name
                        logger.warning(("Adding {0} layer: {1}...".format(layer_info["type"], vector_layer_name)))
                        arcpy.mapping.AddLayerToGroup(data_frame, group_layer, arc_layer, "TOP")
                finally:
                    del arc_layer
            else:
                try:
                    arc_layer = add_layer_to_mxd(vector_layer_name, layer_file or "group", mxd, group_layer=group_layer)
                    if layer_file:
                        # Get instance of layer from MXD, not the template file.
                        try:
                            logger.warning(("Updating layer: {0}...".format(arc_layer.name)))
                            update_layer(
                                arc_layer,
                                file_path,
                                layer_info["type"],
                                projection=file_info.get("projection"),
                                verify=verify,
                            )
                        except Exception as e:
                            logger.error("Could not update layer {0}".format(arc_layer.name))
                            logger.error(e)
                    else:
                        # Add arbitrary vector layers.
                        logger.warning("Adding {0} layer(s):...".format(layer_info["type"]))
                        vector_layer_group = arc_layer
                        for sublayer in layer_info["layers"]:
                            sublayer_name = "{}_{}{}".format(
                                sublayer, file_info["projection"], file_info["file_ext"].replace(".", "_")
                            )
                            logger.warning("Creating new layer for {0}...".format(sublayer_name))
                            arcpy.MakeFeatureLayer_management(
                                "{0}/{1}".format(file_path.rstrip("/"), sublayer), sublayer_name
                            )
                            arc_layer = arcpy.mapping.Layer(sublayer_name)
                            logger.warning("adding {} to {}".format(arc_layer.name, vector_layer_group.name))
                            # Note ordering is important here since layers are [1,2,3,4] we want to add at bottom.
                            arcpy.mapping.AddLayerToGroup(data_frame, vector_layer_group, arc_layer, "BOTTOM")
                        del vector_layer_group
                except Exception as e:
                    logger.error("Could not add layer {0}".format(arc_layer.name))
                    logger.error(e)
                finally:
                    del arc_layer
예제 #12
0
#%% test arcpy.KMLToLayer_conversion()
# Set workspace (where all the KMLs are)
parameter = arcpy.GetParameterAsText(0)
parameter2 = arcpy.Describe(parameter)
work_space = parameter2.path
kml_name = parameter2.basename + '.' + parameter2.extension
#work_space = r"C:\Users\lt\Downloads\20470404-ArcPy-Line2Polygon S1 KML"
#kml_name = 'Sentinel-1B_MP_20170214T180000_20170222T180000.kml'
gdb_space = '.'.join([kml_name.split('.')[0], 'gdb'])
final_shp = re.sub(r'[-: ]', r'', '.'.join([kml_name.split('.')[0], 'shp']))
final_shp_out = join(work_space, final_shp)
arcpy.env.workspace = work_space
arcpy.env.overwriteOutput = True
# Convert KML files to geodatabase
kml = join(work_space, kml_name)
arcpy.KMLToLayer_conversion(kml, work_space)
#%% test table field
gdb_space = '.'.join([kml_name.split('.')[0], 'gdb'])
feature_name = 'Placemarks\Polylines'
#feature_namefull = join(work_space, gdb_space, feature_name)
arcpy.env.workspace = join(work_space, gdb_space)

field_list = arcpy.ListFields(feature_name)
save_field = [
    'OID', 'Shape', 'SensingTime', 'SensingMode', 'Shape_Length', 'EndTime'
]

time_format_in = 'yyyy-MM-dd HH:mm:ss'
# mysteriously the time_format_out seems did not change anything in new field!!
time_format_out = 'YYYY/MM/dd hh:mm:ss'
arcpy.ConvertTimeField_management(feature_name, "BeginTime", time_format_in,
예제 #13
0
def kmlToFeatures(kmlFile):
    arcpy.AddMessage("kmlToFeatures")
    arcpy.KMLToLayer_conversion(kmlFile, arcpy.env.scratchFolder, "outKML")
    return arcpy.env.scratchFolder + "/outKML.gdb/Placemarks/Polygons"
예제 #14
0
def clip_data(input_items, out_workspace, out_coordinate_system, gcs_sr, gcs_clip_poly, out_format):
    """Clips input results."""
    clipped = 0
    errors = 0
    skipped = 0
    fds = None
    global processed_count
    global layer_name
    global existing_fields
    global new_fields
    global field_values

    for ds, out_name in input_items.items():
        try:
            if not isinstance(out_name, list):
                out_name = ''
            # -----------------------------------------------
            # If the item is a service layer, process and continue.
            # -----------------------------------------------
            if ds.startswith('http'):
                try:
                    if out_coordinate_system == 0:
                        service_layer = task_utils.ServiceLayer(ds)
                        wkid = service_layer.wkid
                        out_sr = arcpy.SpatialReference(wkid)
                        arcpy.env.outputCoordinateSystem = out_sr
                    else:
                        out_sr = task_utils.get_spatial_reference(out_coordinate_system)
                        arcpy.env.outputCoordinateSystem = out_sr

                    if not out_sr.name == gcs_sr.name:
                        try:
                            geo_transformation = arcpy.ListTransformations(gcs_sr, out_sr)[0]
                            clip_poly = gcs_clip_poly.projectAs(out_sr, geo_transformation)
                        except (AttributeError, IndexError):
                            try:
                                clip_poly = gcs_clip_poly.projectAs(out_sr)
                            except AttributeError:
                                clip_poly = gcs_clip_poly
                        except ValueError:
                            clip_poly = gcs_clip_poly
                    else:
                        clip_poly = gcs_clip_poly

                    arcpy.env.overwriteOutput = True
                    service_layer = task_utils.ServiceLayer(ds, clip_poly.extent.JSON, 'esriGeometryEnvelope')
                    oid_groups = service_layer.object_ids
                    out_features = None
                    g = 0.
                    group_cnt = service_layer.object_ids_cnt
                    for group in oid_groups:
                        g += 1
                        group = [oid for oid in group if oid]
                        where = '{0} IN {1}'.format(service_layer.oid_field_name, tuple(group))
                        url = ds + "/query?where={}&outFields={}&returnGeometry=true&f=json".format(where, '*', eval(clip_poly.JSON))
                        feature_set = arcpy.FeatureSet()
                        if not out_name:
                            out_name = service_layer.service_layer_name
                        try:
                            feature_set.load(url)
                        except Exception:
                            continue
                        if not out_features:
                            out_features = arcpy.Clip_analysis(feature_set, clip_poly, out_name)
                        else:
                            clip_features = arcpy.Clip_analysis(feature_set, clip_poly, 'in_memory/features')
                            arcpy.Append_management(clip_features, out_features, 'NO_TEST')
                            try:
                                arcpy.Delete_management(clip_features)
                            except arcpy.ExecuteError:
                                pass
                        status_writer.send_percent(float(g) / group_cnt, '', 'clip_data')
                    processed_count += 1.
                    clipped += 1
                    status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(ds), 'clip_data')
                    continue
                except Exception as ex:
                    status_writer.send_state(status.STAT_WARNING, str(ex))
                    errors_reasons[ds] = ex.message
                    errors += 1
                    continue

            # -----------------------------------------------
            # Check if the path is a MXD data frame type.
            # ------------------------------------------------
            map_frame_name = task_utils.get_data_frame_name(ds)
            if map_frame_name:
                ds = ds.split('|')[0].strip()

            # -------------------------------
            # Is the input a geometry feature
            # -------------------------------
            if isinstance(out_name, list):
                for row in out_name:
                    try:
                        arcpy.env.overwriteOutput = True
                        name = os.path.join(out_workspace, arcpy.ValidateTableName(ds, out_workspace))
                        if out_format == 'SHP':
                            name += '.shp'
                        # Clip the geometry.
                        geo_json = row['[geo]']
                        geom = arcpy.AsShape(geo_json)
                        row.pop('[geo]')
                        if not arcpy.Exists(name):
                            if arcpy.env.outputCoordinateSystem:
                                arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())
                            else:
                                arcpy.env.outputCoordinateSystem = 4326
                                arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())

                            layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer')
                            existing_fields = [f.name for f in arcpy.ListFields(layer_name)]
                            new_fields = []
                            field_values = []
                            for field, value in row.iteritems():
                                valid_field = arcpy.ValidateFieldName(field, out_workspace)
                                new_fields.append(valid_field)
                                field_values.append(value)
                                try:
                                    arcpy.AddField_management(layer_name, valid_field, 'TEXT')
                                except arcpy.ExecuteError:
                                    arcpy.DeleteField_management(layer_name, valid_field)
                                    arcpy.AddField_management(layer_name, valid_field, 'TEXT')
                        else:
                            if not geom.type.upper() == arcpy.Describe(name).shapeType.upper():
                                name = arcpy.CreateUniqueName(os.path.basename(name), out_workspace)
                                if arcpy.env.outputCoordinateSystem:
                                    arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())
                                else:
                                    arcpy.env.outputCoordinateSystem = 4326
                                    arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())

                                layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer')
                                existing_fields = [f.name for f in arcpy.ListFields(layer_name)]
                                new_fields = []
                                field_values = []
                                for field, value in row.items():
                                    valid_field = arcpy.ValidateFieldName(field, out_workspace)
                                    new_fields.append(valid_field)
                                    field_values.append(value)
                                    if not valid_field in existing_fields:
                                        try:
                                            arcpy.AddField_management(layer_name, valid_field, 'TEXT')
                                        except arcpy.ExecuteError:
                                            arcpy.DeleteField_management(layer_name, valid_field)
                                            arcpy.AddField_management(layer_name, valid_field, 'TEXT')

                        clipped_geometry = arcpy.Clip_analysis(geom, gcs_clip_poly, arcpy.Geometry())
                        if clipped_geometry:
                            with arcpy.da.InsertCursor(layer_name, ["SHAPE@"] + new_fields) as icur:
                                icur.insertRow([clipped_geometry[0]] + field_values)
                        status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(row['name']), 'clip_data')
                        processed_count += 1
                        clipped += 1
                    except KeyError:
                        processed_count += 1
                        skipped += 1
                        status_writer.send_state(_(status.STAT_WARNING, 'Invalid input type: {0}').format(ds))
                        skipped_reasons[ds] = 'Invalid input type'
                    except Exception as ex:
                        processed_count += 1
                        errors += 1
                        errors_reasons[ds] = ex.message
                        continue
                continue

            dsc = arcpy.Describe(ds)
            try:
                if dsc.spatialReference.name == 'Unknown':
                    status_writer.send_state(status.STAT_WARNING, _('{0} has an Unknown projection. Output may be invalid or empty.').format(dsc.name))
            except AttributeError:
                pass

            # --------------------------------------------------------------------
            # If no output coord. system, get output spatial reference from input.
            # --------------------------------------------------------------------
            if out_coordinate_system == 0:
                try:
                    out_sr = dsc.spatialReference
                    arcpy.env.outputCoordinateSystem = out_sr
                except AttributeError:
                    out_sr = task_utils.get_spatial_reference(4326)
                    arcpy.env.outputCoordinateSystem = out_sr
            else:
                out_sr = task_utils.get_spatial_reference(out_coordinate_system)
                arcpy.env.outputCoordinateSystem = out_sr

            # -------------------------------------------------
            # If the item is not a file, project the clip area.
            # -------------------------------------------------
            if dsc.dataType not in ('File', 'TextFile'):
                if not out_sr.name == gcs_sr.name:
                    try:
                        geo_transformation = arcpy.ListTransformations(gcs_sr, out_sr)[0]
                        clip_poly = gcs_clip_poly.projectAs(out_sr, geo_transformation)
                    except (AttributeError, IndexError):
                        try:
                            clip_poly = gcs_clip_poly.projectAs(out_sr)
                        except AttributeError:
                            clip_poly = gcs_clip_poly
                    except ValueError:
                        clip_poly = gcs_clip_poly
                else:
                    clip_poly = gcs_clip_poly
                extent = clip_poly.extent


            # -----------------------------
            # Check the data type and clip.
            # -----------------------------

            # Feature Class or ShapeFile
            if dsc.dataType in ('FeatureClass', 'ShapeFile', 'Shapefile'):
                if out_name == '':
                    name = arcpy.ValidateTableName(dsc.name, out_workspace)
                    name = task_utils.create_unique_name(name, out_workspace)
                else:
                    name = arcpy.ValidateTableName(out_name, out_workspace)
                    name = task_utils.create_unique_name(name, out_workspace)
                # Does the input exist in a feature dataset? If so, create the feature dataset if it doesn't exist.
                ws = os.path.dirname(ds)
                if [any(ext) for ext in ('.gdb', '.mdb', '.sde') if ext in ws]:
                    if os.path.splitext(ws)[1] in ('.gdb', '.mdb', '.sde'):
                        arcpy.Clip_analysis(ds, clip_poly, name)
                    else:
                        fds_name = os.path.basename(ws)
                        if not arcpy.Exists(os.path.join(out_workspace, fds_name)):
                            arcpy.CreateFeatureDataset_management(out_workspace, fds_name, dsc.spatialReference)
                        arcpy.Clip_analysis(ds, clip_poly, os.path.join(out_workspace, fds_name, os.path.basename(ds)))
                else:
                    arcpy.Clip_analysis(ds, clip_poly, name)

            # Feature dataset
            elif dsc.dataType == 'FeatureDataset':
                if not out_format == 'SHP':
                    fds_name = os.path.basename(task_utils.create_unique_name(dsc.name, out_workspace))
                    fds = arcpy.CreateFeatureDataset_management(out_workspace, fds_name)
                arcpy.env.workspace = ds
                for fc in arcpy.ListFeatureClasses():
                    try:
                        if not out_format == 'SHP':
                            arcpy.Clip_analysis(fc, clip_poly, task_utils.create_unique_name(fc, fds.getOutput(0)))
                        else:
                            arcpy.Clip_analysis(fc, clip_poly, task_utils.create_unique_name(fc, out_workspace))
                    except arcpy.ExecuteError:
                        pass
                arcpy.env.workspace = out_workspace

            # Raster dataset
            elif dsc.dataType == 'RasterDataset':
                if out_name == '':
                    name = task_utils.create_unique_name(dsc.name, out_workspace)
                else:
                    name = task_utils.create_unique_name(out_name, out_workspace)
                ext = '{0} {1} {2} {3}'.format(extent.XMin, extent.YMin, extent.XMax, extent.YMax)
                arcpy.Clip_management(ds, ext, name, in_template_dataset=clip_poly, clipping_geometry="ClippingGeometry")

            # Layer file
            elif dsc.dataType == 'Layer':
                task_utils.clip_layer_file(dsc.catalogPath, clip_poly, arcpy.env.workspace)

            # Cad drawing dataset
            elif dsc.dataType == 'CadDrawingDataset':
                arcpy.env.workspace = dsc.catalogPath
                cad_wks_name = os.path.splitext(dsc.name)[0]
                for cad_fc in arcpy.ListFeatureClasses():
                    name = task_utils.create_unique_name('{0}_{1}'.format(cad_wks_name, cad_fc), out_workspace)
                    arcpy.Clip_analysis(cad_fc, clip_poly, name)
                arcpy.env.workspace = out_workspace

            # File
            elif dsc.dataType in ('File', 'TextFile'):
                if dsc.catalogPath.endswith('.kml') or dsc.catalogPath.endswith('.kmz'):
                    name = os.path.splitext(dsc.name)[0]
                    kml_layer = arcpy.KMLToLayer_conversion(dsc.catalogPath, arcpy.env.scratchFolder, name)
                    group_layer = arcpy.mapping.Layer(os.path.join(arcpy.env.scratchFolder, '{0}.lyr'.format(name)))
                    for layer in arcpy.mapping.ListLayers(group_layer):
                        if layer.isFeatureLayer:
                            arcpy.Clip_analysis(layer,
                                                gcs_clip_poly,
                                                task_utils.create_unique_name(layer, out_workspace))
                    # Clean up temp KML results.
                    arcpy.Delete_management(os.path.join(arcpy.env.scratchFolder, '{0}.lyr'.format(name)))
                    arcpy.Delete_management(kml_layer[1])
                    del group_layer
                else:
                    if out_name == '':
                        out_name = dsc.name
                    if out_workspace.endswith('.gdb'):
                        f = arcpy.Copy_management(ds, os.path.join(os.path.dirname(out_workspace), out_name))
                    else:
                        f = arcpy.Copy_management(ds, os.path.join(out_workspace, out_name))
                    processed_count += 1.
                    status_writer.send_percent(processed_count / result_count, _('Copied file: {0}').format(dsc.name), 'clip_data')
                    status_writer.send_state(_('Copied file: {0}').format(dsc.name))
                    clipped += 1
                    if out_format in ('LPK', 'MPK'):
                        files_to_package.append(f.getOutput(0))
                    continue

            # Map document
            elif dsc.dataType == 'MapDocument':
                task_utils.clip_mxd_layers(dsc.catalogPath, clip_poly, arcpy.env.workspace, map_frame_name)
            else:
                processed_count += 1.
                status_writer.send_percent(processed_count / result_count, _('Invalid input type: {0}').format(ds), 'clip_data')
                status_writer.send_state(status.STAT_WARNING, _('Invalid input type: {0}').format(ds))
                skipped += 1
                skipped_reasons[ds] = _('Invalid input type: {0}').format(dsc.dataType)
                continue

            processed_count += 1.
            status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(dsc.name), 'clip_data')
            status_writer.send_status(_('Clipped: {0}').format(dsc.name))
            clipped += 1
        # Continue. Process as many as possible.
        except Exception as ex:
            processed_count += 1.
            status_writer.send_percent(processed_count / result_count, _('Skipped: {0}').format(os.path.basename(ds)), 'clip_data')
            status_writer.send_status(_('FAIL: {0}').format(repr(ex)))
            errors_reasons[ds] = ex.message
            errors += 1
            pass
    return clipped, errors, skipped
예제 #15
0
 def toKML(x,y):
   arcpy.KMLToLayer_conversion(x,y)
   return
예제 #16
0
#todo: environment Variables
arcpy.env.workspace = folder
arcpy.env.overWriteOutput = 1

#todo: check for the texistence of gedatabase
lstStr = string.split(kml, '\\')
lstStr = string.split(lstStr[len(lstStr) - 1], '.')
basename = lstStr[0]
gdb = folder + os.path.sep + basename + '.gdb'
if os.path.exists(gdb):
    print 'deleting ' + gdb
    arcpy.Delete_management(gdb)

#todo: kml to Layer
print 'converting ' + kml
arcpy.KMLToLayer_conversion(kml, folder)

#todo: cycle cursor, update Longitude and latitude of Placemark\Points
print 'updating Longitude and Latitude'
ds = gdb + os.path.sep + "Placemarks\\Points"
arcpy.AddField_management(in_table=ds,
                          field_name="Longitude",
                          field_type="DOUBLE")
arcpy.AddField_management(in_table=ds,
                          field_name="Latitude",
                          field_type="DOUBLE")
lstFP = []  #list Of folder paths
n = 0
recs = arcpy.UpdateCursor(ds)
rec = recs.next()
while rec:
예제 #17
0
arcpy.env.workspace = folder
linePoints = folder + "\\LinePoints.shp"
polygonPoints = folder + "\\PolygonPoints.shp"
linePntsLst = []
polygonPntsLst = []
pointsLst = []
wgsSR = arcpy.SpatialReference(4326)
vaLLCSR = arcpy.SpatialReference(3969)
lastGdb = ""
for fl in os.listdir(folder):
    if (fl.endswith(".kmz") or fl.endswith(".kml")):
        
        kmzName = os.path.splitext(os.path.basename(fl))[0]
        print (kmzName)
        try:
            arcpy.KMLToLayer_conversion(folder + "\\" + fl, folder)
            print("KML converted")
        except:
            errorWriter = open(folder + "\\errorlog.txt", 'a')
            errorWriter.write("Error converting " + kmzName + "\n")
            errorWriter.write(traceback.format_exc() + "\n")
            errorWriter.close()
            break
        gdb = folder + "\\" + kmzName + ".gdb" + "\\Placemarks"
        arcpy.env.workspace = gdb
        try:
            if(arcpy.env.workspace == lastGdb):
                raise Exception
        except:
            errorWriter = open(folder + "\\errorlog.txt", 'a')
            errorWriter.write("Error changing gdb for " + kmzName + "\n")
예제 #18
0
        # What Python?
        sysver = sys.version
        arcpy.AddMessage(sysver)
        print(sysver)

        # Url where KML/KMZ resides
        urllib.urlretrieve(
            "http://www.natice.noaa.gov/pub/special/google_kml/antarctic.kmz",
            dlAntarctic)
        urllib.urlretrieve(
            "http://www.natice.noaa.gov/pub/special/google_kml/arctic.kmz",
            dlArctic)
        print("Retrieving Data from source")
        arcpy.AddMessage("Retrieving Data from source")

# Convert from kmz/kml to feature layer
outantacrticfc = arcpy.KMLToLayer_conversion(dlAntarctic, downloadfolder)
outacrticfc = arcpy.KMLToLayer_conversion(dlArctic, downloadfolder)

##If you want to run this script in an ArcGIS Desktop model, uncomment these two lines and define two
## output parameters in the script. The outputs will be feature class.

#arcpy.SetParameter(0,outantacrticfc)
#arcpy.SetParameter(1,outacrticfc)

outAntarctic.close()
outArctic.close()

arcpy.AddMessage("Retrieved Ice Data")
예제 #19
0
import traceback


folder = arcpy.GetParameterAsText(0)
arcpy.env.workspace = folder
arcpy.env.outputMFlag = "Disabled"
arcpy.env.overwriteOutput = True
sr = arcpy.SpatialReference(3969)
for kmz in os.listdir(folder):
    if(kmz.endswith(".kmz") or kmz.endswith(".kml")):
        fileName = os.path.basename(kmz)
        fileNameOnly = os.path.splitext(fileName)[0]
        outWKT = folder + "\\" + fileNameOnly + ".wkt"
        try:
            
            outLyr = arcpy.KMLToLayer_conversion(folder + "\\" + kmz, folder)
            print("layer created")
            gdb = folder + "\\" +  fileNameOnly + ".gdb" 
            arcpy.env.workspace = gdb
            lccShp = arcpy.Project_management(r"\Placemarks\Polygons", "lccShp", sr)
            snglPrt = arcpy.MultipartToSinglepart_management(lccShp, "snglPrt")
            writeWKT = open(outWKT, 'w')
            for row in arcpy.da.SearchCursor(snglPrt, ["SHAPE@WKT"]):
                wktLine = row[0]
                wktPlyRep = wktLine.replace("MULTIPOLYGON Z", "$shape = 'polygon")
                wktParRep = wktPlyRep.replace(" (((", "((\n    ")
                wktParRep2 = wktParRep.replace(")))", "\n))'")
                writeWKT.write(wktParRep2)
            writeWKT.close()     
            arcpy.env.workspace = folder
예제 #20
0
def add_to_geodatabase(input_items, out_gdb, is_fds):
    """Adds items to a geodatabase."""
    added = 0
    skipped = 0
    errors = 0
    global processed_count
    global layer_name
    global existing_fields
    global new_fields
    global field_values

    for ds, out_name in input_items.iteritems():
        try:
            # -----------------------------------------------
            # If the item is a service layer, process and continue.
            # -----------------------------------------------
            if ds.startswith('http'):
                try:
                    service_layer = task_utils.ServiceLayer(ds)
                    arcpy.env.overwriteOutput = True
                    oid_groups = service_layer.object_ids
                    out_features = None
                    g = 0.
                    group_cnt = service_layer.object_ids_cnt
                    for group in oid_groups:
                        g += 1
                        group = [oid for oid in group if oid]
                        where = '{0} IN {1}'.format(
                            service_layer.oid_field_name, tuple(group))
                        url = ds + "/query?where={}&outFields={}&returnGeometry=true&geometryType=esriGeometryPolygon&f=json".format(
                            where, '*')
                        feature_set = arcpy.FeatureSet()
                        feature_set.load(url)
                        if not out_features:
                            out_features = arcpy.CopyFeatures_management(
                                feature_set,
                                task_utils.create_unique_name(
                                    out_name, out_gdb))
                        else:
                            features = arcpy.CopyFeatures_management(
                                feature_set,
                                task_utils.create_unique_name(
                                    out_name, out_gdb))
                            arcpy.Append_management(features, out_features,
                                                    'NO_TEST')
                            try:
                                arcpy.Delete_management(features)
                            except arcpy.ExecuteError:
                                pass
                        status_writer.send_percent(
                            float(g) / group_cnt * 100, '',
                            'add_to_geodatabase')
                    processed_count += 1.
                    added += 1
                    status_writer.send_percent(processed_count / result_count,
                                               _('Added: {0}').format(ds),
                                               'add_to_geodatabase')
                    continue
                except Exception as ex:
                    status_writer.send_state(status.STAT_WARNING, str(ex))
                    errors_reasons[ds] = ex.message
                    errors += 1
                    continue

            # ------------------------------
            # Is the input a mxd data frame.
            # ------------------------------
            map_frame_name = task_utils.get_data_frame_name(ds)
            if map_frame_name:
                ds = ds.split('|')[0].strip()

            # -------------------------------
            # Is the input a geometry feature
            # -------------------------------
            if isinstance(out_name, list):
                increment = task_utils.get_increment(result_count)
                for row in out_name:
                    try:
                        name = os.path.join(
                            out_gdb, arcpy.ValidateTableName(ds, out_gdb))
                        # Create the geometry if it exists.
                        geom = None
                        try:
                            geo_json = row['[geo]']
                            geom = arcpy.AsShape(geo_json)
                            row.pop('[geo]')
                        except KeyError:
                            pass

                        if geom:
                            if not arcpy.Exists(name):
                                if arcpy.env.outputCoordinateSystem:
                                    arcpy.CreateFeatureclass_management(
                                        out_gdb, os.path.basename(name),
                                        geom.type.upper())
                                else:
                                    arcpy.env.outputCoordinateSystem = 4326
                                    arcpy.CreateFeatureclass_management(
                                        out_gdb, os.path.basename(name),
                                        geom.type.upper())
                                layer_name = arcpy.MakeFeatureLayer_management(
                                    name, 'flayer_{0}'.format(
                                        os.path.basename(name)))
                                existing_fields = [
                                    f.name
                                    for f in arcpy.ListFields(layer_name)
                                ]
                                new_fields = []
                                field_values = []
                                for field, value in row.iteritems():
                                    valid_field = arcpy.ValidateFieldName(
                                        field, out_gdb)
                                    new_fields.append(valid_field)
                                    field_values.append(value)
                                    arcpy.AddField_management(
                                        layer_name, valid_field, 'TEXT')
                            else:
                                if not geom.type.upper() == arcpy.Describe(
                                        name).shapeType.upper():
                                    name = arcpy.CreateUniqueName(
                                        os.path.basename(name), out_gdb)
                                    if arcpy.env.outputCoordinateSystem:
                                        arcpy.CreateFeatureclass_management(
                                            out_gdb, os.path.basename(name),
                                            geom.type.upper())
                                    else:
                                        arcpy.env.outputCoordinateSystem = 4326
                                        arcpy.CreateFeatureclass_management(
                                            out_gdb, os.path.basename(name),
                                            geom.type.upper())
                                    layer_name = arcpy.MakeFeatureLayer_management(
                                        name, 'flayer_{0}'.format(
                                            os.path.basename(name)))
                                    existing_fields = [
                                        f.name
                                        for f in arcpy.ListFields(layer_name)
                                    ]
                                    new_fields = []
                                    field_values = []
                                    for field, value in row.iteritems():
                                        valid_field = arcpy.ValidateFieldName(
                                            field, out_gdb)
                                        new_fields.append(valid_field)
                                        field_values.append(value)
                                        if valid_field not in existing_fields:
                                            arcpy.AddField_management(
                                                layer_name, valid_field,
                                                'TEXT')
                        else:
                            if not arcpy.Exists(name):
                                arcpy.CreateTable_management(
                                    out_gdb, os.path.basename(name))
                                view_name = arcpy.MakeTableView_management(
                                    name, 'tableview')
                                existing_fields = [
                                    f.name for f in arcpy.ListFields(view_name)
                                ]
                                new_fields = []
                                field_values = []
                                for field, value in row.iteritems():
                                    valid_field = arcpy.ValidateFieldName(
                                        field, out_gdb)
                                    new_fields.append(valid_field)
                                    field_values.append(value)
                                    if valid_field not in existing_fields:
                                        arcpy.AddField_management(
                                            view_name, valid_field, 'TEXT')

                        if geom:
                            with arcpy.da.InsertCursor(layer_name, ["SHAPE@"] +
                                                       new_fields) as icur:
                                icur.insertRow([geom] + field_values)
                        else:
                            with arcpy.da.InsertCursor(view_name,
                                                       new_fields) as icur:
                                icur.insertRow(field_values)

                        processed_count += 1
                        if (processed_count % increment) == 0:
                            status_writer.send_percent(
                                float(processed_count) / result_count,
                                _('Added: {0}').format(row['name']),
                                'add_to_geodatabase')
                        added += 1
                        continue
                    except Exception as ex:
                        processed_count += 1
                        errors += 1
                        errors_reasons[name] = ex.message
                        continue
                continue
            # -----------------------------
            # Check the data type and clip.
            # -----------------------------
            dsc = arcpy.Describe(ds)
            if dsc.dataType == 'FeatureClass':
                if out_name == '':
                    arcpy.CopyFeatures_management(
                        ds, task_utils.create_unique_name(dsc.name, out_gdb))
                else:
                    arcpy.CopyFeatures_management(
                        ds, task_utils.create_unique_name(out_name, out_gdb))

            elif dsc.dataType == 'ShapeFile':
                if out_name == '':
                    arcpy.CopyFeatures_management(
                        ds,
                        task_utils.create_unique_name(dsc.name[:-4], out_gdb))
                else:
                    arcpy.CopyFeatures_management(
                        ds, task_utils.create_unique_name(out_name, out_gdb))

            elif dsc.dataType == 'FeatureDataset':
                if not is_fds:
                    fds_name = os.path.basename(
                        task_utils.create_unique_name(dsc.name, out_gdb))
                    fds = arcpy.CreateFeatureDataset_management(
                        out_gdb, fds_name).getOutput(0)
                else:
                    fds = out_gdb
                arcpy.env.workspace = dsc.catalogPath
                for fc in arcpy.ListFeatureClasses():
                    name = os.path.basename(
                        task_utils.create_unique_name(fc, out_gdb))
                    arcpy.CopyFeatures_management(fc, os.path.join(fds, name))
                arcpy.env.workspace = out_gdb

            elif dsc.dataType == 'RasterDataset':
                if is_fds:
                    out_gdb = os.path.dirname(out_gdb)
                if out_name == '':
                    arcpy.CopyRaster_management(
                        ds, task_utils.create_unique_name(dsc.name, out_gdb))
                else:
                    arcpy.CopyRaster_management(
                        ds, task_utils.create_unique_name(out_name, out_gdb))

            elif dsc.dataType == 'RasterCatalog':
                if is_fds:
                    out_gdb = os.path.dirname(out_gdb)
                if out_name == '':
                    arcpy.CopyRasterCatalogItems_management(
                        ds, task_utils.create_unique_name(dsc.name, out_gdb))
                else:
                    arcpy.CopyRasterCatalogItems_management(
                        ds, task_utils.create_unique_name(out_name, out_gdb))

            elif dsc.dataType == 'Layer':
                layer_from_file = arcpy.mapping.Layer(dsc.catalogPath)
                layers = arcpy.mapping.ListLayers(layer_from_file)
                for layer in layers:
                    if out_name == '':
                        name = task_utils.create_unique_name(
                            layer.name, out_gdb)
                    else:
                        name = task_utils.create_unique_name(out_name, out_gdb)
                    if layer.isFeatureLayer:
                        arcpy.CopyFeatures_management(layer.dataSource, name)
                    elif layer.isRasterLayer:
                        if is_fds:
                            name = os.path.dirname(name)
                        arcpy.CopyRaster_management(layer.dataSource, name)

            elif dsc.dataType == 'CadDrawingDataset':
                arcpy.env.workspace = dsc.catalogPath
                cad_wks_name = os.path.splitext(dsc.name)[0]
                for cad_fc in arcpy.ListFeatureClasses():
                    arcpy.CopyFeatures_management(
                        cad_fc,
                        task_utils.create_unique_name(
                            '{0}_{1}'.format(cad_wks_name, cad_fc), out_gdb))
                arcpy.env.workspace = out_gdb

            elif dsc.dataType == 'File':
                if dsc.catalogPath.endswith(
                        '.kml') or dsc.catalogPath.endswith('.kmz'):
                    name = os.path.splitext(dsc.name)[0]
                    temp_dir = tempfile.mkdtemp()
                    kml_layer = arcpy.KMLToLayer_conversion(
                        dsc.catalogPath, temp_dir, name)
                    group_layer = arcpy.mapping.Layer(
                        os.path.join(temp_dir, '{}.lyr'.format(name)))
                    for layer in arcpy.mapping.ListLayers(group_layer):
                        if layer.isFeatureLayer:
                            arcpy.CopyFeatures_management(
                                layer,
                                task_utils.create_unique_name(layer, out_gdb))
                        elif layer.isRasterLayer:
                            if is_fds:
                                out_gdb = os.path.dirname(out_gdb)
                            arcpy.CopyRaster_management(
                                layer,
                                task_utils.create_unique_name(layer, out_gdb))
                    # Clean up temp KML results.
                    arcpy.Delete_management(
                        os.path.join(temp_dir, '{}.lyr'.format(name)))
                    arcpy.Delete_management(kml_layer)
                else:
                    processed_count += 1
                    status_writer.send_percent(
                        processed_count / result_count,
                        _('Invalid input type: {0}').format(dsc.name),
                        'add_to_geodatabase')
                    skipped += 1
                    skipped_reasons[ds] = _('Invalid input type: {0}').format(
                        dsc.dataType)
                    continue

            elif dsc.dataType == 'MapDocument':
                mxd = arcpy.mapping.MapDocument(dsc.catalogPath)
                if map_frame_name:
                    df = arcpy.mapping.ListDataFrames(mxd, map_frame_name)[0]
                    layers = arcpy.mapping.ListLayers(mxd, data_frame=df)
                else:
                    layers = arcpy.mapping.ListLayers(mxd)
                for layer in layers:
                    if layer.isFeatureLayer:
                        arcpy.CopyFeatures_management(
                            layer.dataSource,
                            task_utils.create_unique_name(layer.name, out_gdb))
                    elif layer.isRasterLayer:
                        if is_fds:
                            out_gdb = os.path.dirname(out_gdb)
                        arcpy.CopyRaster_management(
                            layer.dataSource,
                            task_utils.create_unique_name(layer.name, out_gdb))
                table_views = arcpy.mapping.ListTableViews(mxd)
                if is_fds:
                    out_gdb = os.path.dirname(out_gdb)
                for table_view in table_views:
                    arcpy.CopyRows_management(
                        table_view.dataSource,
                        task_utils.create_unique_name(table_view.name,
                                                      out_gdb))
                out_gdb = arcpy.env.workspace

            elif dsc.dataType.find('Table') > 0:
                if is_fds:
                    out_gdb = os.path.dirname(out_gdb)
                if out_name == '':
                    arcpy.CopyRows_management(
                        ds, task_utils.create_unique_name(dsc.name, out_gdb))
                else:
                    arcpy.CopyRows_management(
                        ds, task_utils.create_unique_name(out_name, out_gdb))

            else:
                # Try to copy any other types such as topologies, network datasets, etc.
                if is_fds:
                    out_gdb = os.path.dirname(out_gdb)
                arcpy.Copy_management(
                    ds, task_utils.create_unique_name(dsc.name, out_gdb))

            out_gdb = arcpy.env.workspace
            processed_count += 1.
            status_writer.send_percent(processed_count / result_count,
                                       _('Added: {0}').format(ds),
                                       'add_to_geodatabase')
            status_writer.send_status(_('Added: {0}').format(ds))
            added += 1
        # Continue if an error. Process as many as possible.
        except Exception as ex:
            processed_count += 1
            status_writer.send_percent(processed_count / result_count,
                                       _('Skipped: {0}').format(ds),
                                       'add_to_geodatabase')
            status_writer.send_status(_('FAIL: {0}').format(repr(ex)))
            errors_reasons[ds] = repr(ex)
            errors += 1
            continue

    return added, errors, skipped
    print '## Etapa 4: Move eventuais arquivos KML para a respectiva pasta'
    kmllist = os.path.join(Input, folder, "*.kml")

    for file in glob.glob(kmllist):
        #shutil.move(file, os.path.join(Input, folder, 'Geodata//kml'))
        shutil.move(file, os.path.join(Input, folder, 'Dados Brutos'))

# -------------------------------------------------------------------------------------------------------
# Importa KMLs: Transforma KML que vira um geodatabase
    print '## Etapa 5: Converte KML para "Feature Class"'
    kmllist = os.path.join(Input, folder, 'Dados Brutos', "*.kml")
    kmlpath = os.path.join(Input, folder, 'Dados Brutos')

    for file in glob.glob(kmllist):
        try:
            arcpy.KMLToLayer_conversion(file, kmlpath)
            gdb = str.replace(file, '.kml',
                              '.gdb') + '//Placemarks//Multipatches'
            out = os.path.join(Input, folder, 'Geodata', "Geo_SiCAR.mdb",
                               str.replace(os.path.basename(file), '.kml', ''))
            arcpy.ddd.MultiPatchFootprint(gdb, out)

        except arcpy.ExecuteError:
            print arcpy.GetMessages()

# -------------------------------------------------------------------------------------------------------
# Retira todos os arquivos das pastas zipadas e move os arquivos zipados (brutos) para a pasta "Dados Brutos".
    print '## Etapa 6: Extrai todos os "Shapefiles" das pastas zipadas'
    ziplist = os.path.join(Input, folder, '*.zip')
    zippath = os.path.join(Input, folder)
def mainFunction(
    dataFile, geometryType, inputCoordinateSystemName,
    outputCoordinateSystemName, xField, yField, spreadsheetUniqueID, output
):  # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter)
    try:
        # --------------------------------------- Start of code --------------------------------------- #

        # Get coordinate systems and transformation
        inputCoordinateSystem, outputCoordinateSystem, transformation = getCoordinateDetails(
            inputCoordinateSystemName, outputCoordinateSystemName)

        # If url set as data file
        urlCheck = ['http', 'https']
        if any(file in dataFile for file in urlCheck):
            printMessage("Downloading file from - " + dataFile + "...", "info")
            # Download the file from the link
            file = urllib2.urlopen(dataFile)
            fileName, fileExt = os.path.splitext(dataFile)
            # Download in chunks
            fileChunk = 16 * 1024
            with open(os.path.join(arcpy.env.scratchFolder, "Data" + fileExt),
                      'wb') as output:
                while True:
                    chunk = file.read(fileChunk)
                    if not chunk:
                        break
                    # Write chunk to output file
                    output.write(chunk)
            output.close()
            dataFile = os.path.join(arcpy.env.scratchFolder, "Data" + fileExt)

        # If data type is excel
        if dataFile.lower().endswith(('.xls', '.xlsx')):
            # If x and y fields provided
            if ((xField) and (yField)):
                # Get geometry type - line or polygon
                if ((geometryType.lower() == "line")
                        or (geometryType.lower() == "polygon")):
                    # If unique Id provided
                    if (spreadsheetUniqueID):
                        # Call function to get layer from spreadsheet
                        output = spreadsheetToLinePolygon(
                            dataFile, geometryType, xField, yField,
                            spreadsheetUniqueID, inputCoordinateSystemName,
                            inputCoordinateSystem, outputCoordinateSystemName,
                            outputCoordinateSystem, transformation)
                    else:
                        printMessage(
                            "Please provide a ID field in the spreadsheet to uniquely identify each feature...",
                            "error")
                        sys.exit()
                # Get geometry type - point
                else:
                    # If projection needed
                    if (transformation.lower() != "none"):
                        printMessage("Importing Excel sheet...", "info")
                        arcpy.ExcelToTable_conversion(dataFile,
                                                      "in_memory\\Dataset", "")
                        arcpy.MakeXYEventLayer_management(
                            "in_memory\\Dataset", xField, yField, "InputLayer",
                            inputCoordinateSystem, "")
                        printMessage(
                            "Projecting layer from " +
                            inputCoordinateSystemName + " to " +
                            outputCoordinateSystemName + "...", "info")
                        arcpy.Project_management(
                            "InputLayer",
                            os.path.join(arcpy.env.scratchGDB,
                                         "Layer_Projected"),
                            outputCoordinateSystem, transformation,
                            inputCoordinateSystem, "NO_PRESERVE_SHAPE", "")
                        printMessage("Creating layer...", "info")
                        output = arcpy.MakeFeatureLayer_management(
                            os.path.join(arcpy.env.scratchGDB,
                                         "Layer_Projected"), "Layer", "", "",
                            "")
                    else:
                        printMessage("Importing Excel sheet...", "info")
                        arcpy.ExcelToTable_conversion(dataFile,
                                                      "in_memory\\Dataset", "")
                        printMessage("Creating layer...", "info")
                        output = arcpy.MakeXYEventLayer_management(
                            "in_memory\\Dataset", xField, yField, "Layer",
                            inputCoordinateSystem, "")
            else:
                printMessage(
                    "Please provide an X and Y field for the Excel file...",
                    "error")
                sys.exit()
        # If data type is shapefile
        elif dataFile.lower().endswith('.zip'):
            printMessage("Importing Shapefile...", "info")
            # Extract the zip file to a temporary location
            zip = zipfile.ZipFile(dataFile, mode="r")
            tempFolder = arcpy.CreateFolder_management(
                arcpy.env.scratchFolder, "Data-" + str(uuid.uuid1()))
            zip.extractall(str(tempFolder))
            # Get the extracted shapefile
            shapefile = max(glob.iglob(str(tempFolder) + r"\*.shp"),
                            key=os.path.getmtime)
            # If projection needed
            if (transformation.lower() != "none"):
                printMessage(
                    "Projecting layer from " + inputCoordinateSystemName +
                    " to " + outputCoordinateSystemName + "...", "info")
                arcpy.Project_management(
                    shapefile,
                    os.path.join(arcpy.env.scratchGDB, "Layer_Projected"),
                    outputCoordinateSystem, transformation,
                    inputCoordinateSystem, "NO_PRESERVE_SHAPE", "")
                printMessage("Creating layer...", "info")
                output = arcpy.MakeFeatureLayer_management(
                    os.path.join(arcpy.env.scratchGDB, "Layer_Projected"),
                    "Layer", "", "", "")
            else:
                printMessage("Creating layer...", "info")
                output = arcpy.MakeFeatureLayer_management(
                    shapefile, "Layer", "", "", "")
        # If data type is gpx
        elif dataFile.lower().endswith('.gpx'):
            printMessage("Importing GPX...", "info")
            arcpy.GPXtoFeatures_conversion(dataFile, "in_memory\\Dataset")
            # If projection needed
            if (transformation.lower() != "none"):
                printMessage(
                    "Projecting layer from " + inputCoordinateSystemName +
                    " to " + outputCoordinateSystemName + "...", "info")
                arcpy.Project_management(
                    "in_memory\\Dataset",
                    os.path.join(arcpy.env.scratchGDB, "Layer_Projected"),
                    outputCoordinateSystem, transformation,
                    inputCoordinateSystem, "NO_PRESERVE_SHAPE", "")
                printMessage("Creating layer...", "info")
                output = arcpy.MakeFeatureLayer_management(
                    os.path.join(arcpy.env.scratchGDB, "Layer_Projected"),
                    "Layer", "", "", "")
            else:
                printMessage("Creating layer...", "info")
                output = arcpy.MakeFeatureLayer_management(
                    "in_memory\\Dataset", "Layer", "", "", "")
        # If data type is kml
        elif dataFile.lower().endswith(('.kml', '.kmz')):
            # If kml geometry type provided
            if (geometryType):
                printMessage("Importing KML...", "info")
                arcpy.KMLToLayer_conversion(dataFile, arcpy.env.scratchFolder,
                                            "KML", "NO_GROUNDOVERLAY")
                outputGeodatabase = os.path.join(arcpy.env.scratchFolder,
                                                 "KML.gdb")

                # Get the kml dataset as specified
                if (geometryType.lower() == "line"):
                    kmlDataset = os.path.join(outputGeodatabase,
                                              "Placemarks\Polylines")
                elif (geometryType.lower() == "polygon"):
                    kmlDataset = os.path.join(outputGeodatabase,
                                              "Placemarks\Polygons")
                else:
                    kmlDataset = os.path.join(outputGeodatabase,
                                              "Placemarks\Points")
                # If projection needed
                if (transformation.lower() != "none"):
                    printMessage(
                        "Projecting layer from " + inputCoordinateSystemName +
                        " to " + outputCoordinateSystemName + "...", "info")
                    arcpy.Project_management(
                        kmlDataset,
                        os.path.join(arcpy.env.scratchGDB, "Layer_Projected"),
                        outputCoordinateSystem, transformation,
                        inputCoordinateSystem, "NO_PRESERVE_SHAPE", "")
                    printMessage("Creating layer...", "info")
                    output = arcpy.MakeFeatureLayer_management(
                        os.path.join(arcpy.env.scratchGDB, "Layer_Projected"),
                        "Layer", "", "", "")
                else:
                    printMessage("Creating layer...", "info")
                    output = arcpy.MakeFeatureLayer_management(
                        kmlDataset, "Layer", "", "", "")
            else:
                printMessage(
                    "Please provide a geometry type for the KML file...",
                    "error")
                sys.exit()
        # If data type is csv
        elif dataFile.lower().endswith('.csv'):
            # If x and y fields provided
            if ((xField) and (yField)):
                # Get geometry type - line or polygon
                if ((geometryType.lower() == "line")
                        or (geometryType.lower() == "polygon")):
                    # If unique Id provided
                    if (spreadsheetUniqueID):
                        # Call function to get layer from spreadsheet
                        output = spreadsheetToLinePolygon(
                            dataFile, geometryType, xField, yField,
                            spreadsheetUniqueID, inputCoordinateSystemName,
                            inputCoordinateSystem, outputCoordinateSystemName,
                            outputCoordinateSystem, transformation)
                    else:
                        printMessage(
                            "Please provide a ID field in the spreadsheet to uniquely identify each feature...",
                            "error")
                        sys.exit()
                # Get geometry type - point
                else:
                    # If projection needed
                    if (transformation.lower() != "none"):
                        printMessage("Importing CSV...", "info")
                        arcpy.MakeXYEventLayer_management(
                            dataFile, xField, yField, "InputLayer",
                            inputCoordinateSystem, "")
                        printMessage(
                            "Projecting layer from " +
                            inputCoordinateSystemName + " to " +
                            outputCoordinateSystemName + "...", "info")
                        arcpy.Project_management(
                            "InputLayer",
                            os.path.join(arcpy.env.scratchGDB,
                                         "Layer_Projected"),
                            outputCoordinateSystem, transformation,
                            inputCoordinateSystem, "NO_PRESERVE_SHAPE", "")
                        printMessage("Creating layer...", "info")
                        output = arcpy.MakeFeatureLayer_management(
                            os.path.join(arcpy.env.scratchGDB,
                                         "Layer_Projected"), "Layer", "", "",
                            "")
                    else:
                        printMessage("Importing CSV...", "info")
                        printMessage("Creating layer...", "info")
                        output = arcpy.MakeXYEventLayer_management(
                            dataFile, xField, yField, "Layer",
                            inputCoordinateSystem, "")
            else:
                printMessage(
                    "Please provide an X and Y field for the CSV file...",
                    "error")
                sys.exit()
        else:
            printMessage(
                "Not a valid data file. Please use .csv,.xls,.xlsx,.zip,.gpx,.kml or .kmz...",
                "error")
            sys.exit()

        # --------------------------------------- End of code --------------------------------------- #
        # If called from gp tool return the arcpy parameter
        if __name__ == '__main__':
            # Return the output if there is any
            if output:
                # If ArcGIS desktop installed
                if (arcgisDesktop == "true"):
                    arcpy.SetParameter(7, output)
                # ArcGIS desktop not installed
                else:
                    return output
        # Otherwise return the result
        else:
            # Return the output if there is any
            if output:
                return output
        # Logging
        if (enableLogging == "true"):
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logMessage.flush()
            logMessage.close()
            logger.handlers = []
    # If arcpy error
    except arcpy.ExecuteError:
        # Build and show the error message
        errorMessage = arcpy.GetMessages(2)
        printMessage(errorMessage, "error")

        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logMessage.flush()
            logMessage.close()
            logger.handlers = []
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
    # If python error
    except Exception as e:
        errorMessage = ""
        # Build and show the error message
        # If many arguments
        if (e.args):
            for i in range(len(e.args)):
                if (i == 0):
                    # Python version check
                    if sys.version_info[0] >= 3:
                        # Python 3.x
                        errorMessage = str(
                            e.args[i]).encode('utf-8').decode('utf-8')
                    else:
                        # Python 2.x
                        errorMessage = unicode(e.args[i]).encode('utf-8')
                else:
                    # Python version check
                    if sys.version_info[0] >= 3:
                        # Python 3.x
                        errorMessage = errorMessage + " " + str(
                            e.args[i]).encode('utf-8').decode('utf-8')
                    else:
                        # Python 2.x
                        errorMessage = errorMessage + " " + unicode(
                            e.args[i]).encode('utf-8')
        # Else just one argument
        else:
            errorMessage = e
        printMessage(errorMessage, "error")
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logMessage.flush()
            logMessage.close()
            logger.handlers = []
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
예제 #23
0
def clip_data(input_items, out_workspace, clip_polygon, out_format):
    """Clips input results using the clip polygon.

    :param input_items: list of item to be clipped
    :param out_workspace: the output workspace where results are created
    :param clip_polygon: the clip polygon geometry
    :param out_format: the type of output to be created (i.e. SHP for shapefile)
    """

    global processed_count
    global layer_name
    global existing_fields
    global new_fields
    global field_values

    clipped = 0
    errors = 0
    skipped = 0
    fds = None

    for ds, out_name in input_items.iteritems():
        try:
            if not isinstance(out_name, list):
                out_name = ''
            # -----------------------------------------------
            # If the item is a service layer, process and continue.
            # -----------------------------------------------
            if ds.startswith('http'):
                try:
                    clip_service_layers(ds, clip_polygon, out_name)
                    processed_count += 1.
                    clipped += 1
                    status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(ds), 'clip_data_by_features')
                    continue
                except Exception as ex:
                    status_writer.send_state(status.STAT_WARNING, str(ex))
                    errors_reasons[ds] = ex.message
                    errors += 1
                    continue


            # -----------------------------------------------
            # Check if the path is a MXD data frame type.
            # ------------------------------------------------
            map_frame_name = task_utils.get_data_frame_name(ds)
            if map_frame_name:
                ds = ds.split('|')[0].strip()


            # ---------------------------------
            # Is the input is geometry features
            # ---------------------------------
            if isinstance(out_name, list):
                arcpy.env.overwriteOutput = True
                increment = task_utils.get_increment(result_count)
                for row in out_name:
                    try:
                        name = os.path.join(out_workspace, arcpy.ValidateTableName(ds, out_workspace))
                        if out_format == 'SHP':
                            name += '.shp'

                        geo_json = row['[geo]']
                        geom = arcpy.AsShape(geo_json)
                        row.pop('[geo]')
                        if not arcpy.Exists(name):
                            if arcpy.env.outputCoordinateSystem:
                                arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())
                            else:
                                arcpy.env.outputCoordinateSystem = 4326
                                arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())

                            layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer')
                            if out_format == 'SHP':
                                arcpy.DeleteField_management(layer_name, 'Id')
                            existing_fields = [f.name for f in arcpy.ListFields(layer_name)]
                            new_fields = []
                            field_values = []
                            for field, value in row.iteritems():
                                valid_field = arcpy.ValidateFieldName(field, out_workspace)
                                new_fields.append(valid_field)
                                field_values.append(value)
                                arcpy.AddField_management(layer_name, valid_field, 'TEXT')
                        else:
                            if not geom.type.upper() == arcpy.Describe(name).shapeType.upper():
                                name = arcpy.CreateUniqueName(os.path.basename(name), out_workspace)
                                if arcpy.env.outputCoordinateSystem:
                                    arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())
                                else:
                                    arcpy.env.outputCoordinateSystem = 4326
                                    arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())

                                layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer')
                                if out_format == 'SHP':
                                    arcpy.DeleteField_management(layer_name, 'Id')
                                existing_fields = [f.name for f in arcpy.ListFields(layer_name)]
                                new_fields = []
                                field_values = []
                                for field, value in row.iteritems():
                                    valid_field = arcpy.ValidateFieldName(field, out_workspace)
                                    new_fields.append(valid_field)
                                    field_values.append(value)
                                    if not valid_field in existing_fields:
                                        arcpy.AddField_management(layer_name, valid_field, 'TEXT')

                        clipped_geometry = arcpy.Clip_analysis(geom, clip_polygon, arcpy.Geometry())
                        if clipped_geometry:
                            with arcpy.da.InsertCursor(layer_name, ["SHAPE@"] + new_fields) as icur:
                                icur.insertRow([clipped_geometry[0]] + field_values)

                        processed_count += 1
                        if (processed_count % increment) == 0:
                            status_writer.send_percent(float(processed_count) / result_count, _('Clipped: {0}').format(row['name']), 'clip_data')
                        clipped += 1
                    except KeyError:
                        processed_count += 1
                        skipped += 1
                        skipped_reasons[ds] = 'Invalid input type'
                        status_writer.send_state(_(status.STAT_WARNING, 'Invalid input type: {0}').format(ds))
                    except Exception as ex:
                        processed_count += 1
                        errors += 1
                        errors_reasons[ds] = ex.message
                        continue
                continue


            dsc = arcpy.Describe(ds)
            try:
                if dsc.spatialReference.name == 'Unknown':
                    status_writer.send_state(status.STAT_WARNING, _('{0} has an Unknown projection. Output may be invalid or empty.').format(dsc.name))
            except AttributeError:
                pass

            # -----------------------------
            # Check the data type and clip.
            # -----------------------------

            # Feature Class or ShapeFile
            if dsc.dataType in ('FeatureClass', 'ShapeFile', 'Shapefile'):
                if out_name == '':
                    name = arcpy.ValidateTableName(dsc.name, out_workspace)
                    name = task_utils.create_unique_name(name, out_workspace)
                else:
                    name = arcpy.ValidateTableName(out_name, out_workspace)
                    name = task_utils.create_unique_name(name, out_workspace)
                # Does the input exist in a feature dataset? If so, create the feature dataset if it doesn't exist.
                ws = os.path.dirname(ds)
                if [any(ext) for ext in ('.gdb', '.mdb', '.sde') if ext in ws]:
                    if os.path.splitext(ws)[1] in ('.gdb', '.mdb', '.sde'):
                        arcpy.Clip_analysis(ds, clip_polygon, name)
                    else:
                        fds_name = os.path.basename(ws)
                        if not arcpy.Exists(os.path.join(out_workspace, fds_name)):
                            arcpy.CreateFeatureDataset_management(out_workspace, fds_name, dsc.spatialReference)
                        arcpy.Clip_analysis(ds, clip_polygon,
                                            os.path.join(out_workspace, fds_name, os.path.basename(ds)))
                else:
                    arcpy.Clip_analysis(ds, clip_polygon, name)

                # arcpy.Clip_analysis(ds, clip_polygon, name)

            # Feature dataset
            elif dsc.dataType == 'FeatureDataset':
                if not out_format == 'SHP':
                    fds_name = os.path.basename(task_utils.create_unique_name(dsc.name, out_workspace))
                    fds = arcpy.CreateFeatureDataset_management(out_workspace, fds_name)
                arcpy.env.workspace = ds
                for fc in arcpy.ListFeatureClasses():
                    try:
                        if not out_format == 'SHP':
                            arcpy.Clip_analysis(fc, clip_polygon, task_utils.create_unique_name(fc, fds.getOutput(0)))
                        else:
                            arcpy.Clip_analysis(fc, clip_polygon, task_utils.create_unique_name(fc, out_workspace))
                    except arcpy.ExecuteError:
                        pass
                arcpy.env.workspace = out_workspace

            # Raster dataset
            elif dsc.dataType == 'RasterDataset':
                if out_name == '':
                    name = task_utils.create_unique_name(dsc.name, out_workspace)
                else:
                    name = task_utils.create_unique_name(out_name, out_workspace)
                if type(clip_polygon) is arcpy.Polygon:
                    extent = clip_polygon.extent
                else:
                    extent = arcpy.Describe(clip_polygon).extent
                ext = '{0} {1} {2} {3}'.format(extent.XMin, extent.YMin, extent.XMax, extent.YMax)
                arcpy.Clip_management(ds, ext, name, in_template_dataset=clip_polygon, clipping_geometry="ClippingGeometry")

            # Layer file
            elif dsc.dataType == 'Layer':
                task_utils.clip_layer_file(dsc.catalogPath, clip_polygon, arcpy.env.workspace)

            # Cad drawing dataset
            elif dsc.dataType == 'CadDrawingDataset':
                arcpy.env.workspace = dsc.catalogPath
                cad_wks_name = os.path.splitext(dsc.name)[0]
                for cad_fc in arcpy.ListFeatureClasses():
                    name = task_utils.create_unique_name('{0}_{1}'.format(cad_wks_name, cad_fc), out_workspace)
                    arcpy.Clip_analysis(cad_fc, clip_polygon, name)
                arcpy.env.workspace = out_workspace

            # File
            elif dsc.dataType in ('File', 'TextFile'):
                if dsc.catalogPath.endswith('.kml') or dsc.catalogPath.endswith('.kmz'):
                    name = os.path.splitext(dsc.name)[0]
                    kml_layer = arcpy.KMLToLayer_conversion(dsc.catalogPath, arcpy.env.scratchFolder, name)
                    group_layer = arcpy.mapping.Layer(os.path.join(arcpy.env.scratchFolder, '{0}.lyr'.format(name)))
                    for layer in arcpy.mapping.ListLayers(group_layer):
                        if layer.isFeatureLayer:
                            arcpy.Clip_analysis(layer,
                                                clip_polygon,
                                                task_utils.create_unique_name(layer, out_workspace))
                    # Clean up temp KML results.
                    arcpy.Delete_management(os.path.join(arcpy.env.scratchFolder, '{0}.lyr'.format(name)))
                    arcpy.Delete_management(kml_layer[1])
                    del group_layer
                else:
                    if out_name == '':
                        out_name = dsc.name
                    if out_workspace.endswith('.gdb'):
                        f = arcpy.Copy_management(ds, os.path.join(os.path.dirname(out_workspace), out_name))
                    else:
                        f = arcpy.Copy_management(ds, os.path.join(out_workspace, out_name))
                    processed_count += 1.
                    status_writer.send_percent(processed_count / result_count, _('Copied file: {0}').format(dsc.name), 'clip_data')
                    status_writer.send_state(_('Copied file: {0}').format(dsc.name))
                    clipped += 1
                    if out_format in ('LPK', 'MPK'):
                        files_to_package.append(f.getOutput(0))
                    continue

            # Map document
            elif dsc.dataType == 'MapDocument':
                task_utils.clip_mxd_layers(dsc.catalogPath, clip_polygon, arcpy.env.workspace, map_frame_name)
            else:
                processed_count += 1.
                status_writer.send_percent(processed_count / result_count, _('Invalid input type: {0}').format(ds), 'clip_data')
                status_writer.send_state(_('Invalid input type: {0}').format(ds))
                skipped_reasons[ds] = _('Invalid input type: {0}').format(dsc.dataType)
                skipped += 1
                continue

            processed_count += 1.
            status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(dsc.name), 'clip_data')
            status_writer.send_status(_('Clipped: {0}').format(dsc.name))
            clipped += 1
        # Continue. Process as many as possible.
        except Exception as ex:
            processed_count += 1.
            status_writer.send_percent(processed_count / result_count, _('Skipped: {0}').format(os.path.basename(ds)), 'clip_data')
            status_writer.send_status(_('FAIL: {0}').format(repr(ex)))
            errors_reasons[ds] = repr(ex)
            errors += 1
            pass
    return clipped, errors, skipped
예제 #24
0
import arcpy
import sys

if len(sys.argv) == 1:
    exit(1)

in_kml_file = sys.argv[1]
output_folder = sys.argv[2]

arcpy.KMLToLayer_conversion(in_kml_file, output_folder)
'''
arcpy.Buffer_analysis("roads", outfile, distance, "FULL", "ROUND", "LIST", "Distance")

field_area = 'Area'
minimum_area = 20
fcInput = 'ThePathAndOrNameOfMyFeatureClass'
fcOutput = 'ThePathAndOrNameOfTheOutputFeatureClass'
where_clause = '{} > {}'.format(arcpy.AddFieldDelimiters(fcInput, field_area), minimum_area)
arcpy.Select_analysis(fcInput, fcOutput, where_clause)
'''
예제 #25
0
def getPerClassFunc(fileFolder, grid, codetoClass, code, rasterPoints,
                    outputDatabase, verbose):

    perClass = fileFolder + "\\" + grid
    kmls = perClass + "\\KMLs"
    addPoly = perClass + "\\AddPolygon"
    removePoly = poly = perClass + "\\RemovePolygon"

    tempKmls = perClass + "\\tempKmls"
    tempAdd = perClass + "\\tempAdd"
    tempRemove = perClass + "\\tempRemove"
    tempMerge = perClass + "\\templayermerge"
    os.makedirs(tempKmls)
    os.makedirs(tempAdd)
    os.makedirs(tempRemove)
    os.makedirs(tempMerge)

    tempDatabase = "temp.gdb"
    arcpy.CreateFileGDB_management(perClass, tempDatabase)

    listKmls = os.listdir(kmls)
    kmlNumber = len(listKmls)

    listAddPoly = os.listdir(addPoly)
    addNumber = len(listAddPoly)

    listRemovePoly = os.listdir(removePoly)
    removeNumber = len(listRemovePoly)

    kmlLayer = []

    if kmlNumber > 0:
        for file in listKmls:
            sampKML = kmls + "\\" + file
            #get short name
            splitext = os.path.splitext(file)
            outputName = splitext[0]
            # Process: KML To Layer
            arcpy.KMLToLayer_conversion(sampKML, tempKmls, outputName,
                                        "NO_GROUNDOVERLAY")  ##
            kmlLayer.append(tempKmls + "\\" + outputName +
                            ".gdb\\Placemarks\\Points")  ##
            #print kmlLayer
            if verbose:
                print "KML to Layer is done"

    if addNumber > 0:
        for file in listAddPoly:
            sampAdd = addPoly + "\\" + file
            splitext = os.path.splitext(file)
            outputName = splitext[0]
            # Process: KML To Layer
            arcpy.KMLToLayer_conversion(sampAdd, tempAdd, outputName,
                                        "NO_GROUNDOVERLAY")
            addPolygon = tempAdd + "\\" + outputName + ".gdb\\Placemarks\\Polygons"

    if removeNumber > 0:
        for file in listRemovePoly:
            sampRemove = removePoly + "\\" + file
            splitext = os.path.splitext(file)
            outputName = splitext[0]
            # Process: KML To Layer
            arcpy.KMLToLayer_conversion(sampRemove, tempRemove, outputName,
                                        "NO_GROUNDOVERLAY")
            removePolygon = tempRemove + "\\" + outputName + ".gdb\\Placemarks\\Polygons"

    # create merge feature class from different groups in perclass
    if kmlNumber >= 1:
        # Process: Merge
        outputmerge = perClass + "\\" + tempDatabase + "\\mergelayer"
        arcpy.Merge_management(kmlLayer, outputmerge)

    # create feature class for remove polygons: Remove points fall in removepolygons
    if removeNumber > 0 and kmlNumber > 0:

        outputmerge_rm = tempMerge + "\\mergelayer"
        #print outputmerge,removePolygon
        # Process: Select Layer By Location
        arcpy.MakeFeatureLayer_management(outputmerge, outputmerge_rm)
        arcpy.SelectLayerByLocation_management(outputmerge_rm, "WITHIN",
                                               removePolygon, "",
                                               "NEW_SELECTION")

        # Process: Select Layer By Attribute
        arcpy.SelectLayerByAttribute_management(outputmerge_rm,
                                                "SWITCH_SELECTION", "")

        # Process: Copy Features
        outputRemove = perClass + "\\" + tempDatabase + "\\outputRemove"
        arcpy.CopyFeatures_management(outputmerge_rm, outputRemove, "", "0",
                                      "0", "0")
        if verbose:
            print "remove points is done"

    #create feature class for addpolygons
    if addNumber > 0:
        # Process: Select Layer By Location
        rasterPointsForAdd = tempMerge + "\\rasterpoints"
        arcpy.MakeFeatureLayer_management(rasterPoints, rasterPointsForAdd)
        arcpy.SelectLayerByLocation_management(rasterPointsForAdd, "WITHIN",
                                               addPolygon, "", "NEW_SELECTION")

        # Process: Copy Features
        outputAdd = perClass + "\\" + tempDatabase + "\\outputAdd"
        arcpy.CopyFeatures_management(rasterPointsForAdd, outputAdd, "", "0",
                                      "0", "0")
        if verbose:
            print "add points is done"

    isOutput = 0
    # merge if both add and remove
    if addNumber > 0 and removeNumber > 0:
        # merge
        outputPerClass = perClass + "\\" + tempDatabase + "\\outputPerclass"
        arcpy.Merge_management([outputRemove, outputAdd], outputPerClass)
        arcpy.DeleteIdentical_management(outputPerClass, "Shape")
        isOutput = 1
        if verbose:
            print "merge add and remove is done"

    if addNumber > 0 and removeNumber == 0 and kmlNumber == 0:
        outputPerClass = outputAdd
        isOutput = 1

    if addNumber == 0 and removeNumber > 0:
        outputPerClass = outputRemove
        isOutput = 1

    if kmlNumber > 0 and addNumber == 0 and removeNumber == 0:
        outputPerClass = outputmerge
        isOutput = 1

    if kmlNumber > 0 and addNumber > 0 and removeNumber == 0:
        # merge
        outputPerClass = perClass + "\\" + tempDatabase + "\\outputPerclass"
        arcpy.Merge_management([outputmerge, outputAdd], outputPerClass)
        arcpy.DeleteIdentical_management(outputPerClass, "Shape")
        isOutput = 1

    if addNumber == 0 and removeNumber == 0 and kmlNumber > 0:
        outputPerClass = outputmerge
        isOutput = 1

    if isOutput > 0:
        # add field and calculate field
        # Process: Add Field
        arcpy.AddField_management(outputPerClass, "cls_lbl", "LONG", "", "",
                                  "", "", "NULLABLE", "NON_REQUIRED", "")

        # Process: Calculate Field
        arcpy.CalculateField_management(outputPerClass, "cls_lbl",
                                        codetoClass[code], "VB", "")
        #arcpy.AddGeometryAttributes_management(outputPerClass, "POINT_X_Y_Z_M")

        if verbose:
            print "Add and calculate field is done"

        # get output
        outputGdb = fileFolder + "\\" + outputDatabase + "\\" + grid
        arcpy.CopyFeatures_management(outputPerClass, outputGdb, "", "0", "0",
                                      "0")

        if verbose:
            print grid + " is done"
import os
import arcpy
inDir = r"c:\Users\jsilhavy\Documents\OB\Treninky\0912_Naves\GPS"
outDir = r"c:\Users\jsilhavy\Documents\OB\Treninky\0912_Naves\GPS" + "\\"  #+ "gdb" + "\\"
shp_listDir = os.listdir(inDir)
for myKML in shp_listDir:
    if myKML[-4:] == ".kml":
        inKML = inDir + "\\" + myKML
        outKML = myKML[:-4]
        arcpy.KMLToLayer_conversion(in_kml_file=inKML,
                                    output_folder=outDir,
                                    output_data=outKML,
                                    include_groundoverlay="NO_GROUNDOVERLAY")
예제 #27
0
#Execute
arcpy.CreateFileGDB_management(interFolder, gdb_name)

print "Step 1 completed at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

## ---------------------------------------------------------------------------
## 2. Export KMZ files as layers in new geodatabases
## Description: Convert all KMZ found in the current workspace

print "\nStep 2 Export KMZ files starts at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

for kmz in arcpy.ListFiles('*.KM*'):
    arcpy.KMLToLayer_conversion(kmz, interFolder)

print "Step 2 completed at", datetime.datetime.now().strftime(
    "%A, %B %d %Y %I:%M:%S%p")

## ---------------------------------------------------------------------------
## 3. Export feature class
## Description: Export feature class in the geodatabase

print "\nStep 3 Rename and export feature class starts at", datetime.datetime.now(
).strftime("%A, %B %d %Y %I:%M:%S%p")

arcpy.env.workspace = interFolder

# Loop through all the FileGeodatabases within the workspace
wks = arcpy.ListWorkspaces('*Confined*', 'FileGDB')
예제 #28
0
                arcpy.AddError(row.getValue("ObjectID"))
                print "fullfipsid is NULL, sheck to see if located inside Colorado. OBJECTID:"
                print row.getValue("ObjectID")
        arcpy.AddMessage("CHECK COMPLETE")
        print "Done."
        del rows, row
    except Exception as a:
        arcpy.AddMessage("Check Failed.")
        print "Check Failed."

# Process: LOADS TO STAGING TEMPLATE
    print "Loading to staging template..."
    try:
        arcpy.Append_management("tableOutput", midmileTemplate, "NO_TEST", "", "")
        print "Loaded to staging template"
    except Exception as a:
        print a
        print "Staging template append FAILURE"

# Batch KML/KMZ process
if batchKML <>"":

# Changes workspace
    arcpy.env.workspace = batchKML

# Convert all KMZ and KML files found in the current workspace
for kmz in arcpy.ListFiles('*.KM*'):
    print "CONVERTING: " + os.path.join(batchKML,kmz)
    arcpy.KMLToLayer_conversion(kmz, arcpy.env.workspace)

예제 #29
0
def execute(request):
    """Creates a GeoPDF.
    :param request: json as a dict.
    """
    added_to_map = 0
    errors = 0
    skipped = 0

    parameters = request['params']
    num_results, response_index = task_utils.get_result_count(parameters)
    docs = parameters[response_index]['response']['docs']
    input_items = task_utils.get_input_items(docs)
    input_rows = collections.defaultdict(list)
    for doc in docs:
        if 'path' not in doc:
            input_rows[doc['name']].append(doc)
    if num_results > task_utils.CHUNK_SIZE:
        status_writer.send_state(status.STAT_FAILED,
                                 'Reduce results to 25 or less.')
        return

    map_template = task_utils.get_parameter_value(parameters, 'map_template',
                                                  'value')
    base_map = task_utils.get_parameter_value(parameters, 'base_map', 'value')
    map_title = task_utils.get_parameter_value(parameters, 'map_title',
                                               'value')
    attribute_setting = task_utils.get_parameter_value(parameters,
                                                       'attribute_settings',
                                                       'value')
    author = task_utils.get_parameter_value(parameters, 'map_author', 'value')
    output_file_name = task_utils.get_parameter_value(parameters,
                                                      'output_file_name',
                                                      'value')
    if not output_file_name:
        output_file_name = 'output_pdf'
    try:
        map_view = task_utils.get_parameter_value(parameters, 'map_view',
                                                  'extent')
    except KeyError:
        map_view = None
        pass

    temp_folder = os.path.join(request['folder'], 'temp')
    if not os.path.exists(temp_folder):
        os.makedirs(temp_folder)

    if base_map == 'NONE':
        base_layer = None
    else:
        base_layer = arcpy.mapping.Layer(
            os.path.join(os.path.dirname(os.path.dirname(__file__)),
                         'supportfiles', 'basemaps',
                         '{0}.lyr'.format(base_map)))
    mxd_path = os.path.join(os.path.dirname(os.path.dirname(__file__)),
                            'supportfiles', 'frame', map_template)
    mxd = arcpy.mapping.MapDocument(mxd_path)
    data_frame = arcpy.mapping.ListDataFrames(mxd)[0]

    layers = []
    all_layers = []

    if input_rows:
        for name, rows in input_rows.iteritems():
            for row in rows:
                try:
                    name = arcpy.CreateUniqueName(name, 'in_memory')
                    # Create the geometry.
                    geo_json = row['[geo]']
                    geom = arcpy.AsShape(geo_json)
                    arcpy.CopyFeatures_management(geom, name)
                    feature_layer = arcpy.MakeFeatureLayer_management(
                        name, os.path.basename(name))
                    layer_file = arcpy.SaveToLayerFile_management(
                        feature_layer,
                        os.path.join(temp_folder,
                                     '{0}.lyr'.format(os.path.basename(name))))
                    layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))
                    all_layers.append(
                        arcpy.mapping.Layer(layer_file.getOutput(0)))
                    added_to_map += 1
                except KeyError:
                    skipped += 1
                    skipped_reasons[name] = 'No geographic information'
                    continue

    for i, item in enumerate(input_items, 1):
        try:
            # Is the item a mxd data frame.
            map_frame_name = task_utils.get_data_frame_name(item)
            if map_frame_name:
                item = item.split('|')[0].strip()

            dsc = arcpy.Describe(item)
            if dsc.dataType == 'Layer':
                layers.append(arcpy.mapping.Layer(dsc.catalogPath))

            elif dsc.dataType == 'FeatureClass' or dsc.dataType == 'ShapeFile':
                if os.path.basename(item) in [l.name for l in all_layers]:
                    layer_name = '{0}_{1}'.format(os.path.basename(item), i)
                else:
                    layer_name = os.path.basename(item)
                feature_layer = arcpy.MakeFeatureLayer_management(
                    item, layer_name)
                layer_file = arcpy.SaveToLayerFile_management(
                    feature_layer,
                    os.path.join(temp_folder, '{0}.lyr'.format(layer_name)))
                layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))
                all_layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))

            elif dsc.dataType == 'FeatureDataset':
                arcpy.env.workspace = item
                for fc in arcpy.ListFeatureClasses():
                    layer_file = arcpy.SaveToLayerFile_management(
                        arcpy.MakeFeatureLayer_management(
                            fc, '{0}_{1}'.format(fc, i)),
                        os.path.join(temp_folder, '{0}_{1}.lyr'.format(fc, i)))
                    layer = arcpy.mapping.Layer(layer_file.getOutput(0))
                    layer.name = fc
                    layers.append(layer)
                    all_layers.append(layer)

            elif dsc.dataType == 'RasterDataset':
                if os.path.basename(item) in [l.name for l in all_layers]:
                    layer_name = '{0}_{1}'.format(os.path.basename(item), i)
                else:
                    layer_name = os.path.basename(item)
                raster_layer = arcpy.MakeRasterLayer_management(
                    item, layer_name)
                layer_file = arcpy.SaveToLayerFile_management(
                    raster_layer,
                    os.path.join(temp_folder, '{0}.lyr'.format(layer_name)))
                layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))
                all_layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))

            elif dsc.catalogPath.endswith('.kml') or dsc.catalogPath.endswith(
                    '.kmz'):
                if not os.path.splitext(dsc.name)[0] in layers:
                    name = os.path.splitext(dsc.name)[0]
                else:
                    name = '{0}_{1}'.format(os.path.splitext(dsc.name)[0], i)
                arcpy.KMLToLayer_conversion(dsc.catalogPath, temp_folder, name)
                layers.append(
                    arcpy.mapping.Layer(
                        os.path.join(temp_folder, '{0}.lyr'.format(name))))
                all_layers.append(
                    arcpy.mapping.Layer(
                        os.path.join(temp_folder, '{0}.lyr'.format(name))))

            elif dsc.dataType == 'MapDocument':
                input_mxd = arcpy.mapping.MapDocument(item)
                if map_frame_name:
                    df = arcpy.mapping.ListDataFrames(input_mxd,
                                                      map_frame_name)[0]
                    layers = arcpy.mapping.ListLayers(input_mxd, data_frame=df)
                else:
                    layers = arcpy.mapping.ListLayers(input_mxd)

            if layers:
                for layer in layers:
                    status_writer.send_status(
                        _('Adding layer {0}...').format(layer.name))
                    arcpy.mapping.AddLayer(data_frame, layer)
                    added_to_map += 1
                    layers = []
            else:
                status_writer.send_status(
                    _('Invalid input type: {0}').format(item))
                skipped_reasons[item] = 'Invalid input type'
                skipped += 1
        except Exception as ex:
            status_writer.send_status(_('FAIL: {0}').format(repr(ex)))
            errors += 1
            errors_reasons[item] = repr(ex)
            pass

    if map_view:
        extent = map_view.split(' ')
        new_extent = data_frame.extent
        new_extent.XMin, new_extent.YMin = float(extent[0]), float(extent[1])
        new_extent.XMax, new_extent.YMax = float(extent[2]), float(extent[3])
        data_frame.extent = new_extent
    else:
        data_frame.zoomToSelectedFeatures()

    # Update text elements in map template.
    date_element = arcpy.mapping.ListLayoutElements(mxd, 'TEXT_ELEMENT',
                                                    'date')
    if date_element:
        date_element[0].text = 'Date: {0}'.format(task_utils.get_local_date())

    title_element = arcpy.mapping.ListLayoutElements(mxd, 'TEXT_ELEMENT',
                                                     'title')
    if title_element:
        title_element[0].text = map_title

    author_element = arcpy.mapping.ListLayoutElements(mxd, 'TEXT_ELEMENT',
                                                      'author')
    if author_element:
        author_element[0].text = '{0} {1}'.format(author_element[0].text,
                                                  author)

    if map_template in ('ANSI_D_LND.mxd', 'ANSI_E_LND.mxd'):
        coord_elements = arcpy.mapping.ListLayoutElements(
            mxd, 'TEXT_ELEMENT', 'x*')
        coord_elements += arcpy.mapping.ListLayoutElements(
            mxd, 'TEXT_ELEMENT', 'y*')
        if coord_elements:
            for e in coord_elements:
                new_text = e.text
                if e.name == 'xmin':
                    dms = task_utils.dd_to_dms(data_frame.extent.XMin)
                    if data_frame.extent.XMin > 0:
                        new_text = new_text.replace('W', 'E')
                elif e.name == 'xmax':
                    dms = task_utils.dd_to_dms(data_frame.extent.XMax)
                    if data_frame.extent.XMax > 0:
                        new_text = new_text.replace('W', 'E')
                elif e.name == 'ymin':
                    dms = task_utils.dd_to_dms(data_frame.extent.YMin)
                    if data_frame.extent.YMin < 0:
                        new_text = new_text.replace('N', 'S')
                elif e.name == 'ymax':
                    if data_frame.extent.YMax < 0:
                        new_text = new_text.replace('N', 'S')
                    dms = task_utils.dd_to_dms(data_frame.extent.YMax)

                new_text = new_text.replace('d', str(dms[0]))
                new_text = new_text.replace('m', str(dms[1]))
                new_text = new_text.replace('s', str(dms[2]))
                e.text = new_text

    # Do this now so it does not affect zoom level or extent.
    if base_layer:
        status_writer.send_status(_('Adding basemap {0}...').format(base_map))
        arcpy.mapping.AddLayer(data_frame, base_layer, 'BOTTOM')

    if added_to_map > 0:
        status_writer.send_status(_('Exporting to PDF...'))
        arcpy.mapping.ExportToPDF(mxd,
                                  os.path.join(
                                      request['folder'],
                                      '{0}.pdf'.format(output_file_name)),
                                  layers_attributes=attribute_setting)
        # Create a thumbnail size PNG of the mxd.
        task_utils.make_thumbnail(
            mxd, os.path.join(request['folder'], '_thumb.png'), False)
    else:
        status_writer.send_state(status.STAT_FAILED,
                                 _('No results can be exported to PDF'))
        task_utils.report(os.path.join(request['folder'], '__report.json'),
                          added_to_map,
                          skipped,
                          skipped_details=skipped_reasons)
        return

    # Update state if necessary.
    if skipped > 0 or errors > 0:
        status_writer.send_state(
            status.STAT_WARNING,
            _('{0} results could not be processed').format(skipped + errors))
    task_utils.report(os.path.join(request['folder'],
                                   '__report.json'), added_to_map, skipped,
                      errors, errors_reasons, skipped_reasons)
예제 #30
0
in_file = arcpy.GetParameterAsText(0)
out_file = arcpy.GetParameterAsText(1)

##in_file = 'g:/beijing/summer'
##out_file = 'g:/beijing/summer/convert'

if os.path.exists(out_file) == False:
    os.makedirs(out_file)

gg = os.listdir(in_file)
ff = []
for f in gg:
    if os.path.splitext(f)[1] == '.kml' or os.path.splitext[f][1] == '.kmz':
        ff.append(f)
arcpy.AddMessage(ff)
arcpy.CreateFeatureclass_management(out_file, 'result.shp', 'POLYGON')
arcpy.AddField_management(os.path.join(out_file, 'result.shp'), 'Name', 'TEXT')
out_feature = os.path.join(out_file, 'result.shp')

for i in range(len(ff)):
    in_file1 = os.path.join(in_file, ff[i])
    arcpy.KMLToLayer_conversion(in_file1, out_file)
    arcpy.AddMessage(ff[i] + 'has been succesfully converted')
    arcpy.Append_management(
        os.path.join(
            os.path.join(out_file,
                         os.path.splitext(ff[i])[0] + '.gdb'),
            'Placemarks_polygon'), out_feature, 'NO_TEST')
    arcpy.AddMessage(ff[i] +
                     'has been succesfully added to the result feature')