def execute(request): """Creates a GeoPDF. :param request: json as a dict. """ added_to_map = 0 errors = 0 skipped = 0 parameters = request['params'] num_results, response_index = task_utils.get_result_count(parameters) docs = parameters[response_index]['response']['docs'] input_items = task_utils.get_input_items(docs) input_rows = collections.defaultdict(list) for doc in docs: if 'path' not in doc: input_rows[doc['name']].append(doc) if num_results > task_utils.CHUNK_SIZE: status_writer.send_state(status.STAT_FAILED, 'Reduce results to 25 or less.') return map_template = task_utils.get_parameter_value(parameters, 'map_template', 'value') base_map = task_utils.get_parameter_value(parameters, 'base_map', 'value') map_title = task_utils.get_parameter_value(parameters, 'map_title', 'value') attribute_setting = task_utils.get_parameter_value(parameters, 'attribute_settings', 'value') author = task_utils.get_parameter_value(parameters, 'map_author', 'value') output_file_name = task_utils.get_parameter_value(parameters, 'output_file_name', 'value') if not output_file_name: output_file_name = 'output_pdf' try: map_view = task_utils.get_parameter_value(parameters, 'map_view', 'extent') except KeyError: map_view = None pass temp_folder = os.path.join(request['folder'], 'temp') if not os.path.exists(temp_folder): os.makedirs(temp_folder) if base_map == 'NONE': base_layer = None else: base_layer = arcpy.mapping.Layer( os.path.join(os.path.dirname(os.path.dirname(__file__)), 'supportfiles', 'basemaps', '{0}.lyr'.format(base_map))) mxd_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'supportfiles', 'frame', map_template) mxd = arcpy.mapping.MapDocument(mxd_path) data_frame = arcpy.mapping.ListDataFrames(mxd)[0] layers = [] all_layers = [] if input_rows: for name, rows in input_rows.iteritems(): for row in rows: try: name = arcpy.CreateUniqueName(name, 'in_memory') # Create the geometry. geo_json = row['[geo]'] geom = arcpy.AsShape(geo_json) arcpy.CopyFeatures_management(geom, name) feature_layer = arcpy.MakeFeatureLayer_management( name, os.path.basename(name)) layer_file = arcpy.SaveToLayerFile_management( feature_layer, os.path.join(temp_folder, '{0}.lyr'.format(os.path.basename(name)))) layers.append(arcpy.mapping.Layer(layer_file.getOutput(0))) all_layers.append( arcpy.mapping.Layer(layer_file.getOutput(0))) added_to_map += 1 except KeyError: skipped += 1 skipped_reasons[name] = 'No geographic information' continue for i, item in enumerate(input_items, 1): try: # Is the item a mxd data frame. map_frame_name = task_utils.get_data_frame_name(item) if map_frame_name: item = item.split('|')[0].strip() dsc = arcpy.Describe(item) if dsc.dataType == 'Layer': layers.append(arcpy.mapping.Layer(dsc.catalogPath)) elif dsc.dataType == 'FeatureClass' or dsc.dataType == 'ShapeFile': if os.path.basename(item) in [l.name for l in all_layers]: layer_name = '{0}_{1}'.format(os.path.basename(item), i) else: layer_name = os.path.basename(item) feature_layer = arcpy.MakeFeatureLayer_management( item, layer_name) layer_file = arcpy.SaveToLayerFile_management( feature_layer, os.path.join(temp_folder, '{0}.lyr'.format(layer_name))) layers.append(arcpy.mapping.Layer(layer_file.getOutput(0))) all_layers.append(arcpy.mapping.Layer(layer_file.getOutput(0))) elif dsc.dataType == 'FeatureDataset': arcpy.env.workspace = item for fc in arcpy.ListFeatureClasses(): layer_file = arcpy.SaveToLayerFile_management( arcpy.MakeFeatureLayer_management( fc, '{0}_{1}'.format(fc, i)), os.path.join(temp_folder, '{0}_{1}.lyr'.format(fc, i))) layer = arcpy.mapping.Layer(layer_file.getOutput(0)) layer.name = fc layers.append(layer) all_layers.append(layer) elif dsc.dataType == 'RasterDataset': if os.path.basename(item) in [l.name for l in all_layers]: layer_name = '{0}_{1}'.format(os.path.basename(item), i) else: layer_name = os.path.basename(item) raster_layer = arcpy.MakeRasterLayer_management( item, layer_name) layer_file = arcpy.SaveToLayerFile_management( raster_layer, os.path.join(temp_folder, '{0}.lyr'.format(layer_name))) layers.append(arcpy.mapping.Layer(layer_file.getOutput(0))) all_layers.append(arcpy.mapping.Layer(layer_file.getOutput(0))) elif dsc.catalogPath.endswith('.kml') or dsc.catalogPath.endswith( '.kmz'): if not os.path.splitext(dsc.name)[0] in layers: name = os.path.splitext(dsc.name)[0] else: name = '{0}_{1}'.format(os.path.splitext(dsc.name)[0], i) arcpy.KMLToLayer_conversion(dsc.catalogPath, temp_folder, name) layers.append( arcpy.mapping.Layer( os.path.join(temp_folder, '{0}.lyr'.format(name)))) all_layers.append( arcpy.mapping.Layer( os.path.join(temp_folder, '{0}.lyr'.format(name)))) elif dsc.dataType == 'MapDocument': input_mxd = arcpy.mapping.MapDocument(item) if map_frame_name: df = arcpy.mapping.ListDataFrames(input_mxd, map_frame_name)[0] layers = arcpy.mapping.ListLayers(input_mxd, data_frame=df) else: layers = arcpy.mapping.ListLayers(input_mxd) if layers: for layer in layers: status_writer.send_status( _('Adding layer {0}...').format(layer.name)) arcpy.mapping.AddLayer(data_frame, layer) added_to_map += 1 layers = [] else: status_writer.send_status( _('Invalid input type: {0}').format(item)) skipped_reasons[item] = 'Invalid input type' skipped += 1 except Exception as ex: status_writer.send_status(_('FAIL: {0}').format(repr(ex))) errors += 1 errors_reasons[item] = repr(ex) pass if map_view: extent = map_view.split(' ') new_extent = data_frame.extent new_extent.XMin, new_extent.YMin = float(extent[0]), float(extent[1]) new_extent.XMax, new_extent.YMax = float(extent[2]), float(extent[3]) data_frame.extent = new_extent else: data_frame.zoomToSelectedFeatures() # Update text elements in map template. date_element = arcpy.mapping.ListLayoutElements(mxd, 'TEXT_ELEMENT', 'date') if date_element: date_element[0].text = 'Date: {0}'.format(task_utils.get_local_date()) title_element = arcpy.mapping.ListLayoutElements(mxd, 'TEXT_ELEMENT', 'title') if title_element: title_element[0].text = map_title author_element = arcpy.mapping.ListLayoutElements(mxd, 'TEXT_ELEMENT', 'author') if author_element: author_element[0].text = '{0} {1}'.format(author_element[0].text, author) if map_template in ('ANSI_D_LND.mxd', 'ANSI_E_LND.mxd'): coord_elements = arcpy.mapping.ListLayoutElements( mxd, 'TEXT_ELEMENT', 'x*') coord_elements += arcpy.mapping.ListLayoutElements( mxd, 'TEXT_ELEMENT', 'y*') if coord_elements: for e in coord_elements: new_text = e.text if e.name == 'xmin': dms = task_utils.dd_to_dms(data_frame.extent.XMin) if data_frame.extent.XMin > 0: new_text = new_text.replace('W', 'E') elif e.name == 'xmax': dms = task_utils.dd_to_dms(data_frame.extent.XMax) if data_frame.extent.XMax > 0: new_text = new_text.replace('W', 'E') elif e.name == 'ymin': dms = task_utils.dd_to_dms(data_frame.extent.YMin) if data_frame.extent.YMin < 0: new_text = new_text.replace('N', 'S') elif e.name == 'ymax': if data_frame.extent.YMax < 0: new_text = new_text.replace('N', 'S') dms = task_utils.dd_to_dms(data_frame.extent.YMax) new_text = new_text.replace('d', str(dms[0])) new_text = new_text.replace('m', str(dms[1])) new_text = new_text.replace('s', str(dms[2])) e.text = new_text # Do this now so it does not affect zoom level or extent. if base_layer: status_writer.send_status(_('Adding basemap {0}...').format(base_map)) arcpy.mapping.AddLayer(data_frame, base_layer, 'BOTTOM') if added_to_map > 0: status_writer.send_status(_('Exporting to PDF...')) arcpy.mapping.ExportToPDF(mxd, os.path.join( request['folder'], '{0}.pdf'.format(output_file_name)), layers_attributes=attribute_setting) # Create a thumbnail size PNG of the mxd. task_utils.make_thumbnail( mxd, os.path.join(request['folder'], '_thumb.png'), False) else: status_writer.send_state(status.STAT_FAILED, _('No results can be exported to PDF')) task_utils.report(os.path.join(request['folder'], '__report.json'), added_to_map, skipped, skipped_details=skipped_reasons) return # Update state if necessary. if skipped > 0 or errors > 0: status_writer.send_state( status.STAT_WARNING, _('{0} results could not be processed').format(skipped + errors)) task_utils.report(os.path.join(request['folder'], '__report.json'), added_to_map, skipped, errors, errors_reasons, skipped_reasons)
def clip_data(input_items, out_workspace, clip_polygon, out_format): """Clips input results using the clip polygon. :param input_items: list of item to be clipped :param out_workspace: the output workspace where results are created :param clip_polygon: the clip polygon geometry :param out_format: the type of output to be created (i.e. SHP for shapefile) """ global processed_count global layer_name global existing_fields global new_fields global field_values clipped = 0 errors = 0 skipped = 0 fds = None for ds, out_name in input_items.iteritems(): try: # ----------------------------------------------- # If the item is a service layer, process and continue. # ----------------------------------------------- if ds.startswith("http"): try: clip_service_layers(ds, clip_polygon, out_name) processed_count += 1.0 clipped += 1 status_writer.send_percent( processed_count / result_count, _("Clipped: {0}").format(ds), "clip_data_by_features" ) continue except Exception as ex: status_writer.send_state(status.STAT_WARNING, str(ex)) errors_reasons[ds] = ex.message errors += 1 continue # ----------------------------------------------- # Check if the path is a MXD data frame type. # ------------------------------------------------ map_frame_name = task_utils.get_data_frame_name(ds) if map_frame_name: ds = ds.split("|")[0].strip() # --------------------------------- # Is the input is geometry features # --------------------------------- if isinstance(out_name, list): increment = task_utils.get_increment(result_count) for row in out_name: try: name = os.path.join(out_workspace, arcpy.ValidateTableName(ds, out_workspace)) if out_format == "SHP": name += ".shp" geo_json = row["[geo]"] geom = arcpy.AsShape(geo_json) row.pop("[geo]") if not arcpy.Exists(name): if arcpy.env.outputCoordinateSystem: arcpy.CreateFeatureclass_management( out_workspace, os.path.basename(name), geom.type.upper() ) else: arcpy.env.outputCoordinateSystem = 4326 arcpy.CreateFeatureclass_management( out_workspace, os.path.basename(name), geom.type.upper() ) layer_name = arcpy.MakeFeatureLayer_management(name, "flayer") if out_format == "SHP": arcpy.DeleteField_management(layer_name, "Id") existing_fields = [f.name for f in arcpy.ListFields(layer_name)] new_fields = [] field_values = [] for field, value in row.iteritems(): valid_field = arcpy.ValidateFieldName(field, out_workspace) new_fields.append(valid_field) field_values.append(value) arcpy.AddField_management(layer_name, valid_field, "TEXT") else: if not geom.type.upper() == arcpy.Describe(name).shapeType.upper(): name = arcpy.CreateUniqueName(os.path.basename(name), out_workspace) if arcpy.env.outputCoordinateSystem: arcpy.CreateFeatureclass_management( out_workspace, os.path.basename(name), geom.type.upper() ) else: arcpy.env.outputCoordinateSystem = 4326 arcpy.CreateFeatureclass_management( out_workspace, os.path.basename(name), geom.type.upper() ) layer_name = arcpy.MakeFeatureLayer_management(name, "flayer") if out_format == "SHP": arcpy.DeleteField_management(layer_name, "Id") existing_fields = [f.name for f in arcpy.ListFields(layer_name)] new_fields = [] field_values = [] for field, value in row.iteritems(): valid_field = arcpy.ValidateFieldName(field, out_workspace) new_fields.append(valid_field) field_values.append(value) if not valid_field in existing_fields: arcpy.AddField_management(layer_name, valid_field, "TEXT") clipped_geometry = arcpy.Clip_analysis(geom, clip_polygon, arcpy.Geometry()) if clipped_geometry: with arcpy.da.InsertCursor(layer_name, ["SHAPE@"] + new_fields) as icur: icur.insertRow([clipped_geometry[0]] + field_values) processed_count += 1 if (processed_count % increment) == 0: status_writer.send_percent( float(processed_count) / result_count, _("Clipped: {0}").format(row["name"]), "clip_data", ) clipped += 1 except KeyError: processed_count += 1 skipped += 1 skipped_reasons[ds] = "Invalid input type" status_writer.send_state(_(status.STAT_WARNING, "Invalid input type: {0}").format(ds)) except Exception as ex: processed_count += 1 errors += 1 errors_reasons[ds] = ex.message continue continue dsc = arcpy.Describe(ds) try: if dsc.spatialReference.name == "Unknown": status_writer.send_state( status.STAT_WARNING, _("{0} has an Unknown projection. Output may be invalid or empty.").format(dsc.name), ) except AttributeError: pass # ----------------------------- # Check the data type and clip. # ----------------------------- # Feature Class or ShapeFile if dsc.dataType in ("FeatureClass", "ShapeFile", "Shapefile"): if out_name == "": name = arcpy.ValidateTableName(dsc.name, out_workspace) name = task_utils.create_unique_name(name, out_workspace) else: name = arcpy.ValidateTableName(out_name, out_workspace) name = task_utils.create_unique_name(name, out_workspace) arcpy.Clip_analysis(ds, clip_polygon, name) # Feature dataset elif dsc.dataType == "FeatureDataset": if not out_format == "SHP": fds_name = os.path.basename(task_utils.create_unique_name(dsc.name, out_workspace)) fds = arcpy.CreateFeatureDataset_management(out_workspace, fds_name) arcpy.env.workspace = ds for fc in arcpy.ListFeatureClasses(): try: if not out_format == "SHP": arcpy.Clip_analysis(fc, clip_polygon, task_utils.create_unique_name(fc, fds.getOutput(0))) else: arcpy.Clip_analysis(fc, clip_polygon, task_utils.create_unique_name(fc, out_workspace)) except arcpy.ExecuteError: pass arcpy.env.workspace = out_workspace # Raster dataset elif dsc.dataType == "RasterDataset": if out_name == "": name = task_utils.create_unique_name(dsc.name, out_workspace) else: name = task_utils.create_unique_name(out_name, out_workspace) if type(clip_polygon) is arcpy.Polygon: extent = clip_polygon.extent else: extent = arcpy.Describe(clip_polygon).extent ext = "{0} {1} {2} {3}".format(extent.XMin, extent.YMin, extent.XMax, extent.YMax) arcpy.Clip_management( ds, ext, name, in_template_dataset=clip_polygon, clipping_geometry="ClippingGeometry" ) # Layer file elif dsc.dataType == "Layer": task_utils.clip_layer_file(dsc.catalogPath, clip_polygon, arcpy.env.workspace) # Cad drawing dataset elif dsc.dataType == "CadDrawingDataset": arcpy.env.workspace = dsc.catalogPath cad_wks_name = os.path.splitext(dsc.name)[0] for cad_fc in arcpy.ListFeatureClasses(): name = task_utils.create_unique_name("{0}_{1}".format(cad_wks_name, cad_fc), out_workspace) arcpy.Clip_analysis(cad_fc, clip_polygon, name) arcpy.env.workspace = out_workspace # File elif dsc.dataType in ("File", "TextFile"): if dsc.catalogPath.endswith(".kml") or dsc.catalogPath.endswith(".kmz"): name = os.path.splitext(dsc.name)[0] kml_layer = arcpy.KMLToLayer_conversion(dsc.catalogPath, arcpy.env.scratchFolder, name) group_layer = arcpy.mapping.Layer(os.path.join(arcpy.env.scratchFolder, "{0}.lyr".format(name))) for layer in arcpy.mapping.ListLayers(group_layer): if layer.isFeatureLayer: arcpy.Clip_analysis( layer, clip_polygon, task_utils.create_unique_name(layer, out_workspace) ) # Clean up temp KML results. arcpy.Delete_management(os.path.join(arcpy.env.scratchFolder, "{0}.lyr".format(name))) arcpy.Delete_management(kml_layer[1]) del group_layer else: if out_name == "": out_name = dsc.name if out_workspace.endswith(".gdb"): f = arcpy.Copy_management(ds, os.path.join(os.path.dirname(out_workspace), out_name)) else: f = arcpy.Copy_management(ds, os.path.join(out_workspace, out_name)) processed_count += 1.0 status_writer.send_percent( processed_count / result_count, _("Copied file: {0}").format(dsc.name), "clip_data" ) status_writer.send_state(_("Copied file: {0}").format(dsc.name)) clipped += 1 if out_format in ("LPK", "MPK"): files_to_package.append(f.getOutput(0)) continue # Map document elif dsc.dataType == "MapDocument": task_utils.clip_mxd_layers(dsc.catalogPath, clip_polygon, arcpy.env.workspace, map_frame_name) else: processed_count += 1.0 status_writer.send_percent( processed_count / result_count, _("Invalid input type: {0}").format(ds), "clip_data" ) status_writer.send_state(_("Invalid input type: {0}").format(ds)) skipped_reasons[ds] = _("Invalid input type: {0}").format(dsc.dataType) skipped += 1 continue processed_count += 1.0 status_writer.send_percent(processed_count / result_count, _("Clipped: {0}").format(dsc.name), "clip_data") status_writer.send_status(_("Clipped: {0}").format(dsc.name)) clipped += 1 # Continue. Process as many as possible. except Exception as ex: processed_count += 1.0 status_writer.send_percent( processed_count / result_count, _("Skipped: {0}").format(os.path.basename(ds)), "clip_data" ) status_writer.send_status(_("FAIL: {0}").format(repr(ex))) errors_reasons[ds] = repr(ex) errors += 1 pass return clipped, errors, skipped
def clip_data(input_items, out_workspace, clip_polygon, out_format): """Clips input results using the clip polygon. :param input_items: list of item to be clipped :param out_workspace: the output workspace where results are created :param clip_polygon: the clip polygon geometry :param out_format: the type of output to be created (i.e. SHP for shapefile) """ global processed_count global layer_name global existing_fields global new_fields global field_values clipped = 0 errors = 0 skipped = 0 fds = None for ds, out_name in input_items.iteritems(): try: if not isinstance(out_name, list): out_name = '' # ----------------------------------------------- # If the item is a service layer, process and continue. # ----------------------------------------------- if ds.startswith('http'): try: clip_service_layers(ds, clip_polygon, out_name) processed_count += 1. clipped += 1 status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(ds), 'clip_data_by_features') continue except Exception as ex: status_writer.send_state(status.STAT_WARNING, str(ex)) errors_reasons[ds] = ex.message errors += 1 continue # ----------------------------------------------- # Check if the path is a MXD data frame type. # ------------------------------------------------ map_frame_name = task_utils.get_data_frame_name(ds) if map_frame_name: ds = ds.split('|')[0].strip() # --------------------------------- # Is the input is geometry features # --------------------------------- if isinstance(out_name, list): arcpy.env.overwriteOutput = True increment = task_utils.get_increment(result_count) for row in out_name: try: name = os.path.join(out_workspace, arcpy.ValidateTableName(ds, out_workspace)) if out_format == 'SHP': name += '.shp' geo_json = row['[geo]'] geom = arcpy.AsShape(geo_json) row.pop('[geo]') if not arcpy.Exists(name): if arcpy.env.outputCoordinateSystem: arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper()) else: arcpy.env.outputCoordinateSystem = 4326 arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper()) layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer') if out_format == 'SHP': arcpy.DeleteField_management(layer_name, 'Id') existing_fields = [f.name for f in arcpy.ListFields(layer_name)] new_fields = [] field_values = [] for field, value in row.iteritems(): valid_field = arcpy.ValidateFieldName(field, out_workspace) new_fields.append(valid_field) field_values.append(value) arcpy.AddField_management(layer_name, valid_field, 'TEXT') else: if not geom.type.upper() == arcpy.Describe(name).shapeType.upper(): name = arcpy.CreateUniqueName(os.path.basename(name), out_workspace) if arcpy.env.outputCoordinateSystem: arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper()) else: arcpy.env.outputCoordinateSystem = 4326 arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper()) layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer') if out_format == 'SHP': arcpy.DeleteField_management(layer_name, 'Id') existing_fields = [f.name for f in arcpy.ListFields(layer_name)] new_fields = [] field_values = [] for field, value in row.iteritems(): valid_field = arcpy.ValidateFieldName(field, out_workspace) new_fields.append(valid_field) field_values.append(value) if not valid_field in existing_fields: arcpy.AddField_management(layer_name, valid_field, 'TEXT') clipped_geometry = arcpy.Clip_analysis(geom, clip_polygon, arcpy.Geometry()) if clipped_geometry: with arcpy.da.InsertCursor(layer_name, ["SHAPE@"] + new_fields) as icur: icur.insertRow([clipped_geometry[0]] + field_values) processed_count += 1 if (processed_count % increment) == 0: status_writer.send_percent(float(processed_count) / result_count, _('Clipped: {0}').format(row['name']), 'clip_data') clipped += 1 except KeyError: processed_count += 1 skipped += 1 skipped_reasons[ds] = 'Invalid input type' status_writer.send_state(_(status.STAT_WARNING, 'Invalid input type: {0}').format(ds)) except Exception as ex: processed_count += 1 errors += 1 errors_reasons[ds] = ex.message continue continue dsc = arcpy.Describe(ds) try: if dsc.spatialReference.name == 'Unknown': status_writer.send_state(status.STAT_WARNING, _('{0} has an Unknown projection. Output may be invalid or empty.').format(dsc.name)) except AttributeError: pass # ----------------------------- # Check the data type and clip. # ----------------------------- # Feature Class or ShapeFile if dsc.dataType in ('FeatureClass', 'ShapeFile', 'Shapefile'): if out_name == '': name = arcpy.ValidateTableName(dsc.name, out_workspace) name = task_utils.create_unique_name(name, out_workspace) else: name = arcpy.ValidateTableName(out_name, out_workspace) name = task_utils.create_unique_name(name, out_workspace) # Does the input exist in a feature dataset? If so, create the feature dataset if it doesn't exist. ws = os.path.dirname(ds) if [any(ext) for ext in ('.gdb', '.mdb', '.sde') if ext in ws]: if os.path.splitext(ws)[1] in ('.gdb', '.mdb', '.sde'): arcpy.Clip_analysis(ds, clip_polygon, name) else: fds_name = os.path.basename(ws) if not arcpy.Exists(os.path.join(out_workspace, fds_name)): arcpy.CreateFeatureDataset_management(out_workspace, fds_name, dsc.spatialReference) arcpy.Clip_analysis(ds, clip_polygon, os.path.join(out_workspace, fds_name, os.path.basename(ds))) else: arcpy.Clip_analysis(ds, clip_polygon, name) # arcpy.Clip_analysis(ds, clip_polygon, name) # Feature dataset elif dsc.dataType == 'FeatureDataset': if not out_format == 'SHP': fds_name = os.path.basename(task_utils.create_unique_name(dsc.name, out_workspace)) fds = arcpy.CreateFeatureDataset_management(out_workspace, fds_name) arcpy.env.workspace = ds for fc in arcpy.ListFeatureClasses(): try: if not out_format == 'SHP': arcpy.Clip_analysis(fc, clip_polygon, task_utils.create_unique_name(fc, fds.getOutput(0))) else: arcpy.Clip_analysis(fc, clip_polygon, task_utils.create_unique_name(fc, out_workspace)) except arcpy.ExecuteError: pass arcpy.env.workspace = out_workspace # Raster dataset elif dsc.dataType == 'RasterDataset': if out_name == '': name = task_utils.create_unique_name(dsc.name, out_workspace) else: name = task_utils.create_unique_name(out_name, out_workspace) if type(clip_polygon) is arcpy.Polygon: extent = clip_polygon.extent else: extent = arcpy.Describe(clip_polygon).extent ext = '{0} {1} {2} {3}'.format(extent.XMin, extent.YMin, extent.XMax, extent.YMax) arcpy.Clip_management(ds, ext, name, in_template_dataset=clip_polygon, clipping_geometry="ClippingGeometry") # Layer file elif dsc.dataType == 'Layer': task_utils.clip_layer_file(dsc.catalogPath, clip_polygon, arcpy.env.workspace) # Cad drawing dataset elif dsc.dataType == 'CadDrawingDataset': arcpy.env.workspace = dsc.catalogPath cad_wks_name = os.path.splitext(dsc.name)[0] for cad_fc in arcpy.ListFeatureClasses(): name = task_utils.create_unique_name('{0}_{1}'.format(cad_wks_name, cad_fc), out_workspace) arcpy.Clip_analysis(cad_fc, clip_polygon, name) arcpy.env.workspace = out_workspace # File elif dsc.dataType in ('File', 'TextFile'): if dsc.catalogPath.endswith('.kml') or dsc.catalogPath.endswith('.kmz'): name = os.path.splitext(dsc.name)[0] kml_layer = arcpy.KMLToLayer_conversion(dsc.catalogPath, arcpy.env.scratchFolder, name) group_layer = arcpy.mapping.Layer(os.path.join(arcpy.env.scratchFolder, '{0}.lyr'.format(name))) for layer in arcpy.mapping.ListLayers(group_layer): if layer.isFeatureLayer: arcpy.Clip_analysis(layer, clip_polygon, task_utils.create_unique_name(layer, out_workspace)) # Clean up temp KML results. arcpy.Delete_management(os.path.join(arcpy.env.scratchFolder, '{0}.lyr'.format(name))) arcpy.Delete_management(kml_layer[1]) del group_layer else: if out_name == '': out_name = dsc.name if out_workspace.endswith('.gdb'): f = arcpy.Copy_management(ds, os.path.join(os.path.dirname(out_workspace), out_name)) else: f = arcpy.Copy_management(ds, os.path.join(out_workspace, out_name)) processed_count += 1. status_writer.send_percent(processed_count / result_count, _('Copied file: {0}').format(dsc.name), 'clip_data') status_writer.send_state(_('Copied file: {0}').format(dsc.name)) clipped += 1 if out_format in ('LPK', 'MPK'): files_to_package.append(f.getOutput(0)) continue # Map document elif dsc.dataType == 'MapDocument': task_utils.clip_mxd_layers(dsc.catalogPath, clip_polygon, arcpy.env.workspace, map_frame_name) else: processed_count += 1. status_writer.send_percent(processed_count / result_count, _('Invalid input type: {0}').format(ds), 'clip_data') status_writer.send_state(_('Invalid input type: {0}').format(ds)) skipped_reasons[ds] = _('Invalid input type: {0}').format(dsc.dataType) skipped += 1 continue processed_count += 1. status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(dsc.name), 'clip_data') status_writer.send_status(_('Clipped: {0}').format(dsc.name)) clipped += 1 # Continue. Process as many as possible. except Exception as ex: processed_count += 1. status_writer.send_percent(processed_count / result_count, _('Skipped: {0}').format(os.path.basename(ds)), 'clip_data') status_writer.send_status(_('FAIL: {0}').format(repr(ex))) errors_reasons[ds] = repr(ex) errors += 1 pass return clipped, errors, skipped
def clip_data(input_items, out_workspace, out_coordinate_system, gcs_sr, gcs_clip_poly, out_format): """Clips input results.""" clipped = 0 errors = 0 skipped = 0 fds = None global processed_count global layer_name global existing_fields global new_fields global field_values for ds, out_name in input_items.items(): try: if not isinstance(out_name, list): out_name = '' # ----------------------------------------------- # If the item is a service layer, process and continue. # ----------------------------------------------- if ds.startswith('http'): try: if out_coordinate_system == 0: service_layer = task_utils.ServiceLayer(ds) wkid = service_layer.wkid out_sr = arcpy.SpatialReference(wkid) arcpy.env.outputCoordinateSystem = out_sr else: out_sr = task_utils.get_spatial_reference(out_coordinate_system) arcpy.env.outputCoordinateSystem = out_sr if not out_sr.name == gcs_sr.name: try: geo_transformation = arcpy.ListTransformations(gcs_sr, out_sr)[0] clip_poly = gcs_clip_poly.projectAs(out_sr, geo_transformation) except (AttributeError, IndexError): try: clip_poly = gcs_clip_poly.projectAs(out_sr) except AttributeError: clip_poly = gcs_clip_poly except ValueError: clip_poly = gcs_clip_poly else: clip_poly = gcs_clip_poly arcpy.env.overwriteOutput = True service_layer = task_utils.ServiceLayer(ds, clip_poly.extent.JSON, 'esriGeometryEnvelope') oid_groups = service_layer.object_ids out_features = None g = 0. group_cnt = service_layer.object_ids_cnt for group in oid_groups: g += 1 group = [oid for oid in group if oid] where = '{0} IN {1}'.format(service_layer.oid_field_name, tuple(group)) url = ds + "/query?where={}&outFields={}&returnGeometry=true&f=json".format(where, '*', eval(clip_poly.JSON)) feature_set = arcpy.FeatureSet() if not out_name: out_name = service_layer.service_layer_name try: feature_set.load(url) except Exception: continue if not out_features: out_features = arcpy.Clip_analysis(feature_set, clip_poly, out_name) else: clip_features = arcpy.Clip_analysis(feature_set, clip_poly, 'in_memory/features') arcpy.Append_management(clip_features, out_features, 'NO_TEST') try: arcpy.Delete_management(clip_features) except arcpy.ExecuteError: pass status_writer.send_percent(float(g) / group_cnt, '', 'clip_data') processed_count += 1. clipped += 1 status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(ds), 'clip_data') continue except Exception as ex: status_writer.send_state(status.STAT_WARNING, str(ex)) errors_reasons[ds] = ex.message errors += 1 continue # ----------------------------------------------- # Check if the path is a MXD data frame type. # ------------------------------------------------ map_frame_name = task_utils.get_data_frame_name(ds) if map_frame_name: ds = ds.split('|')[0].strip() # ------------------------------- # Is the input a geometry feature # ------------------------------- if isinstance(out_name, list): for row in out_name: try: arcpy.env.overwriteOutput = True name = os.path.join(out_workspace, arcpy.ValidateTableName(ds, out_workspace)) if out_format == 'SHP': name += '.shp' # Clip the geometry. geo_json = row['[geo]'] geom = arcpy.AsShape(geo_json) row.pop('[geo]') if not arcpy.Exists(name): if arcpy.env.outputCoordinateSystem: arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper()) else: arcpy.env.outputCoordinateSystem = 4326 arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper()) layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer') existing_fields = [f.name for f in arcpy.ListFields(layer_name)] new_fields = [] field_values = [] for field, value in row.iteritems(): valid_field = arcpy.ValidateFieldName(field, out_workspace) new_fields.append(valid_field) field_values.append(value) try: arcpy.AddField_management(layer_name, valid_field, 'TEXT') except arcpy.ExecuteError: arcpy.DeleteField_management(layer_name, valid_field) arcpy.AddField_management(layer_name, valid_field, 'TEXT') else: if not geom.type.upper() == arcpy.Describe(name).shapeType.upper(): name = arcpy.CreateUniqueName(os.path.basename(name), out_workspace) if arcpy.env.outputCoordinateSystem: arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper()) else: arcpy.env.outputCoordinateSystem = 4326 arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper()) layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer') existing_fields = [f.name for f in arcpy.ListFields(layer_name)] new_fields = [] field_values = [] for field, value in row.items(): valid_field = arcpy.ValidateFieldName(field, out_workspace) new_fields.append(valid_field) field_values.append(value) if not valid_field in existing_fields: try: arcpy.AddField_management(layer_name, valid_field, 'TEXT') except arcpy.ExecuteError: arcpy.DeleteField_management(layer_name, valid_field) arcpy.AddField_management(layer_name, valid_field, 'TEXT') clipped_geometry = arcpy.Clip_analysis(geom, gcs_clip_poly, arcpy.Geometry()) if clipped_geometry: with arcpy.da.InsertCursor(layer_name, ["SHAPE@"] + new_fields) as icur: icur.insertRow([clipped_geometry[0]] + field_values) status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(row['name']), 'clip_data') processed_count += 1 clipped += 1 except KeyError: processed_count += 1 skipped += 1 status_writer.send_state(_(status.STAT_WARNING, 'Invalid input type: {0}').format(ds)) skipped_reasons[ds] = 'Invalid input type' except Exception as ex: processed_count += 1 errors += 1 errors_reasons[ds] = ex.message continue continue dsc = arcpy.Describe(ds) try: if dsc.spatialReference.name == 'Unknown': status_writer.send_state(status.STAT_WARNING, _('{0} has an Unknown projection. Output may be invalid or empty.').format(dsc.name)) except AttributeError: pass # -------------------------------------------------------------------- # If no output coord. system, get output spatial reference from input. # -------------------------------------------------------------------- if out_coordinate_system == 0: try: out_sr = dsc.spatialReference arcpy.env.outputCoordinateSystem = out_sr except AttributeError: out_sr = task_utils.get_spatial_reference(4326) arcpy.env.outputCoordinateSystem = out_sr else: out_sr = task_utils.get_spatial_reference(out_coordinate_system) arcpy.env.outputCoordinateSystem = out_sr # ------------------------------------------------- # If the item is not a file, project the clip area. # ------------------------------------------------- if dsc.dataType not in ('File', 'TextFile'): if not out_sr.name == gcs_sr.name: try: geo_transformation = arcpy.ListTransformations(gcs_sr, out_sr)[0] clip_poly = gcs_clip_poly.projectAs(out_sr, geo_transformation) except (AttributeError, IndexError): try: clip_poly = gcs_clip_poly.projectAs(out_sr) except AttributeError: clip_poly = gcs_clip_poly except ValueError: clip_poly = gcs_clip_poly else: clip_poly = gcs_clip_poly extent = clip_poly.extent # ----------------------------- # Check the data type and clip. # ----------------------------- # Feature Class or ShapeFile if dsc.dataType in ('FeatureClass', 'ShapeFile', 'Shapefile'): if out_name == '': name = arcpy.ValidateTableName(dsc.name, out_workspace) name = task_utils.create_unique_name(name, out_workspace) else: name = arcpy.ValidateTableName(out_name, out_workspace) name = task_utils.create_unique_name(name, out_workspace) # Does the input exist in a feature dataset? If so, create the feature dataset if it doesn't exist. ws = os.path.dirname(ds) if [any(ext) for ext in ('.gdb', '.mdb', '.sde') if ext in ws]: if os.path.splitext(ws)[1] in ('.gdb', '.mdb', '.sde'): arcpy.Clip_analysis(ds, clip_poly, name) else: fds_name = os.path.basename(ws) if not arcpy.Exists(os.path.join(out_workspace, fds_name)): arcpy.CreateFeatureDataset_management(out_workspace, fds_name, dsc.spatialReference) arcpy.Clip_analysis(ds, clip_poly, os.path.join(out_workspace, fds_name, os.path.basename(ds))) else: arcpy.Clip_analysis(ds, clip_poly, name) # Feature dataset elif dsc.dataType == 'FeatureDataset': if not out_format == 'SHP': fds_name = os.path.basename(task_utils.create_unique_name(dsc.name, out_workspace)) fds = arcpy.CreateFeatureDataset_management(out_workspace, fds_name) arcpy.env.workspace = ds for fc in arcpy.ListFeatureClasses(): try: if not out_format == 'SHP': arcpy.Clip_analysis(fc, clip_poly, task_utils.create_unique_name(fc, fds.getOutput(0))) else: arcpy.Clip_analysis(fc, clip_poly, task_utils.create_unique_name(fc, out_workspace)) except arcpy.ExecuteError: pass arcpy.env.workspace = out_workspace # Raster dataset elif dsc.dataType == 'RasterDataset': if out_name == '': name = task_utils.create_unique_name(dsc.name, out_workspace) else: name = task_utils.create_unique_name(out_name, out_workspace) ext = '{0} {1} {2} {3}'.format(extent.XMin, extent.YMin, extent.XMax, extent.YMax) arcpy.Clip_management(ds, ext, name, in_template_dataset=clip_poly, clipping_geometry="ClippingGeometry") # Layer file elif dsc.dataType == 'Layer': task_utils.clip_layer_file(dsc.catalogPath, clip_poly, arcpy.env.workspace) # Cad drawing dataset elif dsc.dataType == 'CadDrawingDataset': arcpy.env.workspace = dsc.catalogPath cad_wks_name = os.path.splitext(dsc.name)[0] for cad_fc in arcpy.ListFeatureClasses(): name = task_utils.create_unique_name('{0}_{1}'.format(cad_wks_name, cad_fc), out_workspace) arcpy.Clip_analysis(cad_fc, clip_poly, name) arcpy.env.workspace = out_workspace # File elif dsc.dataType in ('File', 'TextFile'): if dsc.catalogPath.endswith('.kml') or dsc.catalogPath.endswith('.kmz'): name = os.path.splitext(dsc.name)[0] kml_layer = arcpy.KMLToLayer_conversion(dsc.catalogPath, arcpy.env.scratchFolder, name) group_layer = arcpy.mapping.Layer(os.path.join(arcpy.env.scratchFolder, '{0}.lyr'.format(name))) for layer in arcpy.mapping.ListLayers(group_layer): if layer.isFeatureLayer: arcpy.Clip_analysis(layer, gcs_clip_poly, task_utils.create_unique_name(layer, out_workspace)) # Clean up temp KML results. arcpy.Delete_management(os.path.join(arcpy.env.scratchFolder, '{0}.lyr'.format(name))) arcpy.Delete_management(kml_layer[1]) del group_layer else: if out_name == '': out_name = dsc.name if out_workspace.endswith('.gdb'): f = arcpy.Copy_management(ds, os.path.join(os.path.dirname(out_workspace), out_name)) else: f = arcpy.Copy_management(ds, os.path.join(out_workspace, out_name)) processed_count += 1. status_writer.send_percent(processed_count / result_count, _('Copied file: {0}').format(dsc.name), 'clip_data') status_writer.send_state(_('Copied file: {0}').format(dsc.name)) clipped += 1 if out_format in ('LPK', 'MPK'): files_to_package.append(f.getOutput(0)) continue # Map document elif dsc.dataType == 'MapDocument': task_utils.clip_mxd_layers(dsc.catalogPath, clip_poly, arcpy.env.workspace, map_frame_name) else: processed_count += 1. status_writer.send_percent(processed_count / result_count, _('Invalid input type: {0}').format(ds), 'clip_data') status_writer.send_state(status.STAT_WARNING, _('Invalid input type: {0}').format(ds)) skipped += 1 skipped_reasons[ds] = _('Invalid input type: {0}').format(dsc.dataType) continue processed_count += 1. status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(dsc.name), 'clip_data') status_writer.send_status(_('Clipped: {0}').format(dsc.name)) clipped += 1 # Continue. Process as many as possible. except Exception as ex: processed_count += 1. status_writer.send_percent(processed_count / result_count, _('Skipped: {0}').format(os.path.basename(ds)), 'clip_data') status_writer.send_status(_('FAIL: {0}').format(repr(ex))) errors_reasons[ds] = ex.message errors += 1 pass return clipped, errors, skipped
def get_items(input_items, out_workspace): """Returns the list of items to package.""" layers = [] files = [] errors = 0 skipped = 0 for i, item in enumerate(input_items, 1): try: if item.endswith('.lyr'): layers.append(arcpy.mapping.Layer(item)) else: # Is the item a mxd data frame. map_frame_name = task_utils.get_data_frame_name(item) if map_frame_name: item = item.split('|')[0].strip() dsc = arcpy.Describe(item) if dsc.dataType in ('FeatureClass', 'ShapeFile', 'RasterDataset'): if os.path.basename(item) in [l.name for l in layers]: layer_name = '{0}_{1}'.format(os.path.basename(item), i) else: layer_name = os.path.basename(item) if dsc.dataType == 'RasterDataset': arcpy.MakeRasterLayer_management(item, layer_name) else: arcpy.MakeFeatureLayer_management(item, layer_name) layers.append(arcpy.mapping.Layer(layer_name)) elif dsc.dataType in ('CadDrawingDataset', 'FeatureDataset'): arcpy.env.workspace = item for fc in arcpy.ListFeatureClasses(): if os.path.basename(fc) in [l.name for l in layers]: layer_name = '{0}_{1}'.format( os.path.basename(fc), i) else: layer_name = os.path.basename(fc) arcpy.MakeFeatureLayer_management(fc, layer_name) layers.append(arcpy.mapping.Layer(layer_name)) arcpy.env.workspace = out_workspace elif dsc.dataType == 'MapDocument': in_mxd = arcpy.mapping.MapDocument(item) if map_frame_name: df = arcpy.mapping.ListDataFrames( in_mxd, map_frame_name)[0] mxd_layers = arcpy.mapping.ListLayers(in_mxd, data_frame=df) else: mxd_layers = arcpy.mapping.ListLayers(in_mxd) layers += mxd_layers elif item.endswith('.gdb') or item.endswith('.mdb'): arcpy.env.workspace = item for fc in arcpy.ListFeatureClasses(): if os.path.basename(fc) in [l.name for l in layers]: layer_name = '{0}_{1}'.format( os.path.basename(fc), i) else: layer_name = os.path.basename(fc) arcpy.MakeFeatureLayer_management(fc, layer_name) layers.append(arcpy.mapping.Layer(layer_name)) for raster in arcpy.ListRasters(): if os.path.basename(raster) in [ l.name for l in layers ]: layer_name = '{0}_{1}'.format( os.path.basename(raster), i) else: layer_name = os.path.basename(raster) arcpy.MakeRasterLayer_management(raster, layer_name) layers.append(arcpy.mapping.Layer(layer_name)) datasets = arcpy.ListDatasets('*', 'Feature') for fds in datasets: arcpy.env.workspace = fds for fc in arcpy.ListFeatureClasses(): if os.path.basename(fc) in [ l.name for l in layers ]: layer_name = '{0}_{1}'.format( os.path.basename(fc), i) else: layer_name = os.path.basename(fc) arcpy.MakeFeatureLayer_management(fc, layer_name) layers.append(arcpy.mapping.Layer(layer_name)) arcpy.env.workspace = item arcpy.env.workspace = out_workspace elif dsc.dataType == 'File' or dsc.dataType == 'TextFile': files.append(item) else: status_writer.send_status( _('Invalid input type: {0}').format(item)) skipped_reasons[item] = 'Invalid input type' skipped += 1 continue except Exception as ex: status_writer.send_status( _('Cannot package: {0}: {1}').format(item, repr(ex))) errors += 1 errors_reasons[item] = repr(ex) pass return layers, files, errors, skipped
def convert_to_kml(input_items, out_workspace, extent, show_progress=False): converted = 0 errors = 0 skipped = 0 global processed_count global layer_name global existing_fields global new_fields global field_values arcpy.env.overwriteOutput = True for ds, out_name in input_items.iteritems(): try: # ----------------------------------------------- # If the item is a service layer, process and continue. # ----------------------------------------------- if ds.startswith('http'): try: service_layer = task_utils.ServiceLayer( ds, extent.JSON, 'esriGeometryPolygon') arcpy.env.overwriteOutput = True oid_groups = service_layer.object_ids out_features = None g = 0. group_cnt = service_layer.object_ids_cnt if not arcpy.Exists(os.path.join(out_workspace, 'temp.gdb')): temp_gdb = arcpy.CreateFileGDB_management( out_workspace, 'temp.gdb') temp_gdb = temp_gdb[0] else: temp_gdb = os.path.join(out_workspace, 'temp.gdb') for group in oid_groups: g += 1 group = [oid for oid in group if oid] where = '{0} IN {1}'.format( service_layer.oid_field_name, tuple(group)) url = ds + "/query?where={}&outFields={}&returnGeometry=true&f=json&".format( where, '*') feature_set = arcpy.FeatureSet() try: feature_set.load(url) except Exception: continue if not out_features: out_features = arcpy.CopyFeatures_management( feature_set, task_utils.create_unique_name( out_name, temp_gdb)) else: features = arcpy.CopyFeatures_management( feature_set, task_utils.create_unique_name( out_name, temp_gdb)) arcpy.Append_management(features, out_features, 'NO_TEST') try: arcpy.Delete_management(features) except arcpy.ExecuteError: pass status_writer.send_percent( float(g) / group_cnt * 100, '', 'convert_to_kml') arcpy.MakeFeatureLayer_management(out_features, out_name) arcpy.LayerToKML_conversion( out_name, '{0}.kmz'.format(os.path.join(out_workspace, out_name)), 1, boundary_box_extent=extent) processed_count += 1. converted += 1 status_writer.send_percent(processed_count / result_count, _('Converted: {0}').format(ds), 'convert_to_kml') continue except Exception as ex: status_writer.send_state(status.STAT_WARNING, str(ex)) errors += 1 errors_reasons[ds] = ex.message continue # Is the input a mxd data frame. map_frame_name = task_utils.get_data_frame_name(ds) if map_frame_name: ds = ds.split('|')[0].strip() # ------------------------------- # Is the input a geometry feature # ------------------------------- if isinstance(out_name, list): increment = task_utils.get_increment(result_count) for row in out_name: try: name = arcpy.ValidateTableName(ds, 'in_memory') name = os.path.join('in_memory', name) # Clip the geometry. geo_json = row['[geo]'] geom = arcpy.AsShape(geo_json) row.pop('[geo]') if not arcpy.Exists(name): if arcpy.env.outputCoordinateSystem: layer_name = arcpy.CreateFeatureclass_management( 'in_memory', os.path.basename(name), geom.type.upper()) else: arcpy.env.outputCoordinateSystem = 4326 layer_name = arcpy.CreateFeatureclass_management( 'in_memory', os.path.basename(name), geom.type.upper()) # layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer') existing_fields = [ f.name for f in arcpy.ListFields(layer_name) ] new_fields = [] field_values = [] for field, value in row.iteritems(): valid_field = arcpy.ValidateFieldName( field, 'in_memory') new_fields.append(valid_field) field_values.append(value) arcpy.AddField_management( layer_name, valid_field, 'TEXT') else: if not geom.type.upper() == arcpy.Describe( name).shapeType.upper(): name = arcpy.CreateUniqueName( os.path.basename(name), 'in_memory') if arcpy.env.outputCoordinateSystem: layer_name = arcpy.CreateFeatureclass_management( 'in_memory', os.path.basename(name), geom.type.upper()) else: arcpy.env.outputCoordinateSystem = 4326 layer_name = arcpy.CreateFeatureclass_management( 'in_memory', os.path.basename(name), geom.type.upper()) existing_fields = [ f.name for f in arcpy.ListFields(layer_name) ] new_fields = [] field_values = [] for field, value in row.iteritems(): valid_field = arcpy.ValidateFieldName( field, 'in_memory') new_fields.append(valid_field) field_values.append(value) if not valid_field in existing_fields: arcpy.AddField_management( layer_name, valid_field, 'TEXT') with arcpy.da.InsertCursor(layer_name, ["SHAPE@"] + new_fields) as icur: icur.insertRow([geom] + field_values) arcpy.MakeFeatureLayer_management( layer_name, os.path.basename(name)) arcpy.LayerToKML_conversion( os.path.basename(name), '{0}.kmz'.format( os.path.join(out_workspace, os.path.basename(name))), 1, boundary_box_extent=extent) if (processed_count % increment) == 0: status_writer.send_percent( float(processed_count) / result_count, _('Converted: {0}').format(row['name']), 'convert_to_kml') processed_count += 1 converted += 1 except KeyError: processed_count += 1 skipped += 1 skipped_reasons[ds] = 'Invalid input type' status_writer.send_state( _(status.STAT_WARNING, 'Invalid input type: {0}').format(ds)) except Exception as ex: processed_count += 1 errors += 1 errors_reasons[ds] = ex.message continue del icur continue dsc = arcpy.Describe(ds) if os.path.exists( os.path.join('{0}.kmz'.format( os.path.join(out_workspace, out_name)))): out_name = os.path.basename( arcpy.CreateUniqueName(out_name + '.kmz', out_workspace))[:-4] if dsc.dataType == 'FeatureClass': arcpy.MakeFeatureLayer_management(ds, dsc.name) if out_name == '': out_name = dsc.name arcpy.LayerToKML_conversion( dsc.name, '{0}.kmz'.format(os.path.join(out_workspace, out_name)), 1, boundary_box_extent=extent) converted += 1 elif dsc.dataType == 'ShapeFile': arcpy.MakeFeatureLayer_management(ds, dsc.name[:-4]) if out_name == '': out_name = dsc.name[:-4] arcpy.LayerToKML_conversion( dsc.name[:-4], '{0}.kmz'.format(os.path.join(out_workspace, out_name)), 1, boundary_box_extent=extent) converted += 1 elif dsc.dataType == 'RasterDataset': arcpy.MakeRasterLayer_management(ds, dsc.name) if out_name == '': out_name = dsc.name arcpy.LayerToKML_conversion( dsc.name, '{0}.kmz'.format(os.path.join(out_workspace, out_name)), 1, boundary_box_extent=extent) converted += 1 elif dsc.dataType == 'Layer': if out_name == '': if dsc.name.endswith('.lyr'): out_name = dsc.name[:-4] else: out_name = dsc.name arcpy.LayerToKML_conversion( ds, '{0}.kmz'.format(os.path.join(out_workspace, out_name)), 1, boundary_box_extent=extent) converted += 1 elif dsc.dataType == 'FeatureDataset': arcpy.env.workspace = ds for fc in arcpy.ListFeatureClasses(): arcpy.MakeFeatureLayer_management(fc, 'tmp_lyr') arcpy.LayerToKML_conversion( 'tmp_lyr', '{0}.kmz'.format(os.path.join(out_workspace, fc)), 1, boundary_box_extent=extent) converted += 1 elif dsc.dataType == 'CadDrawingDataset': arcpy.env.workspace = dsc.catalogPath for cad_fc in arcpy.ListFeatureClasses(): if cad_fc.lower() == 'annotation': try: cad_anno = arcpy.ImportCADAnnotation_conversion( cad_fc, arcpy.CreateUniqueName('cadanno', arcpy.env.scratchGDB)) except arcpy.ExecuteError: cad_anno = arcpy.ImportCADAnnotation_conversion( cad_fc, arcpy.CreateUniqueName('cadanno', arcpy.env.scratchGDB), 1) arcpy.MakeFeatureLayer_management(cad_anno, 'cad_lyr') name = '{0}_{1}'.format(dsc.name[:-4], cad_fc) arcpy.LayerToKML_conversion( 'cad_lyr', '{0}.kmz'.format(os.path.join(out_workspace, name)), 1, boundary_box_extent=extent) converted += 1 else: arcpy.MakeFeatureLayer_management(cad_fc, 'cad_lyr') name = '{0}_{1}'.format(dsc.name[:-4], cad_fc) arcpy.LayerToKML_conversion( 'cad_lyr', '{0}.kmz'.format(os.path.join(out_workspace, name)), 1, boundary_box_extent=extent) converted += 1 # Map document to KML. elif dsc.dataType == 'MapDocument': mxd = arcpy.mapping.MapDocument(ds) if map_frame_name: data_frames = arcpy.mapping.ListDataFrames( mxd, map_frame_name) else: data_frames = arcpy.mapping.ListDataFrames(mxd) for df in data_frames: name = '{0}_{1}'.format(dsc.name[:-4], df.name) arcpy.MapToKML_conversion( ds, df.name, '{0}.kmz'.format(os.path.join(out_workspace, name)), extent_to_export=extent) converted += 1 else: processed_count += 1 status_writer.send_percent( processed_count / result_count, _('Invalid input type: {0}').format(dsc.name), 'convert_to_kml') skipped += 1 skipped_reasons[ds] = _('Invalid input type: {0}').format( dsc.dataType) continue processed_count += 1 status_writer.send_percent(processed_count / result_count, _('Converted: {0}').format(ds), 'convert_to_kml') status_writer.send_status(_('Converted: {0}').format(ds)) except Exception as ex: processed_count += 1 status_writer.send_percent(processed_count / result_count, _('Skipped: {0}').format(ds), 'convert_to_kml') status_writer.send_status(_('WARNING: {0}').format(repr(ex))) errors_reasons[ds] = repr(ex) errors += 1 pass return converted, errors, skipped
def add_to_geodatabase(input_items, out_gdb, is_fds): """Adds items to a geodatabase.""" added = 0 skipped = 0 errors = 0 global processed_count global layer_name global existing_fields global new_fields global field_values for ds, out_name in input_items.iteritems(): try: # ----------------------------------------------- # If the item is a service layer, process and continue. # ----------------------------------------------- if ds.startswith('http'): try: service_layer = task_utils.ServiceLayer(ds) arcpy.env.overwriteOutput = True oid_groups = service_layer.object_ids out_features = None g = 0. group_cnt = service_layer.object_ids_cnt for group in oid_groups: g += 1 group = [oid for oid in group if oid] where = '{0} IN {1}'.format( service_layer.oid_field_name, tuple(group)) url = ds + "/query?where={}&outFields={}&returnGeometry=true&geometryType=esriGeometryPolygon&f=json".format( where, '*') feature_set = arcpy.FeatureSet() feature_set.load(url) if not out_features: out_features = arcpy.CopyFeatures_management( feature_set, task_utils.create_unique_name( out_name, out_gdb)) else: features = arcpy.CopyFeatures_management( feature_set, task_utils.create_unique_name( out_name, out_gdb)) arcpy.Append_management(features, out_features, 'NO_TEST') try: arcpy.Delete_management(features) except arcpy.ExecuteError: pass status_writer.send_percent( float(g) / group_cnt * 100, '', 'add_to_geodatabase') processed_count += 1. added += 1 status_writer.send_percent(processed_count / result_count, _('Added: {0}').format(ds), 'add_to_geodatabase') continue except Exception as ex: status_writer.send_state(status.STAT_WARNING, str(ex)) errors_reasons[ds] = ex.message errors += 1 continue # ------------------------------ # Is the input a mxd data frame. # ------------------------------ map_frame_name = task_utils.get_data_frame_name(ds) if map_frame_name: ds = ds.split('|')[0].strip() # ------------------------------- # Is the input a geometry feature # ------------------------------- if isinstance(out_name, list): increment = task_utils.get_increment(result_count) for row in out_name: try: name = os.path.join( out_gdb, arcpy.ValidateTableName(ds, out_gdb)) # Create the geometry if it exists. geom = None try: geo_json = row['[geo]'] geom = arcpy.AsShape(geo_json) row.pop('[geo]') except KeyError: pass if geom: if not arcpy.Exists(name): if arcpy.env.outputCoordinateSystem: arcpy.CreateFeatureclass_management( out_gdb, os.path.basename(name), geom.type.upper()) else: arcpy.env.outputCoordinateSystem = 4326 arcpy.CreateFeatureclass_management( out_gdb, os.path.basename(name), geom.type.upper()) layer_name = arcpy.MakeFeatureLayer_management( name, 'flayer_{0}'.format( os.path.basename(name))) existing_fields = [ f.name for f in arcpy.ListFields(layer_name) ] new_fields = [] field_values = [] for field, value in row.iteritems(): valid_field = arcpy.ValidateFieldName( field, out_gdb) new_fields.append(valid_field) field_values.append(value) arcpy.AddField_management( layer_name, valid_field, 'TEXT') else: if not geom.type.upper() == arcpy.Describe( name).shapeType.upper(): name = arcpy.CreateUniqueName( os.path.basename(name), out_gdb) if arcpy.env.outputCoordinateSystem: arcpy.CreateFeatureclass_management( out_gdb, os.path.basename(name), geom.type.upper()) else: arcpy.env.outputCoordinateSystem = 4326 arcpy.CreateFeatureclass_management( out_gdb, os.path.basename(name), geom.type.upper()) layer_name = arcpy.MakeFeatureLayer_management( name, 'flayer_{0}'.format( os.path.basename(name))) existing_fields = [ f.name for f in arcpy.ListFields(layer_name) ] new_fields = [] field_values = [] for field, value in row.iteritems(): valid_field = arcpy.ValidateFieldName( field, out_gdb) new_fields.append(valid_field) field_values.append(value) if valid_field not in existing_fields: arcpy.AddField_management( layer_name, valid_field, 'TEXT') else: if not arcpy.Exists(name): arcpy.CreateTable_management( out_gdb, os.path.basename(name)) view_name = arcpy.MakeTableView_management( name, 'tableview') existing_fields = [ f.name for f in arcpy.ListFields(view_name) ] new_fields = [] field_values = [] for field, value in row.iteritems(): valid_field = arcpy.ValidateFieldName( field, out_gdb) new_fields.append(valid_field) field_values.append(value) if valid_field not in existing_fields: arcpy.AddField_management( view_name, valid_field, 'TEXT') if geom: with arcpy.da.InsertCursor(layer_name, ["SHAPE@"] + new_fields) as icur: icur.insertRow([geom] + field_values) else: with arcpy.da.InsertCursor(view_name, new_fields) as icur: icur.insertRow(field_values) processed_count += 1 if (processed_count % increment) == 0: status_writer.send_percent( float(processed_count) / result_count, _('Added: {0}').format(row['name']), 'add_to_geodatabase') added += 1 continue except Exception as ex: processed_count += 1 errors += 1 errors_reasons[name] = ex.message continue continue # ----------------------------- # Check the data type and clip. # ----------------------------- dsc = arcpy.Describe(ds) if dsc.dataType == 'FeatureClass': if out_name == '': arcpy.CopyFeatures_management( ds, task_utils.create_unique_name(dsc.name, out_gdb)) else: arcpy.CopyFeatures_management( ds, task_utils.create_unique_name(out_name, out_gdb)) elif dsc.dataType == 'ShapeFile': if out_name == '': arcpy.CopyFeatures_management( ds, task_utils.create_unique_name(dsc.name[:-4], out_gdb)) else: arcpy.CopyFeatures_management( ds, task_utils.create_unique_name(out_name, out_gdb)) elif dsc.dataType == 'FeatureDataset': if not is_fds: fds_name = os.path.basename( task_utils.create_unique_name(dsc.name, out_gdb)) fds = arcpy.CreateFeatureDataset_management( out_gdb, fds_name).getOutput(0) else: fds = out_gdb arcpy.env.workspace = dsc.catalogPath for fc in arcpy.ListFeatureClasses(): name = os.path.basename( task_utils.create_unique_name(fc, out_gdb)) arcpy.CopyFeatures_management(fc, os.path.join(fds, name)) arcpy.env.workspace = out_gdb elif dsc.dataType == 'RasterDataset': if is_fds: out_gdb = os.path.dirname(out_gdb) if out_name == '': arcpy.CopyRaster_management( ds, task_utils.create_unique_name(dsc.name, out_gdb)) else: arcpy.CopyRaster_management( ds, task_utils.create_unique_name(out_name, out_gdb)) elif dsc.dataType == 'RasterCatalog': if is_fds: out_gdb = os.path.dirname(out_gdb) if out_name == '': arcpy.CopyRasterCatalogItems_management( ds, task_utils.create_unique_name(dsc.name, out_gdb)) else: arcpy.CopyRasterCatalogItems_management( ds, task_utils.create_unique_name(out_name, out_gdb)) elif dsc.dataType == 'Layer': layer_from_file = arcpy.mapping.Layer(dsc.catalogPath) layers = arcpy.mapping.ListLayers(layer_from_file) for layer in layers: if out_name == '': name = task_utils.create_unique_name( layer.name, out_gdb) else: name = task_utils.create_unique_name(out_name, out_gdb) if layer.isFeatureLayer: arcpy.CopyFeatures_management(layer.dataSource, name) elif layer.isRasterLayer: if is_fds: name = os.path.dirname(name) arcpy.CopyRaster_management(layer.dataSource, name) elif dsc.dataType == 'CadDrawingDataset': arcpy.env.workspace = dsc.catalogPath cad_wks_name = os.path.splitext(dsc.name)[0] for cad_fc in arcpy.ListFeatureClasses(): arcpy.CopyFeatures_management( cad_fc, task_utils.create_unique_name( '{0}_{1}'.format(cad_wks_name, cad_fc), out_gdb)) arcpy.env.workspace = out_gdb elif dsc.dataType == 'File': if dsc.catalogPath.endswith( '.kml') or dsc.catalogPath.endswith('.kmz'): name = os.path.splitext(dsc.name)[0] temp_dir = tempfile.mkdtemp() kml_layer = arcpy.KMLToLayer_conversion( dsc.catalogPath, temp_dir, name) group_layer = arcpy.mapping.Layer( os.path.join(temp_dir, '{}.lyr'.format(name))) for layer in arcpy.mapping.ListLayers(group_layer): if layer.isFeatureLayer: arcpy.CopyFeatures_management( layer, task_utils.create_unique_name(layer, out_gdb)) elif layer.isRasterLayer: if is_fds: out_gdb = os.path.dirname(out_gdb) arcpy.CopyRaster_management( layer, task_utils.create_unique_name(layer, out_gdb)) # Clean up temp KML results. arcpy.Delete_management( os.path.join(temp_dir, '{}.lyr'.format(name))) arcpy.Delete_management(kml_layer) else: processed_count += 1 status_writer.send_percent( processed_count / result_count, _('Invalid input type: {0}').format(dsc.name), 'add_to_geodatabase') skipped += 1 skipped_reasons[ds] = _('Invalid input type: {0}').format( dsc.dataType) continue elif dsc.dataType == 'MapDocument': mxd = arcpy.mapping.MapDocument(dsc.catalogPath) if map_frame_name: df = arcpy.mapping.ListDataFrames(mxd, map_frame_name)[0] layers = arcpy.mapping.ListLayers(mxd, data_frame=df) else: layers = arcpy.mapping.ListLayers(mxd) for layer in layers: if layer.isFeatureLayer: arcpy.CopyFeatures_management( layer.dataSource, task_utils.create_unique_name(layer.name, out_gdb)) elif layer.isRasterLayer: if is_fds: out_gdb = os.path.dirname(out_gdb) arcpy.CopyRaster_management( layer.dataSource, task_utils.create_unique_name(layer.name, out_gdb)) table_views = arcpy.mapping.ListTableViews(mxd) if is_fds: out_gdb = os.path.dirname(out_gdb) for table_view in table_views: arcpy.CopyRows_management( table_view.dataSource, task_utils.create_unique_name(table_view.name, out_gdb)) out_gdb = arcpy.env.workspace elif dsc.dataType.find('Table') > 0: if is_fds: out_gdb = os.path.dirname(out_gdb) if out_name == '': arcpy.CopyRows_management( ds, task_utils.create_unique_name(dsc.name, out_gdb)) else: arcpy.CopyRows_management( ds, task_utils.create_unique_name(out_name, out_gdb)) else: # Try to copy any other types such as topologies, network datasets, etc. if is_fds: out_gdb = os.path.dirname(out_gdb) arcpy.Copy_management( ds, task_utils.create_unique_name(dsc.name, out_gdb)) out_gdb = arcpy.env.workspace processed_count += 1. status_writer.send_percent(processed_count / result_count, _('Added: {0}').format(ds), 'add_to_geodatabase') status_writer.send_status(_('Added: {0}').format(ds)) added += 1 # Continue if an error. Process as many as possible. except Exception as ex: processed_count += 1 status_writer.send_percent(processed_count / result_count, _('Skipped: {0}').format(ds), 'add_to_geodatabase') status_writer.send_status(_('FAIL: {0}').format(repr(ex))) errors_reasons[ds] = repr(ex) errors += 1 continue return added, errors, skipped
def convert_to_kml(input_items, out_workspace, extent, show_progress=False): converted = 0 errors = 0 skipped = 0 global processed_count global layer_name global existing_fields global new_fields global field_values arcpy.env.overwriteOutput = True for ds, out_name in input_items.iteritems(): try: # ----------------------------------------------- # If the item is a service layer, process and continue. # ----------------------------------------------- if ds.startswith("http"): try: service_layer = task_utils.ServiceLayer(ds, extent.JSON, "esriGeometryPolygon") arcpy.env.overwriteOutput = True oid_groups = service_layer.object_ids out_features = None g = 0.0 group_cnt = service_layer.object_ids_cnt if not arcpy.Exists(os.path.join(out_workspace, "temp.gdb")): temp_gdb = arcpy.CreateFileGDB_management(out_workspace, "temp.gdb") temp_gdb = temp_gdb[0] else: temp_gdb = os.path.join(out_workspace, "temp.gdb") for group in oid_groups: g += 1 group = [oid for oid in group if oid] where = "{0} IN {1}".format(service_layer.oid_field_name, tuple(group)) url = ds + "/query?where={}&outFields={}&returnGeometry=true&f=json&".format(where, "*") feature_set = arcpy.FeatureSet() try: feature_set.load(url) except Exception: continue if not out_features: out_features = arcpy.CopyFeatures_management( feature_set, task_utils.create_unique_name(out_name, temp_gdb) ) else: features = arcpy.CopyFeatures_management( feature_set, task_utils.create_unique_name(out_name, temp_gdb) ) arcpy.Append_management(features, out_features, "NO_TEST") try: arcpy.Delete_management(features) except arcpy.ExecuteError: pass status_writer.send_percent(float(g) / group_cnt * 100, "", "convert_to_kml") arcpy.MakeFeatureLayer_management(out_features, out_name) arcpy.LayerToKML_conversion( out_name, "{0}.kmz".format(os.path.join(out_workspace, out_name)), 1, boundary_box_extent=extent ) processed_count += 1.0 converted += 1 status_writer.send_percent( processed_count / result_count, _("Converted: {0}").format(ds), "convert_to_kml" ) continue except Exception as ex: status_writer.send_state(status.STAT_WARNING, str(ex)) errors += 1 errors_reasons[ds] = ex.message continue # Is the input a mxd data frame. map_frame_name = task_utils.get_data_frame_name(ds) if map_frame_name: ds = ds.split("|")[0].strip() # ------------------------------- # Is the input a geometry feature # ------------------------------- if isinstance(out_name, list): increment = task_utils.get_increment(result_count) for row in out_name: try: name = arcpy.ValidateTableName(ds, "in_memory") name = os.path.join("in_memory", name) # Clip the geometry. geo_json = row["[geo]"] geom = arcpy.AsShape(geo_json) row.pop("[geo]") if not arcpy.Exists(name): if arcpy.env.outputCoordinateSystem: layer_name = arcpy.CreateFeatureclass_management( "in_memory", os.path.basename(name), geom.type.upper() ) else: arcpy.env.outputCoordinateSystem = 4326 layer_name = arcpy.CreateFeatureclass_management( "in_memory", os.path.basename(name), geom.type.upper() ) # layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer') existing_fields = [f.name for f in arcpy.ListFields(layer_name)] new_fields = [] field_values = [] for field, value in row.iteritems(): valid_field = arcpy.ValidateFieldName(field, "in_memory") new_fields.append(valid_field) field_values.append(value) arcpy.AddField_management(layer_name, valid_field, "TEXT") else: if not geom.type.upper() == arcpy.Describe(name).shapeType.upper(): name = arcpy.CreateUniqueName(os.path.basename(name), "in_memory") if arcpy.env.outputCoordinateSystem: layer_name = arcpy.CreateFeatureclass_management( "in_memory", os.path.basename(name), geom.type.upper() ) else: arcpy.env.outputCoordinateSystem = 4326 layer_name = arcpy.CreateFeatureclass_management( "in_memory", os.path.basename(name), geom.type.upper() ) existing_fields = [f.name for f in arcpy.ListFields(layer_name)] new_fields = [] field_values = [] for field, value in row.iteritems(): valid_field = arcpy.ValidateFieldName(field, "in_memory") new_fields.append(valid_field) field_values.append(value) if not valid_field in existing_fields: arcpy.AddField_management(layer_name, valid_field, "TEXT") with arcpy.da.InsertCursor(layer_name, ["SHAPE@"] + new_fields) as icur: icur.insertRow([geom] + field_values) arcpy.MakeFeatureLayer_management(layer_name, os.path.basename(name)) arcpy.LayerToKML_conversion( os.path.basename(name), "{0}.kmz".format(os.path.join(out_workspace, os.path.basename(name))), 1, boundary_box_extent=extent, ) if (processed_count % increment) == 0: status_writer.send_percent( float(processed_count) / result_count, _("Converted: {0}").format(row["name"]), "convert_to_kml", ) processed_count += 1 converted += 1 except KeyError: processed_count += 1 skipped += 1 skipped_reasons[ds] = "Invalid input type" status_writer.send_state(_(status.STAT_WARNING, "Invalid input type: {0}").format(ds)) except Exception as ex: processed_count += 1 errors += 1 errors_reasons[ds] = ex.message continue del icur continue dsc = arcpy.Describe(ds) if os.path.exists(os.path.join("{0}.kmz".format(os.path.join(out_workspace, out_name)))): out_name = os.path.basename(arcpy.CreateUniqueName(out_name + ".kmz", out_workspace))[:-4] if dsc.dataType == "FeatureClass": arcpy.MakeFeatureLayer_management(ds, dsc.name) if out_name == "": out_name = dsc.name arcpy.LayerToKML_conversion( dsc.name, "{0}.kmz".format(os.path.join(out_workspace, out_name)), 1, boundary_box_extent=extent ) converted += 1 elif dsc.dataType == "ShapeFile": arcpy.MakeFeatureLayer_management(ds, dsc.name[:-4]) if out_name == "": out_name = dsc.name[:-4] arcpy.LayerToKML_conversion( dsc.name[:-4], "{0}.kmz".format(os.path.join(out_workspace, out_name)), 1, boundary_box_extent=extent, ) converted += 1 elif dsc.dataType == "RasterDataset": arcpy.MakeRasterLayer_management(ds, dsc.name) if out_name == "": out_name = dsc.name arcpy.LayerToKML_conversion( dsc.name, "{0}.kmz".format(os.path.join(out_workspace, out_name)), 1, boundary_box_extent=extent ) converted += 1 elif dsc.dataType == "Layer": if out_name == "": if dsc.name.endswith(".lyr"): out_name = dsc.name[:-4] else: out_name = dsc.name arcpy.LayerToKML_conversion( ds, "{0}.kmz".format(os.path.join(out_workspace, out_name)), 1, boundary_box_extent=extent ) converted += 1 elif dsc.dataType == "FeatureDataset": arcpy.env.workspace = ds for fc in arcpy.ListFeatureClasses(): arcpy.MakeFeatureLayer_management(fc, "tmp_lyr") arcpy.LayerToKML_conversion( "tmp_lyr", "{0}.kmz".format(os.path.join(out_workspace, fc)), 1, boundary_box_extent=extent ) converted += 1 elif dsc.dataType == "CadDrawingDataset": arcpy.env.workspace = dsc.catalogPath for cad_fc in arcpy.ListFeatureClasses(): if cad_fc.lower() == "annotation": try: cad_anno = arcpy.ImportCADAnnotation_conversion( cad_fc, arcpy.CreateUniqueName("cadanno", arcpy.env.scratchGDB) ) except arcpy.ExecuteError: cad_anno = arcpy.ImportCADAnnotation_conversion( cad_fc, arcpy.CreateUniqueName("cadanno", arcpy.env.scratchGDB), 1 ) arcpy.MakeFeatureLayer_management(cad_anno, "cad_lyr") name = "{0}_{1}".format(dsc.name[:-4], cad_fc) arcpy.LayerToKML_conversion( "cad_lyr", "{0}.kmz".format(os.path.join(out_workspace, name)), 1, boundary_box_extent=extent, ) converted += 1 else: arcpy.MakeFeatureLayer_management(cad_fc, "cad_lyr") name = "{0}_{1}".format(dsc.name[:-4], cad_fc) arcpy.LayerToKML_conversion( "cad_lyr", "{0}.kmz".format(os.path.join(out_workspace, name)), 1, boundary_box_extent=extent, ) converted += 1 # Map document to KML. elif dsc.dataType == "MapDocument": mxd = arcpy.mapping.MapDocument(ds) if map_frame_name: data_frames = arcpy.mapping.ListDataFrames(mxd, map_frame_name) else: data_frames = arcpy.mapping.ListDataFrames(mxd) for df in data_frames: name = "{0}_{1}".format(dsc.name[:-4], df.name) arcpy.MapToKML_conversion( ds, df.name, "{0}.kmz".format(os.path.join(out_workspace, name)), extent_to_export=extent ) converted += 1 else: processed_count += 1 status_writer.send_percent( processed_count / result_count, _("Invalid input type: {0}").format(dsc.name), "convert_to_kml" ) skipped += 1 skipped_reasons[ds] = _("Invalid input type: {0}").format(dsc.dataType) continue processed_count += 1 status_writer.send_percent(processed_count / result_count, _("Converted: {0}").format(ds), "convert_to_kml") status_writer.send_status(_("Converted: {0}").format(ds)) except Exception as ex: processed_count += 1 status_writer.send_percent(processed_count / result_count, _("Skipped: {0}").format(ds), "convert_to_kml") status_writer.send_status(_("WARNING: {0}").format(repr(ex))) errors_reasons[ds] = repr(ex) errors += 1 pass return converted, errors, skipped
def execute(request): """Creates a GeoPDF. :param request: json as a dict. """ added_to_map = 0 errors = 0 skipped = 0 parameters = request["params"] num_results, response_index = task_utils.get_result_count(parameters) docs = parameters[response_index]["response"]["docs"] input_items = task_utils.get_input_items(docs) input_rows = collections.defaultdict(list) for doc in docs: if "path" not in doc: input_rows[doc["name"]].append(doc) if num_results > task_utils.CHUNK_SIZE: status_writer.send_state(status.STAT_FAILED, "Reduce results to 25 or less.") return map_template = task_utils.get_parameter_value(parameters, "map_template", "value") base_map = task_utils.get_parameter_value(parameters, "base_map", "value") map_title = task_utils.get_parameter_value(parameters, "map_title", "value") attribute_setting = task_utils.get_parameter_value(parameters, "attribute_settings", "value") author = task_utils.get_parameter_value(parameters, "map_author", "value") output_file_name = task_utils.get_parameter_value(parameters, "output_file_name", "value") if not output_file_name: output_file_name = "output_pdf" try: map_view = task_utils.get_parameter_value(parameters, "map_view", "extent") except KeyError: map_view = None pass temp_folder = os.path.join(request["folder"], "temp") if not os.path.exists(temp_folder): os.makedirs(temp_folder) if base_map == "NONE": base_layer = None else: base_layer = arcpy.mapping.Layer( os.path.join( os.path.dirname(os.path.dirname(__file__)), "supportfiles", "basemaps", "{0}.lyr".format(base_map) ) ) mxd_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), "supportfiles", "frame", map_template) mxd = arcpy.mapping.MapDocument(mxd_path) data_frame = arcpy.mapping.ListDataFrames(mxd)[0] layers = [] all_layers = [] if input_rows: for name, rows in input_rows.iteritems(): for row in rows: try: name = arcpy.CreateUniqueName(name, "in_memory") # Create the geometry. geo_json = row["[geo]"] geom = arcpy.AsShape(geo_json) arcpy.CopyFeatures_management(geom, name) feature_layer = arcpy.MakeFeatureLayer_management(name, os.path.basename(name)) layer_file = arcpy.SaveToLayerFile_management( feature_layer, os.path.join(temp_folder, "{0}.lyr".format(os.path.basename(name))) ) layers.append(arcpy.mapping.Layer(layer_file.getOutput(0))) all_layers.append(arcpy.mapping.Layer(layer_file.getOutput(0))) added_to_map += 1 except KeyError: skipped += 1 skipped_reasons[name] = "No geographic information" continue for i, item in enumerate(input_items, 1): try: # Is the item a mxd data frame. map_frame_name = task_utils.get_data_frame_name(item) if map_frame_name: item = item.split("|")[0].strip() dsc = arcpy.Describe(item) if dsc.dataType == "Layer": layers.append(arcpy.mapping.Layer(dsc.catalogPath)) elif dsc.dataType == "FeatureClass" or dsc.dataType == "ShapeFile": if os.path.basename(item) in [l.name for l in all_layers]: layer_name = "{0}_{1}".format(os.path.basename(item), i) else: layer_name = os.path.basename(item) feature_layer = arcpy.MakeFeatureLayer_management(item, layer_name) layer_file = arcpy.SaveToLayerFile_management( feature_layer, os.path.join(temp_folder, "{0}.lyr".format(layer_name)) ) layers.append(arcpy.mapping.Layer(layer_file.getOutput(0))) all_layers.append(arcpy.mapping.Layer(layer_file.getOutput(0))) elif dsc.dataType == "FeatureDataset": arcpy.env.workspace = item for fc in arcpy.ListFeatureClasses(): layer_file = arcpy.SaveToLayerFile_management( arcpy.MakeFeatureLayer_management(fc, "{0}_{1}".format(fc, i)), os.path.join(temp_folder, "{0}_{1}.lyr".format(fc, i)), ) layer = arcpy.mapping.Layer(layer_file.getOutput(0)) layer.name = fc layers.append(layer) all_layers.append(layer) elif dsc.dataType == "RasterDataset": if os.path.basename(item) in [l.name for l in all_layers]: layer_name = "{0}_{1}".format(os.path.basename(item), i) else: layer_name = os.path.basename(item) raster_layer = arcpy.MakeRasterLayer_management(item, layer_name) layer_file = arcpy.SaveToLayerFile_management( raster_layer, os.path.join(temp_folder, "{0}.lyr".format(layer_name)) ) layers.append(arcpy.mapping.Layer(layer_file.getOutput(0))) all_layers.append(arcpy.mapping.Layer(layer_file.getOutput(0))) elif dsc.catalogPath.endswith(".kml") or dsc.catalogPath.endswith(".kmz"): if not os.path.splitext(dsc.name)[0] in layers: name = os.path.splitext(dsc.name)[0] else: name = "{0}_{1}".format(os.path.splitext(dsc.name)[0], i) arcpy.KMLToLayer_conversion(dsc.catalogPath, temp_folder, name) layers.append(arcpy.mapping.Layer(os.path.join(temp_folder, "{0}.lyr".format(name)))) all_layers.append(arcpy.mapping.Layer(os.path.join(temp_folder, "{0}.lyr".format(name)))) elif dsc.dataType == "MapDocument": input_mxd = arcpy.mapping.MapDocument(item) if map_frame_name: df = arcpy.mapping.ListDataFrames(input_mxd, map_frame_name)[0] layers = arcpy.mapping.ListLayers(input_mxd, data_frame=df) else: layers = arcpy.mapping.ListLayers(input_mxd) if layers: for layer in layers: status_writer.send_status(_("Adding layer {0}...").format(layer.name)) arcpy.mapping.AddLayer(data_frame, layer) added_to_map += 1 layers = [] else: status_writer.send_status(_("Invalid input type: {0}").format(item)) skipped_reasons[item] = "Invalid input type" skipped += 1 except Exception as ex: status_writer.send_status(_("FAIL: {0}").format(repr(ex))) errors += 1 errors_reasons[item] = repr(ex) pass if map_view: extent = map_view.split(" ") new_extent = data_frame.extent new_extent.XMin, new_extent.YMin = float(extent[0]), float(extent[1]) new_extent.XMax, new_extent.YMax = float(extent[2]), float(extent[3]) data_frame.extent = new_extent else: data_frame.zoomToSelectedFeatures() # Update text elements in map template. date_element = arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT", "date") if date_element: date_element[0].text = "Date: {0}".format(task_utils.get_local_date()) title_element = arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT", "title") if title_element: title_element[0].text = map_title author_element = arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT", "author") if author_element: author_element[0].text = "{0} {1}".format(author_element[0].text, author) if map_template in ("ANSI_D_LND.mxd", "ANSI_E_LND.mxd"): coord_elements = arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT", "x*") coord_elements += arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT", "y*") if coord_elements: for e in coord_elements: new_text = e.text if e.name == "xmin": dms = task_utils.dd_to_dms(data_frame.extent.XMin) if data_frame.extent.XMin > 0: new_text = new_text.replace("W", "E") elif e.name == "xmax": dms = task_utils.dd_to_dms(data_frame.extent.XMax) if data_frame.extent.XMax > 0: new_text = new_text.replace("W", "E") elif e.name == "ymin": dms = task_utils.dd_to_dms(data_frame.extent.YMin) if data_frame.extent.YMin < 0: new_text = new_text.replace("N", "S") elif e.name == "ymax": if data_frame.extent.YMax < 0: new_text = new_text.replace("N", "S") dms = task_utils.dd_to_dms(data_frame.extent.YMax) new_text = new_text.replace("d", str(dms[0])) new_text = new_text.replace("m", str(dms[1])) new_text = new_text.replace("s", str(dms[2])) e.text = new_text # Do this now so it does not affect zoom level or extent. if base_layer: status_writer.send_status(_("Adding basemap {0}...").format(base_map)) arcpy.mapping.AddLayer(data_frame, base_layer, "BOTTOM") if added_to_map > 0: status_writer.send_status(_("Exporting to PDF...")) arcpy.mapping.ExportToPDF( mxd, os.path.join(request["folder"], "{0}.pdf".format(output_file_name)), layers_attributes=attribute_setting, ) # Create a thumbnail size PNG of the mxd. task_utils.make_thumbnail(mxd, os.path.join(request["folder"], "_thumb.png"), False) else: status_writer.send_state(status.STAT_FAILED, _("No results can be exported to PDF")) task_utils.report( os.path.join(request["folder"], "__report.json"), added_to_map, skipped, skipped_details=skipped_reasons ) return # Update state if necessary. if skipped > 0 or errors > 0: status_writer.send_state(status.STAT_WARNING, _("{0} results could not be processed").format(skipped + errors)) task_utils.report( os.path.join(request["folder"], "__report.json"), added_to_map, skipped, errors, errors_reasons, skipped_reasons )
def add_to_geodatabase(input_items, out_gdb, is_fds): """Adds items to a geodatabase.""" added = 0 skipped = 0 errors = 0 global processed_count global layer_name global existing_fields global new_fields global field_values for ds, out_name in input_items.iteritems(): try: # ----------------------------------------------- # If the item is a service layer, process and continue. # ----------------------------------------------- if ds.startswith("http"): try: service_layer = task_utils.ServiceLayer(ds) arcpy.env.overwriteOutput = True oid_groups = service_layer.object_ids out_features = None g = 0.0 group_cnt = service_layer.object_ids_cnt for group in oid_groups: g += 1 group = [oid for oid in group if oid] where = "{0} IN {1}".format(service_layer.oid_field_name, tuple(group)) url = ( ds + "/query?where={}&outFields={}&returnGeometry=true&geometryType=esriGeometryPolygon&f=json".format( where, "*" ) ) feature_set = arcpy.FeatureSet() feature_set.load(url) if not out_features: out_features = arcpy.CopyFeatures_management( feature_set, task_utils.create_unique_name(out_name, out_gdb) ) else: features = arcpy.CopyFeatures_management( feature_set, task_utils.create_unique_name(out_name, out_gdb) ) arcpy.Append_management(features, out_features, "NO_TEST") try: arcpy.Delete_management(features) except arcpy.ExecuteError: pass status_writer.send_percent(float(g) / group_cnt * 100, "", "add_to_geodatabase") processed_count += 1.0 added += 1 status_writer.send_percent( processed_count / result_count, _("Added: {0}").format(ds), "add_to_geodatabase" ) continue except Exception as ex: status_writer.send_state(status.STAT_WARNING, str(ex)) errors_reasons[ds] = ex.message errors += 1 continue # ------------------------------ # Is the input a mxd data frame. # ------------------------------ map_frame_name = task_utils.get_data_frame_name(ds) if map_frame_name: ds = ds.split("|")[0].strip() # ------------------------------- # Is the input a geometry feature # ------------------------------- if isinstance(out_name, list): increment = task_utils.get_increment(result_count) for row in out_name: try: name = os.path.join(out_gdb, arcpy.ValidateTableName(ds, out_gdb)) # Create the geometry if it exists. geom = None try: geo_json = row["[geo]"] geom = arcpy.AsShape(geo_json) row.pop("[geo]") except KeyError: pass if geom: if not arcpy.Exists(name): if arcpy.env.outputCoordinateSystem: arcpy.CreateFeatureclass_management( out_gdb, os.path.basename(name), geom.type.upper() ) else: arcpy.env.outputCoordinateSystem = 4326 arcpy.CreateFeatureclass_management( out_gdb, os.path.basename(name), geom.type.upper() ) layer_name = arcpy.MakeFeatureLayer_management( name, "flayer_{0}".format(os.path.basename(name)) ) existing_fields = [f.name for f in arcpy.ListFields(layer_name)] new_fields = [] field_values = [] for field, value in row.iteritems(): valid_field = arcpy.ValidateFieldName(field, out_gdb) new_fields.append(valid_field) field_values.append(value) arcpy.AddField_management(layer_name, valid_field, "TEXT") else: if not geom.type.upper() == arcpy.Describe(name).shapeType.upper(): name = arcpy.CreateUniqueName(os.path.basename(name), out_gdb) if arcpy.env.outputCoordinateSystem: arcpy.CreateFeatureclass_management( out_gdb, os.path.basename(name), geom.type.upper() ) else: arcpy.env.outputCoordinateSystem = 4326 arcpy.CreateFeatureclass_management( out_gdb, os.path.basename(name), geom.type.upper() ) layer_name = arcpy.MakeFeatureLayer_management( name, "flayer_{0}".format(os.path.basename(name)) ) existing_fields = [f.name for f in arcpy.ListFields(layer_name)] new_fields = [] field_values = [] for field, value in row.iteritems(): valid_field = arcpy.ValidateFieldName(field, out_gdb) new_fields.append(valid_field) field_values.append(value) if valid_field not in existing_fields: arcpy.AddField_management(layer_name, valid_field, "TEXT") else: if not arcpy.Exists(name): arcpy.CreateTable_management(out_gdb, os.path.basename(name)) view_name = arcpy.MakeTableView_management(name, "tableview") existing_fields = [f.name for f in arcpy.ListFields(view_name)] new_fields = [] field_values = [] for field, value in row.iteritems(): valid_field = arcpy.ValidateFieldName(field, out_gdb) new_fields.append(valid_field) field_values.append(value) if valid_field not in existing_fields: arcpy.AddField_management(view_name, valid_field, "TEXT") if geom: with arcpy.da.InsertCursor(layer_name, ["SHAPE@"] + new_fields) as icur: icur.insertRow([geom] + field_values) else: with arcpy.da.InsertCursor(view_name, new_fields) as icur: icur.insertRow(field_values) processed_count += 1 if (processed_count % increment) == 0: status_writer.send_percent( float(processed_count) / result_count, _("Added: {0}").format(row["name"]), "add_to_geodatabase", ) added += 1 continue except Exception as ex: processed_count += 1 errors += 1 errors_reasons[name] = ex.message continue continue # ----------------------------- # Check the data type and clip. # ----------------------------- dsc = arcpy.Describe(ds) if dsc.dataType == "FeatureClass": if out_name == "": arcpy.CopyFeatures_management(ds, task_utils.create_unique_name(dsc.name, out_gdb)) else: arcpy.CopyFeatures_management(ds, task_utils.create_unique_name(out_name, out_gdb)) elif dsc.dataType == "ShapeFile": if out_name == "": arcpy.CopyFeatures_management(ds, task_utils.create_unique_name(dsc.name[:-4], out_gdb)) else: arcpy.CopyFeatures_management(ds, task_utils.create_unique_name(out_name, out_gdb)) elif dsc.dataType == "FeatureDataset": if not is_fds: fds_name = os.path.basename(task_utils.create_unique_name(dsc.name, out_gdb)) fds = arcpy.CreateFeatureDataset_management(out_gdb, fds_name).getOutput(0) else: fds = out_gdb arcpy.env.workspace = dsc.catalogPath for fc in arcpy.ListFeatureClasses(): name = os.path.basename(task_utils.create_unique_name(fc, out_gdb)) arcpy.CopyFeatures_management(fc, os.path.join(fds, name)) arcpy.env.workspace = out_gdb elif dsc.dataType == "RasterDataset": if is_fds: out_gdb = os.path.dirname(out_gdb) if out_name == "": arcpy.CopyRaster_management(ds, task_utils.create_unique_name(dsc.name, out_gdb)) else: arcpy.CopyRaster_management(ds, task_utils.create_unique_name(out_name, out_gdb)) elif dsc.dataType == "RasterCatalog": if is_fds: out_gdb = os.path.dirname(out_gdb) if out_name == "": arcpy.CopyRasterCatalogItems_management(ds, task_utils.create_unique_name(dsc.name, out_gdb)) else: arcpy.CopyRasterCatalogItems_management(ds, task_utils.create_unique_name(out_name, out_gdb)) elif dsc.dataType == "Layer": layer_from_file = arcpy.mapping.Layer(dsc.catalogPath) layers = arcpy.mapping.ListLayers(layer_from_file) for layer in layers: if out_name == "": name = task_utils.create_unique_name(layer.name, out_gdb) else: name = task_utils.create_unique_name(out_name, out_gdb) if layer.isFeatureLayer: arcpy.CopyFeatures_management(layer.dataSource, name) elif layer.isRasterLayer: if is_fds: name = os.path.dirname(name) arcpy.CopyRaster_management(layer.dataSource, name) elif dsc.dataType == "CadDrawingDataset": arcpy.env.workspace = dsc.catalogPath cad_wks_name = os.path.splitext(dsc.name)[0] for cad_fc in arcpy.ListFeatureClasses(): arcpy.CopyFeatures_management( cad_fc, task_utils.create_unique_name("{0}_{1}".format(cad_wks_name, cad_fc), out_gdb) ) arcpy.env.workspace = out_gdb elif dsc.dataType == "File": if dsc.catalogPath.endswith(".kml") or dsc.catalogPath.endswith(".kmz"): name = os.path.splitext(dsc.name)[0] temp_dir = tempfile.mkdtemp() kml_layer = arcpy.KMLToLayer_conversion(dsc.catalogPath, temp_dir, name) group_layer = arcpy.mapping.Layer(os.path.join(temp_dir, "{}.lyr".format(name))) for layer in arcpy.mapping.ListLayers(group_layer): if layer.isFeatureLayer: arcpy.CopyFeatures_management(layer, task_utils.create_unique_name(layer, out_gdb)) elif layer.isRasterLayer: if is_fds: out_gdb = os.path.dirname(out_gdb) arcpy.CopyRaster_management(layer, task_utils.create_unique_name(layer, out_gdb)) # Clean up temp KML results. arcpy.Delete_management(os.path.join(temp_dir, "{}.lyr".format(name))) arcpy.Delete_management(kml_layer) else: processed_count += 1 status_writer.send_percent( processed_count / result_count, _("Invalid input type: {0}").format(dsc.name), "add_to_geodatabase", ) skipped += 1 skipped_reasons[ds] = _("Invalid input type: {0}").format(dsc.dataType) continue elif dsc.dataType == "MapDocument": mxd = arcpy.mapping.MapDocument(dsc.catalogPath) if map_frame_name: df = arcpy.mapping.ListDataFrames(mxd, map_frame_name)[0] layers = arcpy.mapping.ListLayers(mxd, data_frame=df) else: layers = arcpy.mapping.ListLayers(mxd) for layer in layers: if layer.isFeatureLayer: arcpy.CopyFeatures_management( layer.dataSource, task_utils.create_unique_name(layer.name, out_gdb) ) elif layer.isRasterLayer: if is_fds: out_gdb = os.path.dirname(out_gdb) arcpy.CopyRaster_management( layer.dataSource, task_utils.create_unique_name(layer.name, out_gdb) ) table_views = arcpy.mapping.ListTableViews(mxd) if is_fds: out_gdb = os.path.dirname(out_gdb) for table_view in table_views: arcpy.CopyRows_management( table_view.dataSource, task_utils.create_unique_name(table_view.name, out_gdb) ) out_gdb = arcpy.env.workspace elif dsc.dataType.find("Table") > 0: if is_fds: out_gdb = os.path.dirname(out_gdb) if out_name == "": arcpy.CopyRows_management(ds, task_utils.create_unique_name(dsc.name, out_gdb)) else: arcpy.CopyRows_management(ds, task_utils.create_unique_name(out_name, out_gdb)) else: # Try to copy any other types such as topologies, network datasets, etc. if is_fds: out_gdb = os.path.dirname(out_gdb) arcpy.Copy_management(ds, task_utils.create_unique_name(dsc.name, out_gdb)) out_gdb = arcpy.env.workspace processed_count += 1.0 status_writer.send_percent(processed_count / result_count, _("Added: {0}").format(ds), "add_to_geodatabase") status_writer.send_status(_("Added: {0}").format(ds)) added += 1 # Continue if an error. Process as many as possible. except Exception as ex: processed_count += 1 status_writer.send_percent( processed_count / result_count, _("Skipped: {0}").format(ds), "add_to_geodatabase" ) status_writer.send_status(_("FAIL: {0}").format(repr(ex))) errors_reasons[ds] = repr(ex) errors += 1 continue return added, errors, skipped