def create_lpk(data_location, lpk_name, additional_files=None): """Creates a layer package (.lpk) for all datasets in the data location. :param data_location: location of data to packaged :param lpk_name: name of the layer package :param additional_files: list of additional files to include in the package """ import arcpy # Ensure existing layer files have a description. lyr_files = glob.glob(os.path.join(data_location, '*.lyr')) for lyr in lyr_files: layer = arcpy.mapping.Layer(lyr) if layer.description == '': layer.description = layer.name layer.save() # Save data to layer files. save_to_layer_file(data_location, False) # Package all layer files. layer_files = glob.glob(os.path.join(data_location, '*.lyr')) arcpy.PackageLayer_management(layer_files, os.path.join(os.path.dirname(data_location), '{0}.lpk'.format(lpk_name)), 'PRESERVE', version='10', additional_files=additional_files) make_thumbnail(layer_files[0], os.path.join(os.path.dirname(data_location), '_thumb.png'))
def make_layer_package(output_folder, intermediates_folder, analyses_folder, inputs_folder, symbology_folder, layer_package_name, clipping_network): """ Makes a layer package for the project :param output_folder: The folder that we want to base our layer package off of :param layer_package_name: The name of the layer package that we'll make :param clipping_network: What we want to clip our network to :return: """ if layer_package_name == "" or layer_package_name is None: layer_package_name = "LayerPackage" if not layer_package_name.endswith(".lpk"): layer_package_name += ".lpk" new_source = None arcpy.AddMessage("Assembling Layer Package...") empty_group_layer = os.path.join(symbology_folder, "EmptyGroupLayer.lyr") mxd = arcpy.mapping.MapDocument("CURRENT") df = arcpy.mapping.ListDataFrames(mxd)[0] analyses_layer = get_analyses_layer(analyses_folder, empty_group_layer, df, mxd) inputs_layer = get_inputs_layer(empty_group_layer, inputs_folder, df, mxd) intermediates_layer = get_intermediates_layers(empty_group_layer, intermediates_folder, df, mxd) output_layer = group_layers(empty_group_layer, "Output", [intermediates_layer, analyses_layer], df, mxd) output_layer = group_layers(empty_group_layer, layer_package_name[:-4], [output_layer, inputs_layer], df, mxd, remove_layer=False) layer_package = os.path.join(output_folder, layer_package_name) arcpy.AddMessage("Saving Layer Package...") arcpy.PackageLayer_management(output_layer, layer_package)
def makeLayerPackage(outputDataPath, pointLayer, upstreamLayer, downstreamLayer, streamNetwork, demLayer, streamNetworkOrig): """ Applies symbology to layer files :param outputDataPath: What output folder we're in :param pointLayer: The layer points output :param upstreamLayer: The layer of upstream impact probabilities :param downstreamLayer: The layer of downstream impact probabilities :param streamNetwork: The stream network in the project folder :param demLayer: The DEM layer we made earlier :param streamNetworkOrig: The stream network file in the inputs folder :return: None """ #TODO Make a layer package? projectPath = os.path.dirname(os.path.dirname(outputDataPath)) tribCodeFolder = os.path.dirname(os.path.abspath(__file__)) symbologyFolder = os.path.join(tribCodeFolder, 'symbology') pointSymbology = os.path.join(symbologyFolder, "TribImpactPoints.lyr") upstreamSymbology = os.path.join(symbologyFolder, "TribImpactUpstream.lyr") downstreamSymbology = os.path.join(symbologyFolder, "TribImpactDownstream.lyr") arcpy.ApplySymbologyFromLayer_management(pointLayer, pointSymbology) arcpy.SaveToLayerFile_management(pointLayer, pointLayer) arcpy.ApplySymbologyFromLayer_management(upstreamLayer, upstreamSymbology) arcpy.SaveToLayerFile_management(upstreamLayer, upstreamLayer) arcpy.ApplySymbologyFromLayer_management(downstreamLayer, downstreamSymbology) arcpy.SaveToLayerFile_management(downstreamLayer, downstreamLayer) streamNetworkLayer = streamNetworkOrig[:-4] + '.lyr' arcpy.MakeFeatureLayer_management(streamNetworkOrig, streamNetworkLayer) arcpy.SaveToLayerFile_management(streamNetworkLayer, streamNetworkLayer) layerPackageFolder = makeFolder(outputDataPath, "03_LayerPackage") layerPackage = os.path.join(layerPackageFolder, "layerPackage.lpkx") layers = [ pointLayer, upstreamLayer, downstreamLayer, demLayer, streamNetworkLayer ] try: arcpy.PackageLayer_management(layers, layerPackage) except: arcpy.AddWarning( 'We could not package the output into a single layer package. This is often a result of a ' + 'known bug in ArcGIS 10.6. You may try packaging the outputs together yourself if you wish' )
def ejecutar(path_1): path = path_1 + "/temp" path_file = path_1 + "\\data" arcpy.env.workspace = path arcpy.env.overwriteOutput = True arcpy.MakeXYEventLayer_management(conversion_dbf, "LONG_N", "LAT_N", "NODOS", sr) arcpy.SaveToLayerFile_management("NODOS", path_file + "\\NODOS") arcpy.PackageLayer_management(path_file + "\\NODOS.lyr", path_file + "\\NODOS") #arcpy.ErasePoint_edit(path_file+"\\NODOS.lyr",path_1+"\\data\\mapa\\EliminarNaN.lyr",'INSIDE') #arcpy.PackageLayer_management(path_file + "\\"+V_ATRIBUTO_NAME+"EB.lyr", path_file + "\\"+V_ATRIBUTO_NAME +"EB") arcpy.XYToLine_management(path + "\\conversion.dbf", path_file + "\\Enlaces", "LONG_A", "LAT_A", "LONG_B", "LAT_B", "GEODESIC", "PERMISIONA")
def makeLayerPackage(output_folder, intermediatesFolder, analysesFolder, inputsFolder, symbologyFolder, layerPackageName): """ Makes a layer package for the project :param output_folder: The folder that we want to base our layer package off of :param layerPackageName: The name of the layer package that we'll make :return: """ if layerPackageName == "" or layerPackageName is None: layerPackageName = "LayerPackage" if not layerPackageName.endswith(".lpk"): layerPackageName += ".lpk" arcpy.AddMessage("Making Layer Package...") emptyGroupLayer = os.path.join(symbologyFolder, "EmptyGroupLayer.lyr") mxd = arcpy.mapping.MapDocument("CURRENT") df = arcpy.mapping.ListDataFrames(mxd)[0] outputLayers = findLayersInFolder(analysesFolder) inputsLayer = getInputsLayer(emptyGroupLayer, inputsFolder, df, mxd) BRATLayer = groupLayers(emptyGroupLayer, "Beaver Restoration Assessment Tool - BRAT", outputLayers, df, mxd) intermediatesLayer = getIntermediatesLayers(emptyGroupLayer, intermediatesFolder, df, mxd) outputLayer = groupLayers(emptyGroupLayer, "Output", [intermediatesLayer, BRATLayer], df, mxd) outputLayer = groupLayers(emptyGroupLayer, layerPackageName[:-4], [outputLayer, inputsLayer], df, mxd, removeLayer=False) layerPackage = os.path.join(output_folder, layerPackageName) arcpy.PackageLayer_management(outputLayer, layerPackage)
def execute(request): """Package inputs to an Esri map or layer package. :param request: json as a dict. """ errors = 0 skipped = 0 layers = [] files = [] app_folder = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) parameters = request['params'] out_format = task_utils.get_parameter_value(parameters, 'output_format', 'value') summary = task_utils.get_parameter_value(parameters, 'summary') tags = task_utils.get_parameter_value(parameters, 'tags') output_file_name = task_utils.get_parameter_value(parameters, 'output_file_name') if not output_file_name: output_file_name = 'package_results' # Get the clip region as an extent object. clip_area = None try: clip_area_wkt = task_utils.get_parameter_value(parameters, 'processing_extent', 'wkt') clip_area = task_utils.get_clip_region(clip_area_wkt) except (KeyError, ValueError): pass out_workspace = os.path.join(request['folder'], 'temp') if not os.path.exists(out_workspace): os.makedirs(out_workspace) num_results, response_index = task_utils.get_result_count(parameters) # if num_results > task_utils.CHUNK_SIZE: # Query the index for results in groups of 25. query_index = task_utils.QueryIndex(parameters[response_index]) fl = query_index.fl query = '{0}{1}{2}'.format(sys.argv[2].split('=')[1], '/select?&wt=json', fl) fq = query_index.get_fq() if fq: groups = task_utils.grouper(range(0, num_results), task_utils.CHUNK_SIZE, '') query += fq elif 'ids' in parameters[response_index]: groups = task_utils.grouper(list(parameters[response_index]['ids']), task_utils.CHUNK_SIZE, '') else: groups = task_utils.grouper(range(0, num_results), task_utils.CHUNK_SIZE, '') headers = { 'x-access-token': task_utils.get_security_token(request['owner']) } status_writer.send_status(_('Starting to process...')) for group in groups: if fq: results = requests.get( query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), verify=verify_ssl, headers=headers) elif 'ids' in parameters[response_index]: results = requests.get(query + '{0}&ids={1}'.format(fl, ','.join(group)), verify=verify_ssl, headers=headers) else: results = requests.get( query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), verify=verify_ssl, headers=headers) input_items = task_utils.get_input_items( results.json()['response']['docs']) if not input_items: input_items = task_utils.get_input_items( parameters[response_index]['response']['docs']) layers, files, errors, skipped = get_items(input_items, out_workspace) # else: # input_items = task_utils.get_input_items(parameters[response_index]['response']['docs']) # layers, files, errors, skipped = get_items(input_items, out_workspace) if errors == num_results: status_writer.send_state(status.STAT_FAILED, _('No results to package')) return try: if out_format == 'MPK': shutil.copyfile( os.path.join(app_folder, 'supportfiles', 'MapTemplate.mxd'), os.path.join(out_workspace, 'output.mxd')) mxd = arcpy.mapping.MapDocument( os.path.join(out_workspace, 'output.mxd')) if mxd.description == '': mxd.description = os.path.basename(mxd.filePath) df = arcpy.mapping.ListDataFrames(mxd)[0] for layer in layers: arcpy.mapping.AddLayer(df, layer) mxd.save() status_writer.send_status( _('Generating {0}. Large input {1} will take longer to process.' .format('MPK', 'results'))) if arcpy.GetInstallInfo()['Version'] == '10.0': arcpy.PackageMap_management( mxd.filePath, os.path.join(os.path.dirname(out_workspace), '{0}.mpk'.format(output_file_name)), 'PRESERVE', extent=clip_area) elif arcpy.GetInstallInfo()['Version'] == '10.1': arcpy.PackageMap_management( mxd.filePath, os.path.join(os.path.dirname(out_workspace), '{0}.mpk'.format(output_file_name)), 'PRESERVE', extent=clip_area, ArcGISRuntime='RUNTIME', version='10', additional_files=files, summary=summary, tags=tags) else: arcpy.PackageMap_management( mxd.filePath, os.path.join(os.path.dirname(out_workspace), '{0}.mpk'.format(output_file_name)), 'PRESERVE', extent=clip_area, arcgisruntime='RUNTIME', version='10', additional_files=files, summary=summary, tags=tags) # Create a thumbnail size PNG of the mxd. task_utils.make_thumbnail( mxd, os.path.join(request['folder'], '_thumb.png')) else: status_writer.send_status( _('Generating {0}. Large input {1} will take longer to process.' .format('LPK', 'results'))) for layer in layers: if layer.description == '': layer.description = layer.name if arcpy.GetInstallInfo()['Version'] == '10.0': arcpy.PackageLayer_management( layers, os.path.join(os.path.dirname(out_workspace), '{0}.lpk'.format(output_file_name)), 'PRESERVE', extent=clip_area, version='10') else: arcpy.PackageLayer_management( layers, os.path.join(os.path.dirname(out_workspace), '{0}.lpk'.format(output_file_name)), 'PRESERVE', extent=clip_area, version='10', additional_files=files, summary=summary, tags=tags) # Create a thumbnail size PNG of the mxd. task_utils.make_thumbnail( layers[0], os.path.join(request['folder'], '_thumb.png')) except (RuntimeError, ValueError, arcpy.ExecuteError) as ex: status_writer.send_state(status.STAT_FAILED, repr(ex)) return # Update state if necessary. if errors > 0 or skipped: status_writer.send_state( status.STAT_WARNING, _('{0} results could not be processed').format(errors + skipped)) task_utils.report(os.path.join(request['folder'], '__report.json'), num_results - (skipped + errors), skipped, errors, errors_reasons, skipped_reasons)
def execute(self, parameters, messages): #-*- conding: utf-8 -*- """The source code of the tool.""" ATRIBUTO = parameters[0].valueAsText V_ATRIBUTO = parameters[1].valueAsText values = pr.prueba() messages.addMessage("Campo: " + ATRIBUTO) messages.addMessage("Valor de Campo: " + V_ATRIBUTO) messages.addMessage("{0} probando ".format(values)) # Local variables: path = os.path.dirname(os.path.abspath(__file__)) Datos_xls = "DATA_PRUEBA.xls" conversion = "conversion" conversion_dbf = "conversion.dbf" #Scripts = "C:\\Users\\EstChristianRafaelMa\\Desktop\\ARGIS_PASANTIA\\Datos" SaveToLayerFile_lyr = path + "SaveToLayerFile.lyr" #Lugar de trabajo arcpy.env.workspace = path arcpy.env.overwriteOutput = True # Process: Excel To Table arcpy.ExcelToTable_conversion(Datos_xls, "conversion1") arcpy.TableToTable_conversion("conversion1.dbf", path, conversion) # Process: Add Field arcpy.AddField_management(conversion_dbf, "Latitud", "FLOAT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.AddField_management(conversion_dbf, "Longitud", "FLOAT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") # Calculate Parameters for New Fields expression = "getClass(!G!,!M!,!S!,!N_S!)" codeblock = """def getClass(G,M,S,N_S): if N_S == "S": return -1*(float(G)+float(float(M)/60.0)+float(float(S)/3600.0)) else: return 1*(float(G)+float(float(M)/60.0)+float(float(S)/3600.0))""" expression1 = "getClass1(!G_1!,!M_1!,!S_1!,!E_O!)" codeblock1 = """def getClass1(G_1,M_1,S_1,E_O): if E_O == "O": return -1*(float(G_1)+float(float(M_1)/60.0)+float(float(S_1)/3600.0)) else: return 1*(float(G_1)+float(float(M_1)/60.0)+float(float(S_1)/3600.0))""" arcpy.CalculateField_management("conversion.dbf", "Latitud", expression, "PYTHON_9.3", codeblock) arcpy.CalculateField_management("conversion.dbf", "Longitud", expression1, "PYTHON_9.3", codeblock1) #Paquete original de salida datum = "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],\ PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],\ VERTCS['WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],\ PARAMETER['Vertical_Shift',0.0],PARAMETER['Direction',1.0],UNIT['Meter',1.0]];\ -400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;\ 0.001;0.001;IsHighPrecision" sr = arcpy.SpatialReference() sr.loadFromString(datum) arcpy.MakeXYEventLayer_management("conversion.dbf", "Longitud", "Latitud", "original", sr) arcpy.SaveToLayerFile_management("original", "salida_original") arcpy.PackageLayer_management("salida_original.lyr", "paquete_capa_original") # Process: Table to Table expresion = arcpy.AddFieldDelimiters( arcpy.env.workspace, ATRIBUTO) + "=" + "'" + V_ATRIBUTO + "'" arcpy.TableToTable_conversion(conversion_dbf, path, "salida", expresion) salida_Layer = "salida_Layer" #salida_Layer=V_ATRIBUTO #salida_Layer="TOOL" arcpy.MakeXYEventLayer_management("salida.dbf", "Longitud", "Latitud", salida_Layer, sr) saved_Layer = "salidalyr" #saved_Layer=V_ATRIBUTO #saved_Layer="TOOL" # Process: Save To Layer File arcpy.SaveToLayerFile_management(salida_Layer, saved_Layer) #arcpy.PackageLayer_management("salidalyr.lyr","paquete_capa") #arcpy.RefreshActiveView() #arcpy.RefreshTOC() #arcpy.RefreshCatalog("SaveToLayerFile_lyr.lyr") return
def execute(self, parameters, messages): #-*- conding: utf-8 -*- """The source code of the tool.""" ATRIBUTO = parameters[0].valueAsText V_ATRIBUTO = parameters[1].valueAsText Create_Shapefile = parameters[2].valueAsText Address_Shapefile = parameters[3].valueAsText V_ATRIBUTO_NAME1 = "" for letra in V_ATRIBUTO: if letra == ".": print("NO ASCII") else: V_ATRIBUTO_NAME1 = V_ATRIBUTO_NAME1 + letra V_ATRIBUTO_NAME = "" for letra in V_ATRIBUTO_NAME1: if letra == "&": print("NO ASCII") else: V_ATRIBUTO_NAME = V_ATRIBUTO_NAME + letra messages.addMessage("Campo: " + ATRIBUTO) messages.addMessage("Valor de Campo: " + V_ATRIBUTO) # Local variables: path = os.path.dirname(os.path.abspath(__file__)) path_file = path + "/data" try: shutil.rmtree(path_file, ignore_errors=True) os.mkdir(path_file, 0755) except Exception as e: print("No data") os.mkdir(path_file, 0755) salida_Layer = "salida_Layer" SaveToLayerFile_lyr = path + "\SaveToLayerFile.lyr" #.......... arcpy.TableToTable_conversion("salida.dbf", path, "temporal") conversion_dbf = "temporal.dbf" #............................. #Lugar de trabajo arcpy.env.workspace = path arcpy.env.overwriteOutput = True # Process: Table to Table expresion = arcpy.AddFieldDelimiters( arcpy.env.workspace, ATRIBUTO) + "=" + "'" + V_ATRIBUTO + "'" arcpy.TableToTable_conversion(conversion_dbf, path, "salida", expresion) # Process: Make XY Event Layer datum = "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],\ PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],\ VERTCS['WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],\ PARAMETER['Vertical_Shift',0.0],PARAMETER['Direction',1.0],UNIT['Meter',1.0]];\ -400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119522E-09;\ 0.001;0.001;IsHighPrecision" sr = arcpy.SpatialReference() sr.loadFromString(datum) arcpy.MakeXYEventLayer_management("salida.dbf", "Longitud", "Latitud", salida_Layer, sr) saved_Layer = "salidalyr" # Process: Save To Layer File arcpy.SaveToLayerFile_management(salida_Layer, saved_Layer) arcpy.PackageLayer_management("salidalyr.lyr", "paquete_capa") #.................................... #Lugar de trabajo arcpy.env.workspace = path_file arcpy.env.overwriteOutput = True # Process: Table to Table expresion = arcpy.AddFieldDelimiters( arcpy.env.workspace, ATRIBUTO) + "=" + "'" + V_ATRIBUTO + "'" arcpy.TableToTable_conversion(conversion_dbf, path_file, V_ATRIBUTO_NAME, expresion) # Process: Make XY Event Layer arcpy.MakeXYEventLayer_management(path + "/salida.dbf", "Longitud", "Latitud", V_ATRIBUTO_NAME, sr) # Process: Save To Layer File arcpy.SaveToLayerFile_management(V_ATRIBUTO_NAME, V_ATRIBUTO_NAME) arcpy.PackageLayer_management(V_ATRIBUTO_NAME + ".lyr", V_ATRIBUTO_NAME) #arcpy.PackageLayer_management(HNOMBRE+".lyr","V_ATRIBUTO") #.................................... #arcpy.RefreshActiveView() #arcpy.RefreshTOC() #arcpy.RefreshCatalog("SaveToLayerFile_lyr_filtrada.lyr") if Create_Shapefile.upper() == "SI": arcpy.FeatureClassToShapefile_conversion( [salida_Layer, "salida_original.lyr"], Address_Shapefile) return
def package_layers(layerPathList, outputFolder): # Packages all of the layers together into a single layer package arcpy.PackageLayer_management(layerPathList, os.path.join(outputFolder, "SummaryLayers.lpk"))