def FolderExtraction(): ##logger.info("Start the module FolderExtraction") LogMessage(" Start the FolderExtraction module.") dirs = os.listdir(NewPermitFolder) ##loop through all the folders found in the "New" folder for file in dirs: try: LogMessage( file) ##set the variable PermitID to the name of the folder. The folder is named after the permitID (must be done manually before running script). PermitID = file ##once the PermitID is set you make that folder the active directory os.chdir(NewPermitFolder + "/" + PermitID) ##the permit folder actually only contains one file (a layer file) so I probably don't have to do a For loop here. Basically I just need ##code that would go into the permit folder and extract the layer package to C:\TEMP\Google. What it currently does is loop through the ##permit folder, renames the layer package so it has the PermitID as its name and then extracts it to C:\TEMP\Google. I rename it because ##you have to provide the full path to the layer package as part of the extraction process and the layer package names are different for ##each submittal. permitdir = os.listdir(NewPermitFolder + "\\" + PermitID) for lpk in permitdir: name = lpk LogMessage(" " + name) os.rename(name, PermitID + ".lpk") LogMessage(" Now extract package...") #create folder in Google temp folder named after the PermitID permitFolder = Google + "/" + PermitID os.mkdir(permitFolder) arcpy.ExtractPackage_management(NewPermitFolder + "/" + PermitID + "/" + PermitID + ".lpk", permitFolder) LogMessage(" Package extracted...") except: LogMessage(" This package has already been extracted") ## return return
def execute(request): """Deletes files. :param request: json as a dict """ errors_reasons = {} errors = 0 published = 0 app_folder = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) parameters = request['params'] num_results, response_index = task_utils.get_result_count(parameters) input_items = task_utils.get_input_items(parameters[response_index]['response']['docs']) if num_results > task_utils.CHUNK_SIZE: status_writer.send_state(status.STAT_FAILED, 'Reduce results to 25 or less.') return url = task_utils.get_parameter_value(parameters, 'url', 'value') username = task_utils.get_parameter_value(parameters, 'username', 'value') password = task_utils.get_parameter_value(parameters, 'password', 'value') service_name = task_utils.get_parameter_value(parameters, 'service_name', 'value') folder_name = task_utils.get_parameter_value(parameters, 'folder_name', 'value') request_folder = os.path.join(request['folder'], 'temp') if not os.path.exists(request_folder): os.makedirs(request_folder) map_template = os.path.join(request_folder, 'output.mxd') shutil.copyfile(os.path.join(app_folder, 'supportfiles', 'MapTemplate.mxd'), map_template) for item in input_items: try: # Code required because of an Esri bug - cannot describe a map package (raises IOError). if item.endswith('.mpk'): status_writer.send_status(_('Extracting: {0}').format(item)) arcpy.ExtractPackage_management(item, request_folder) pkg_folder = os.path.join(request_folder, glob.glob1(request_folder, 'v*')[0]) mxd_file = os.path.join(pkg_folder, glob.glob1(pkg_folder, '*.mxd')[0]) mxd = arcpy.mapping.MapDocument(mxd_file) create_service(request_folder, mxd, url, username, password, service_name, folder_name) else: data_type = arcpy.Describe(item).dataType if data_type == 'MapDocument': mxd = arcpy.mapping.MapDocument(item) create_service(request_folder, mxd, url, username, password, service_name, folder_name) elif data_type == 'Layer': if item.endswith('.lpk'): status_writer.send_status(_('Extracting: {0}').format(item)) arcpy.ExtractPackage_management(item, request_folder) pkg_folder = os.path.join(request_folder, glob.glob1(request_folder, 'v*')[0]) item = os.path.join(pkg_folder, glob.glob1(pkg_folder, '*.lyr')[0]) layer = arcpy.mapping.Layer(item) mxd = arcpy.mapping.MapDocument(map_template) mxd.description = layer.name mxd.tags = layer.name mxd.save() data_frame = arcpy.mapping.ListDataFrames(mxd)[0] arcpy.mapping.AddLayer(data_frame, layer) mxd.save() create_service(request_folder, mxd, url, username, password, service_name, folder_name) elif data_type in ('FeatureClass', 'ShapeFile', 'RasterDataset'): if data_type == 'RasterDataset': arcpy.MakeRasterLayer_management(item, os.path.basename(item)) else: arcpy.MakeFeatureLayer_management(item, os.path.basename(item)) layer = arcpy.mapping.Layer(os.path.basename(item)) mxd = arcpy.mapping.MapDocument(map_template) mxd.description = layer.name mxd.tags = layer.name mxd.save() data_frame = arcpy.mapping.ListDataFrames(mxd)[0] arcpy.mapping.AddLayer(data_frame, layer) mxd.save() create_service(request_folder, mxd, url, username, password, service_name, folder_name) published += 1 except task_utils.AnalyzeServiceException as ase: status_writer.send_state(status.STAT_FAILED, _(ase)) errors_reasons[item] = repr(ase) errors += 1 except requests.RequestException as re: status_writer.send_state(status.STAT_FAILED, _(re)) errors_reasons[item] = repr(re) errors += 1 except task_utils.PublishException as pe: status_writer.send_state(status.STAT_FAILED, _(pe)) errors_reasons[item] = repr(pe) errors += 1 except arcpy.ExecuteError as ee: status_writer.send_state(status.STAT_FAILED, _(ee)) errors_reasons[item] = repr(ee) errors += 1 except Exception as ex: status_writer.send_state(status.STAT_FAILED, _(ex)) errors_reasons[item] = repr(ex) errors += 1 finally: task_utils.report(os.path.join(request['folder'], '__report.json'), published, 0, errors, errors_reasons)
def publishContourMaster(deploymentFolderPath, serverConnectionFilePath, serviceName, serviceFolder): serviceName = "{}_{}".format(serviceName, ContourConfig.CONTOUR_2FT_SERVICE_NAME) mpk = ContourConfig.EMPTY_MASTER_MPK cache_dir = ContourConfig.CACHE_FOLDER tilingScheme = ContourConfig.TILING_SCHEME # # Get input parameters # serverConnectionFilePath = arcpy.GetParameterAsText(0) # serviceName = arcpy.GetParameterAsText(1) # serviceFolder = arcpy.GetParameterAsText(2) Utility.printArguments(["deploymentFolderPath", "serverConnectionFilePath", "serviceName", "serviceFolder", "cache_dir", "Template MPK", "tilingScheme"], [deploymentFolderPath, serverConnectionFilePath, serviceName, serviceFolder, cache_dir, mpk, tilingScheme], "B04 PublishContourMaster") # Find the master MPK in the current directory # cwd = os.path.dirname(sys.argv[0]) temp = os.path.join(deploymentFolderPath, "temp") if os.path.exists(temp): try: shutil.rmtree(temp) except: pass if not os.path.exists(temp): try: os.mkdir(temp) except: pass # mpk = os.path.join(cwd + "\\emptyMaster.mpk") #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- # The following path can be modified if needed # Path to the cache serviceFolder where project tiles will be stored for this service cacheFolder = cache_dir # r"C:\arcgisserver\directories\arcgiscache" cacheDir = os.path.join(cache_dir, serviceName) if serviceFolder is not None and len(serviceFolder) > 0: cacheDir = os.path.join(cache_dir, "{}_{}".format(serviceFolder, serviceName)) if os.path.exists(cacheDir): now = datetime.datetime.now() updatedCacheDir = "{}_{}{}{}_{}{}{}".format(cacheDir, ("0000{}".format(now.year))[-4:], ("00{}".format(now.month))[-2:], ("00{}".format(now.day))[-2:], ("00{}".format(now.hour))[-2:], ("00{}".format(now.minute))[-2:], ("00{}".format(now.second))[-2:]) arcpy.AddMessage("The existing cache folder will be moved to: {0}".format(updatedCacheDir)) shutil.move(cacheDir, updatedCacheDir) #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- # Construct path for master contour service inputService = os.path.join(serverConnectionFilePath, serviceFolder, "{}.MapServer".format(serviceName)) if serverConnectionFilePath.endswith(".ags"): inputService = os.path.join(serverConnectionFilePath[:-4], serviceFolder, "{}.MapServer".format(serviceName)) arcpy.AddMessage("Location of new service will be: {0}".format(inputService)) # Extract the master map package arcpy.ExtractPackage_management(mpk, temp) mapDoc = os.path.join(temp, "v103", "emptyMaster.mxd") sddraft = os.path.join(temp, "{}.sddraft".format(serviceName)) sd = os.path.join(temp, "{}.sd".format(serviceName)) # tilingScheme = os.path.join(cwd + "\\NRCS_tilingScheme.xml") # Cache template file arcpy.AddMessage("Creating Map Service Definition Draft {}".format(sddraft)) # Create the SDDraft file for the empty master contour service analysis = arcpy.mapping.CreateMapSDDraft(mapDoc, sddraft, serviceName, "ARCGIS_SERVER", serverConnectionFilePath, folder_name=serviceFolder) # Check for analyzer errors if analysis['errors'] == {}: arcpy.AddMessage("Staging Map Service Definition {}".format(sd)) arcpy.StageService_server(sddraft, sd) arcpy.AddMessage("Uploading Map Service Definition {} to {}".format(sd, serverConnectionFilePath)) arcpy.UploadServiceDefinition_server(sd, serverConnectionFilePath) arcpy.AddMessage("Service publishing completed") else: # If the SDDraft analysis contained errors, display them arcpy.AddMessage("\nERROR\nThe following errors were encountered during analysis of the map document: " + str(analysis['errors'])) os.remove(sddraft) # Create the empty cache schema for the master contour service arcpy.AddMessage("Creating map service cache at {}".format(cacheFolder)) # List of input variables for map service properties # tilingSchemeType = "PREDEFINED" # scalesType = "" # tileOrigin = "" # numOfScales = ContourConfig.CONTOUR_SCALES_NUM scales = ContourConfig.CONTOUR_SCALES_STRING # dotsPerInch = "96" # tileSize = "256 x 256" # cacheTileFormat = "PNG" # tileCompressionQuality = "75" # storageFormat = "COMPACT" arcpy.CreateMapServerCache_server(input_service=inputService, service_cache_directory=cacheFolder, tiling_scheme_type="PREDEFINED", predefined_tiling_scheme=tilingScheme, # scales_type=scalesType , # dots_per_inch=dotsPerInch, # num_of_scales=numOfScales, # tile_size=tileSize, # cache_tile_format=cacheTileFormat, # storage_format=storageFormat, # tile_compression_quality=tileCompressionQuality, scales=scales ) # Clean up the Service Definition file from the temp serviceFolder os.remove(sd) arcpy.AddMessage("Operation complete")
log("Found upload mpk: " + uploadedMpkFileName) break if uploadedMpkFileName is None: raise Exception("No .mpk file found in: " + uploadPathRoot) uploadedPackage = path.join(uploadPathRoot, uploadedMpkFileName) extractedPackageName = "_ags_gra_" + newUuid + "_mpk" extractedPackage = path.join(outFolder, extractedPackageName) jsonFilePath = path.join(extractedPackage, "commondata", "userdata", outWebmapJsonFileName) # unpackage data and extract json file log('Attempting to extract: ' + uploadedPackage) log('Extracting to: ' + extractedPackage) arcpy.ExtractPackage_management(uploadedPackage, extractedPackage) jsonFile = open(jsonFilePath, "r") savedWebmapJson = jsonFile.read() # send webmap back to client resultObj["webmap"] = savedWebmapJson except Exception as e: log(e, True) finally: resultObjJson = json.dumps(resultObj) log("Result object: ") log(resultObjJson) arcpy.SetParameterAsText(0, resultObjJson)
#Execute arcpy.CreateFileGDB_management(interFolder, gdb_name) print "Step 1 completed at", datetime.datetime.now().strftime( "%A, %B %d %Y %I:%M:%S%p") ## --------------------------------------------------------------------------- ## 2. Extract Package ## Description: Extract the contents of the BLM package to the geodatabase print "\nStep 2 Extract package starts at", datetime.datetime.now().strftime( "%A, %B %d %Y %I:%M:%S%p") #Execute Extract Package arcpy.ExtractPackage_management('BLM_National_Surface_Management_Agency.lpk', interFolder + "\\Unpacked\\") print "Step 2 completed at", datetime.datetime.now().strftime( "%A, %B %d %Y %I:%M:%S%p") ## --------------------------------------------------------------------------- ## 3. Export Feature Class to geodatabase ## Description: Export US land management feature to the geodatabase print "\nStep 3 Export US land management feature to gdb starts at", datetime.datetime.now( ).strftime("%A, %B %d %Y %I:%M:%S%p") us_gdb = interFolder + "\\Unpacked\\v101\\sma_wm.gdb\\" feature = "SurfaceManagementAgency" fc = os.path.join(us_gdb, feature) arcpy.FeatureClassToShapefile_conversion(fc, out_gdb)
def execute(request): """Deletes files. :param request: json as a dict """ errors_reasons = {} errors = 0 published = 0 app_folder = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) parameters = request['params'] num_results, response_index = task_utils.get_result_count(parameters) input_items = task_utils.get_input_items(parameters[response_index]['response']['docs']) if num_results > task_utils.CHUNK_SIZE: status_writer.send_state(status.STAT_FAILED, 'Reduce results to 25 or less.') return server_conn = task_utils.get_parameter_value(parameters, 'server_connection_path', 'value') service_name = task_utils.get_parameter_value(parameters, 'service_name', 'value') folder_name = task_utils.get_parameter_value(parameters, 'folder_name', 'value') if not server_conn: status_writer.send_state(status.STAT_FAILED, _('A server path is required')) return request_folder = os.path.join(request['folder'], 'temp') if not os.path.exists(request_folder): os.makedirs(request_folder) map_template = os.path.join(request_folder, 'output.mxd') shutil.copyfile(os.path.join(app_folder, 'supportfiles', 'MapTemplate.mxd'), map_template) for item in input_items: try: # Code required because of an Esri bug - cannot describe a map package (raises IOError). if item.endswith('.mpk'): status_writer.send_status(_('Extracting: {0}').format(item)) arcpy.ExtractPackage_management(item, request_folder) pkg_folder = os.path.join(request_folder, glob.glob1(request_folder, 'v*')[0]) mxd_file = os.path.join(pkg_folder, glob.glob1(pkg_folder, '*.mxd')[0]) mxd = arcpy.mapping.MapDocument(mxd_file) create_service(request_folder, mxd, server_conn, service_name, folder_name) else: data_type = arcpy.Describe(item).dataType if data_type == 'MapDocument': mxd = arcpy.mapping.MapDocument(item) create_service(request_folder, mxd, server_conn, service_name, folder_name) elif data_type == 'Layer': if item.endswith('.lpk'): status_writer.send_status(_('Extracting: {0}').format(item)) arcpy.ExtractPackage_management(item, request_folder) pkg_folder = os.path.join(request_folder, glob.glob1(request_folder, 'v*')[0]) item = os.path.join(pkg_folder, glob.glob1(pkg_folder, '*.lyr')[0]) layer = arcpy.mapping.Layer(item) mxd = arcpy.mapping.MapDocument(map_template) mxd.description = layer.name mxd.tags = layer.name mxd.save() data_frame = arcpy.mapping.ListDataFrames(mxd)[0] arcpy.mapping.AddLayer(data_frame, layer) mxd.save() create_service(request_folder, mxd, server_conn, service_name, folder_name) elif data_type in ('FeatureClass', 'ShapeFile', 'RasterDataset'): if data_type == 'RasterDataset': arcpy.MakeRasterLayer_management(item, os.path.basename(item)) else: arcpy.MakeFeatureLayer_management(item, os.path.basename(item)) layer = arcpy.mapping.Layer(os.path.basename(item)) mxd = arcpy.mapping.MapDocument(map_template) mxd.title = layer.name data_frame = arcpy.mapping.ListDataFrames(mxd)[0] arcpy.mapping.AddLayer(data_frame, layer) mxd.save() create_service(request_folder, mxd, server_conn, service_name, folder_name) elif data_type in ('CadDrawingDataset', 'FeatureDataset'): arcpy.env.workspace = item mxd = arcpy.mapping.MapDocument(map_template) data_frame = arcpy.mapping.ListDataFrames(mxd)[0] for fc in arcpy.ListFeatureClasses(): dataset_name = os.path.splitext(os.path.basename(item))[0] l = arcpy.MakeFeatureLayer_management(fc, '{0}_{1}'.format(dataset_name, os.path.basename(fc))) arcpy.mapping.AddLayer(data_frame, l.getOutput(0)) mxd.save() arcpy.ResetEnvironments() create_service(request_folder, mxd, server_conn, service_name, folder_name) published += 1 except task_utils.AnalyzeServiceException as ex: errors += 1 errors_reasons[item] = repr(ex) status_writer.send_state(status.STAT_FAILED) except arcpy.ExecuteError as ex: errors += 1 errors_reasons[item] = repr(ex) status_writer.send_state(status.STAT_FAILED) except Exception as ex: errors += 1 errors_reasons[item] = repr(ex) status_writer.send_state(status.STAT_FAILED) finally: if errors: errors_reasons[item] = repr(ex) status_writer.send_status(_('FAIL: {0}').format(errors_reasons[item])) task_utils.report(os.path.join(request['folder'], '__report.json'), published, 0, errors, errors_reasons)